OpenCMISS-Iron Internal API Documentation
distributed_matrix_vector.f90
Go to the documentation of this file.
1 
43 
46 
47  USE base_routines
48  USE cmiss_mpi
49  USE cmisspetsc
51  USE input_output
53  USE iso_c_binding
54  USE kinds
55  USE matrix_vector
56 #ifndef NOMPIMOD
57  USE mpi
58 #endif
59  USE strings
60  USE types
62 
63 #include "macros.h"
64 
65  IMPLICIT NONE
66 
67  PRIVATE
68 
69 #ifdef NOMPIMOD
70 #include "mpif.h"
71 #endif
72 #include "petscversion.h"
73 
74  !Module parameters
75 
80  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_cmiss_type=library_cmiss_type
81  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_petsc_type=library_petsc_type
83 
93 
105 
110  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_include_ghosts_type=1
111  INTEGER(INTG), PARAMETER :: distributed_matrix_vector_no_ghosts_type=2
113 
114  !Module types
115 
116  !Module variables
117 
118  INTEGER(INTG), SAVE :: distributed_data_id=100000000
119 
120  !Interfaces
121 
124  MODULE PROCEDURE distributed_matrix_all_values_set_sp
125  MODULE PROCEDURE distributed_matrix_all_values_set_dp
126  MODULE PROCEDURE distributed_matrix_all_values_set_l
128 
131  MODULE PROCEDURE distributed_matrix_all_values_set_sp
132  MODULE PROCEDURE distributed_matrix_all_values_set_dp
133  MODULE PROCEDURE distributed_matrix_all_values_set_l
134  END INTERFACE distributedmatrix_allvaluesset
135 
137  MODULE PROCEDURE distributed_matrix_create_finish
138  END INTERFACE distributedmatrix_createfinish
139 
141  MODULE PROCEDURE distributed_matrix_create_start
142  END INTERFACE distributedmatrix_createstart
143 
145  MODULE PROCEDURE distributed_matrix_data_get_intg
146  MODULE PROCEDURE distributed_matrix_data_get_sp
147  MODULE PROCEDURE distributed_matrix_data_get_dp
148  MODULE PROCEDURE distributed_matrix_data_get_l
149  END INTERFACE distributed_matrix_data_get
150 
152  MODULE PROCEDURE distributed_matrix_data_get_intg
153  MODULE PROCEDURE distributed_matrix_data_get_sp
154  MODULE PROCEDURE distributed_matrix_data_get_dp
155  MODULE PROCEDURE distributed_matrix_data_get_l
156  END INTERFACE distributedmatrix_dataget
157 
159  MODULE PROCEDURE distributed_matrix_data_restore_intg
160  MODULE PROCEDURE distributed_matrix_data_restore_sp
161  MODULE PROCEDURE distributed_matrix_data_restore_dp
162  MODULE PROCEDURE distributed_matrix_data_restore_l
163  END INTERFACE distributed_matrix_data_restore
164 
166  MODULE PROCEDURE distributed_matrix_data_restore_intg
167  MODULE PROCEDURE distributed_matrix_data_restore_sp
168  MODULE PROCEDURE distributed_matrix_data_restore_dp
169  MODULE PROCEDURE distributed_matrix_data_restore_l
170  END INTERFACE distributedmatrix_datarestore
171 
173  MODULE PROCEDURE distributed_matrix_data_type_set
174  END INTERFACE distributedmatrix_datatypeset
175 
177  MODULE PROCEDURE distributed_matrix_destroy
178  END INTERFACE distributedmatrix_destroy
179 
181  MODULE PROCEDURE distributed_matrix_duplicate
182  END INTERFACE distributedmatrix_duplicate
183 
185  MODULE PROCEDURE distributed_matrix_form
186  END INTERFACE distributedmatrix_form
187 
189  MODULE PROCEDURE distributed_matrix_ghosting_type_set
191 
193  MODULE PROCEDURE distributed_matrix_library_type_set
195 
199 
203 
207 
209  MODULE PROCEDURE distributed_matrix_output
210  END INTERFACE distributedmatrix_output
211 
213  MODULE PROCEDURE distributed_matrix_override_set_on
214  END INTERFACE distributedmatrix_overrideseton
215 
217  MODULE PROCEDURE distributed_matrix_override_set_off
219 
223 
227 
229  MODULE PROCEDURE distributed_matrix_storage_type_get
231 
233  MODULE PROCEDURE distributed_matrix_storage_type_set
235 
237  MODULE PROCEDURE distributed_matrix_update_finish
238  END INTERFACE distributedmatrix_updatefinish
239 
241  MODULE PROCEDURE distributed_matrix_update_start
242  END INTERFACE distributedmatrix_updatestart
243 
245  MODULE PROCEDURE distributed_matrix_update_isfinished
247 
251 
253  MODULE PROCEDURE distributed_matrix_values_add_intg
254  MODULE PROCEDURE distributed_matrix_values_add_intg1
255  MODULE PROCEDURE distributed_matrix_values_add_intg2
256  MODULE PROCEDURE distributed_matrix_values_add_sp
257  MODULE PROCEDURE distributed_matrix_values_add_sp1
258  MODULE PROCEDURE distributed_matrix_values_add_sp2
259  MODULE PROCEDURE distributed_matrix_values_add_dp
260  MODULE PROCEDURE distributed_matrix_values_add_dp1
261  MODULE PROCEDURE distributed_matrix_values_add_dp2
262  MODULE PROCEDURE distributed_matrix_values_add_l
263  MODULE PROCEDURE distributed_matrix_values_add_l1
264  MODULE PROCEDURE distributed_matrix_values_add_l2
265  END INTERFACE
266 
268  MODULE PROCEDURE distributed_matrix_values_add_intg
269  MODULE PROCEDURE distributed_matrix_values_add_intg1
270  MODULE PROCEDURE distributed_matrix_values_add_intg2
271  MODULE PROCEDURE distributed_matrix_values_add_sp
272  MODULE PROCEDURE distributed_matrix_values_add_sp1
273  MODULE PROCEDURE distributed_matrix_values_add_sp2
274  MODULE PROCEDURE distributed_matrix_values_add_dp
275  MODULE PROCEDURE distributed_matrix_values_add_dp1
276  MODULE PROCEDURE distributed_matrix_values_add_dp2
277  MODULE PROCEDURE distributed_matrix_values_add_l
278  MODULE PROCEDURE distributed_matrix_values_add_l1
279  MODULE PROCEDURE distributed_matrix_values_add_l2
280  END INTERFACE distributedmatrix_valuesadd
281 
283  MODULE PROCEDURE distributed_matrix_values_get_intg
284  MODULE PROCEDURE distributed_matrix_values_get_intg1
285  MODULE PROCEDURE distributed_matrix_values_get_intg2
286  MODULE PROCEDURE distributed_matrix_values_get_sp
287  MODULE PROCEDURE distributed_matrix_values_get_sp1
288  MODULE PROCEDURE distributed_matrix_values_get_sp2
289  MODULE PROCEDURE distributed_matrix_values_get_dp
290  MODULE PROCEDURE distributed_matrix_values_get_dp1
291  MODULE PROCEDURE distributed_matrix_values_get_dp2
292  MODULE PROCEDURE distributed_matrix_values_get_l
293  MODULE PROCEDURE distributed_matrix_values_get_l1
294  MODULE PROCEDURE distributed_matrix_values_get_l2
295  END INTERFACE distributed_matrix_values_get
296 
298  MODULE PROCEDURE distributed_matrix_values_get_intg
299  MODULE PROCEDURE distributed_matrix_values_get_intg1
300  MODULE PROCEDURE distributed_matrix_values_get_intg2
301  MODULE PROCEDURE distributed_matrix_values_get_sp
302  MODULE PROCEDURE distributed_matrix_values_get_sp1
303  MODULE PROCEDURE distributed_matrix_values_get_sp2
304  MODULE PROCEDURE distributed_matrix_values_get_dp
305  MODULE PROCEDURE distributed_matrix_values_get_dp1
306  MODULE PROCEDURE distributed_matrix_values_get_dp2
307  MODULE PROCEDURE distributed_matrix_values_get_l
308  MODULE PROCEDURE distributed_matrix_values_get_l1
309  MODULE PROCEDURE distributed_matrix_values_get_l2
310  END INTERFACE distributedmatrix_valuesget
311 
313  MODULE PROCEDURE distributed_matrix_values_set_intg
314  MODULE PROCEDURE distributed_matrix_values_set_intg1
315  MODULE PROCEDURE distributed_matrix_values_set_intg2
316  MODULE PROCEDURE distributed_matrix_values_set_sp
317  MODULE PROCEDURE distributed_matrix_values_set_sp1
318  MODULE PROCEDURE distributed_matrix_values_set_sp2
319  MODULE PROCEDURE distributed_matrix_values_set_dp
320  MODULE PROCEDURE distributed_matrix_values_set_dp1
321  MODULE PROCEDURE distributed_matrix_values_set_dp2
322  MODULE PROCEDURE distributed_matrix_values_set_l
323  MODULE PROCEDURE distributed_matrix_values_set_l1
324  MODULE PROCEDURE distributed_matrix_values_set_l2
325  END INTERFACE distributed_matrix_values_set
326 
328  MODULE PROCEDURE distributed_matrix_values_set_intg
329  MODULE PROCEDURE distributed_matrix_values_set_intg1
330  MODULE PROCEDURE distributed_matrix_values_set_intg2
331  MODULE PROCEDURE distributed_matrix_values_set_sp
332  MODULE PROCEDURE distributed_matrix_values_set_sp1
333  MODULE PROCEDURE distributed_matrix_values_set_sp2
334  MODULE PROCEDURE distributed_matrix_values_set_dp
335  MODULE PROCEDURE distributed_matrix_values_set_dp1
336  MODULE PROCEDURE distributed_matrix_values_set_dp2
337  MODULE PROCEDURE distributed_matrix_values_set_l
338  MODULE PROCEDURE distributed_matrix_values_set_l1
339  MODULE PROCEDURE distributed_matrix_values_set_l2
340  END INTERFACE distributedmatrix_valuesset
341 
343  MODULE PROCEDURE distributed_matrix_by_vector_add
345 
348  MODULE PROCEDURE distributed_vector_all_values_set_sp
349  MODULE PROCEDURE distributed_vector_all_values_set_dp
350  MODULE PROCEDURE distributed_vector_all_values_set_l
352 
355  MODULE PROCEDURE distributed_vector_all_values_set_sp
356  MODULE PROCEDURE distributed_vector_all_values_set_dp
357  MODULE PROCEDURE distributed_vector_all_values_set_l
358  END INTERFACE distributedvector_allvaluesset
359 
361  MODULE PROCEDURE distributed_vector_copy_intg
362  MODULE PROCEDURE distributed_vector_copy_sp
363  MODULE PROCEDURE distributed_vector_copy_dp
364  MODULE PROCEDURE distributed_vector_copy_l
365  END INTERFACE distributed_vector_copy
366 
368  MODULE PROCEDURE distributed_vector_copy_intg
369  MODULE PROCEDURE distributed_vector_copy_sp
370  MODULE PROCEDURE distributed_vector_copy_dp
371  MODULE PROCEDURE distributed_vector_copy_l
372  END INTERFACE distributedvector_copy
373 
375  MODULE PROCEDURE distributed_vector_create_finish
376  END INTERFACE distributedvector_createfinish
377 
379  MODULE PROCEDURE distributed_vector_create_start
380  END INTERFACE distributedvector_createstart
381 
383  MODULE PROCEDURE distributed_vector_data_get_intg
384  MODULE PROCEDURE distributed_vector_data_get_sp
385  MODULE PROCEDURE distributed_vector_data_get_dp
386  MODULE PROCEDURE distributed_vector_data_get_l
387  END INTERFACE distributed_vector_data_get
388 
390  MODULE PROCEDURE distributed_vector_data_get_intg
391  MODULE PROCEDURE distributed_vector_data_get_sp
392  MODULE PROCEDURE distributed_vector_data_get_dp
393  MODULE PROCEDURE distributed_vector_data_get_l
394  END INTERFACE distributedvector_dataget
395 
397  MODULE PROCEDURE distributed_vector_data_restore_intg
398  MODULE PROCEDURE distributed_vector_data_restore_sp
399  MODULE PROCEDURE distributed_vector_data_restore_dp
400  MODULE PROCEDURE distributed_vector_data_restore_l
401  END INTERFACE distributed_vector_data_restore
402 
404  MODULE PROCEDURE distributed_vector_data_restore_intg
405  MODULE PROCEDURE distributed_vector_data_restore_sp
406  MODULE PROCEDURE distributed_vector_data_restore_dp
407  MODULE PROCEDURE distributed_vector_data_restore_l
408  END INTERFACE distributedvector_datarestore
409 
411  MODULE PROCEDURE distributed_vector_destroy
412  END INTERFACE distributedvector_destroy
413 
415  MODULE PROCEDURE distributed_vector_duplicate
416  END INTERFACE distributedvector_duplicate
417 
419  MODULE PROCEDURE distributed_vector_ghosting_type_set
421 
423  MODULE PROCEDURE distributed_vector_library_type_set
425 
427  MODULE PROCEDURE distributed_vector_output
428  END INTERFACE distributedvector_output
429 
431  MODULE PROCEDURE distributed_vector_override_set_on
432  END INTERFACE distributedvector_overrideseton
433 
435  MODULE PROCEDURE distributed_vector_override_set_off
437 
439  MODULE PROCEDURE distributed_vector_update_finish
440  END INTERFACE distributedvector_updatefinish
441 
443  MODULE PROCEDURE distributed_vector_update_start
444  END INTERFACE distributedvector_updatestart
445 
447  MODULE PROCEDURE distributed_vector_update_isfinished
449 
453 
455  MODULE PROCEDURE distributed_vector_values_add_intg
456  MODULE PROCEDURE distributed_vector_values_add_intg1
457  MODULE PROCEDURE distributed_vector_values_add_sp
458  MODULE PROCEDURE distributed_vector_values_add_sp1
459  MODULE PROCEDURE distributed_vector_values_add_dp
460  MODULE PROCEDURE distributed_vector_values_add_dp1
461  MODULE PROCEDURE distributed_vector_values_add_l
462  MODULE PROCEDURE distributed_vector_values_add_l1
463  END INTERFACE distributed_vector_values_add
464 
466  MODULE PROCEDURE distributed_vector_values_add_intg
467  MODULE PROCEDURE distributed_vector_values_add_intg1
468  MODULE PROCEDURE distributed_vector_values_add_sp
469  MODULE PROCEDURE distributed_vector_values_add_sp1
470  MODULE PROCEDURE distributed_vector_values_add_dp
471  MODULE PROCEDURE distributed_vector_values_add_dp1
472  MODULE PROCEDURE distributed_vector_values_add_l
473  MODULE PROCEDURE distributed_vector_values_add_l1
474  END INTERFACE distributedvector_valuesadd
475 
477  MODULE PROCEDURE distributed_vector_values_get_intg
478  MODULE PROCEDURE distributed_vector_values_get_intg1
479  MODULE PROCEDURE distributed_vector_values_get_sp
480  MODULE PROCEDURE distributed_vector_values_get_sp1
481  MODULE PROCEDURE distributed_vector_values_get_dp
482  MODULE PROCEDURE distributed_vector_values_get_dp1
483  MODULE PROCEDURE distributed_vector_values_get_l
484  MODULE PROCEDURE distributed_vector_values_get_l1
485  END INTERFACE distributed_vector_values_get
486 
488  MODULE PROCEDURE distributed_vector_values_get_intg
489  MODULE PROCEDURE distributed_vector_values_get_intg1
490  MODULE PROCEDURE distributed_vector_values_get_sp
491  MODULE PROCEDURE distributed_vector_values_get_sp1
492  MODULE PROCEDURE distributed_vector_values_get_dp
493  MODULE PROCEDURE distributed_vector_values_get_dp1
494  MODULE PROCEDURE distributed_vector_values_get_l
495  MODULE PROCEDURE distributed_vector_values_get_l1
496  END INTERFACE distributedvector_valuesget
497 
499  MODULE PROCEDURE distributed_vector_values_set_intg
500  MODULE PROCEDURE distributed_vector_values_set_intg1
501  MODULE PROCEDURE distributed_vector_values_set_sp
502  MODULE PROCEDURE distributed_vector_values_set_sp1
503  MODULE PROCEDURE distributed_vector_values_set_dp
504  MODULE PROCEDURE distributed_vector_values_set_dp1
505  MODULE PROCEDURE distributed_vector_values_set_l
506  MODULE PROCEDURE distributed_vector_values_set_l1
507  END INTERFACE distributed_vector_values_set
508 
510  MODULE PROCEDURE distributed_vector_values_set_intg
511  MODULE PROCEDURE distributed_vector_values_set_intg1
512  MODULE PROCEDURE distributed_vector_values_set_sp
513  MODULE PROCEDURE distributed_vector_values_set_sp1
514  MODULE PROCEDURE distributed_vector_values_set_dp
515  MODULE PROCEDURE distributed_vector_values_set_dp1
516  MODULE PROCEDURE distributed_vector_values_set_l
517  MODULE PROCEDURE distributed_vector_values_set_l1
518  END INTERFACE distributedvector_valuesset
519 
521  MODULE PROCEDURE distributedvector_vecdotintg
522  MODULE PROCEDURE distributedvector_vecdotsp
523  MODULE PROCEDURE distributedvector_vecdotdp
524  END INTERFACE distributedvector_vecdot
525 
527 
530 
535 
537 
539 
541 
543 
545 
547 
549 
551 
553 
555 
557 
559 
561 
563 
565 
567 
569 
571 
573 
575 
577 
579 
581 
583 
585 
587 
589 
591 
593 
595 
597 
599 
601 
603 
605 
607 
609 
611 
613 
615 
617 
619 
621 
623 
625 
627 
629 
631 
633 
635 
637 
639 
641 
643 
645 
647 
649 
651 
653 
655 
657 
659 
661 
663 
665 
667 
669 
671 
673 
675 
677 
679 
681 
683 
685 
686 CONTAINS
687 
688  !
689  !================================================================================================================================
690  !
691 
693  SUBROUTINE distributed_matrix_all_values_set_intg(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
695  !Argument variables
696  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
697  INTEGER(INTG), INTENT(IN) :: VALUE
698  INTEGER(INTG), INTENT(OUT) :: ERR
699  TYPE(varying_string), INTENT(OUT) :: ERROR
700  !Local variables
701  TYPE(varying_string) :: LOCAL_ERROR
702 
703  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error,*999)
704 
705  IF(ASSOCIATED(distributed_matrix)) THEN
706  IF(distributed_matrix%MATRIX_FINISHED) THEN
707  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
709  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
710  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
711  ELSE
712  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
713  ENDIF
715  CALL flagerror("Cannot set all values for an integer PETSc distributed matrix.",err,error,*999)
716  CASE DEFAULT
717  local_error="The distributed matrix library type of "// &
718  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
719  CALL flagerror(local_error,err,error,*999)
720  END SELECT
721  ELSE
722  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
723  ENDIF
724  ELSE
725  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
726  ENDIF
727 
728  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG")
729  RETURN
730 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_INTG",err,error)
731  RETURN 1
733 
734  !
735  !================================================================================================================================
736  !
737 
739  SUBROUTINE distributed_matrix_all_values_set_sp(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
741  !Argument variables
742  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
743  REAL(SP), INTENT(IN) :: VALUE
744  INTEGER(INTG), INTENT(OUT) :: ERR
745  TYPE(varying_string), INTENT(OUT) :: ERROR
746  !Local variables
747  TYPE(varying_string) :: LOCAL_ERROR
748 
749  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error,*999)
750 
751  IF(ASSOCIATED(distributed_matrix)) THEN
752  IF(distributed_matrix%MATRIX_FINISHED) THEN
753  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
755  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
756  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
757  ELSE
758  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
759  ENDIF
761  CALL flagerror("Cannot set all values for a single precision PETSc distributed matrix.",err,error,*999)
762  CASE DEFAULT
763  local_error="The distributed matrix library type of "// &
764  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
765  CALL flagerror(local_error,err,error,*999)
766  END SELECT
767  ELSE
768  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
769  ENDIF
770  ELSE
771  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
772  ENDIF
773 
774  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP")
775  RETURN
776 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_SP",err,error)
777  RETURN 1
779 
780  !
781  !================================================================================================================================
782  !
783 
785  SUBROUTINE distributed_matrix_all_values_set_dp(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
787  !Argument variables
788  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
789  REAL(DP), INTENT(IN) :: VALUE
790  INTEGER(INTG), INTENT(OUT) :: ERR
791  TYPE(varying_string), INTENT(OUT) :: ERROR
792  !Local variables
793  TYPE(varying_string) :: LOCAL_ERROR
794 
795  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error,*999)
796 
797  IF(ASSOCIATED(distributed_matrix)) THEN
798  IF(distributed_matrix%MATRIX_FINISHED) THEN
799  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
801  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
802  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
803  ELSE
804  CALL flagerror("Distributed matrix cmiss is not associated.",err,error,*999)
805  ENDIF
807  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
808  IF(abs(VALUE)<=zero_tolerance) THEN
809  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
810  CALL petsc_matzeroentries(distributed_matrix%PETSC%OVERRIDE_MATRIX,err,error,*999)
811  ELSE
812  CALL petsc_matzeroentries(distributed_matrix%PETSC%MATRIX,err,error,*999)
813  ENDIF
814  ELSE
815  CALL flagerror("Not implemented.",err,error,*999)
816  ENDIF
817  ELSE
818  CALL flagerror("Distributed matrix petsc is not associated.",err,error,*999)
819  ENDIF
820  CASE DEFAULT
821  local_error="The distributed matrix library type of "// &
822  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
823  CALL flagerror(local_error,err,error,*999)
824  END SELECT
825  ELSE
826  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
827  ENDIF
828  ELSE
829  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
830  ENDIF
831 
832  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP")
833  RETURN
834 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_DP",err,error)
835  RETURN 1
837 
838  !
839  !================================================================================================================================
840  !
841 
843  SUBROUTINE distributed_matrix_all_values_set_l(DISTRIBUTED_MATRIX,VALUE,ERR,ERROR,*)
845  !Argument variables
846  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
847  LOGICAL, INTENT(IN) :: VALUE
848  INTEGER(INTG), INTENT(OUT) :: ERR
849  TYPE(varying_string), INTENT(OUT) :: ERROR
850  !Local variables
851  TYPE(varying_string) :: LOCAL_ERROR
852 
853  enters("DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error,*999)
854 
855  IF(ASSOCIATED(distributed_matrix)) THEN
856  IF(distributed_matrix%MATRIX_FINISHED) THEN
857  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
859  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
860  CALL matrix_all_values_set(distributed_matrix%CMISS%MATRIX,VALUE,err,error,*999)
861  ELSE
862  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
863  ENDIF
865  CALL flagerror("Cannot set all values for a logical PETSc distributed matrix.",err,error,*999)
866  CASE DEFAULT
867  local_error="The distributed matrix library type of "// &
868  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
869  CALL flagerror(local_error,err,error,*999)
870  END SELECT
871  ELSE
872  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
873  ENDIF
874  ELSE
875  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
876  ENDIF
877 
878  exits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_L")
879  RETURN
880 999 errorsexits("DISTRIBUTED_MATRIX_ALL_VALUES_SET_L",err,error)
881  RETURN 1
883 
884  !
885  !================================================================================================================================
886  !
887 
889  SUBROUTINE distributed_matrix_cmiss_create_finish(CMISS_MATRIX,ERR,ERROR,*)
891  !Argument variables
892  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
893  INTEGER(INTG), INTENT(OUT) :: ERR
894  TYPE(varying_string), INTENT(OUT) :: ERROR
895  !Local Variables
896  TYPE(domain_mapping_type), POINTER :: DOMAIN_MAPPING
897  INTEGER(INTG) :: DUMMY_ERR
898  TYPE(varying_string) :: DUMMY_ERROR
899 
900  enters("DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error,*998)
901 
902  IF(ASSOCIATED(cmiss_matrix)) THEN
903  cmiss_matrix%BASE_TAG_NUMBER=distributed_data_id
904  domain_mapping=>cmiss_matrix%DISTRIBUTED_MATRIX%ROW_DOMAIN_MAPPING
905  IF(ASSOCIATED(domain_mapping)) THEN
906  IF(domain_mapping%NUMBER_OF_DOMAINS==1) THEN
908  ELSE
910  & domain_mapping%ADJACENT_DOMAINS_PTR(domain_mapping%NUMBER_OF_DOMAINS)
911  END IF
912  CALL matrix_create_finish(cmiss_matrix%MATRIX,err,error,*999)
913  ELSE
914  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*998)
915  ENDIF
916  ELSE
917  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*998)
918  ENDIF
919 
920  exits("DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH")
921  RETURN
922 999 CALL distributed_matrix_cmiss_finalise(cmiss_matrix,dummy_err,dummy_error,*998)
923 998 errorsexits("DISTRIBUTED_MATRIX_CMISS_CREATE_FINISH",err,error)
924  RETURN 1
926 
927  !
928  !================================================================================================================================
929  !
930 
932  SUBROUTINE distributed_matrix_cmiss_finalise(CMISS_MATRIX,ERR,ERROR,*)
934  !Argument variables
935  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
936  INTEGER(INTG), INTENT(OUT) :: ERR
937  TYPE(varying_string), INTENT(OUT) :: ERROR
938  !Local Variables
939 
940  enters("DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error,*999)
941 
942  IF(ASSOCIATED(cmiss_matrix)) THEN
943  CALL matrix_destroy(cmiss_matrix%MATRIX,err,error,*999)
944  DEALLOCATE(cmiss_matrix)
945  ENDIF
946 
947  exits("DISTRIBUTED_MATRIX_CMISS_FINALSE")
948  RETURN
949 999 errorsexits("DISTRIBUTED_MATRIX_CMISS_FINALISE",err,error)
950  RETURN 1
951  END SUBROUTINE distributed_matrix_cmiss_finalise
952 
953  !
954  !================================================================================================================================
955  !
956 
958  SUBROUTINE distributed_matrix_cmiss_initialise(DISTRIBUTED_MATRIX,ERR,ERROR,*)
960  !Argument variables
961  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
962  INTEGER(INTG), INTENT(OUT) :: ERR
963  TYPE(varying_string), INTENT(OUT) :: ERROR
964  !Local Variables
965  INTEGER(INTG) :: DUMMY_ERR
966  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
967  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
968 
969  enters("DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error,*998)
970 
971  IF(ASSOCIATED(distributed_matrix)) THEN
972  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
973  CALL flagerror("CMISS is already associated for this distributed matrix.",err,error,*998)
974  ELSE
975  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
976  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
977  IF(ASSOCIATED(row_domain_mapping)) THEN
978  IF(ASSOCIATED(column_domain_mapping)) THEN
979  ALLOCATE(distributed_matrix%CMISS,stat=err)
980  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed matrix.",err,error,*999)
981  distributed_matrix%CMISS%DISTRIBUTED_MATRIX=>distributed_matrix
982  distributed_matrix%LIBRARY_TYPE=distributed_matrix_vector_cmiss_type
983  NULLIFY(distributed_matrix%CMISS%MATRIX)
984  !Set the defaults
985  CALL matrix_create_start(distributed_matrix%CMISS%MATRIX,err,error,*999)
986  CALL matrix_data_type_set(distributed_matrix%CMISS%MATRIX,matrix_vector_dp_type,err,error,*999)
987  CALL matrix_storage_type_set(distributed_matrix%CMISS%MATRIX,matrix_block_storage_type,err,error,*999)
988  SELECT CASE(distributed_matrix%GHOSTING_TYPE)
990  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
991  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
993  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
994  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
995  CASE DEFAULT
996  local_error="The distributed matrix ghosting type of "// &
997  & trim(numbertovstring(distributed_matrix%GHOSTING_TYPE,"*",err,error))//" is invalid."
998  CALL flagerror(local_error,err,error,*999)
999  END SELECT
1000  ELSE
1001  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*998)
1002  ENDIF
1003  ELSE
1004  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*998)
1005  ENDIF
1006  ENDIF
1007  ELSE
1008  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
1009  ENDIF
1010 
1011  exits("DISTRIBUTED_MATRIX_CMISS_INITIALSE")
1012  RETURN
1013 999 IF(ASSOCIATED(distributed_matrix%CMISS)) &
1014  & CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,dummy_err,dummy_error,*999)
1015 998 errorsexits("DISTRIBUTED_MATRIX_CMISS_INITIALISE",err,error)
1016  RETURN 1
1018 
1019  !
1020  !================================================================================================================================
1021  !
1022 
1024  SUBROUTINE distributed_matrix_create_finish(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1026  !Argument variables
1027  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1028  INTEGER(INTG), INTENT(OUT) :: ERR
1029  TYPE(varying_string), INTENT(OUT) :: ERROR
1030  !Local Variables
1031  INTEGER(INTG) :: DUMMY_ERR
1032  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
1033 
1034  enters("DISTRIBUTED_MATRIX_CREATE_FINISH",err,error,*998)
1035 
1036  IF(ASSOCIATED(distributed_matrix)) THEN
1037  IF(distributed_matrix%MATRIX_FINISHED) THEN
1038  CALL flagerror("The distributed matrix has been finished.",err,error,*998)
1039  ELSE
1040  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1042  CALL distributed_matrix_cmiss_create_finish(distributed_matrix%CMISS,err,error,*999)
1044  CALL distributed_matrix_petsc_create_finish(distributed_matrix%PETSC,err,error,*999)
1045  CASE DEFAULT
1046  local_error="The distributed matrix library type of "// &
1047  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1048  CALL flagerror(local_error,err,error,*999)
1049  END SELECT
1050  distributed_matrix%MATRIX_FINISHED=.true.
1051  ENDIF
1052  ELSE
1053  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
1054  ENDIF
1055 
1056  exits("DISTRIBUTED_MATRIX_CREATE_FINISH")
1057  RETURN
1058 999 CALL distributed_matrix_finalise(distributed_matrix,dummy_err,dummy_error,*998)
1059 998 errorsexits("DISTRIBUTED_MATRIX_CREATE_FINISH",err,error)
1060  RETURN 1
1061  END SUBROUTINE distributed_matrix_create_finish
1062 
1063  !
1064  !================================================================================================================================
1065  !
1066 
1068  SUBROUTINE distributed_matrix_create_start(ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING,DISTRIBUTED_MATRIX,ERR,ERROR,*)
1070  !Argument variables
1071  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING
1072  TYPE(domain_mapping_type), POINTER :: COLUMN_DOMAIN_MAPPING
1073  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1074  INTEGER(INTG), INTENT(OUT) :: ERR
1075  TYPE(varying_string), INTENT(OUT) :: ERROR
1076  !Local Variables
1077  INTEGER(INTG) :: DUMMY_ERR
1078  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
1079 
1080  enters("DISTRIBUTED_MATRIX_CREATE_START",err,error,*998)
1081 
1082  IF(ASSOCIATED(row_domain_mapping)) THEN
1083  IF(ASSOCIATED(column_domain_mapping)) THEN
1084  IF(ASSOCIATED(distributed_matrix)) THEN
1085  CALL flagerror("Distributed matrix is already associated.",err,error,*998)
1086  ELSE
1087  IF(row_domain_mapping%NUMBER_OF_DOMAINS==column_domain_mapping%NUMBER_OF_DOMAINS) THEN
1088  CALL distributed_matrix_initialise(row_domain_mapping,column_domain_mapping,distributed_matrix,err,error,*999)
1089  !Set the defaults
1090  ELSE
1091  local_error="The number of domains in the row domain mapping ("// &
1092  & trim(numbertovstring(row_domain_mapping%NUMBER_OF_DOMAINS,"*",err,error))// &
1093  & ") does not match the number of domains in the column domain mapping ("// &
1094  & trim(numbertovstring(column_domain_mapping%NUMBER_OF_DOMAINS,"*",err,error))//")."
1095  CALL flagerror(local_error,err,error,*999)
1096  ENDIF
1097  ENDIF
1098  ELSE
1099  CALL flagerror("Column domain mapping is not associated.",err,error,*999)
1100  ENDIF
1101  ELSE
1102  CALL flagerror("Row domain mapping is not associated.",err,error,*998)
1103  ENDIF
1104 
1105  exits("DISTRIBUTED_MATRIX_CREATE_START")
1106  RETURN
1107 999 CALL distributed_matrix_finalise(distributed_matrix,dummy_err,dummy_error,*998)
1108 998 errorsexits("DISTRIBUTED_MATRIX_CREATE_START",err,error)
1109  RETURN 1
1110  END SUBROUTINE distributed_matrix_create_start
1111 
1112  !
1113  !================================================================================================================================
1114  !
1115 
1117  SUBROUTINE distributed_matrix_data_get_intg(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1119  !Argument variables
1120  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1121  INTEGER(INTG), POINTER :: DATA(:)
1122  INTEGER(INTG), INTENT(OUT) :: ERR
1123  TYPE(varying_string), INTENT(OUT) :: ERROR
1124  !Local Variables
1125  TYPE(varying_string) :: LOCAL_ERROR
1126 
1127  enters("DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error,*999)
1128 
1129  IF(ASSOCIATED(distributed_matrix)) THEN
1130  IF(ASSOCIATED(data)) THEN
1131  CALL flagerror("Data is already associated",err,error,*999)
1132  ELSE
1133  NULLIFY(data)
1134  IF(distributed_matrix%MATRIX_FINISHED) THEN
1135  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1137  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1138  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1139  ELSE
1140  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1141  ENDIF
1143  CALL flagerror("Cannot get data for an integer PETSc distributed matrix.",err,error,*999)
1144  CASE DEFAULT
1145  local_error="The distributed matrix library type of "// &
1146  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1147  CALL flagerror(local_error,err,error,*999)
1148  END SELECT
1149  ELSE
1150  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1151  ENDIF
1152  ENDIF
1153  ELSE
1154  CALL flagerror("Distributed matrix is not associated",err,error,*999)
1155  ENDIF
1156 
1157  exits("DISTRIBUTED_MATRIX_DATA_GET_INTG")
1158  RETURN
1159 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_INTG",err,error)
1160  RETURN 1
1161  END SUBROUTINE distributed_matrix_data_get_intg
1162 
1163  !
1164  !================================================================================================================================
1165  !
1166 
1168  SUBROUTINE distributed_matrix_data_get_sp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1170  !Argument variables
1171  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1172  REAL(SP), POINTER :: DATA(:)
1173  INTEGER(INTG), INTENT(OUT) :: ERR
1174  TYPE(varying_string), INTENT(OUT) :: ERROR
1175  !Local Variables
1176  TYPE(varying_string) :: LOCAL_ERROR
1177 
1178  enters("DISTRIBUTED_MATRIX_DATA_GET_SP",err,error,*999)
1179 
1180  IF(ASSOCIATED(distributed_matrix)) THEN
1181  IF(ASSOCIATED(data)) THEN
1182  CALL flagerror("Data is already associated.",err,error,*999)
1183  ELSE
1184  NULLIFY(data)
1185  IF(distributed_matrix%MATRIX_FINISHED) THEN
1186  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1188  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1189  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1190  ELSE
1191  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1192  ENDIF
1194  CALL flagerror("Cannot get data for a single precision PETSc distributed matrix.",err,error,*999)
1195  CASE DEFAULT
1196  local_error="The distributed matrix library type of "// &
1197  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1198  CALL flagerror(local_error,err,error,*999)
1199  END SELECT
1200  ELSE
1201  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1202  ENDIF
1203  ENDIF
1204  ELSE
1205  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1206  ENDIF
1207 
1208  exits("DISTRIBUTED_MATRIX_DATA_GET_SP")
1209  RETURN
1210 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_SP",err,error)
1211  RETURN 1
1212  END SUBROUTINE distributed_matrix_data_get_sp
1213 
1214  !
1215  !================================================================================================================================
1216  !
1217 
1219  SUBROUTINE distributed_matrix_data_get_dp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1221  !Argument variables
1222  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1223  REAL(DP), POINTER :: DATA(:)
1224  INTEGER(INTG), INTENT(OUT) :: ERR
1225  TYPE(varying_string), INTENT(OUT) :: ERROR
1226  !Local Variables
1227  REAL(DP), POINTER :: petscData(:,:)
1228  TYPE(varying_string) :: LOCAL_ERROR
1229  TYPE(c_ptr) :: TEMP_ME
1230 
1231  enters("DISTRIBUTED_MATRIX_DATA_GET_DP",err,error,*999)
1232 
1233  IF(ASSOCIATED(distributed_matrix)) THEN
1234  IF(ASSOCIATED(data)) THEN
1235  CALL flagerror("Data is already associated.",err,error,*999)
1236  ELSE
1237  NULLIFY(data)
1238  IF(distributed_matrix%MATRIX_FINISHED) THEN
1239  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1241  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1242  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1243  ELSE
1244  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1245  ENDIF
1247  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
1248  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
1249  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1251  CALL petsc_matdensegetarrayf90(distributed_matrix%petsc%OVERRIDE_MATRIX,petscdata,err,error,*999)
1253  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1255  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1257  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1259  CALL petsc_matseqaijgetarrayf90(distributed_matrix%petsc%OVERRIDE_MATRIX,petscdata,err,error,*999)
1261  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1263  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1264  CASE DEFAULT
1265  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1266  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1267  CALL flagerror(local_error,err,error,*999)
1268  END SELECT
1269  ELSE
1270  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1272  CALL petsc_matdensegetarrayf90(distributed_matrix%petsc%matrix,petscdata,err,error,*999)
1274  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1276  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1278  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1280  CALL petsc_matseqaijgetarrayf90(distributed_matrix%petsc%matrix,petscdata,err,error,*999)
1282  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1284  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1285  CASE DEFAULT
1286  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1287  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1288  CALL flagerror(local_error,err,error,*999)
1289  END SELECT
1290  ENDIF
1291  ! Convert 2D array from PETSc to 1D array
1292  ! Using C_F_POINTER(C_LOC(... is a bit ugly but transfer doesn't work with pointers
1293  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1295  temp_me = c_loc(petscdata(1,1))
1296  CALL c_f_pointer(temp_me,DATA,[distributed_matrix%PETSC%M*distributed_matrix%PETSC%N])
1298  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1300  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1302  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1304  !PETSc returns an m * n matrix rather than number non-zeros by 1, so the returned
1305  !2D array actually contains junk data outside of the actual matrix.
1306  !This is a bug in PETSc but we can get the correct 1D data here
1307  temp_me = c_loc(petscdata(1,1))
1308  CALL c_f_pointer(temp_me,DATA,[distributed_matrix%PETSC%NUMBER_NON_ZEROS])
1310  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1312  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1313  CASE DEFAULT
1314  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1315  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1316  CALL flagerror(local_error,err,error,*999)
1317  END SELECT
1318  ELSE
1319  CALL flagerror("Distributed matris PETSc is not associated.",err,error,*999)
1320  ENDIF
1321  CASE DEFAULT
1322  local_error="The distributed matrix library type of "// &
1323  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1324  CALL flagerror(local_error,err,error,*999)
1325  END SELECT
1326  ELSE
1327  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1328  ENDIF
1329  ENDIF
1330  ELSE
1331  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1332  ENDIF
1333 
1334  exits("DISTRIBUTED_MATRIX_DATA_GET_DP")
1335  RETURN
1336 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_DP",err,error)
1337  RETURN 1
1338 
1339  END SUBROUTINE distributed_matrix_data_get_dp
1340 
1341  !
1342  !================================================================================================================================
1343  !
1344 
1346  SUBROUTINE distributed_matrix_data_get_l(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1348  !Argument variables
1349  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1350  LOGICAL, POINTER :: DATA(:)
1351  INTEGER(INTG), INTENT(OUT) :: ERR
1352  TYPE(varying_string), INTENT(OUT) :: ERROR
1353  !Local Variables
1354  TYPE(varying_string) :: LOCAL_ERROR
1355 
1356  enters("DISTRIBUTED_MATRIX_DATA_GET_L",err,error,*999)
1357 
1358  IF(ASSOCIATED(distributed_matrix)) THEN
1359  IF(ASSOCIATED(data)) THEN
1360  CALL flagerror("Data is already associated",err,error,*999)
1361  ELSE
1362  NULLIFY(data)
1363  IF(distributed_matrix%MATRIX_FINISHED) THEN
1364  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1366  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1367  CALL matrix_data_get(distributed_matrix%CMISS%MATRIX,DATA,err,error,*999)
1368  ELSE
1369  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1370  ENDIF
1372  CALL flagerror("Cannot get data for a logical PETSc distributed matrix.",err,error,*999)
1373  CASE DEFAULT
1374  local_error="The distributed matrix library type of "// &
1375  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1376  CALL flagerror(local_error,err,error,*999)
1377  END SELECT
1378  ELSE
1379  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1380  ENDIF
1381  ENDIF
1382  ELSE
1383  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1384  ENDIF
1385 
1386  exits("DISTRIBUTED_MATRIX_DATA_GET_L")
1387  RETURN
1388 999 errorsexits("DISTRIBUTED_MATRIX_DATA_GET_L",err,error)
1389  RETURN 1
1390  END SUBROUTINE distributed_matrix_data_get_l
1391 
1392  !
1393  !================================================================================================================================
1394  !
1395 
1397  SUBROUTINE distributed_matrix_data_restore_intg(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1399  !Argument variables
1400  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1401  INTEGER(INTG), POINTER :: DATA(:)
1402  INTEGER(INTG), INTENT(OUT) :: ERR
1403  TYPE(varying_string), INTENT(OUT) :: ERROR
1404  !Local Variables
1405  TYPE(varying_string) :: LOCAL_ERROR
1406 
1407  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error,*999)
1408 
1409  IF(ASSOCIATED(distributed_matrix)) THEN
1410  IF(ASSOCIATED(data)) THEN
1411  IF(distributed_matrix%MATRIX_FINISHED) THEN
1412  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1414  NULLIFY(data)
1416  CALL flagerror("Cannot restore data for an integer PETSc distributed matrix.",err,error,*999)
1417  CASE DEFAULT
1418  local_error="The distributed matrix library type of "// &
1419  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1420  CALL flagerror(local_error,err,error,*999)
1421  END SELECT
1422  ELSE
1423  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1424  ENDIF
1425  ELSE
1426  CALL flagerror("Data is not associated.",err,error,*999)
1427  ENDIF
1428  ELSE
1429  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1430  ENDIF
1431 
1432  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_INTG")
1433  RETURN
1434 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_INTG",err,error)
1435  RETURN 1
1437 
1438  !
1439  !================================================================================================================================
1440  !
1441 
1443  SUBROUTINE distributed_matrix_data_restore_sp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1445  !Argument variables
1446  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1447  REAL(SP), POINTER :: DATA(:)
1448  INTEGER(INTG), INTENT(OUT) :: ERR
1449  TYPE(varying_string), INTENT(OUT) :: ERROR
1450  !Local Variables
1451  TYPE(varying_string) :: LOCAL_ERROR
1452 
1453  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error,*999)
1454 
1455  IF(ASSOCIATED(distributed_matrix)) THEN
1456  IF(ASSOCIATED(data)) THEN
1457  IF(distributed_matrix%MATRIX_FINISHED) THEN
1458  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1460  NULLIFY(data)
1462  CALL flagerror("Cannot restore data for a single precision PETSc distributed matrix.",err,error,*999)
1463  CASE DEFAULT
1464  local_error="The distributed matrix library type of "// &
1465  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1466  CALL flagerror(local_error,err,error,*999)
1467  END SELECT
1468  ELSE
1469  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1470  ENDIF
1471  ELSE
1472  CALL flagerror("Data is not associated.",err,error,*999)
1473  ENDIF
1474  ELSE
1475  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1476  ENDIF
1477 
1478  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_SP")
1479  RETURN
1480 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_SP",err,error)
1481  RETURN 1
1482  END SUBROUTINE distributed_matrix_data_restore_sp
1483 
1484  !
1485  !================================================================================================================================
1486  !
1487 
1489  SUBROUTINE distributed_matrix_data_restore_dp(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1491  !Argument variables
1492  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1493  REAL(DP), POINTER :: DATA(:)
1494  INTEGER(INTG), INTENT(OUT) :: ERR
1495  TYPE(varying_string), INTENT(OUT) :: ERROR
1496  !Local Variables
1497  REAL(DP), POINTER :: petscData(:,:)
1498  TYPE(varying_string) :: LOCAL_ERROR
1499  TYPE(c_ptr) :: TEMP_ME
1500 
1501  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error,*999)
1502 
1503  IF(ASSOCIATED(distributed_matrix)) THEN
1504  IF(ASSOCIATED(data)) THEN
1505  IF(distributed_matrix%MATRIX_FINISHED) THEN
1506  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1508  NULLIFY(data)
1510  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
1511  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1513  !Convert 1D array to 2D
1514  temp_me = c_loc(DATA(1))
1515  CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1517  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1519  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1521  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1523  !PETSc expects an m * n 2D matrix rather than a 1D array with length equal to number of non-zeros
1524  !This is a bug in PETSc so we have to give it a 2D matrix with junk at the end
1525  temp_me = c_loc(DATA(1))
1526  CALL c_f_pointer(temp_me,petscdata,[distributed_matrix%PETSC%M,distributed_matrix%PETSC%N])
1528  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1530  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1531  CASE DEFAULT
1532  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1533  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1534  CALL flagerror(local_error,err,error,*999)
1535  END SELECT
1536  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
1537  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1539  CALL petsc_matdenserestorearrayf90(distributed_matrix%PETSC%OVERRIDE_MATRIX,petscdata,err,error,*999)
1541  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1543  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1545  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1547  CALL petsc_matseqaijrestorearrayf90(distributed_matrix%PETSC%OVERRIDE_MATRIX,petscdata,err,error,*999)
1549  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1551  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1552  CASE DEFAULT
1553  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1554  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1555  CALL flagerror(local_error,err,error,*999)
1556  END SELECT
1557  ELSE
1558  SELECT CASE(distributed_matrix%PETSC%STORAGE_TYPE)
1560  CALL petsc_matdenserestorearrayf90(distributed_matrix%PETSC%MATRIX,petscdata,err,error,*999)
1562  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1564  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1566  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1568  CALL petsc_matseqaijrestorearrayf90(distributed_matrix%PETSC%MATRIX,petscdata,err,error,*999)
1570  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1572  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1573  CASE DEFAULT
1574  local_error="The PETSc matrix storage type of "//trim(numbertovstring( &
1575  & distributed_matrix%PETSC%STORAGE_TYPE,"*",err,error))//" is invalid."
1576  CALL flagerror(local_error,err,error,*999)
1577  END SELECT
1578  ENDIF
1579  ELSE
1580  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
1581  ENDIF
1582  CASE DEFAULT
1583  local_error="The distributed matrix library type of "// &
1584  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1585  CALL flagerror(local_error,err,error,*999)
1586  END SELECT
1587  ELSE
1588  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1589  ENDIF
1590  ELSE
1591  CALL flagerror("Data is not associated.",err,error,*999)
1592  ENDIF
1593  ELSE
1594  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1595  ENDIF
1596 
1597  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_DP")
1598  RETURN
1599 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_DP",err,error)
1600  RETURN 1
1601 
1602  END SUBROUTINE distributed_matrix_data_restore_dp
1603 
1604  !
1605  !================================================================================================================================
1606  !
1607 
1609  SUBROUTINE distributed_matrix_data_restore_l(DISTRIBUTED_MATRIX,DATA,ERR,ERROR,*)
1611  !Argument variables
1612  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1613  LOGICAL, POINTER :: DATA(:)
1614  INTEGER(INTG), INTENT(OUT) :: ERR
1615  TYPE(varying_string), INTENT(OUT) :: ERROR
1616  !Local Variables
1617  TYPE(varying_string) :: LOCAL_ERROR
1618 
1619  enters("DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error,*999)
1620 
1621  IF(ASSOCIATED(distributed_matrix)) THEN
1622  IF(ASSOCIATED(data)) THEN
1623  IF(distributed_matrix%MATRIX_FINISHED) THEN
1624  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1626  NULLIFY(data)
1628  CALL flagerror("Cannot restore data for a logical PETSc distributed matrix.",err,error,*999)
1629  CASE DEFAULT
1630  local_error="The distributed matrix library type of "// &
1631  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1632  CALL flagerror(local_error,err,error,*999)
1633  END SELECT
1634  ELSE
1635  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1636  ENDIF
1637  ELSE
1638  CALL flagerror("Data is not associated.",err,error,*999)
1639  ENDIF
1640  ELSE
1641  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1642  ENDIF
1643 
1644  exits("DISTRIBUTED_MATRIX_DATA_RESTORE_L")
1645  RETURN
1646 999 errorsexits("DISTRIBUTED_MATRIX_DATA_RESTORE_L",err,error)
1647  RETURN 1
1648  END SUBROUTINE distributed_matrix_data_restore_l
1649 
1650  !
1651  !================================================================================================================================
1652  !
1653 
1655  SUBROUTINE distributedmatrix_datatypeget(matrix,dataType,err,error,*)
1657  !Argument variables
1658  TYPE(distributed_matrix_type), POINTER :: matrix
1659  INTEGER(INTG), INTENT(OUT) :: dataType
1660  INTEGER(INTG), INTENT(OUT) :: err
1661  TYPE(varying_string), INTENT(OUT) :: error
1662 
1663  enters("DistributedMatrix_DataTypeGet",err,error,*999)
1664 
1665  IF(ASSOCIATED(matrix)) THEN
1666  IF(.NOT.matrix%matrix_finished) THEN
1667  CALL flag_error("The matrix has not been finished.",err,error,*999)
1668  ELSE
1669  datatype=matrix%data_type
1670  END IF
1671  ELSE
1672  CALL flag_error("Distributed matrix is not associated.",err,error,*999)
1673  END IF
1674 
1675  exits("DistributedMatrix_DataTypeGet")
1676  RETURN
1677 999 errorsexits("DistributedMatrix_DataTypeGet",err,error)
1678  RETURN 1
1679  END SUBROUTINE distributedmatrix_datatypeget
1680 
1681  !
1682  !================================================================================================================================
1683  !
1684 
1686  SUBROUTINE distributed_matrix_data_type_set(DISTRIBUTED_MATRIX,DATA_TYPE,ERR,ERROR,*)
1688  !Argument variables
1689  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1690  INTEGER(INTG), INTENT(IN) :: DATA_TYPE
1691  INTEGER(INTG), INTENT(OUT) :: ERR
1692  TYPE(varying_string), INTENT(OUT) :: ERROR
1693  !Local Variables
1694  TYPE(varying_string) :: LOCAL_ERROR
1695 
1696  enters("DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error,*999)
1697 
1698  IF(ASSOCIATED(distributed_matrix)) THEN
1699  IF(distributed_matrix%MATRIX_FINISHED) THEN
1700  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
1701  ELSE
1702  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1704  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1705  CALL matrix_data_type_set(distributed_matrix%CMISS%MATRIX,data_type,err,error,*999)
1706  distributed_matrix%DATA_TYPE=data_type
1707  ELSE
1708  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1709  ENDIF
1711  SELECT CASE(data_type)
1713  CALL flagerror("An integer distributed PETSc matrix is not implemented.",err,error,*999)
1715  CALL flagerror("A single precision distributed PETSc matrix is not implemented.",err,error,*999)
1717  distributed_matrix%DATA_TYPE=distributed_matrix_vector_dp_type
1719  CALL flagerror("A logical distributed PETSc matrix is not implemented.",err,error,*999)
1720  CASE DEFAULT
1721  local_error="The specified data type of "//trim(numbertovstring(data_type,"*",err,error))//" is invalid."
1722  CALL flagerror(local_error,err,error,*999)
1723  END SELECT
1724  CASE DEFAULT
1725  local_error="The distributed matrix library type of "// &
1726  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1727  CALL flagerror(local_error,err,error,*999)
1728  END SELECT
1729  ENDIF
1730  ELSE
1731  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1732  ENDIF
1733 
1734  exits("DISTRIBUTED_MATRIX_DATA_TYPE_SET")
1735  RETURN
1736 999 errorsexits("DISTRIBUTED_MATRIX_DATA_TYPE_SET",err,error)
1737  RETURN 1
1738  END SUBROUTINE distributed_matrix_data_type_set
1739 
1740  !
1741  !================================================================================================================================
1742  !
1743 
1745  SUBROUTINE distributedmatrix_dimensionsget(distributedMatrix,m,n,err,error,*)
1747  !Argument variables
1748  TYPE(distributed_matrix_type), POINTER :: distributedMatrix
1749  INTEGER(INTG), INTENT(OUT) :: m
1750  INTEGER(INTG), INTENT(OUT) :: n
1751  INTEGER(INTG), INTENT(OUT) :: err
1752  TYPE(varying_string), INTENT(OUT) :: error
1753  !Local variables
1754  TYPE(matrix_type), POINTER :: matrix
1755  TYPE(distributed_matrix_petsc_type), POINTER :: petscMatrix
1756  TYPE(varying_string) :: localError
1757 
1758  enters("DistributedMatrix_DimensionsGet",err,error,*999)
1759 
1760  IF(ASSOCIATED(distributedmatrix)) THEN
1761  SELECT CASE(distributedmatrix%library_type)
1763  IF(ASSOCIATED(distributedmatrix%cmiss)) THEN
1764  matrix=>distributedmatrix%cmiss%matrix
1765  IF(ASSOCIATED(matrix)) THEN
1766  IF(.NOT.matrix%matrix_finished) THEN
1767  CALL flag_error("The matrix has not been finished.",err,error,*999)
1768  ELSE
1769  m=matrix%m
1770  n=matrix%n
1771  END IF
1772  ELSE
1773  CALL flag_error("Distributed matrix CMISS matrix is not associated.",err,error,*999)
1774  END IF
1775  ELSE
1776  CALL flag_error("Distributed matrix CMISS is not associated.",err,error,*999)
1777  END IF
1779  petscmatrix=>distributedmatrix%petsc
1780  IF(ASSOCIATED(petscmatrix)) THEN
1781  m=petscmatrix%m
1782  n=petscmatrix%n
1783  ELSE
1784  CALL flag_error("Distributed matrix PETSc is not associated.",err,error,*999)
1785  END IF
1786  CASE DEFAULT
1787  localerror="The distributed matrix library type of "// &
1788  & trim(number_to_vstring(distributedmatrix%library_type,"*",err,error))//" is invalid."
1789  CALL flag_error(localerror,err,error,*999)
1790  END SELECT
1791  ELSE
1792  CALL flag_error("Distributed matrix is not associated.",err,error,*999)
1793  END IF
1794 
1795  exits("DistributedMatrix_DimensionsGet")
1796  RETURN
1797 999 errorsexits("DistributedMatrix_DimensionsGet",err,error)
1798  RETURN 1
1799  END SUBROUTINE distributedmatrix_dimensionsget
1800 
1801  !
1802  !================================================================================================================================
1803  !
1804 
1806  SUBROUTINE distributed_matrix_destroy(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1808  !Argument variables
1809  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1810  INTEGER(INTG), INTENT(OUT) :: ERR
1811  TYPE(varying_string), INTENT(OUT) :: ERROR
1812  !Local Variables
1813 
1814  enters("DISTRIBUTED_MATRIX_DESTROY",err,error,*999)
1815 
1816  IF(ASSOCIATED(distributed_matrix)) THEN
1817  CALL distributed_matrix_finalise(distributed_matrix,err,error,*999)
1818  ELSE
1819  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
1820  ENDIF
1821 
1822  exits("DISTRIBUTED_MATRIX_DESTROY")
1823  RETURN
1824 999 errorsexits("DISTRIBUTED_MATRIX_DESTROY",err,error)
1825  RETURN 1
1826  END SUBROUTINE distributed_matrix_destroy
1827 
1828  !
1829  !================================================================================================================================
1830  !
1831 
1833  SUBROUTINE distributed_matrix_duplicate(DISTRIBUTED_MATRIX,NEW_DISTRIBUTED_MATRIX,ERR,ERROR,*)
1835  !Argument variables
1836  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1837  TYPE(distributed_matrix_type), POINTER :: NEW_DISTRIBUTED_MATRIX
1838  INTEGER(INTG), INTENT(OUT) :: ERR
1839  TYPE(varying_string), INTENT(OUT) :: ERROR
1840  !Local Variables
1841  INTEGER(INTG) :: DUMMY_ERR
1842  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
1843 
1844  enters("DISTRIBUTED_MATRIX_DUPLICATE",err,error,*998)
1845 
1846  IF(ASSOCIATED(distributed_matrix)) THEN
1847  IF(ASSOCIATED(new_distributed_matrix)) THEN
1848  CALL flagerror("New distributed matrix is already associated.",err,error,*998)
1849  ELSE
1850  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1852  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1853  CALL distributed_matrix_create_start(distributed_matrix%ROW_DOMAIN_MAPPING,distributed_matrix%COLUMN_DOMAIN_MAPPING, &
1854  & new_distributed_matrix,err,error,*999)
1855  CALL matrix_duplicate(distributed_matrix%CMISS%MATRIX,new_distributed_matrix%CMISS%MATRIX,err,error,*999)
1856  CALL distributed_matrix_create_finish(new_distributed_matrix,err,error,*999)
1857  ELSE
1858  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1859  ENDIF
1861  CALL distributed_matrix_create_start(distributed_matrix%ROW_DOMAIN_MAPPING,distributed_matrix%COLUMN_DOMAIN_MAPPING, &
1862  & new_distributed_matrix,err,error,*999)
1863  CALL distributed_matrix_library_type_set(new_distributed_matrix,distributed_matrix_vector_petsc_type,err,error,*999)
1864  CALL distributed_matrix_create_finish(new_distributed_matrix,err,error,*999)
1865  CASE DEFAULT
1866  local_error="The distributed matrix library type of "// &
1867  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1868  CALL flagerror(local_error,err,error,*999)
1869  END SELECT
1870  ENDIF
1871  ELSE
1872  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
1873  ENDIF
1874 
1875  exits("DISTRIBUTED_MATRIX_DUPLICATE")
1876  RETURN
1877 999 CALL distributed_matrix_finalise(new_distributed_matrix,dummy_err,dummy_error,*999)
1878 998 errorsexits("DISTRIBUTED_MATRIX_DUPLICATE",err,error)
1879  RETURN 1
1880  END SUBROUTINE distributed_matrix_duplicate
1881 
1882  !
1883  !================================================================================================================================
1884  !
1885 
1887  SUBROUTINE distributed_matrix_finalise(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1889  !Argument variables
1890  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1891  INTEGER(INTG), INTENT(OUT) :: ERR
1892  TYPE(varying_string), INTENT(OUT) :: ERROR
1893  !Local Variables
1894 
1895  enters("DISTRIBUTED_MATRIX_FINALISE",err,error,*999)
1896 
1897  IF(ASSOCIATED(distributed_matrix)) THEN
1898  CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,err,error,*999)
1899  CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,err,error,*999)
1900  DEALLOCATE(distributed_matrix)
1901  ENDIF
1902 
1903  exits("DISTRIBUTED_MATRIX_FINALISE")
1904  RETURN
1905 999 errorsexits("DISTRIBUTED_MATRIX_FINALISE",err,error)
1906  RETURN 1
1907  END SUBROUTINE distributed_matrix_finalise
1908 
1909  !
1910  !================================================================================================================================
1911  !
1912 
1914  SUBROUTINE distributed_matrix_form(DISTRIBUTED_MATRIX,ERR,ERROR,*)
1916  !Argument variables
1917  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
1918  INTEGER(INTG), INTENT(OUT) :: ERR
1919  TYPE(varying_string), INTENT(OUT) :: ERROR
1920  !Local Variables
1921  INTEGER(INTG) :: column_idx,row
1922  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
1923  TYPE(varying_string) :: LOCAL_ERROR
1924 
1925  enters("DISTRIBUTED_MATRIX_FORM",err,error,*999)
1926 
1927  IF(ASSOCIATED(distributed_matrix)) THEN
1928  IF(distributed_matrix%MATRIX_FINISHED) THEN
1929  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
1931  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
1932  CALL flagerror("Not implemented.",err,error,*999)
1933  ELSE
1934  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
1935  ENDIF
1937  petsc_matrix=>distributed_matrix%PETSC
1938  IF(ASSOCIATED(petsc_matrix)) THEN
1939  SELECT CASE(petsc_matrix%STORAGE_TYPE)
1941  CALL petsc_matzeroentries(petsc_matrix%MATRIX,err,error,*999)
1943  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
1945  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
1947  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
1949  IF(petsc_matrix%USE_OVERRIDE_MATRIX) THEN
1950  DO row=1,petsc_matrix%M
1951  DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1952  CALL petsc_matsetvalue(petsc_matrix%OVERRIDE_MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1953  & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1954  & err,error,*999) !PETSc uses 0 indicies
1955  ENDDO !column_idx
1956  ENDDO !row_idx
1957  ELSE
1958  DO row=1,petsc_matrix%M
1959  DO column_idx=petsc_matrix%ROW_INDICES(row),petsc_matrix%ROW_INDICES(row+1)-1
1960  CALL petsc_matsetvalue(petsc_matrix%MATRIX,petsc_matrix%GLOBAL_ROW_NUMBERS(row), &
1961  & petsc_matrix%COLUMN_INDICES(column_idx)-1,0.0_dp,petsc_insert_values, &
1962  & err,error,*999) !PETSc uses 0 indicies
1963  ENDDO !column_idx
1964  ENDDO !row_idx
1965  ENDIF
1967  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
1969  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
1970  CASE DEFAULT
1971  local_error="The PETSc matrix storage type of "//trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))// &
1972  & " is invalid."
1973  CALL flagerror(local_error,err,error,*999)
1974  END SELECT
1975  IF(petsc_matrix%USE_OVERRIDE_MATRIX) THEN
1976  CALL petsc_matassemblybegin(petsc_matrix%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
1977  CALL petsc_matassemblyend(petsc_matrix%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
1978  ELSE
1979  CALL petsc_matassemblybegin(petsc_matrix%MATRIX,petsc_mat_final_assembly,err,error,*999)
1980  CALL petsc_matassemblyend(petsc_matrix%MATRIX,petsc_mat_final_assembly,err,error,*999)
1981  ENDIF
1982  ELSE
1983  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
1984  ENDIF
1985  CASE DEFAULT
1986  local_error="The distributed matrix library type of "// &
1987  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
1988  CALL flagerror(local_error,err,error,*999)
1989  END SELECT
1990  ELSE
1991  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
1992  ENDIF
1993  ENDIF
1994 
1995  exits("DISTRIBUTED_MATRIX_FORM")
1996  RETURN
1997 999 errorsexits("DISTRIBUTED_MATRIX_FORM",err,error)
1998  RETURN 1
1999  END SUBROUTINE distributed_matrix_form
2000 
2001  !
2002  !================================================================================================================================
2003  !
2004 
2006  SUBROUTINE distributed_matrix_ghosting_type_set(DISTRIBUTED_MATRIX,GHOSTING_TYPE,ERR,ERROR,*)
2008  !Argument variables
2009  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2010  INTEGER(INTG), INTENT(IN) :: GHOSTING_TYPE
2011  INTEGER(INTG), INTENT(OUT) :: ERR
2012  TYPE(varying_string), INTENT(OUT) :: ERROR
2013  !Local Variables
2014  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2015  TYPE(varying_string) :: LOCAL_ERROR
2016 
2017  enters("DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error,*999)
2018 
2019  IF(ASSOCIATED(distributed_matrix)) THEN
2020  IF(distributed_matrix%MATRIX_FINISHED) THEN
2021  CALL flagerror("The distributed matrix has already been finished.",err,error,*999)
2022  ELSE
2023  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2024  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2025  IF(ASSOCIATED(row_domain_mapping)) THEN
2026  IF(ASSOCIATED(column_domain_mapping)) THEN
2027  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2029  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2030  SELECT CASE(ghosting_type)
2032  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%TOTAL_NUMBER_OF_LOCAL, &
2033  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2035  CALL matrix_size_set(distributed_matrix%CMISS%MATRIX,row_domain_mapping%NUMBER_OF_LOCAL, &
2036  & column_domain_mapping%NUMBER_OF_GLOBAL,err,error,*999)
2037  CASE DEFAULT
2038  local_error="The given ghosting type of "//trim(numbertovstring(ghosting_type,"*",err,error))//" is invalid."
2039  CALL flagerror(local_error,err,error,*999)
2040  END SELECT
2041  ELSE
2042  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2043  ENDIF
2045  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2046  SELECT CASE(ghosting_type)
2048  distributed_matrix%PETSC%N=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2050  distributed_matrix%PETSC%N=row_domain_mapping%NUMBER_OF_LOCAL
2051  CASE DEFAULT
2052  local_error="The given ghosting type of "//trim(numbertovstring(ghosting_type,"*",err,error))//" is invalid."
2053  CALL flagerror(local_error,err,error,*999)
2054  END SELECT
2055  ELSE
2056  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2057  ENDIF
2058  CASE DEFAULT
2059  local_error="The distributed matrix library type of "// &
2060  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2061  CALL flagerror(local_error,err,error,*999)
2062  END SELECT
2063  distributed_matrix%GHOSTING_TYPE=ghosting_type
2064  ELSE
2065  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*999)
2066  ENDIF
2067  ELSE
2068  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*999)
2069  ENDIF
2070  ENDIF
2071  ELSE
2072  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2073  ENDIF
2074 
2075  exits("DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET")
2076  RETURN
2077 999 errorsexits("DISTRIBUTED_MATRIX_GHOSTING_TYPE_SET",err,error)
2078  RETURN 1
2080 
2081  !
2082  !================================================================================================================================
2083  !
2084 
2086  SUBROUTINE distributed_matrix_library_type_set(DISTRIBUTED_MATRIX,LIBRARY_TYPE,ERR,ERROR,*)
2088  !Argument variables
2089  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2090  INTEGER(INTG), INTENT(IN) :: LIBRARY_TYPE
2091  INTEGER(INTG), INTENT(OUT) :: ERR
2092  TYPE(varying_string), INTENT(OUT) :: ERROR
2093  !Local Variables
2094  INTEGER(INTG) :: DUMMY_ERR,OLD_LIBRARY_TYPE
2095  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
2096 
2097  enters("DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error,*998)
2098 
2099  IF(ASSOCIATED(distributed_matrix)) THEN
2100  IF(distributed_matrix%MATRIX_FINISHED) THEN
2101  CALL flagerror("The distributed matrix has already been finished.",err,error,*998)
2102  ELSE
2103  old_library_type=distributed_matrix%LIBRARY_TYPE
2104  IF(library_type/=old_library_type) THEN
2105  !Initialise the new library type
2106  SELECT CASE(library_type)
2108  CALL distributed_matrix_cmiss_initialise(distributed_matrix,err,error,*999)
2110  CALL distributed_matrix_petsc_initialise(distributed_matrix,err,error,*999)
2111  CASE DEFAULT
2112  local_error="The library type of "//trim(numbertovstring(library_type,"*",err,error))//" is invalid."
2113  CALL flagerror(local_error,err,error,*999)
2114  END SELECT
2115  !Finalise the old library type
2116  SELECT CASE(old_library_type)
2118  CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,err,error,*999)
2120  CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,err,error,*999)
2121  CASE DEFAULT
2122  local_error="The distributed matrix library type of "// &
2123  & trim(numbertovstring(old_library_type,"*",err,error))//" is invalid."
2124  CALL flagerror(local_error,err,error,*999)
2125  END SELECT
2126  distributed_matrix%LIBRARY_TYPE=library_type
2127  ENDIF
2128  ENDIF
2129  ELSE
2130  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
2131  ENDIF
2132 
2133  exits("DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET")
2134  RETURN
2135 999 SELECT CASE(library_type)
2137  CALL distributed_matrix_cmiss_finalise(distributed_matrix%CMISS,dummy_err,dummy_error,*998)
2139  CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,dummy_err,dummy_error,*998)
2140  END SELECT
2141 998 errorsexits("DISTRIBUTED_MATRIX_LIBRARY_TYPE_SET",err,error)
2142  RETURN 1
2144 
2145  !
2146  !================================================================================================================================
2147  !
2148 
2150  SUBROUTINE distributed_matrix_initialise(ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING,DISTRIBUTED_MATRIX,ERR,ERROR,*)
2152  !Argument variables
2153  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING
2154  TYPE(domain_mapping_type), POINTER :: COLUMN_DOMAIN_MAPPING
2155  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2156  INTEGER(INTG), INTENT(OUT) :: ERR
2157  TYPE(varying_string), INTENT(OUT) :: ERROR
2158  !Local Variables
2159  INTEGER(INTG) :: DUMMY_ERR
2160  TYPE(varying_string) :: DUMMY_ERROR
2161 
2162  enters("DISTRIBUTED_MATRIX_INITIALISE",err,error,*998)
2163 
2164  IF(ASSOCIATED(row_domain_mapping)) THEN
2165  IF(ASSOCIATED(column_domain_mapping)) THEN
2166  IF(ASSOCIATED(distributed_matrix)) THEN
2167  CALL flagerror("Distributed matrix is already associated.",err,error,*998)
2168  ELSE
2169  ALLOCATE(distributed_matrix,stat=err)
2170  IF(err/=0) CALL flagerror("Could not allocated the distributed matrix.",err,error,*999)
2171  distributed_matrix%MATRIX_FINISHED=.false.
2172  distributed_matrix%LIBRARY_TYPE=0
2173  distributed_matrix%GHOSTING_TYPE=distributed_matrix_vector_include_ghosts_type
2174  distributed_matrix%ROW_DOMAIN_MAPPING=>row_domain_mapping
2175  distributed_matrix%COLUMN_DOMAIN_MAPPING=>column_domain_mapping
2176  distributed_matrix%DATA_TYPE=matrix_vector_dp_type
2177  NULLIFY(distributed_matrix%CMISS)
2178  NULLIFY(distributed_matrix%PETSC)
2179  CALL distributed_matrix_cmiss_initialise(distributed_matrix,err,error,*999)
2180  ENDIF
2181  ELSE
2182  CALL flagerror("Column domain mapping is not associated.",err,error,*999)
2183  ENDIF
2184  ELSE
2185  CALL flagerror("Row domain mapping is not associated.",err,error,*998)
2186  ENDIF
2187 
2188  exits("DISTRIBUTED_MATRIX_INITIALSE")
2189  RETURN
2190 999 CALL distributed_matrix_finalise(distributed_matrix,dummy_err,dummy_error,*998)
2191 998 errorsexits("DISTRIBUTED_MATRIX_INITIALISE",err,error)
2192  RETURN 1
2193  END SUBROUTINE distributed_matrix_initialise
2194 
2195  !
2196  !================================================================================================================================
2197  !
2198 
2200  SUBROUTINE distributed_matrix_max_columns_per_row_get(DISTRIBUTED_MATRIX,MAX_COLUMNS_PER_ROW,ERR,ERROR,*)
2202  !Argument variables
2203  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2204  INTEGER(INTG), INTENT(OUT) :: MAX_COLUMNS_PER_ROW
2205  INTEGER(INTG), INTENT(OUT) :: ERR
2206  TYPE(varying_string), INTENT(OUT) :: ERROR
2207  !Local Variables
2208  TYPE(varying_string) :: LOCAL_ERROR
2209 
2210  enters("DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error,*999)
2211 
2212  IF(ASSOCIATED(distributed_matrix)) THEN
2213  IF(distributed_matrix%MATRIX_FINISHED) THEN
2214  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2216  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2217  CALL matrix_max_columns_per_row_get(distributed_matrix%CMISS%MATRIX,max_columns_per_row,err,error,*999)
2218  ELSE
2219  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2220  ENDIF
2222  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2223  max_columns_per_row=distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW
2224  ELSE
2225  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2226  ENDIF
2227  CASE DEFAULT
2228  local_error="The distributed matrix library type of "// &
2229  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2230  CALL flagerror(local_error,err,error,*999)
2231  END SELECT
2232  ELSE
2233  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
2234  ENDIF
2235  ELSE
2236  CALL flagerror("Distributed mtrix is not associated.",err,error,*999)
2237  ENDIF
2238 
2239  exits("DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET")
2240  RETURN
2241 999 errorsexits("DISTRIBUTED_MATRIX_MAX_COLUMNS_PER_ROW_GET",err,error)
2242  RETURN 1
2244 
2245  !
2246  !================================================================================================================================
2247  !
2248 
2250  SUBROUTINE distributed_matrix_number_non_zeros_set(DISTRIBUTED_MATRIX,NUMBER_NON_ZEROS,ERR,ERROR,*)
2252  !Argument variables
2253  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2254  INTEGER(INTG), INTENT(IN) :: NUMBER_NON_ZEROS
2255  INTEGER(INTG), INTENT(OUT) :: ERR
2256  TYPE(varying_string), INTENT(OUT) :: ERROR
2257  !Local Variables
2258  TYPE(varying_string) :: LOCAL_ERROR
2259 
2260  enters("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error,*999)
2261 
2262  IF(ASSOCIATED(distributed_matrix)) THEN
2263  IF(distributed_matrix%MATRIX_FINISHED) THEN
2264  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
2265  ELSE
2266  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2268  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2269  CALL matrix_number_non_zeros_set(distributed_matrix%CMISS%MATRIX,number_non_zeros,err,error,*999)
2270  ELSE
2271  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2272  ENDIF
2274  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2275  IF(number_non_zeros>0) THEN
2276  distributed_matrix%PETSC%NUMBER_NON_ZEROS=number_non_zeros
2277  ELSE
2278  local_error="The specified number of non zeros ("//trim(numbertovstring(number_non_zeros,"*",err,error))// &
2279  & ") is invalid. The number must be > 0."
2280  CALL flagerror(local_error,err,error,*999)
2281  ENDIF
2282  ELSE
2283  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2284  ENDIF
2285  CASE DEFAULT
2286  local_error="The distributed matrix library type of "// &
2287  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2288  CALL flagerror(local_error,err,error,*999)
2289  END SELECT
2290  ENDIF
2291  ELSE
2292  CALL flagerror("Distributed mtrix is not associated.",err,error,*999)
2293  ENDIF
2294 
2295  exits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET")
2296  RETURN
2297 999 errorsexits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_SET",err,error)
2298  RETURN 1
2300 
2301  !
2302  !================================================================================================================================
2303  !
2304 
2306  SUBROUTINE distributed_matrix_number_non_zeros_get(DISTRIBUTED_MATRIX,NUMBER_NON_ZEROS,ERR,ERROR,*)
2308  !Argument variables
2309  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2310  INTEGER(INTG), INTENT(OUT) :: NUMBER_NON_ZEROS
2311  INTEGER(INTG), INTENT(OUT) :: ERR
2312  TYPE(varying_string), INTENT(OUT) :: ERROR
2313  !Local Variables
2314  TYPE(varying_string) :: LOCAL_ERROR
2315 
2316  enters("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2317 
2318  IF(ASSOCIATED(distributed_matrix)) THEN
2319  IF(distributed_matrix%MATRIX_FINISHED) THEN
2320  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2322  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2323  CALL matrix_number_non_zeros_get(distributed_matrix%CMISS%MATRIX,number_non_zeros,err,error,*999)
2324  ELSE
2325  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2326  ENDIF
2328  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2329  number_non_zeros=distributed_matrix%PETSC%NUMBER_NON_ZEROS
2330  ELSE
2331  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2332  ENDIF
2333  CASE DEFAULT
2334  local_error="The distributed matrix library type of "// &
2335  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2336  CALL flagerror(local_error,err,error,*999)
2337  END SELECT
2338  ELSE
2339  CALL flagerror("The distributed matrix is not finished.",err,error,*999)
2340  ENDIF
2341  ELSE
2342  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2343  ENDIF
2344 
2345  exits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET")
2346  RETURN
2347 999 errorsexits("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error)
2348  RETURN 1
2350 
2351  !
2352  !================================================================================================================================
2353  !
2354  !================================================================================================================================
2355  !
2356 
2358  SUBROUTINE distributed_matrix_linklist_set(DISTRIBUTED_MATRIX,LIST,ERR,ERROR,*)
2360  !Argument variables
2361  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2362  type(linkedlist),pointer :: list(:)
2363  !INTEGER(INTG), INTENT(IN) :: NUMBER_NON_ZEROS !<The number of non zeros in the matrix to set
2364  INTEGER(INTG), INTENT(OUT) :: ERR
2365  TYPE(varying_string), INTENT(OUT) :: ERROR
2366  !Local Variables
2367  TYPE(varying_string) :: LOCAL_ERROR
2368 
2369  enters("DISTRIBUTED_MATRIX_LINKLIST_SET",err,error,*999)
2370 
2371  IF(ASSOCIATED(distributed_matrix)) THEN
2372  IF(distributed_matrix%MATRIX_FINISHED) THEN
2373  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
2374  ELSE
2375  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2377  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2378  CALL matrix_linklist_set(distributed_matrix%CMISS%MATRIX,list,err,error,*999)
2379  !DISTRIBUTED_MATRIX%CMISS%list=list
2380  ELSE
2381  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2382  ENDIF
2384  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2385  !IF(NUMBER_NON_ZEROS>0) THEN
2386  ! Check this
2387  distributed_matrix%PETSC%list=>list
2388  !ELSE
2389  ! LOCAL_ERROR="The specified number of non zeros ("//TRIM(NumberToVString(NUMBER_NON_ZEROS,"*",ERR,ERROR))// &
2390  ! & ") is invalid. The number must be > 0."
2391  ! CALL FlagError(LOCAL_ERROR,ERR,ERROR,*999)
2392  !ENDIF
2393  ELSE
2394  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2395  ENDIF
2396  CASE DEFAULT
2397  local_error="The distributed matrix library type of "// &
2398  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2399  CALL flagerror(local_error,err,error,*999)
2400  END SELECT
2401  ENDIF
2402  ELSE
2403  CALL flagerror("Distributed mtrix is not associated.",err,error,*999)
2404  ENDIF
2405 
2406  exits("DISTRIBUTED_MATRIX_LIKLIST_SET")
2407  RETURN
2408 999 errorsexits("DISTRIBUTED_MATRIX_LINKLIST_SET",err,error)
2409  RETURN 1
2410  END SUBROUTINE distributed_matrix_linklist_set
2411 
2412  !
2413  !================================================================================================================================
2414  !
2415 
2417  SUBROUTINE distributed_matrix_linklist_get(DISTRIBUTED_MATRIX,LIST,ERR,ERROR,*)
2419  !Argument variables
2420  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2421  type(linkedlist),pointer :: list(:)
2422 ! INTEGER(INTG), INTENT(OUT) :: NUMBER_NON_ZEROS !<On return, the number of non zeros in the matrix to get
2423  INTEGER(INTG), INTENT(OUT) :: ERR
2424  TYPE(varying_string), INTENT(OUT) :: ERROR
2425  !Local Variables
2426  TYPE(varying_string) :: LOCAL_ERROR
2427 
2428  enters("DISTRIBUTED_MATRIX_NUMBER_NON_ZEROS_GET",err,error,*999)
2429 
2430  IF(ASSOCIATED(distributed_matrix)) THEN
2431  IF(distributed_matrix%MATRIX_FINISHED) THEN
2432  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2434  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2435  !list=DISTRIBUTED_MATRIX%CMISS%list
2436  CALL matrix_linklist_get(distributed_matrix%CMISS%MATRIX,list,err,error,*999)
2437  ELSE
2438  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2439  ENDIF
2441  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2442  list=>distributed_matrix%PETSC%list
2443  ELSE
2444  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2445  ENDIF
2446  CASE DEFAULT
2447  local_error="The distributed matrix library type of "// &
2448  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2449  CALL flagerror(local_error,err,error,*999)
2450  END SELECT
2451  ELSE
2452  CALL flagerror("The distributed matrix is not finished.",err,error,*999)
2453  ENDIF
2454  ELSE
2455  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2456  ENDIF
2457 
2458  exits("DISTRIBUTED_MATRIX_LINKLIST_GET")
2459  RETURN
2460 999 errorsexits("DISTRIBUTED_MATRIX_LINKLIST_GET",err,error)
2461  RETURN 1
2462  END SUBROUTINE distributed_matrix_linklist_get
2463 
2464  !
2465  !================================================================================================================================
2466  !
2468  SUBROUTINE distributed_matrix_output(ID,DISTRIBUTED_MATRIX,ERR,ERROR,*)
2470  !Argument variables
2471  INTEGER(INTG), INTENT(IN) :: ID
2472  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2473  INTEGER(INTG), INTENT(OUT) :: ERR
2474  TYPE(varying_string), INTENT(OUT) :: ERROR
2475  !Local Variables
2476 ! INTEGER(INTG) :: i,NUMBER_OF_COLUMNS
2477 ! INTEGER(INTG), ALLOCATABLE :: COLUMNS(:)
2478 ! REAL(DP), ALLOCATABLE :: VALUES(:)
2479 ! CHARACTER(LEN=9) :: ROW_STRING
2480 ! CHARACTER(LEN=39) :: INITIAL_STRING
2481  TYPE(varying_string) :: LOCAL_ERROR
2482 
2483  enters("DISTRIBUTED_MATRIX_OUTPUT",err,error,*999)
2484 
2485  IF(ASSOCIATED(distributed_matrix)) THEN
2486  IF(distributed_matrix%MATRIX_FINISHED) THEN
2487  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2489  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
2490  CALL matrix_output(id,distributed_matrix%CMISS%MATRIX,err,error,*999)
2491  ELSE
2492  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2493  ENDIF
2495  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2496  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
2497  CALL petsc_matview(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_viewer_stdout_world,err,error,*999)
2498  ELSE
2499  CALL petsc_matview(distributed_matrix%PETSC%MATRIX,petsc_viewer_stdout_world,err,error,*999)
2500  ENDIF
2501  !ALLOCATE(COLUMNS(DISTRIBUTED_MATRIX%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW),STAT=ERR)
2502  !IF(ERR/=0) CALL FlagError("Could not allocate columns.",ERR,ERROR,*999)
2503  !ALLOCATE(VALUES(DISTRIBUTED_MATRIX%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW),STAT=ERR)
2504  !IF(ERR/=0) CALL FlagError("Could not allocate values.",ERR,ERROR,*999)
2505  !DO i=1,DISTRIBUTED_MATRIX%PETSC%M
2506  ! IF(DISTRIBUTED_MATRIX%PETSC%USE_OVERRIDE_MATRIX) THEN
2507  ! CALL Petsc_MatGetRow(DISTRIBUTED_MATRIX%PETSC%OVERRIDE_MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES, &
2508  ! & ERR,ERROR,*999)
2509  ! ELSE
2510  ! CALL Petsc_MatGetRow(DISTRIBUTED_MATRIX%PETSC%MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES,ERR,ERROR,*999)
2511  ! ENDIF
2512  ! ROW_STRING=NUMBER_TO_CHARACTER(i,"I9",ERR,ERROR)
2513  ! INITIAL_STRING='("Matrix('//ROW_STRING//',:):",8(X,E13.6))'
2514  ! CALL WRITE_STRING_VECTOR(ID,1,1,NUMBER_OF_COLUMNS,8,8,VALUES,INITIAL_STRING,'(20X,8(X,E13.6))', &
2515  ! & ERR,ERROR,*999)
2516  ! IF(DISTRIBUTED_MATRIX%PETSC%USE_OVERRIDE_MATRIX) THEN
2517  ! CALL Petsc_MatRestoreRow(DISTRIBUTED_MATRIX%PETSC%OVERRIDE_MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES, &
2518  ! & ERR,ERROR,*999)
2519  ! ELSE
2520  ! CALL Petsc_MatRestoreRow(DISTRIBUTED_MATRIX%PETSC%MATRIX,i-1,NUMBER_OF_COLUMNS,COLUMNS,VALUES,ERR,ERROR,*999)
2521  ! ENDIF
2522  !ENDDO !i
2523  !IF(ALLOCATED(VALUES)) DEALLOCATE(VALUES)
2524  !IF(ALLOCATED(COLUMNS)) DEALLOCATE(COLUMNS)
2525  ELSE
2526  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2527  ENDIF
2528  CASE DEFAULT
2529  local_error="The distributed matrix library type of "// &
2530  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2531  CALL flagerror(local_error,err,error,*999)
2532  END SELECT
2533  ELSE
2534  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
2535  ENDIF
2536  ELSE
2537  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2538  ENDIF
2539 
2540  exits("DISTRIBUTED_MATRIX_OUTPUT")
2541  RETURN
2542 !999 IF(ALLOCATED(VALUES)) DEALLOCATE(VALUES)
2543 ! IF(ALLOCATED(COLUMNS)) DEALLOCATE(COLUMNS)
2544 999 errorsexits("DISTRIBUTED_MATRIX_OUTPUT",err,error)
2545  RETURN 1
2546  END SUBROUTINE distributed_matrix_output
2547 
2548  !
2549  !================================================================================================================================
2550  !
2551 
2553  SUBROUTINE distributed_matrix_override_set_on(DISTRIBUTED_MATRIX,OVERRIDE_MATRIX,ERR,ERROR,*)
2555  !Argument variables
2556  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2557  TYPE(petscmattype), INTENT(IN) :: OVERRIDE_MATRIX
2558  INTEGER(INTG), INTENT(OUT) :: ERR
2559  TYPE(varying_string), INTENT(OUT) :: ERROR
2560  !Local Variables
2561  TYPE(varying_string) :: LOCAL_ERROR
2562 
2563  enters("DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error,*999)
2564 
2565  IF(ASSOCIATED(distributed_matrix)) THEN
2566  IF(distributed_matrix%MATRIX_FINISHED) THEN
2567  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2569  CALL flagerror("Not implemented.",err,error,*999)
2571  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2572  distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.true.
2573  distributed_matrix%PETSC%OVERRIDE_MATRIX=override_matrix
2574  ELSE
2575  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2576  ENDIF
2577  CASE DEFAULT
2578  local_error="The distributed matrix library type of "// &
2579  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2580  CALL flagerror(local_error,err,error,*999)
2581  END SELECT
2582  ELSE
2583  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
2584  ENDIF
2585  ELSE
2586  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2587  ENDIF
2588 
2589  exits("DISTRIBUTED_MATRIX_OVERRIDE_SET_ON")
2590  RETURN
2591 999 errorsexits("DISTRIBUTED_MATRIX_OVERRIDE_SET_ON",err,error)
2592  RETURN 1
2593  END SUBROUTINE distributed_matrix_override_set_on
2594 
2595  !
2596  !================================================================================================================================
2597  !
2598 
2600  SUBROUTINE distributed_matrix_override_set_off(DISTRIBUTED_MATRIX,ERR,ERROR,*)
2602  !Argument variables
2603  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2604  INTEGER(INTG), INTENT(OUT) :: ERR
2605  TYPE(varying_string), INTENT(OUT) :: ERROR
2606  !Local Variables
2607  TYPE(varying_string) :: LOCAL_ERROR
2608 
2609  enters("DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error,*999)
2610 
2611  IF(ASSOCIATED(distributed_matrix)) THEN
2612  IF(distributed_matrix%MATRIX_FINISHED) THEN
2613  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2615  CALL flagerror("Not implemented.",err,error,*999)
2617  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2618  distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2619  CALL petsc_matinitialise(distributed_matrix%PETSC%OVERRIDE_MATRIX,err,error,*999)
2620  ELSE
2621  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2622  ENDIF
2623  CASE DEFAULT
2624  local_error="The distributed matrix library type of "// &
2625  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2626  CALL flagerror(local_error,err,error,*999)
2627  END SELECT
2628  ELSE
2629  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
2630  ENDIF
2631  ELSE
2632  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2633  ENDIF
2634 
2635  exits("DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF")
2636  RETURN
2637 999 errorsexits("DISTRIBUTED_MATRIX_OVERRIDE_SET_OFF",err,error)
2638  RETURN 1
2640 
2641  !
2642  !================================================================================================================================
2643  !
2644 
2646  SUBROUTINE distributed_matrix_petsc_create_finish(PETSC_MATRIX,ERR,ERROR,*)
2648  !Argument variables
2649  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2650  INTEGER(INTG), INTENT(OUT) :: ERR
2651  TYPE(varying_string), INTENT(OUT) :: ERROR
2652  !Local Variables
2653  INTEGER(INTG) :: DUMMY_ERR,i
2654  !INTEGER(INTG), ALLOCATABLE :: GLOBAL_NUMBERS(:)
2655  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2656  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2657  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
2658 
2659  enters("DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error,*999)
2660 
2661  IF(ASSOCIATED(petsc_matrix)) THEN
2662  distributed_matrix=>petsc_matrix%DISTRIBUTED_MATRIX
2663  IF(ASSOCIATED(distributed_matrix)) THEN
2664  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2665  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2666  IF(ASSOCIATED(row_domain_mapping)) THEN
2667  IF(ASSOCIATED(column_domain_mapping)) THEN
2668  SELECT CASE(petsc_matrix%STORAGE_TYPE)
2670  petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M*petsc_matrix%GLOBAL_N
2671  petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%GLOBAL_N
2672  petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2673  !Set up the Local to Global mappings
2674  ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2675  IF(err/=0) CALL flagerror("Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2676  DO i=1,petsc_matrix%M
2677  petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1 !PETSc uses 0 based indexing
2678  ENDDO !i
2679  !Set up the matrix
2680  ALLOCATE(petsc_matrix%DATA_DP(petsc_matrix%DATA_SIZE),stat=err)
2681  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix data.",err,error,*999)
2682  CALL petsc_matcreatedense(computational_environment%MPI_COMM,petsc_matrix%M,petsc_matrix%N, &
2683  & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_matrix%DATA_DP,petsc_matrix%MATRIX,err,error,*999)
2685  petsc_matrix%NUMBER_NON_ZEROS=petsc_matrix%M
2686  petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=1
2687  petsc_matrix%DATA_SIZE=petsc_matrix%NUMBER_NON_ZEROS
2688  !Set up the Local to Global mappings
2689  ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2690  IF(err/=0) CALL flagerror("Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2691  DO i=1,petsc_matrix%M
2692  petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1 !PETSc uses 0 based indexing
2693  ENDDO !i
2694  !Set up the matrix
2695  ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2696  IF(err/=0) CALL flagerror("Could not allocate diagonal number of non zeros.",err,error,*999)
2697  ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%N),stat=err)
2698  IF(err/=0) CALL flagerror("Could not allocate off diagonal number of non zeros.",err,error,*999)
2699  petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=1
2700  petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
2701  !Create the PETsc AIJ matrix
2702  CALL petsc_matcreateaij(computational_environment%MPI_COMM,petsc_matrix%M,petsc_matrix%N, &
2703  & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2704  & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2706  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
2708  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
2710  IF(ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)) THEN
2711  IF(ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)) THEN
2712  !Create the PETSc AIJ matrix
2713  CALL petsc_matcreateaij(computational_environment%MPI_COMM,petsc_matrix%M,petsc_matrix%N, &
2714  & petsc_matrix%GLOBAL_M,petsc_matrix%GLOBAL_N,petsc_null_integer,petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS, &
2715  & petsc_null_integer,petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS,petsc_matrix%MATRIX,err,error,*999)
2716  !Set matrix options
2717  CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_location_err,.true.,err,error,*999)
2718  CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_new_nonzero_allocation_err,.true.,err,error,*999)
2719  CALL petsc_matsetoption(petsc_matrix%MATRIX,petsc_mat_unused_nonzero_location_err,.true.,err,error,*999)
2720  !Set up the Local to Global mappings
2721  ALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS(petsc_matrix%M),stat=err)
2722  IF(err/=0) CALL flagerror("Could not allocate global row numbers for PETSc distributed matrix.",err,error,*999)
2723  petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2724  DO i=1,petsc_matrix%M
2725  petsc_matrix%GLOBAL_ROW_NUMBERS(i)=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1 !PETSc uses 0 based indexing
2726  IF((petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i))>petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW) &
2727  & petsc_matrix%MAXIMUM_COLUMN_INDICES_PER_ROW=petsc_matrix%ROW_INDICES(i+1)-petsc_matrix%ROW_INDICES(i)
2728  ENDDO !i
2729  ELSE
2730  CALL flagerror("Matrix off diagonal storage locations have not been set.",err,error,*999)
2731  ENDIF
2732  ELSE
2733  CALL flagerror("Matrix diagonal storage locations have not been set.",err,error,*999)
2734  ENDIF
2736  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2738  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
2739  CASE DEFAULT
2740  local_error="The PETSc matrix storage type of "//trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))// &
2741  & " is invalid."
2742  CALL flagerror(local_error,err,error,*999)
2743  END SELECT
2744  ELSE
2745  CALL flagerror("PETSc matrix distributed matrix column domain mapping is not associated.",err,error,*999)
2746  ENDIF
2747  ELSE
2748  CALL flagerror("PETSc matrix distributed matrix row domain mapping is not associated.",err,error,*999)
2749  ENDIF
2750  ENDIF
2751  ELSE
2752  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*998)
2753  ENDIF
2754 
2755  exits("DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH")
2756  RETURN
2757 999 CALL distributed_matrix_petsc_finalise(petsc_matrix,dummy_err,dummy_error,*998)
2758 998 errorsexits("DISTRIBUTED_MATRIX_PETSC_CREATE_FINISH",err,error)
2759  RETURN 1
2761 
2762  !
2763  !================================================================================================================================
2764  !
2765 
2767  SUBROUTINE distributed_matrix_petsc_finalise(PETSC_MATRIX,ERR,ERROR,*)
2769  !Argument variables
2770  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2771  INTEGER(INTG), INTENT(OUT) :: ERR
2772  TYPE(varying_string), INTENT(OUT) :: ERROR
2773  !Local Variables
2774 
2775  enters("DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error,*999)
2776 
2777  IF(ASSOCIATED(petsc_matrix)) THEN
2778  IF(ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
2779  IF(ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
2780  IF(ALLOCATED(petsc_matrix%ROW_INDICES)) DEALLOCATE(petsc_matrix%ROW_INDICES)
2781  IF(ALLOCATED(petsc_matrix%COLUMN_INDICES)) DEALLOCATE(petsc_matrix%COLUMN_INDICES)
2782  IF(ALLOCATED(petsc_matrix%GLOBAL_ROW_NUMBERS)) DEALLOCATE(petsc_matrix%GLOBAL_ROW_NUMBERS)
2783  !IF(ALLOCATED(PETSC_MATRIX%DATA_DP)) DEALLOCATE(PETSC_MATRIX%DATA_DP)
2784  CALL petsc_matfinalise(petsc_matrix%MATRIX,err,error,*999)
2785  CALL petsc_matfinalise(petsc_matrix%OVERRIDE_MATRIX,err,error,*999)
2786  DEALLOCATE(petsc_matrix)
2787  ENDIF
2788 
2789  exits("DISTRIBUTED_MATRIX_PETSC_FINALSE")
2790  RETURN
2791 999 errorsexits("DISTRIBUTED_MATRIX_PETSC_FINALISE",err,error)
2792  RETURN 1
2793  END SUBROUTINE distributed_matrix_petsc_finalise
2794 
2795  !
2796  !================================================================================================================================
2797  !
2798 
2800  SUBROUTINE distributed_matrix_petsc_initialise(DISTRIBUTED_MATRIX,ERR,ERROR,*)
2802  !Argument variables
2803  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2804  INTEGER(INTG), INTENT(OUT) :: ERR
2805  TYPE(varying_string), INTENT(OUT) :: ERROR
2806  !Local Variables
2807  INTEGER(INTG) :: DUMMY_ERR
2808  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2809  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
2810 
2811  enters("DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error,*998)
2812 
2813  IF(ASSOCIATED(distributed_matrix)) THEN
2814  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
2815  CALL flagerror("PETSc is already associated for this distributed matrix",err,error,*998)
2816  ELSE
2817  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2818  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2819  IF(ASSOCIATED(row_domain_mapping)) THEN
2820  IF(ASSOCIATED(column_domain_mapping)) THEN
2821  ALLOCATE(distributed_matrix%PETSC,stat=err)
2822  IF(err/=0) CALL flagerror("Could not allocate PETSc distributed matrix.",err,error,*999)
2823  distributed_matrix%PETSC%DISTRIBUTED_MATRIX=>distributed_matrix
2824  distributed_matrix%LIBRARY_TYPE=distributed_matrix_vector_petsc_type
2825  !Set the defaults
2826  SELECT CASE(distributed_matrix%GHOSTING_TYPE)
2828  distributed_matrix%PETSC%M=row_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2830  distributed_matrix%PETSC%M=row_domain_mapping%NUMBER_OF_LOCAL
2831  CASE DEFAULT
2832  local_error="The distributed matrix ghosting type of "// &
2833  & trim(numbertovstring(distributed_matrix%GHOSTING_TYPE,"*",err,error))//" is invalid."
2834  CALL flagerror(local_error,err,error,*999)
2835  END SELECT
2836  distributed_matrix%PETSC%N=column_domain_mapping%TOTAL_NUMBER_OF_LOCAL
2837  distributed_matrix%PETSC%GLOBAL_M=row_domain_mapping%NUMBER_OF_GLOBAL
2838  distributed_matrix%PETSC%GLOBAL_N=column_domain_mapping%NUMBER_OF_GLOBAL
2839  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_compressed_row_storage_type
2840  distributed_matrix%PETSC%DATA_SIZE=0
2841  distributed_matrix%PETSC%MAXIMUM_COLUMN_INDICES_PER_ROW=0
2842  distributed_matrix%PETSC%USE_OVERRIDE_MATRIX=.false.
2843  CALL petsc_matinitialise(distributed_matrix%PETSC%MATRIX,err,error,*999)
2844  CALL petsc_matinitialise(distributed_matrix%PETSC%OVERRIDE_MATRIX,err,error,*999)
2845  ELSE
2846  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*998)
2847  ENDIF
2848  ELSE
2849  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*998)
2850  ENDIF
2851  ENDIF
2852  ELSE
2853  CALL flagerror("Distributed matrix is not associated.",err,error,*998)
2854  ENDIF
2855 
2856  exits("DISTRIBUTED_MATRIX_PETSC_INITIALSE")
2857  RETURN
2858 999 IF(ASSOCIATED(distributed_matrix%PETSC)) &
2859  & CALL distributed_matrix_petsc_finalise(distributed_matrix%PETSC,dummy_err,dummy_error,*998)
2860 998 errorsexits("DISTRIBUTED_MATRIX_PETSC_INITIALISE",err,error)
2861  RETURN 1
2863 
2864  !
2865  !================================================================================================================================
2866  !
2867 
2869  SUBROUTINE distributed_matrix_storage_locations_get(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,ERR,ERROR,*)
2871  !Argument variables
2872  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2873  INTEGER(INTG), POINTER :: ROW_INDICES(:)
2874  INTEGER(INTG), POINTER :: COLUMN_INDICES(:)
2875  INTEGER(INTG), INTENT(OUT) :: ERR
2876  TYPE(varying_string), INTENT(OUT) :: ERROR
2877  !Local Variables
2878  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
2879  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2880  TYPE(varying_string) :: LOCAL_ERROR
2881 
2882  enters("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error,*999)
2883 
2884  IF(ASSOCIATED(distributed_matrix)) THEN
2885  IF(distributed_matrix%MATRIX_FINISHED) THEN
2886  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2888  cmiss_matrix=>distributed_matrix%CMISS
2889  IF(ASSOCIATED(cmiss_matrix)) THEN
2890  CALL matrix_storage_locations_get(cmiss_matrix%MATRIX,row_indices,column_indices,err,error,*999)
2891  ELSE
2892  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2893  ENDIF
2895  petsc_matrix=>distributed_matrix%PETSC
2896  IF(ASSOCIATED(petsc_matrix)) THEN
2897  SELECT CASE(petsc_matrix%STORAGE_TYPE)
2899  CALL flagerror("Cannot get matrix locations for a block storage matrix.",err,error,*999)
2901  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2903  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
2905  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
2907  row_indices=>distributed_matrix%PETSC%ROW_INDICES
2908  column_indices=>distributed_matrix%PETSC%COLUMN_INDICES
2910  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
2912  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
2913  CASE DEFAULT
2914  local_error="The matrix storage type of "// &
2915  & trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))//" is invalid."
2916  CALL flagerror(local_error,err,error,*999)
2917  END SELECT
2918  ELSE
2919  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
2920  ENDIF
2921  CASE DEFAULT
2922  local_error="The distributed matrix library type of "// &
2923  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
2924  CALL flagerror(local_error,err,error,*999)
2925  END SELECT
2926  ELSE
2927  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
2928  ENDIF
2929  ELSE
2930  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
2931  ENDIF
2932 
2933  exits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET")
2934  RETURN
2935 999 errorsexits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_GET",err,error)
2936  RETURN 1
2938 
2939  !
2940  !================================================================================================================================
2941  !
2942 
2944  SUBROUTINE distributed_matrix_storage_locations_set(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,ERR,ERROR,*)
2946  !Argument variables
2947  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
2948  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
2949  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
2950  INTEGER(INTG), INTENT(OUT) :: ERR
2951  TYPE(varying_string), INTENT(OUT) :: ERROR
2952  !Local Variables
2953  INTEGER(INTG) :: i,j,k,global_row_start,global_row_finish
2954  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
2955  TYPE(distributed_matrix_petsc_type), POINTER :: PETSC_MATRIX
2956  TYPE(domain_mapping_type), POINTER :: ROW_DOMAIN_MAPPING,COLUMN_DOMAIN_MAPPING
2957  TYPE(varying_string) :: LOCAL_ERROR
2958 
2959  NULLIFY(cmiss_matrix)
2960  NULLIFY(petsc_matrix)
2961 
2962  enters("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error,*999)
2963 
2964  IF(ASSOCIATED(distributed_matrix)) THEN
2965  IF(distributed_matrix%MATRIX_FINISHED) THEN
2966  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
2967  ELSE
2968  row_domain_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
2969  column_domain_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
2970  IF(ASSOCIATED(row_domain_mapping)) THEN
2971  IF(ASSOCIATED(column_domain_mapping)) THEN
2972  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
2974  cmiss_matrix=>distributed_matrix%CMISS
2975  IF(ASSOCIATED(cmiss_matrix)) THEN
2976  CALL matrix_storage_locations_set(cmiss_matrix%MATRIX,row_indices,column_indices,err,error,*999)
2977  ELSE
2978  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
2979  ENDIF
2981  petsc_matrix=>distributed_matrix%PETSC
2982  IF(ASSOCIATED(petsc_matrix)) THEN
2983  SELECT CASE(petsc_matrix%STORAGE_TYPE)
2985  !Do nothing
2987  CALL flagerror("Diagonal storage is not implemented for PETSc matrices.",err,error,*999)
2989  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
2991  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
2993  IF(SIZE(row_indices,1)==petsc_matrix%M+1) THEN
2994  IF(SIZE(column_indices,1)==petsc_matrix%NUMBER_NON_ZEROS) THEN
2995  IF(row_indices(1)==1) THEN
2996  !Check the row indicies are correct
2997  IF(row_indices(petsc_matrix%M+1)==petsc_matrix%NUMBER_NON_ZEROS+1) THEN
2998  DO i=2,petsc_matrix%M+1
2999  IF(row_indices(i)<row_indices(i-1)) THEN
3000  local_error="Invalid row indices. Row "//trim(numbertovstring(i,"*",err,error))// &
3001  & " index number ("//trim(numbertovstring(row_indices(i),"*",err,error))//" &
3002  & ) is less than row "//trim(numbertovstring(i-1,"*",err,error))//" index number ("// &
3003  & trim(numbertovstring(row_indices(i-1),"*",err,error))//")."
3004  CALL flagerror(local_error,err,error,*999)
3005  ENDIF
3006  ENDDO !i
3007  !Allocate the PETSc sparsity storage arrays
3008  ALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3009  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix diagonal number of non zeros.",err,error,*999)
3010  petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS=0
3011  ALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(petsc_matrix%M),stat=err)
3012  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix off diagonal number of non zeros.", &
3013  & err,error,*999)
3014  petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS=0
3015  ALLOCATE(petsc_matrix%ROW_INDICES(petsc_matrix%M+1),stat=err)
3016  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix row indices.",err,error,*999)
3017  petsc_matrix%ROW_INDICES(1:petsc_matrix%M+1)=row_indices(1:petsc_matrix%M+1)
3018  ALLOCATE(petsc_matrix%COLUMN_INDICES(petsc_matrix%NUMBER_NON_ZEROS),stat=err)
3019  IF(err/=0) CALL flagerror("Could not allocate PETSc matrix column indices.",err,error,*999)
3020  petsc_matrix%COLUMN_INDICES(1:petsc_matrix%NUMBER_NON_ZEROS)= &
3021  & column_indices(1:petsc_matrix%NUMBER_NON_ZEROS)
3022  !Check the column indices are correct and calculate number of diagonal and off-diagonal columns
3023  global_row_start=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(1)
3024  global_row_finish=row_domain_mapping%LOCAL_TO_GLOBAL_MAP(petsc_matrix%M)
3025  DO i=1,petsc_matrix%M
3026  DO j=row_indices(i),row_indices(i+1)-1
3027  k=column_indices(j)
3028  IF(k>0) THEN
3029  IF(k>petsc_matrix%GLOBAL_N) THEN
3030  local_error="Invalid column indices. Column index "//trim(numbertovstring(j,"*",err,error))// &
3031  & " ("//trim(numbertovstring(k,"*",err,error))// &
3032  & ") is greater than the number of columns ("// &
3033  & trim(numbertovstring(petsc_matrix%GLOBAL_N,"*",err,error))//")."
3034  CALL flagerror(local_error,err,error,*999)
3035  ENDIF
3036  IF(k>=global_row_start.AND.k<=global_row_finish) THEN
3037  petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)+1
3038  ELSE
3039  petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)=petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS(i)+1
3040  ENDIF
3041  ELSE
3042  local_error="Invalid column indices. Column index "//trim(numbertovstring(j,"*",err,error))// &
3043  & " ("//trim(numbertovstring(k,"*",err,error))//") is less than zero."
3044  CALL flagerror(local_error,err,error,*999)
3045  ENDIF
3046  ENDDO !j
3047  !Enforce a place for the diagonal entry.
3048  IF(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)==0) petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS(i)=1
3049  ENDDO !i
3050  IF(diagnostics3) THEN
3051  CALL write_string(diagnostic_output_type,"PETSc distributed matrix sparsity:",err,error,*999)
3052  CALL write_string_value(diagnostic_output_type," Storage type = ",petsc_matrix%STORAGE_TYPE, &
3053  & err,error,*999)
3054  CALL write_string_value(diagnostic_output_type," M = ",petsc_matrix%M,err,error,*999)
3055  CALL write_string_value(diagnostic_output_type," N = ",petsc_matrix%N,err,error,*999)
3056  CALL write_string_value(diagnostic_output_type," Global M = ",petsc_matrix%GLOBAL_M,err,error,*999)
3057  CALL write_string_value(diagnostic_output_type," Global N = ",petsc_matrix%GLOBAL_N,err,error,*999)
3058  CALL write_string_value(diagnostic_output_type," Number of non zeros = ",petsc_matrix% &
3059  & number_non_zeros,err,error,*999)
3060  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%M,8,8,petsc_matrix%&
3061  & diagonal_number_non_zeros,'(" Diagonal number non zeros :",8(X,I10))','(33X,8(X,I10))', &
3062  & err,error,*999)
3063  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%M,8,8,petsc_matrix%&
3064  & offdiagonal_number_non_zeros,'(" Off-diagonal number non zeros :",8(X,I10))','(33X,8(X,I10))', &
3065  & err,error,*999)
3066  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%M+1,8,8,petsc_matrix%&
3067  & row_indices,'(" Row indices :",8(X,I10))','(33X,8(X,I10))', &
3068  & err,error,*999)
3069  CALL write_string_vector(diagnostic_output_type,1,1,petsc_matrix%NUMBER_NON_ZEROS,8,8,petsc_matrix%&
3070  & column_indices,'(" Column indices :",8(X,I10))','(33X,8(X,I10))', &
3071  & err,error,*999)
3072  ENDIF
3073  ELSE
3074  local_error="Invalid row indices. The last row index ("// &
3075  & trim(numbertovstring(row_indices(petsc_matrix%M+1),"*",err,error))// &
3076  & ") does not equal the number of non-zeros + 1 ("// &
3077  & trim(numbertovstring(petsc_matrix%NUMBER_NON_ZEROS+1,"*",err,error))//")."
3078  CALL flagerror(local_error,err,error,*999)
3079  ENDIF
3080  ELSE
3081  local_error="Invalid row indices. The first row index ("// &
3082  & trim(numbertovstring(row_indices(1),"*",err,error))//") does not equal 1."
3083  CALL flagerror(local_error,err,error,*999)
3084  ENDIF
3085  ELSE
3086  local_error="The supplied number of column indices ("// &
3087  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
3088  & ") does not match the number of non-zeros in the matrix ("// &
3089  & trim(numbertovstring(petsc_matrix%NUMBER_NON_ZEROS,"*",err,error))//")."
3090  CALL flagerror(local_error,err,error,*999)
3091  ENDIF
3092  ELSE
3093  local_error="The supplied number of row indices ("// &
3094  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
3095  & ") does not match the number of rows in the matrix + 1 ("// &
3096  & trim(numbertovstring(petsc_matrix%M+1,"*",err,error))//")."
3097  CALL flagerror(local_error,err,error,*999)
3098  ENDIF
3100  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3102  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
3103  CASE DEFAULT
3104  local_error="The specified matrix storage type of "// &
3105  & trim(numbertovstring(petsc_matrix%STORAGE_TYPE,"*",err,error))//" is invalid."
3106  CALL flagerror(local_error,err,error,*999)
3107  END SELECT
3108  ELSE
3109  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3110  ENDIF
3111  CASE DEFAULT
3112  local_error="The distributed matrix library type of "// &
3113  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3114  CALL flagerror(local_error,err,error,*999)
3115  END SELECT
3116  ELSE
3117  CALL flagerror("Distributed matrix column domain mapping is not associated.",err,error,*999)
3118  ENDIF
3119  ELSE
3120  CALL flagerror("Distributed matrix row domain mapping is not associated.",err,error,*999)
3121  ENDIF
3122  ENDIF
3123  ELSE
3124  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3125  ENDIF
3126 
3127  exits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET")
3128  RETURN
3129 999 IF(ASSOCIATED(petsc_matrix)) THEN
3130  IF(ALLOCATED(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%DIAGONAL_NUMBER_NON_ZEROS)
3131  IF(ALLOCATED(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)) DEALLOCATE(petsc_matrix%OFFDIAGONAL_NUMBER_NON_ZEROS)
3132  ENDIF
3133  errorsexits("DISTRIBUTED_MATRIX_STORAGE_LOCATIONS_SET",err,error)
3134  RETURN 1
3136 
3137  !
3138  !================================================================================================================================
3139  !
3140 
3142  SUBROUTINE distributed_matrix_storage_type_get(DISTRIBUTED_MATRIX,STORAGE_TYPE,ERR,ERROR,*)
3144  !Argument variables
3145  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3146  INTEGER(INTG), INTENT(OUT) :: STORAGE_TYPE
3147  INTEGER(INTG), INTENT(OUT) :: ERR
3148  TYPE(varying_string), INTENT(OUT) :: ERROR
3149  !Local Variables
3150  TYPE(varying_string) :: LOCAL_ERROR
3151 
3152  enters("DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error,*999)
3153 
3154  IF(ASSOCIATED(distributed_matrix)) THEN
3155  IF(distributed_matrix%MATRIX_FINISHED) THEN
3156  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3158  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3159  CALL matrix_storage_type_get(distributed_matrix%CMISS%MATRIX,storage_type,err,error,*999)
3160  ELSE
3161  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3162  ENDIF
3164  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3165  storage_type=distributed_matrix%PETSC%STORAGE_TYPE
3166  ELSE
3167  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3168  ENDIF
3169  CASE DEFAULT
3170  local_error="The distributed matrix library type of "// &
3171  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3172  CALL flagerror(local_error,err,error,*999)
3173  END SELECT
3174  ELSE
3175  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3176  ENDIF
3177  ELSE
3178  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3179  ENDIF
3180 
3181  exits("DISTRIBUTED_MATRIX_STORAGE_TYPE_GET")
3182  RETURN
3183 999 errorsexits("DISTRIBUTED_MATRIX_STORAGE_TYPE_GET",err,error)
3184  RETURN 1
3186 
3187  !
3188  !================================================================================================================================
3189  !
3190 
3192  SUBROUTINE distributed_matrix_storage_type_set(DISTRIBUTED_MATRIX,STORAGE_TYPE,ERR,ERROR,*)
3194  !Argument variables
3195  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3196  INTEGER(INTG), INTENT(IN) :: STORAGE_TYPE
3197  INTEGER(INTG), INTENT(OUT) :: ERR
3198  TYPE(varying_string), INTENT(OUT) :: ERROR
3199  !Local Variables
3200  TYPE(varying_string) :: LOCAL_ERROR
3201 
3202  enters("DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error,*999)
3203 
3204  IF(ASSOCIATED(distributed_matrix)) THEN
3205  IF(distributed_matrix%MATRIX_FINISHED) THEN
3206  CALL flagerror("The distributed matrix has been finished.",err,error,*999)
3207  ELSE
3208  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3210  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3211  CALL matrix_storage_type_set(distributed_matrix%CMISS%MATRIX,storage_type,err,error,*999)
3212  ELSE
3213  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3214  ENDIF
3216  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3217  SELECT CASE(storage_type)
3219  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_block_storage_type
3221  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_diagonal_storage_type
3223  CALL flagerror("Column major storage is not implemented for PETSc matrices.",err,error,*999)
3225  CALL flagerror("Row major storage is not implemented for PETSc matrices.",err,error,*999)
3227  distributed_matrix%PETSC%STORAGE_TYPE=distributed_matrix_compressed_row_storage_type
3229  CALL flagerror("Compressed column storage is not implemented for PETSc matrices.",err,error,*999)
3231  CALL flagerror("Row column storage is not implemented for PETSc matrices.",err,error,*999)
3232  CASE DEFAULT
3233  local_error="The specified matrix storage type of "//trim(numbertovstring(storage_type,"*",err,error))// &
3234  & " is invalid."
3235  CALL flagerror(local_error,err,error,*999)
3236  END SELECT
3237  ELSE
3238  CALL flagerror("Distributed matrix PETSc is not implemented.",err,error,*999)
3239  ENDIF
3240  CASE DEFAULT
3241  local_error="The distributed matrix library type of "// &
3242  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3243  CALL flagerror(local_error,err,error,*999)
3244  END SELECT
3245  ENDIF
3246  ELSE
3247  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3248  ENDIF
3249 
3250  exits("DISTRIBUTED_MATRIX_STORAGE_TYPE_SET")
3251  RETURN
3252 999 errorsexits("DISTRIBUTED_MATRIX_STORAGE_TYPE_SET",err,error)
3253  RETURN 1
3255 
3256  !
3257  !================================================================================================================================
3258  !
3259 
3261  SUBROUTINE distributed_matrix_update_finish(DISTRIBUTED_MATRIX,ERR,ERROR,*)
3263  !Argument variables
3264  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3265  INTEGER(INTG), INTENT(OUT) :: ERR
3266  TYPE(varying_string), INTENT(OUT) :: ERROR
3267  !Local Variables
3268  TYPE(varying_string) :: LOCAL_ERROR
3269 
3270  enters("DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error,*999)
3271 
3272  IF(ASSOCIATED(distributed_matrix)) THEN
3273  IF(distributed_matrix%MATRIX_FINISHED) THEN
3274  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3276  !Do nothing for now.
3278  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3279  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3280  CALL petsc_matassemblyend(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3281  ELSE
3282  CALL petsc_matassemblyend(distributed_matrix%PETSC%MATRIX,petsc_mat_final_assembly,err,error,*999)
3283  ENDIF
3284  ELSE
3285  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3286  ENDIF
3287  CASE DEFAULT
3288  local_error="The distributed matrix library type of "// &
3289  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3290  CALL flagerror(local_error,err,error,*999)
3291  END SELECT
3292  ELSE
3293  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3294  ENDIF
3295  ELSE
3296  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3297  ENDIF
3298 
3299  exits("DISTRIBUTED_MATRIX_UPDATE_FINISH")
3300  RETURN
3301 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_FINISH",err,error)
3302  RETURN 1
3303  END SUBROUTINE distributed_matrix_update_finish
3304 
3305  !
3306  !================================================================================================================================
3307  !
3308 
3310  SUBROUTINE distributed_matrix_update_isfinished(DISTRIBUTED_MATRIX,ISFINISHED,ERR,ERROR,*)
3312  !Argument variables
3313  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3314  LOGICAL, INTENT(OUT) :: ISFINISHED
3315  INTEGER(INTG), INTENT(OUT) :: ERR
3316  TYPE(varying_string), INTENT(OUT) :: ERROR
3317  !Local Variables
3318 
3319  enters("DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error,*999)
3320 
3321  isfinished=.false.
3322  IF(ASSOCIATED(distributed_matrix)) THEN
3323  IF(distributed_matrix%MATRIX_FINISHED) THEN
3324  !Do nothting for now.
3325  isfinished=.true.
3326  ELSE
3327  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3328  ENDIF
3329  ELSE
3330  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3331  ENDIF
3332 
3333  exits("DISTRIBUTED_MATRIX_UPDATE_ISFINISHED")
3334  RETURN
3335 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_ISFINISHED",err,error)
3336  RETURN 1
3338 
3339  !
3340  !================================================================================================================================
3341  !
3342 
3344  SUBROUTINE distributed_matrix_update_waitfinished(DISTRIBUTED_MATRIX,ERR,ERROR,*)
3346  !Argument variables
3347  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3348  INTEGER(INTG), INTENT(OUT) :: ERR
3349  TYPE(varying_string), INTENT(OUT) :: ERROR
3350  !Local Variables
3351 
3352  enters("DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error,*999)
3353 
3354  IF(ASSOCIATED(distributed_matrix)) THEN
3355  IF(distributed_matrix%MATRIX_FINISHED) THEN
3356  !Do nothing for now.
3357  ELSE
3358  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3359  ENDIF
3360  ELSE
3361  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3362  ENDIF
3363 
3364  exits("DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED")
3365  RETURN
3366 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_WAITFINISHED",err,error)
3367  RETURN 1
3369 
3370  !
3371  !================================================================================================================================
3372  !
3373 
3375  SUBROUTINE distributed_matrix_update_start(DISTRIBUTED_MATRIX,ERR,ERROR,*)
3377  !Argument variables
3378  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3379  INTEGER(INTG), INTENT(OUT) :: ERR
3380  TYPE(varying_string), INTENT(OUT) :: ERROR
3381  !Local Variables
3382  TYPE(varying_string) :: LOCAL_ERROR
3383 
3384  enters("DISTRIBUTED_MATRIX_UPDATE_START",err,error,*999)
3385 
3386  IF(ASSOCIATED(distributed_matrix)) THEN
3387  IF(distributed_matrix%MATRIX_FINISHED) THEN
3388  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3390  !Do nothing for now.
3392  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3393  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3394  CALL petsc_matassemblybegin(distributed_matrix%PETSC%OVERRIDE_MATRIX,petsc_mat_final_assembly,err,error,*999)
3395  ELSE
3396  CALL petsc_matassemblybegin(distributed_matrix%PETSC%MATRIX,petsc_mat_final_assembly,err,error,*999)
3397  ENDIF
3398  ELSE
3399  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3400  ENDIF
3401  CASE DEFAULT
3402  local_error="The distributed matrix library type of "// &
3403  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3404  CALL flagerror(local_error,err,error,*999)
3405  END SELECT
3406  ELSE
3407  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3408  ENDIF
3409  ELSE
3410  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3411  ENDIF
3412 
3413  exits("DISTRIBUTED_MATRIX_UPDATE_START")
3414  RETURN
3415 999 errorsexits("DISTRIBUTED_MATRIX_UPDATE_START",err,error)
3416  RETURN 1
3417  END SUBROUTINE distributed_matrix_update_start
3418 
3419  !
3420  !================================================================================================================================
3421  !
3422 
3424  SUBROUTINE distributed_matrix_values_add_intg(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3426  !Argument variables
3427  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3428  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3429  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3430  INTEGER(INTG), INTENT(IN) :: VALUES(:)
3431  INTEGER(INTG), INTENT(OUT) :: ERR
3432  TYPE(varying_string), INTENT(OUT) :: ERROR
3433  !Local Variables
3434  TYPE(varying_string) :: LOCAL_ERROR
3435 
3436  enters("DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error,*999)
3437 
3438  IF(ASSOCIATED(distributed_matrix)) THEN
3439  IF(distributed_matrix%MATRIX_FINISHED) THEN
3440  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3442  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3443  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3444  ELSE
3445  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3446  ENDIF
3448  CALL flagerror("Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3449  CASE DEFAULT
3450  local_error="The distributed matrix library type of "// &
3451  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3452  CALL flagerror(local_error,err,error,*999)
3453  END SELECT
3454  ELSE
3455  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3456  ENDIF
3457  ELSE
3458  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3459  ENDIF
3460 
3461  exits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG")
3462  RETURN
3463 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG",err,error)
3464  RETURN 1
3465  END SUBROUTINE distributed_matrix_values_add_intg
3466 
3467  !
3468  !================================================================================================================================
3469  !
3470 
3472  SUBROUTINE distributed_matrix_values_add_intg1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3474  !Argument variables
3475  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3476  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3477  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3478  INTEGER(INTG), INTENT(IN) :: VALUE
3479  INTEGER(INTG), INTENT(OUT) :: ERR
3480  TYPE(varying_string), INTENT(OUT) :: ERROR
3481  !Local Variables
3482  TYPE(varying_string) :: LOCAL_ERROR
3483 
3484  enters("DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error,*999)
3485 
3486  IF(ASSOCIATED(distributed_matrix)) THEN
3487  IF(distributed_matrix%MATRIX_FINISHED) THEN
3488  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3490  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3491  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
3492  ELSE
3493  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3494  ENDIF
3496  CALL flagerror("Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3497  CASE DEFAULT
3498  local_error="The distributed matrix library type of "// &
3499  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3500  CALL flagerror(local_error,err,error,*999)
3501  END SELECT
3502  ELSE
3503  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3504  ENDIF
3505  ELSE
3506  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3507  ENDIF
3508 
3509  exits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG1")
3510  RETURN
3511 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG1",err,error)
3512  RETURN 1
3514 
3515  !
3516  !================================================================================================================================
3517  !
3518 
3520  SUBROUTINE distributed_matrix_values_add_intg2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3522  !Argument variables
3523  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3524  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3525  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3526  INTEGER(INTG), INTENT(IN) :: VALUES(:,:)
3527  INTEGER(INTG), INTENT(OUT) :: ERR
3528  TYPE(varying_string), INTENT(OUT) :: ERROR
3529  !Local Variables
3530  TYPE(varying_string) :: LOCAL_ERROR
3531 
3532  enters("DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error,*999)
3533 
3534  IF(ASSOCIATED(distributed_matrix)) THEN
3535  IF(distributed_matrix%MATRIX_FINISHED) THEN
3536  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3538  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3539  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3540  ELSE
3541  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3542  ENDIF
3544  CALL flagerror("Adding values to an integer PETSc distributed matrix is not implemented.",err,error,*999)
3545  CASE DEFAULT
3546  local_error="The distributed matrix library type of "// &
3547  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3548  CALL flagerror(local_error,err,error,*999)
3549  END SELECT
3550  ELSE
3551  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3552  ENDIF
3553  ELSE
3554  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3555  ENDIF
3556 
3557  exits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG2")
3558  RETURN
3559 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_INTG2",err,error)
3560  RETURN 1
3562 
3563  !
3564  !================================================================================================================================
3565  !
3566 
3568  SUBROUTINE distributed_matrix_values_add_sp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3570  !Argument variables
3571  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3572  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3573  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3574  REAL(SP), INTENT(IN) :: VALUES(:)
3575  INTEGER(INTG), INTENT(OUT) :: ERR
3576  TYPE(varying_string), INTENT(OUT) :: ERROR
3577  !Local Variables
3578  TYPE(varying_string) :: LOCAL_ERROR
3579 
3580  enters("DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error,*999)
3581 
3582  IF(ASSOCIATED(distributed_matrix)) THEN
3583  IF(distributed_matrix%MATRIX_FINISHED) THEN
3584  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3586  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3587  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3588  ELSE
3589  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3590  ENDIF
3592  CALL flagerror("Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3593  CASE DEFAULT
3594  local_error="The distributed matrix library type of "// &
3595  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3596  CALL flagerror(local_error,err,error,*999)
3597  END SELECT
3598  ELSE
3599  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3600  ENDIF
3601  ELSE
3602  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3603  ENDIF
3604 
3605  exits("DISTRIBUTED_MATRIX_VALUES_ADD_SP")
3606  RETURN
3607 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_SP",err,error)
3608  RETURN 1
3609  END SUBROUTINE distributed_matrix_values_add_sp
3610 
3611  !
3612  !================================================================================================================================
3613  !
3614 
3616  SUBROUTINE distributed_matrix_values_add_sp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3618  !Argument variables
3619  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3620  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3621  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3622  REAL(SP), INTENT(IN) :: VALUE
3623  INTEGER(INTG), INTENT(OUT) :: ERR
3624  TYPE(varying_string), INTENT(OUT) :: ERROR
3625  !Local Variables
3626  TYPE(varying_string) :: LOCAL_ERROR
3627 
3628  enters("DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error,*999)
3629 
3630  IF(ASSOCIATED(distributed_matrix)) THEN
3631  IF(distributed_matrix%MATRIX_FINISHED) THEN
3632  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3634  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3635  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
3636  ELSE
3637  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3638  ENDIF
3640  CALL flagerror("Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3641  CASE DEFAULT
3642  local_error="The distributed matrix library type of "// &
3643  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3644  CALL flagerror(local_error,err,error,*999)
3645  END SELECT
3646  ELSE
3647  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3648  ENDIF
3649  ELSE
3650  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3651  ENDIF
3652 
3653  exits("DISTRIBUTED_MATRIX_VALUES_ADD_SP1")
3654  RETURN
3655 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_SP1",err,error)
3656  RETURN 1
3657  END SUBROUTINE distributed_matrix_values_add_sp1
3658 
3659  !
3660  !================================================================================================================================
3661  !
3662 
3664  SUBROUTINE distributed_matrix_values_add_sp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3666  !Argument variables
3667  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3668  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3669  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3670  REAL(SP), INTENT(IN) :: VALUES(:,:)
3671  INTEGER(INTG), INTENT(OUT) :: ERR
3672  TYPE(varying_string), INTENT(OUT) :: ERROR
3673  !Local Variables
3674  TYPE(varying_string) :: LOCAL_ERROR
3675 
3676  enters("DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error,*999)
3677 
3678  IF(ASSOCIATED(distributed_matrix)) THEN
3679  IF(distributed_matrix%MATRIX_FINISHED) THEN
3680  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3682  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3683  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3684  ELSE
3685  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3686  ENDIF
3688  CALL flagerror("Adding values to a single precision PETSc distributed matrix is not implemented.",err,error,*999)
3689  CASE DEFAULT
3690  local_error="The distributed matrix library type of "// &
3691  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3692  CALL flagerror(local_error,err,error,*999)
3693  END SELECT
3694  ELSE
3695  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3696  ENDIF
3697  ELSE
3698  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3699  ENDIF
3700 
3701  exits("DISTRIBUTED_MATRIX_VALUES_ADD_SP2")
3702  RETURN
3703 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_SP2",err,error)
3704  RETURN 1
3705  END SUBROUTINE distributed_matrix_values_add_sp2
3706 
3707  !
3708  !================================================================================================================================
3709  !
3710 
3712  SUBROUTINE distributed_matrix_values_add_dp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3714  !Argument variables
3715  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3716  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3717  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3718  REAL(DP), INTENT(IN) :: VALUES(:)
3719  INTEGER(INTG), INTENT(OUT) :: ERR
3720  TYPE(varying_string), INTENT(OUT) :: ERROR
3721  !Local Variables
3722  INTEGER(INTG) :: i
3723  TYPE(varying_string) :: LOCAL_ERROR
3724 
3725  enters("DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error,*999)
3726 
3727  IF(ASSOCIATED(distributed_matrix)) THEN
3728  IF(distributed_matrix%MATRIX_FINISHED) THEN
3729  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3731  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3732  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3733  ELSE
3734  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3735  ENDIF
3737  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3738  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
3739  IF(SIZE(column_indices,1)==SIZE(values,1)) THEN
3740  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3741  DO i=1,SIZE(row_indices,1)
3742  !Use global matrix row and column numbers
3743  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
3744  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3745  & err,error,*999) !PETSc uses 0 indicies
3746  ENDDO !i
3747  ELSE
3748  DO i=1,SIZE(row_indices,1)
3749  !Use global matrix row and column numbers
3750  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC% &
3751  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_add_values, &
3752  & err,error,*999) !PETSc uses 0 indicies
3753  ENDDO !i
3754  ENDIF
3755  ELSE
3756  local_error="The size of the column indices array ("// &
3757  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
3758  & ") does not conform to the size of the values array ("// &
3759  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
3760  CALL flagerror(local_error,err,error,*999)
3761  ENDIF
3762  ELSE
3763  local_error="The size of the row indices array ("// &
3764  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
3765  & ") does not conform to the size of the values array ("// &
3766  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
3767  CALL flagerror(local_error,err,error,*999)
3768  ENDIF
3769  ELSE
3770  CALL flagerror("The distributed matrix PETSc is not associated.",err,error,*999)
3771  ENDIF
3772  CASE DEFAULT
3773  local_error="The distributed matrix library type of "// &
3774  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3775  CALL flagerror(local_error,err,error,*999)
3776  END SELECT
3777  ELSE
3778  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3779  ENDIF
3780  ELSE
3781  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3782  ENDIF
3783 
3784  exits("DISTRIBUTED_MATRIX_VALUES_ADD_DP")
3785  RETURN
3786 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_DP",err,error)
3787  RETURN 1
3788  END SUBROUTINE distributed_matrix_values_add_dp
3789 
3790  !
3791  !================================================================================================================================
3792  !
3793 
3795  SUBROUTINE distributed_matrix_values_add_dp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3797  !Argument variables
3798  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3799  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3800  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3801  REAL(DP), INTENT(IN) :: VALUE
3802  INTEGER(INTG), INTENT(OUT) :: ERR
3803  TYPE(varying_string), INTENT(OUT) :: ERROR
3804  !Local Variables
3805  INTEGER(INTG) :: PETSC_COL_INDEX(1)
3806  REAL(DP) :: PETSC_VALUE(1)
3807  TYPE(varying_string) :: LOCAL_ERROR
3808 
3809  enters("DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error,*999)
3810 
3811  IF(ASSOCIATED(distributed_matrix)) THEN
3812  IF(distributed_matrix%MATRIX_FINISHED) THEN
3813  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3815  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3816  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
3817  ELSE
3818  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3819  ENDIF
3821  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3822  !Use global matrix row and column numbers
3823  petsc_col_index(1)=column_index-1
3824  petsc_value(1)=VALUE
3825  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3826  CALL petsc_matsetvalue(distributed_matrix%PETSC%OVERRIDE_MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3827  & row_index),column_index-1,VALUE,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3828  ELSE
3829  CALL petsc_matsetvalue(distributed_matrix%PETSC%MATRIX,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
3830  & row_index),column_index-1,VALUE,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3831  ENDIF
3832  ELSE
3833  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
3834  ENDIF
3835  CASE DEFAULT
3836  local_error="The distributed matrix library type of "// &
3837  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3838  CALL flagerror(local_error,err,error,*999)
3839  END SELECT
3840  ELSE
3841  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3842  ENDIF
3843  ELSE
3844  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3845  ENDIF
3846 
3847  exits("DISTRIBUTED_MATRIX_VALUES_ADD_DP1")
3848  RETURN
3849 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_DP1",err,error)
3850  RETURN 1
3851  END SUBROUTINE distributed_matrix_values_add_dp1
3852 
3853  !
3854  !================================================================================================================================
3855  !
3856 
3858  SUBROUTINE distributed_matrix_values_add_dp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3860  !Argument variables
3861  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3862  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3863  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3864  REAL(DP), INTENT(IN) :: VALUES(:,:)
3865  INTEGER(INTG), INTENT(OUT) :: ERR
3866  TYPE(varying_string), INTENT(OUT) :: ERROR
3867  !Local Variables
3868  INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices)),i
3869  TYPE(varying_string) :: LOCAL_ERROR
3870 
3871  enters("DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error,*999)
3872 
3873  IF(ASSOCIATED(distributed_matrix)) THEN
3874  IF(distributed_matrix%MATRIX_FINISHED) THEN
3875  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3877  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3878  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3879  ELSE
3880  CALL flagerror("Distributed matrix CMISS is not associated",err,error,*999)
3881  ENDIF
3883  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
3884  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
3885  IF(SIZE(column_indices,1)==SIZE(values,2)) THEN
3886  DO i=1,SIZE(row_indices,1)
3887  global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
3888  ENDDO !i
3889  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
3890  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,SIZE(row_indices,1),global_row_indices, &
3891  & SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3892  ELSE
3893  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,SIZE(row_indices,1),global_row_indices, &
3894  & SIZE(column_indices,1),column_indices-1,values,petsc_add_values,err,error,*999) !PETSc uses 0 based indices
3895  ENDIF
3896  ELSE
3897  local_error="The size of the column indices array ("// &
3898  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
3899  & ") does not conform to the number of columns in the values array ("// &
3900  & trim(numbertovstring(SIZE(values,2),"*",err,error))//")."
3901  CALL flagerror(local_error,err,error,*999)
3902  ENDIF
3903  ELSE
3904  local_error="The size of the row indices array ("// &
3905  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
3906  & ") does not conform to the number of rows in the values array ("// &
3907  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
3908  CALL flagerror(local_error,err,error,*999)
3909  ENDIF
3910  ELSE
3911  CALL flagerror("The distributed matrix PETSc is not associated.",err,error,*999)
3912  ENDIF
3913  CASE DEFAULT
3914  local_error="The distributed matrix library type of "// &
3915  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3916  CALL flagerror(local_error,err,error,*999)
3917  END SELECT
3918  ELSE
3919  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3920  ENDIF
3921  ELSE
3922  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3923  ENDIF
3924 
3925  exits("DISTRIBUTED_MATRIX_VALUES_ADD_DP2")
3926  RETURN
3927 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_DP2",err,error)
3928  RETURN 1
3929  END SUBROUTINE distributed_matrix_values_add_dp2
3930 
3931  !
3932  !================================================================================================================================
3933  !
3934 
3936  SUBROUTINE distributed_matrix_values_add_l(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
3938  !Argument variables
3939  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3940  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
3941  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
3942  LOGICAL, INTENT(IN) :: VALUES(:)
3943  INTEGER(INTG), INTENT(OUT) :: ERR
3944  TYPE(varying_string), INTENT(OUT) :: ERROR
3945  !Local Variables
3946  TYPE(varying_string) :: LOCAL_ERROR
3947 
3948  enters("DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error,*999)
3949 
3950  IF(ASSOCIATED(distributed_matrix)) THEN
3951  IF(distributed_matrix%MATRIX_FINISHED) THEN
3952  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
3954  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
3955  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
3956  ELSE
3957  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
3958  ENDIF
3960  CALL flagerror("Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
3961  CASE DEFAULT
3962  local_error="The distributed matrix library type of "// &
3963  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
3964  CALL flagerror(local_error,err,error,*999)
3965  END SELECT
3966  ELSE
3967  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
3968  ENDIF
3969  ELSE
3970  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
3971  ENDIF
3972 
3973  exits("DISTRIBUTED_MATRIX_VALUES_ADD_L")
3974  RETURN
3975 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_L",err,error)
3976  RETURN 1
3977  END SUBROUTINE distributed_matrix_values_add_l
3978 
3979  !
3980  !================================================================================================================================
3981  !
3982 
3984  SUBROUTINE distributed_matrix_values_add_l1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
3986  !Argument variables
3987  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
3988  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
3989  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
3990  LOGICAL, INTENT(IN) :: VALUE
3991  INTEGER(INTG), INTENT(OUT) :: ERR
3992  TYPE(varying_string), INTENT(OUT) :: ERROR
3993  !Local Variables
3994  TYPE(varying_string) :: LOCAL_ERROR
3995 
3996  enters("DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error,*999)
3997 
3998  IF(ASSOCIATED(distributed_matrix)) THEN
3999  IF(distributed_matrix%MATRIX_FINISHED) THEN
4000  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4002  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4003  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4004  ELSE
4005  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4006  ENDIF
4008  CALL flagerror("Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4009  CASE DEFAULT
4010  local_error="The distributed matrix library type of "// &
4011  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4012  CALL flagerror(local_error,err,error,*999)
4013  END SELECT
4014  ELSE
4015  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4016  ENDIF
4017  ELSE
4018  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4019  ENDIF
4020 
4021  exits("DISTRIBUTED_MATRIX_VALUES_ADD_L1")
4022  RETURN
4023 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_L1",err,error)
4024  RETURN 1
4025  END SUBROUTINE distributed_matrix_values_add_l1
4026 
4027  !
4028  !================================================================================================================================
4029  !
4030 
4032  SUBROUTINE distributed_matrix_values_add_l2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4034  !Argument variables
4035  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4036  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4037  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4038  LOGICAL, INTENT(IN) :: VALUES(:,:)
4039  INTEGER(INTG), INTENT(OUT) :: ERR
4040  TYPE(varying_string), INTENT(OUT) :: ERROR
4041  !Local Variables
4042  TYPE(varying_string) :: LOCAL_ERROR
4043 
4044  enters("DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error,*999)
4045 
4046  IF(ASSOCIATED(distributed_matrix)) THEN
4047  IF(distributed_matrix%MATRIX_FINISHED) THEN
4048  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4050  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4051  CALL matrix_values_add(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4052  ELSE
4053  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4054  ENDIF
4056  CALL flagerror("Adding values to a logical PETSc distributed matrix is not implemented.",err,error,*999)
4057  CASE DEFAULT
4058  local_error="The distributed matrix library type of "// &
4059  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4060  CALL flagerror(local_error,err,error,*999)
4061  END SELECT
4062  ELSE
4063  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4064  ENDIF
4065  ELSE
4066  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4067  ENDIF
4068 
4069  exits("DISTRIBUTED_MATRIX_VALUES_ADD_L2")
4070  RETURN
4071 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_ADD_L2",err,error)
4072  RETURN 1
4073  END SUBROUTINE distributed_matrix_values_add_l2
4074 
4075  !
4076  !================================================================================================================================
4077  !
4078 
4080  SUBROUTINE distributed_matrix_values_get_intg(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4082  !Argument variables
4083  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4084  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4085  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4086  INTEGER(INTG), INTENT(OUT) :: VALUES(:)
4087  INTEGER(INTG), INTENT(OUT) :: ERR
4088  TYPE(varying_string), INTENT(OUT) :: ERROR
4089  !Local Variables
4090  TYPE(varying_string) :: LOCAL_ERROR
4091 
4092  enters("DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error,*999)
4093 
4094  IF(ASSOCIATED(distributed_matrix)) THEN
4095  IF(distributed_matrix%MATRIX_FINISHED) THEN
4096  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4098  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4099  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4100  ELSE
4101  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4102  ENDIF
4104  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4105  CASE DEFAULT
4106  local_error="The distributed matrix library type of "// &
4107  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4108  CALL flagerror(local_error,err,error,*999)
4109  END SELECT
4110  ELSE
4111  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4112  ENDIF
4113  ELSE
4114  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4115  ENDIF
4116 
4117  exits("DISTRIBUTED_MATRIX_VALUES_GET_INTG")
4118  RETURN
4119 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_INTG",err,error)
4120  RETURN 1
4121  END SUBROUTINE distributed_matrix_values_get_intg
4122 
4123  !
4124  !================================================================================================================================
4125  !
4126 
4128  SUBROUTINE distributed_matrix_values_get_intg1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4130  !Argument variables
4131  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4132  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4133  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4134  INTEGER(INTG), INTENT(OUT) :: VALUE
4135  INTEGER(INTG), INTENT(OUT) :: ERR
4136  TYPE(varying_string), INTENT(OUT) :: ERROR
4137  !Local Variables
4138  TYPE(varying_string) :: LOCAL_ERROR
4139 
4140  enters("DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error,*999)
4141 
4142  IF(ASSOCIATED(distributed_matrix)) THEN
4143  IF(distributed_matrix%MATRIX_FINISHED) THEN
4144  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4146  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4147  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4148  ELSE
4149  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4150  ENDIF
4152  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4153  CASE DEFAULT
4154  local_error="The distributed matrix library type of "// &
4155  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4156  CALL flagerror(local_error,err,error,*999)
4157  END SELECT
4158  ELSE
4159  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4160  ENDIF
4161  ELSE
4162  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4163  ENDIF
4164 
4165  exits("DISTRIBUTED_MATRIX_VALUES_GET_INTG1")
4166  RETURN
4167 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_INTG1",err,error)
4168  RETURN 1
4170 
4171  !
4172  !================================================================================================================================
4173  !
4174 
4176  SUBROUTINE distributed_matrix_values_get_intg2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4178  !Argument variables
4179  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4180  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4181  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4182  INTEGER(INTG), INTENT(OUT) :: VALUES(:,:)
4183  INTEGER(INTG), INTENT(OUT) :: ERR
4184  TYPE(varying_string), INTENT(OUT) :: ERROR
4185  !Local Variables
4186  TYPE(varying_string) :: LOCAL_ERROR
4187 
4188  enters("DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error,*999)
4189 
4190  IF(ASSOCIATED(distributed_matrix)) THEN
4191  IF(distributed_matrix%MATRIX_FINISHED) THEN
4192  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4194  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4195  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4196  ELSE
4197  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4198  ENDIF
4200  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4201  CASE DEFAULT
4202  local_error="The distributed matrix library type of "// &
4203  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4204  CALL flagerror(local_error,err,error,*999)
4205  END SELECT
4206  ELSE
4207  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4208  ENDIF
4209  ELSE
4210  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4211  ENDIF
4212 
4213  exits("DISTRIBUTED_MATRIX_VALUES_GET_INTG2")
4214  RETURN
4215 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_INTG2",err,error)
4216  RETURN 1
4218 
4219  !
4220  !================================================================================================================================
4221  !
4222 
4224  SUBROUTINE distributed_matrix_values_get_sp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4226  !Argument variables
4227  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4228  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4229  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4230  REAL(SP), INTENT(OUT) :: VALUES(:)
4231  INTEGER(INTG), INTENT(OUT) :: ERR
4232  TYPE(varying_string), INTENT(OUT) :: ERROR
4233  !Local Variables
4234  TYPE(varying_string) :: LOCAL_ERROR
4235 
4236  enters("DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error,*999)
4237 
4238  IF(ASSOCIATED(distributed_matrix)) THEN
4239  IF(distributed_matrix%MATRIX_FINISHED) THEN
4240  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4242  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4243  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4244  ELSE
4245  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4246  ENDIF
4248  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4249  CASE DEFAULT
4250  local_error="The distributed matrix library type of "// &
4251  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4252  CALL flagerror(local_error,err,error,*999)
4253  END SELECT
4254  ELSE
4255  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4256  ENDIF
4257  ELSE
4258  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4259  ENDIF
4260 
4261  exits("DISTRIBUTED_MATRIX_VALUES_GET_SP")
4262  RETURN
4263 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_SP",err,error)
4264  RETURN 1
4265  END SUBROUTINE distributed_matrix_values_get_sp
4266 
4267  !
4268  !================================================================================================================================
4269  !
4270 
4272  SUBROUTINE distributed_matrix_values_get_sp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4274  !Argument variables
4275  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4276  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4277  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4278  REAL(SP), INTENT(OUT) :: VALUE
4279  INTEGER(INTG), INTENT(OUT) :: ERR
4280  TYPE(varying_string), INTENT(OUT) :: ERROR
4281  !Local Variables
4282  TYPE(varying_string) :: LOCAL_ERROR
4283 
4284  enters("DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error,*999)
4285 
4286  IF(ASSOCIATED(distributed_matrix)) THEN
4287  IF(distributed_matrix%MATRIX_FINISHED) THEN
4288  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4290  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4291  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4292  ELSE
4293  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4294  ENDIF
4296  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4297  CASE DEFAULT
4298  local_error="The distributed matrix library type of "// &
4299  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4300  CALL flagerror(local_error,err,error,*999)
4301  END SELECT
4302  ELSE
4303  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4304  ENDIF
4305  ELSE
4306  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4307  ENDIF
4308 
4309  exits("DISTRIBUTED_MATRIX_VALUES_GET_SP1")
4310  RETURN
4311 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_SP1",err,error)
4312  RETURN 1
4313  END SUBROUTINE distributed_matrix_values_get_sp1
4314 
4315  !
4316  !================================================================================================================================
4317  !
4318 
4320  SUBROUTINE distributed_matrix_values_get_sp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4322  !Argument variables
4323  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4324  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4325  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4326  REAL(SP), INTENT(OUT) :: VALUES(:,:)
4327  INTEGER(INTG), INTENT(OUT) :: ERR
4328  TYPE(varying_string), INTENT(OUT) :: ERROR
4329  !Local Variables
4330  TYPE(varying_string) :: LOCAL_ERROR
4331 
4332  enters("DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error,*999)
4333 
4334  IF(ASSOCIATED(distributed_matrix)) THEN
4335  IF(distributed_matrix%MATRIX_FINISHED) THEN
4336  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4338  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4339  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4340  ELSE
4341  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4342  ENDIF
4344  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4345  CASE DEFAULT
4346  local_error="The distributed matrix library type of "// &
4347  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4348  CALL flagerror(local_error,err,error,*999)
4349  END SELECT
4350  ELSE
4351  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4352  ENDIF
4353  ELSE
4354  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4355  ENDIF
4356 
4357  exits("DISTRIBUTED_MATRIX_VALUES_GET_SP2")
4358  RETURN
4359 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_SP2",err,error)
4360  RETURN 1
4361  END SUBROUTINE distributed_matrix_values_get_sp2
4362 
4363  !
4364  !================================================================================================================================
4365  !
4366 
4368  SUBROUTINE distributed_matrix_values_get_dp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4370  !Argument variables
4371  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4372  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4373  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4374  REAL(DP), INTENT(OUT) :: VALUES(:)
4375  INTEGER(INTG), INTENT(OUT) :: ERR
4376  TYPE(varying_string), INTENT(OUT) :: ERROR
4377  !Local Variables
4378  INTEGER(INTG) :: i
4379  TYPE(varying_string) :: LOCAL_ERROR
4380 
4381  enters("DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error,*999)
4382 
4383  IF(ASSOCIATED(distributed_matrix)) THEN
4384  IF(distributed_matrix%MATRIX_FINISHED) THEN
4385  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4387  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4388  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4389  ELSE
4390  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4391  ENDIF
4393  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
4394  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
4395  IF(SIZE(column_indices,1)==SIZE(values,1)) THEN
4396  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
4397  DO i=1,SIZE(row_indices,1)
4398  CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4399  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i), &
4400  & err,error,*999) !PETSc uses 0 based indices
4401  ENDDO !i
4402  ELSE
4403  DO i=1,SIZE(row_indices,1)
4404  CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
4405  & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),err,error,*999) !PETSc uses 0 based indices
4406  ENDDO !i
4407  ENDIF
4408  ELSE
4409  local_error="The size of the column indices array ("// &
4410  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
4411  & ") does not conform to the size of the values array ("// &
4412  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
4413  CALL flagerror(local_error,err,error,*999)
4414  ENDIF
4415  ELSE
4416  local_error="The size of the row indices array ("// &
4417  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
4418  & ") does not conform to the size of the values array ("// &
4419  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
4420  CALL flagerror(local_error,err,error,*999)
4421  ENDIF
4422  ELSE
4423  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
4424  ENDIF
4425  CASE DEFAULT
4426  local_error="The distributed matrix library type of "// &
4427  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4428  CALL flagerror(local_error,err,error,*999)
4429  END SELECT
4430  ELSE
4431  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4432  ENDIF
4433  ELSE
4434  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4435  ENDIF
4436 
4437  exits("DISTRIBUTED_MATRIX_VALUES_GET_DP")
4438  RETURN
4439 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_DP",err,error)
4440  RETURN 1
4441  END SUBROUTINE distributed_matrix_values_get_dp
4442 
4443  !
4444  !================================================================================================================================
4445  !
4446 
4448  SUBROUTINE distributed_matrix_values_get_dp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4450  !Argument variables
4451  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4452  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4453  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4454  REAL(DP), INTENT(OUT) :: VALUE
4455  INTEGER(INTG), INTENT(OUT) :: ERR
4456  TYPE(varying_string), INTENT(OUT) :: ERROR
4457  !Local Variables
4458  INTEGER(INTG) :: COLUMN_INDICES(1)
4459  REAL(DP) :: VALUES(1)
4460  TYPE(varying_string) :: LOCAL_ERROR
4461 
4462  enters("DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error,*999)
4463 
4464  IF(ASSOCIATED(distributed_matrix)) THEN
4465  IF(distributed_matrix%MATRIX_FINISHED) THEN
4466  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4468  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4469  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4470  ELSE
4471  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4472  ENDIF
4474  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
4475  column_indices(1)=column_index-1
4476  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
4477  CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
4478  & global_row_numbers(row_index),1,column_indices,values,err,error,*999) !PETSc uses 0 based indices
4479  ELSE
4480  CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_index), &
4481  & 1,column_indices,values,err,error,*999) !PETSc uses 0 based indices
4482  ENDIF
4483  VALUE=values(1)
4484  ELSE
4485  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
4486  ENDIF
4487  CASE DEFAULT
4488  local_error="The distributed matrix library type of "// &
4489  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4490  CALL flagerror(local_error,err,error,*999)
4491  END SELECT
4492  ELSE
4493  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4494  ENDIF
4495  ELSE
4496  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4497  ENDIF
4498 
4499  exits("DISTRIBUTED_MATRIX_VALUES_GET_DP1")
4500  RETURN
4501 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_DP1",err,error)
4502  RETURN 1
4503  END SUBROUTINE distributed_matrix_values_get_dp1
4504 
4505  !
4506  !
4507  !================================================================================================================================
4508  !
4509 
4511  SUBROUTINE distributed_matrix_values_get_dp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4513  !Argument variables
4514  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4515  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4516  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4517  REAL(DP), INTENT(OUT) :: VALUES(:,:)
4518  INTEGER(INTG), INTENT(OUT) :: ERR
4519  TYPE(varying_string), INTENT(OUT) :: ERROR
4520  !Local Variables
4521  INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices,1)),i
4522  TYPE(varying_string) :: LOCAL_ERROR
4523 
4524  enters("DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error,*999)
4525 
4526  IF(ASSOCIATED(distributed_matrix)) THEN
4527  IF(distributed_matrix%MATRIX_FINISHED) THEN
4528  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4530  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4531  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4532  ELSE
4533  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4534  ENDIF
4536  CALL flagerror("Cannot get values for an integer precision PETSc distributed matrix.",err,error,*999)
4537  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
4538  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
4539  IF(SIZE(column_indices,1)==SIZE(values,2)) THEN
4540  DO i=1,SIZE(row_indices,1)
4541  global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
4542  ENDDO !i
4543  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
4544  CALL petsc_matgetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,SIZE(row_indices,1),global_row_indices, &
4545  & SIZE(column_indices,1),column_indices-1,values,err,error,*999) !PETSc uses 0 based row indices
4546  ELSE
4547  CALL petsc_matgetvalues(distributed_matrix%PETSC%MATRIX,SIZE(row_indices,1),global_row_indices, &
4548  & SIZE(column_indices,1),column_indices-1,values,err,error,*999) !PETSc uses 0 based row indices
4549  ENDIF
4550  ELSE
4551  local_error="The size of the column indices array ("// &
4552  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
4553  & ") does not conform to the number of columns in the values array ("// &
4554  & trim(numbertovstring(SIZE(values,2),"*",err,error))//")."
4555  CALL flagerror(local_error,err,error,*999)
4556  ENDIF
4557  ELSE
4558  local_error="The size of the row indices array ("// &
4559  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
4560  & ") does not conform to the number of rows in the values array ("// &
4561  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
4562  CALL flagerror(local_error,err,error,*999)
4563  ENDIF
4564  ELSE
4565  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
4566  ENDIF
4567  CASE DEFAULT
4568  local_error="The distributed matrix library type of "// &
4569  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4570  CALL flagerror(local_error,err,error,*999)
4571  END SELECT
4572  ELSE
4573  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4574  ENDIF
4575  ELSE
4576  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4577  ENDIF
4578 
4579  exits("DISTRIBUTED_MATRIX_VALUES_GET_DP2")
4580  RETURN
4581 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_DP2",err,error)
4582  RETURN 1
4583  END SUBROUTINE distributed_matrix_values_get_dp2
4584 
4585  !
4586  !================================================================================================================================
4587  !
4588 
4590  SUBROUTINE distributed_matrix_values_get_l(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4592  !Argument variables
4593  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4594  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4595  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4596  LOGICAL, INTENT(OUT) :: VALUES(:)
4597  INTEGER(INTG), INTENT(OUT) :: ERR
4598  TYPE(varying_string), INTENT(OUT) :: ERROR
4599  !Local Variables
4600  TYPE(varying_string) :: LOCAL_ERROR
4601 
4602  enters("DISTRIBUTED_MATRIX_VALUES_GET_L",err,error,*999)
4603 
4604  IF(ASSOCIATED(distributed_matrix)) THEN
4605  IF(distributed_matrix%MATRIX_FINISHED) THEN
4606  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4608  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4609  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4610  ELSE
4611  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4612  ENDIF
4614  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4615  CASE DEFAULT
4616  local_error="The distributed matrix library type of "// &
4617  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4618  CALL flagerror(local_error,err,error,*999)
4619  END SELECT
4620  ELSE
4621  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4622  ENDIF
4623  ELSE
4624  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4625  ENDIF
4626 
4627  exits("DISTRIBUTED_MATRIX_VALUES_GET_L")
4628  RETURN
4629 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_L",err,error)
4630  RETURN 1
4631  END SUBROUTINE distributed_matrix_values_get_l
4632 
4633  !
4634  !================================================================================================================================
4635  !
4636 
4638  SUBROUTINE distributed_matrix_values_get_l1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4640  !Argument variables
4641  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4642  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4643  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4644  LOGICAL, INTENT(OUT) :: VALUE
4645  INTEGER(INTG), INTENT(OUT) :: ERR
4646  TYPE(varying_string), INTENT(OUT) :: ERROR
4647  !Local Variables
4648  TYPE(varying_string) :: LOCAL_ERROR
4649 
4650  enters("DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error,*999)
4651 
4652  IF(ASSOCIATED(distributed_matrix)) THEN
4653  IF(distributed_matrix%MATRIX_FINISHED) THEN
4654  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4656  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4657  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4658  ELSE
4659  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4660  ENDIF
4662  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4663  CASE DEFAULT
4664  local_error="The distributed matrix library type of "// &
4665  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4666  CALL flagerror(local_error,err,error,*999)
4667  END SELECT
4668  ELSE
4669  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4670  ENDIF
4671  ELSE
4672  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4673  ENDIF
4674 
4675  exits("DISTRIBUTED_MATRIX_VALUES_GET_L1")
4676  RETURN
4677 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_L1",err,error)
4678  RETURN 1
4679  END SUBROUTINE distributed_matrix_values_get_l1
4680 
4681  !
4682  !================================================================================================================================
4683  !
4684 
4686  SUBROUTINE distributed_matrix_values_get_l2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4688  !Argument variables
4689  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4690  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4691  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4692  LOGICAL, INTENT(OUT) :: VALUES(:,:)
4693  INTEGER(INTG), INTENT(OUT) :: ERR
4694  TYPE(varying_string), INTENT(OUT) :: ERROR
4695  !Local Variables
4696  TYPE(varying_string) :: LOCAL_ERROR
4697 
4698  enters("DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error,*999)
4699 
4700  IF(ASSOCIATED(distributed_matrix)) THEN
4701  IF(distributed_matrix%MATRIX_FINISHED) THEN
4702  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4704  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4705  CALL matrix_values_get(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4706  ELSE
4707  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4708  ENDIF
4710  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
4711  CASE DEFAULT
4712  local_error="The distributed matrix library type of "// &
4713  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4714  CALL flagerror(local_error,err,error,*999)
4715  END SELECT
4716  ELSE
4717  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4718  ENDIF
4719  ELSE
4720  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4721  ENDIF
4722 
4723  exits("DISTRIBUTED_MATRIX_VALUES_GET_L2")
4724  RETURN
4725 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_GET_L2",err,error)
4726  RETURN 1
4727  END SUBROUTINE distributed_matrix_values_get_l2
4728 
4729  !
4730  !================================================================================================================================
4731  !
4732 
4734  SUBROUTINE distributed_matrix_values_set_intg(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4736  !Argument variables
4737  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4738  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4739  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4740  INTEGER(INTG), INTENT(IN) :: VALUES(:)
4741  INTEGER(INTG), INTENT(OUT) :: ERR
4742  TYPE(varying_string), INTENT(OUT) :: ERROR
4743  !Local Variables
4744  TYPE(varying_string) :: LOCAL_ERROR
4745 
4746  enters("DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error,*999)
4747 
4748  IF(ASSOCIATED(distributed_matrix)) THEN
4749  IF(distributed_matrix%MATRIX_FINISHED) THEN
4750  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4752  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4753  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4754  ELSE
4755  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4756  ENDIF
4758  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4759  CASE DEFAULT
4760  local_error="The distributed matrix library type of "// &
4761  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4762  CALL flagerror(local_error,err,error,*999)
4763  END SELECT
4764  ELSE
4765  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4766  ENDIF
4767  ELSE
4768  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4769  ENDIF
4770 
4771  exits("DISTRIBUTED_MATRIX_VALUES_SET_INTG")
4772  RETURN
4773 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_INTG",err,error)
4774  RETURN 1
4775  END SUBROUTINE distributed_matrix_values_set_intg
4776 
4777  !
4778  !================================================================================================================================
4779  !
4780 
4782  SUBROUTINE distributed_matrix_values_set_intg1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4784  !Argument variables
4785  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4786  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4787  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4788  INTEGER(INTG), INTENT(IN) :: VALUE
4789  INTEGER(INTG), INTENT(OUT) :: ERR
4790  TYPE(varying_string), INTENT(OUT) :: ERROR
4791  !Local Variables
4792  TYPE(varying_string) :: LOCAL_ERROR
4793 
4794  enters("DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error,*999)
4795 
4796  IF(ASSOCIATED(distributed_matrix)) THEN
4797  IF(distributed_matrix%MATRIX_FINISHED) THEN
4798  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4800  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4801  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4802  ELSE
4803  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4804  ENDIF
4806  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4807  CASE DEFAULT
4808  local_error="The distributed matrix library type of "// &
4809  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4810  CALL flagerror(local_error,err,error,*999)
4811  END SELECT
4812  ELSE
4813  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4814  ENDIF
4815  ELSE
4816  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4817  ENDIF
4818 
4819  exits("DISTRIBUTED_MATRIX_VALUES_SET_INTG1")
4820  RETURN
4821 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_INTG1",err,error)
4822  RETURN 1
4824 
4825  !
4826  !================================================================================================================================
4827  !
4828 
4830  SUBROUTINE distributed_matrix_values_set_intg2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4832  !Argument variables
4833  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4834  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4835  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4836  INTEGER(INTG), INTENT(IN) :: VALUES(:,:)
4837  INTEGER(INTG), INTENT(OUT) :: ERR
4838  TYPE(varying_string), INTENT(OUT) :: ERROR
4839  !Local Variables
4840  TYPE(varying_string) :: LOCAL_ERROR
4841 
4842  enters("DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error,*999)
4843 
4844  IF(ASSOCIATED(distributed_matrix)) THEN
4845  IF(distributed_matrix%MATRIX_FINISHED) THEN
4846  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4848  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4849  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4850  ELSE
4851  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4852  ENDIF
4854  CALL flagerror("Cannot get values for an integer PETSc distributed matrix.",err,error,*999)
4855  CASE DEFAULT
4856  local_error="The distributed matrix library type of "// &
4857  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4858  CALL flagerror(local_error,err,error,*999)
4859  END SELECT
4860  ELSE
4861  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4862  ENDIF
4863  ELSE
4864  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4865  ENDIF
4866 
4867  exits("DISTRIBUTED_MATRIX_VALUES_SET_INTG2")
4868  RETURN
4869 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_INTG2",err,error)
4870  RETURN 1
4872 
4873  !
4874  !================================================================================================================================
4875  !
4876 
4878  SUBROUTINE distributed_matrix_values_set_sp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4880  !Argument variables
4881  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4882  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4883  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4884  REAL(SP), INTENT(IN) :: VALUES(:)
4885  INTEGER(INTG), INTENT(OUT) :: ERR
4886  TYPE(varying_string), INTENT(OUT) :: ERROR
4887  !Local Variables
4888  TYPE(varying_string) :: LOCAL_ERROR
4889 
4890  enters("DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error,*999)
4891 
4892  IF(ASSOCIATED(distributed_matrix)) THEN
4893  IF(distributed_matrix%MATRIX_FINISHED) THEN
4894  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4896  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4897  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4898  ELSE
4899  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4900  ENDIF
4902  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4903  CASE DEFAULT
4904  local_error="The distributed matrix library type of "// &
4905  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4906  CALL flagerror(local_error,err,error,*999)
4907  END SELECT
4908  ELSE
4909  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4910  ENDIF
4911  ELSE
4912  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4913  ENDIF
4914 
4915  exits("DISTRIBUTED_MATRIX_VALUES_SET_SP")
4916  RETURN
4917 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_SP",err,error)
4918  RETURN 1
4919  END SUBROUTINE distributed_matrix_values_set_sp
4920 
4921  !
4922  !================================================================================================================================
4923  !
4924 
4926  SUBROUTINE distributed_matrix_values_set_sp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
4928  !Argument variables
4929  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4930  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
4931  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
4932  REAL(SP), INTENT(IN) :: VALUE
4933  INTEGER(INTG), INTENT(OUT) :: ERR
4934  TYPE(varying_string), INTENT(OUT) :: ERROR
4935  !Local Variables
4936  TYPE(varying_string) :: LOCAL_ERROR
4937 
4938  enters("DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error,*999)
4939 
4940  IF(ASSOCIATED(distributed_matrix)) THEN
4941  IF(distributed_matrix%MATRIX_FINISHED) THEN
4942  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4944  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4945  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
4946  ELSE
4947  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4948  ENDIF
4950  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4951  CASE DEFAULT
4952  local_error="The distributed matrix library type of "// &
4953  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
4954  CALL flagerror(local_error,err,error,*999)
4955  END SELECT
4956  ELSE
4957  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
4958  ENDIF
4959  ELSE
4960  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
4961  ENDIF
4962 
4963  exits("DISTRIBUTED_MATRIX_VALUES_SET_SP1")
4964  RETURN
4965 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_SP1",err,error)
4966  RETURN 1
4967  END SUBROUTINE distributed_matrix_values_set_sp1
4968 
4969  !
4970  !================================================================================================================================
4971  !
4972 
4974  SUBROUTINE distributed_matrix_values_set_sp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
4976  !Argument variables
4977  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
4978  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
4979  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
4980  REAL(SP), INTENT(IN) :: VALUES(:,:)
4981  INTEGER(INTG), INTENT(OUT) :: ERR
4982  TYPE(varying_string), INTENT(OUT) :: ERROR
4983  !Local Variables
4984  TYPE(varying_string) :: LOCAL_ERROR
4985 
4986  enters("DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error,*999)
4987 
4988  IF(ASSOCIATED(distributed_matrix)) THEN
4989  IF(distributed_matrix%MATRIX_FINISHED) THEN
4990  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
4992  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
4993  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
4994  ELSE
4995  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
4996  ENDIF
4998  CALL flagerror("Cannot get values for a single precision PETSc distributed matrix.",err,error,*999)
4999  CASE DEFAULT
5000  local_error="The distributed matrix library type of "// &
5001  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5002  CALL flagerror(local_error,err,error,*999)
5003  END SELECT
5004  ELSE
5005  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5006  ENDIF
5007  ELSE
5008  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5009  ENDIF
5010 
5011  exits("DISTRIBUTED_MATRIX_VALUES_SET_SP2")
5012  RETURN
5013 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_SP2",err,error)
5014  RETURN 1
5015  END SUBROUTINE distributed_matrix_values_set_sp2
5016 
5017  !
5018  !================================================================================================================================
5019  !
5020 
5022  SUBROUTINE distributed_matrix_values_set_dp(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
5024  !Argument variables
5025  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5026  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
5027  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
5028  REAL(DP), INTENT(IN) :: VALUES(:)
5029  INTEGER(INTG), INTENT(OUT) :: ERR
5030  TYPE(varying_string), INTENT(OUT) :: ERROR
5031  !Local Variables
5032  INTEGER(INTG) :: i
5033  TYPE(varying_string) :: LOCAL_ERROR
5034 
5035  enters("DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error,*999)
5036 
5037  IF(ASSOCIATED(distributed_matrix)) THEN
5038  IF(distributed_matrix%MATRIX_FINISHED) THEN
5039  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5041  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5042  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5043  ELSE
5044  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
5045  ENDIF
5047  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
5048  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
5049  IF(SIZE(column_indices,1)==SIZE(values,1)) THEN
5050  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
5051  DO i=1,SIZE(row_indices,1)
5052  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC% &
5053  & global_row_numbers(row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values, &
5054  & err,error,*999) !0 based indices
5055  ENDDO !i
5056  ELSE
5057  DO i=1,SIZE(row_indices,1)
5058  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
5059  & row_indices(i:i)),1,column_indices(i:i)-1,values(i:i),petsc_insert_values,err,error,*999) !0 based indices
5060  ENDDO !i
5061  ENDIF
5062  ELSE
5063  local_error="The size of the column indices array ("// &
5064  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
5065  & ") does not conform to the size of the values array ("// &
5066  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
5067  CALL flagerror(local_error,err,error,*999)
5068  ENDIF
5069  ELSE
5070  local_error="The size of the row indices array ("// &
5071  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
5072  & ") does not conform to the size of the values array ("// &
5073  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
5074  CALL flagerror(local_error,err,error,*999)
5075  ENDIF
5076  ELSE
5077  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
5078  ENDIF
5079  CASE DEFAULT
5080  local_error="The distributed matrix library type of "// &
5081  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5082  CALL flagerror(local_error,err,error,*999)
5083  END SELECT
5084  ELSE
5085  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5086  ENDIF
5087  ELSE
5088  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5089  ENDIF
5090 
5091  exits("DISTRIBUTED_MATRIX_VALUES_SET_DP")
5092  RETURN
5093 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_DP",err,error)
5094  RETURN 1
5095  END SUBROUTINE distributed_matrix_values_set_dp
5096 
5097  !
5098  !================================================================================================================================
5099  !
5100 
5102  SUBROUTINE distributed_matrix_values_set_dp1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
5104  !Argument variables
5105  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5106  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
5107  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
5108  REAL(DP), INTENT(IN) :: VALUE
5109  INTEGER(INTG), INTENT(OUT) :: ERR
5110  TYPE(varying_string), INTENT(OUT) :: ERROR
5111  !Local Variables
5112  TYPE(varying_string) :: LOCAL_ERROR
5113 
5114  enters("DISTRIBUTED_MATRIX_VALUES_SET_DP1",err,error,*999)
5115 
5116  IF(ASSOCIATED(distributed_matrix)) THEN
5117  IF(distributed_matrix%MATRIX_FINISHED) THEN
5118  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5120  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5121  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
5122  ELSE
5123  CALL flagerror("Distributed matrix CMISS is not associated",err,error,*999)
5124  ENDIF
5126  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
5127  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
5128  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS( &
5129  & row_index),1,(/column_index-1/),(/VALUE/),petsc_insert_values,err,error,*999) !PETSc uses 0 based indices
5130  ELSE
5131  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,1,distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_index), &
5132  & 1,(/column_index-1/),(/VALUE/),petsc_insert_values,err,error,*999) !PETSc uses 0 based indices
5133  ENDIF
5134  ELSE
5135  CALL flagerror("Distributed matrix PETSc is not associated.",err,error,*999)
5136  ENDIF
5137  CASE DEFAULT
5138  local_error="The distributed matrix library type of "// &
5139  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5140  CALL flagerror(local_error,err,error,*999)
5141  END SELECT
5142  ELSE
5143  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5144  ENDIF
5145  ELSE
5146  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5147  ENDIF
5148 
5149  exits("DISTRIBUTED_MATRIX_VALUES_SET_DP1")
5150  RETURN
5151 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_DP1",err,error)
5152  RETURN 1
5153  END SUBROUTINE distributed_matrix_values_set_dp1
5154 
5155  !
5156  !================================================================================================================================
5157  !
5158 
5160  SUBROUTINE distributed_matrix_values_set_dp2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
5162  !Argument variables
5163  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5164  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
5165  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
5166  REAL(DP), INTENT(IN) :: VALUES(:,:)
5167  INTEGER(INTG), INTENT(OUT) :: ERR
5168  TYPE(varying_string), INTENT(OUT) :: ERROR
5169  !Local Variables
5170  INTEGER(INTG) :: GLOBAL_ROW_INDICES(size(row_indices,1)),i
5171  TYPE(varying_string) :: LOCAL_ERROR
5172 
5173  enters("DISTRIBUTED_MATRIX_VALUES_SET_DP2",err,error,*999)
5174 
5175  IF(ASSOCIATED(distributed_matrix)) THEN
5176  IF(distributed_matrix%MATRIX_FINISHED) THEN
5177  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5179  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5180  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5181  ELSE
5182  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
5183  ENDIF
5185  IF(ASSOCIATED(distributed_matrix%PETSC)) THEN
5186  IF(SIZE(row_indices,1)==SIZE(values,1)) THEN
5187  IF(SIZE(column_indices,1)==SIZE(values,2)) THEN
5188  DO i=1,SIZE(row_indices,1)
5189  global_row_indices(i)=distributed_matrix%PETSC%GLOBAL_ROW_NUMBERS(row_indices(i))
5190  ENDDO !i
5191  IF(distributed_matrix%PETSC%USE_OVERRIDE_MATRIX) THEN
5192  CALL petsc_matsetvalues(distributed_matrix%PETSC%OVERRIDE_MATRIX,SIZE(row_indices,1),global_row_indices, &
5193  & SIZE(column_indices,1),column_indices-1,values,petsc_insert_values,err,error,*999) !PETSc uses 0 based indices
5194  ELSE
5195  CALL petsc_matsetvalues(distributed_matrix%PETSC%MATRIX,SIZE(row_indices,1),global_row_indices, &
5196  & SIZE(column_indices,1),column_indices-1,values,petsc_insert_values,err,error,*999) !PETSc uses 0 based indices
5197  ENDIF
5198  ELSE
5199  local_error="The size of the column indices array ("// &
5200  & trim(numbertovstring(SIZE(column_indices,1),"*",err,error))// &
5201  & ") does not conform to the number of columns in the values array ("// &
5202  & trim(numbertovstring(SIZE(values,2),"*",err,error))//")."
5203  CALL flagerror(local_error,err,error,*999)
5204  ENDIF
5205  ELSE
5206  local_error="The size of the row indices array ("// &
5207  & trim(numbertovstring(SIZE(row_indices,1),"*",err,error))// &
5208  & ") does not conform to the number of rows in the values array ("// &
5209  & trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
5210  CALL flagerror(local_error,err,error,*999)
5211  ENDIF
5212  ELSE
5213  CALL flagerror("The distributed matrix PETSc is not associated.",err,error,*999)
5214  ENDIF
5215  CASE DEFAULT
5216  local_error="The distributed matrix library type of "// &
5217  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5218  CALL flagerror(local_error,err,error,*999)
5219  END SELECT
5220  ELSE
5221  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5222  ENDIF
5223  ELSE
5224  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5225  ENDIF
5226 
5227  exits("DISTRIBUTED_MATRIX_VALUES_SET_DP2")
5228  RETURN
5229 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_DP2",err,error)
5230  RETURN 1
5231  END SUBROUTINE distributed_matrix_values_set_dp2
5232 
5233  !
5234  !================================================================================================================================
5235  !
5236 
5238  SUBROUTINE distributed_matrix_values_set_l(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
5240  !Argument variables
5241  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5242  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
5243  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
5244  LOGICAL, INTENT(IN) :: VALUES(:)
5245  INTEGER(INTG), INTENT(OUT) :: ERR
5246  TYPE(varying_string), INTENT(OUT) :: ERROR
5247  !Local Variables
5248  TYPE(varying_string) :: LOCAL_ERROR
5249 
5250  enters("DISTRIBUTED_MATRIX_VALUES_SET_L",err,error,*999)
5251 
5252  IF(ASSOCIATED(distributed_matrix)) THEN
5253  IF(distributed_matrix%MATRIX_FINISHED) THEN
5254  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5256  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5257  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5258  ELSE
5259  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
5260  ENDIF
5262  CALL flagerror("Cannot set values for a logical PETSc distributed matrix.",err,error,*999)
5263  CASE DEFAULT
5264  local_error="The distributed matrix library type of "// &
5265  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5266  CALL flagerror(local_error,err,error,*999)
5267  END SELECT
5268  ELSE
5269  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5270  ENDIF
5271  ELSE
5272  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5273  ENDIF
5274 
5275  exits("DISTRIBUTED_MATRIX_VALUES_SET_L")
5276  RETURN
5277 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_L",err,error)
5278  RETURN 1
5279  END SUBROUTINE distributed_matrix_values_set_l
5280 
5281  !
5282  !================================================================================================================================
5283  !
5284 
5286  SUBROUTINE distributed_matrix_values_set_l1(DISTRIBUTED_MATRIX,ROW_INDEX,COLUMN_INDEX,VALUE,ERR,ERROR,*)
5288  !Argument variables
5289  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5290  INTEGER(INTG), INTENT(IN) :: ROW_INDEX
5291  INTEGER(INTG), INTENT(IN) :: COLUMN_INDEX
5292  LOGICAL, INTENT(IN) :: VALUE
5293  INTEGER(INTG), INTENT(OUT) :: ERR
5294  TYPE(varying_string), INTENT(OUT) :: ERROR
5295  !Local Variables
5296  TYPE(varying_string) :: LOCAL_ERROR
5297 
5298  enters("DISTRIBUTED_MATRIX_VALUES_SET_L1",err,error,*999)
5299 
5300  IF(ASSOCIATED(distributed_matrix)) THEN
5301  IF(distributed_matrix%MATRIX_FINISHED) THEN
5302  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5304  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5305  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_index,column_index,VALUE,err,error,*999)
5306  ELSE
5307  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
5308  ENDIF
5310  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
5311  CASE DEFAULT
5312  local_error="The distributed matrix library type of "// &
5313  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5314  CALL flagerror(local_error,err,error,*999)
5315  END SELECT
5316  ELSE
5317  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5318  ENDIF
5319  ELSE
5320  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5321  ENDIF
5322 
5323  exits("DISTRIBUTED_MATRIX_VALUES_SET_L1")
5324  RETURN
5325 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_L1",err,error)
5326  RETURN 1
5327  END SUBROUTINE distributed_matrix_values_set_l1
5328 
5329  !
5330  !================================================================================================================================
5331  !
5332 
5334  SUBROUTINE distributed_matrix_values_set_l2(DISTRIBUTED_MATRIX,ROW_INDICES,COLUMN_INDICES,VALUES,ERR,ERROR,*)
5336  !Argument variables
5337  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5338  INTEGER(INTG), INTENT(IN) :: ROW_INDICES(:)
5339  INTEGER(INTG), INTENT(IN) :: COLUMN_INDICES(:)
5340  LOGICAL, INTENT(IN) :: VALUES(:,:)
5341  INTEGER(INTG), INTENT(OUT) :: ERR
5342  TYPE(varying_string), INTENT(OUT) :: ERROR
5343  !Local Variables
5344  TYPE(varying_string) :: LOCAL_ERROR
5345 
5346  enters("DISTRIBUTED_MATRIX_VALUES_SET_L2",err,error,*999)
5347 
5348  IF(ASSOCIATED(distributed_matrix)) THEN
5349  IF(distributed_matrix%MATRIX_FINISHED) THEN
5350  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5352  IF(ASSOCIATED(distributed_matrix%CMISS)) THEN
5353  CALL matrix_values_set(distributed_matrix%CMISS%MATRIX,row_indices,column_indices,values,err,error,*999)
5354  ELSE
5355  CALL flagerror("Distributed matrix CMISS is not associated.",err,error,*999)
5356  ENDIF
5358  CALL flagerror("Cannot get values for a logical PETSc distributed matrix.",err,error,*999)
5359  CASE DEFAULT
5360  local_error="The distributed matrix library type of "// &
5361  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid."
5362  CALL flagerror(local_error,err,error,*999)
5363  END SELECT
5364  ELSE
5365  CALL flagerror("The distributed matrix has not been finished.",err,error,*999)
5366  ENDIF
5367  ELSE
5368  CALL flagerror("Distributed matrix is not associated.",err,error,*999)
5369  ENDIF
5370 
5371  exits("DISTRIBUTED_MATRIX_VALUES_SET_L2")
5372  RETURN
5373 999 errorsexits("DISTRIBUTED_MATRIX_VALUES_SET_L2",err,error)
5374  RETURN 1
5375  END SUBROUTINE distributed_matrix_values_set_l2
5376 
5377  !
5378  !================================================================================================================================
5379  !
5380 
5385  SUBROUTINE distributed_matrix_by_vector_add(ROW_SELECTION_TYPE,ALPHA,DISTRIBUTED_MATRIX,DISTRIBUTED_VECTOR,DISTRIBUTED_PRODUCT, &
5386  & err,error,*)
5388  !Argument variables
5389  INTEGER(INTG), INTENT(IN) :: ROW_SELECTION_TYPE
5390  REAL(DP), INTENT(IN) :: ALPHA
5391  TYPE(distributed_matrix_type), POINTER :: DISTRIBUTED_MATRIX
5392  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
5393  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_PRODUCT
5394  INTEGER(INTG), INTENT(OUT) :: ERR
5395  TYPE(varying_string), INTENT(OUT) :: ERROR
5396  !Local Variables
5397  INTEGER(INTG) :: column_idx,local_column,global_column,NUMBER_OF_COLUMNS,NUMBER_OF_ROWS,row,row_idx
5398  REAL(DP) :: SUM
5399  TYPE(distributed_matrix_cmiss_type), POINTER :: CMISS_MATRIX
5400  TYPE(distributed_vector_cmiss_type), POINTER :: CMISS_VECTOR,CMISS_PRODUCT
5401  TYPE(domain_mapping_type), POINTER :: ROW_MAPPING,COLUMN_MAPPING
5402  TYPE(matrix_type), POINTER :: MATRIX
5403  TYPE(varying_string) :: LOCAL_ERROR
5404 
5405  enters("DISTRIBUTED_MATRIX_BY_VECTOR_ADD",err,error,*999)
5406 
5407  IF(abs(alpha)>zero_tolerance) THEN
5408  IF(ASSOCIATED(distributed_matrix)) THEN
5409  IF(distributed_matrix%MATRIX_FINISHED) THEN
5410  IF(ASSOCIATED(distributed_vector)) THEN
5411  IF(distributed_vector%VECTOR_FINISHED) THEN
5412  IF(ASSOCIATED(distributed_product)) THEN
5413  IF(distributed_product%VECTOR_FINISHED) THEN
5414  IF(distributed_matrix%LIBRARY_TYPE==distributed_vector%LIBRARY_TYPE) THEN
5415  IF(distributed_matrix%LIBRARY_TYPE==distributed_product%LIBRARY_TYPE) THEN
5416  column_mapping=>distributed_matrix%COLUMN_DOMAIN_MAPPING
5417  IF(ASSOCIATED(column_mapping)) THEN
5418  row_mapping=>distributed_matrix%ROW_DOMAIN_MAPPING
5419  IF(ASSOCIATED(row_mapping)) THEN
5420  IF(ASSOCIATED(column_mapping,distributed_vector%DOMAIN_MAPPING)) THEN
5421  IF(ASSOCIATED(row_mapping,distributed_product%DOMAIN_MAPPING)) THEN
5422  SELECT CASE(distributed_matrix%LIBRARY_TYPE)
5424  cmiss_matrix=>distributed_matrix%CMISS
5425  IF(ASSOCIATED(cmiss_matrix)) THEN
5426  matrix=>cmiss_matrix%MATRIX
5427  IF(ASSOCIATED(matrix)) THEN
5428  cmiss_vector=>distributed_vector%CMISS
5429  IF(ASSOCIATED(cmiss_vector)) THEN
5430  cmiss_product=>distributed_product%CMISS
5431  IF(ASSOCIATED(cmiss_product)) THEN
5432  SELECT CASE(row_selection_type)
5434  number_of_rows=row_mapping%TOTAL_NUMBER_OF_LOCAL
5436  number_of_rows=row_mapping%NUMBER_OF_LOCAL
5437  CASE DEFAULT
5438  local_error="The row selection type of "// &
5439  & trim(numbertovstring(row_selection_type,"*",err,error))//" is invalid."
5440  CALL flagerror(local_error,err,error,*999)
5441  END SELECT
5442  number_of_columns=column_mapping%NUMBER_OF_GLOBAL
5443  IF(matrix%DATA_TYPE==distributed_vector%DATA_TYPE) THEN
5444  IF(matrix%DATA_TYPE==distributed_product%DATA_TYPE) THEN
5445  SELECT CASE(matrix%DATA_TYPE)
5447  CALL flagerror("Not implemented.",err,error,*999)
5449  CALL flagerror("Not implemented.",err,error,*999)
5451  SELECT CASE(matrix%STORAGE_TYPE)
5453  DO row=1,number_of_rows
5454  sum=0.0_dp
5455  DO local_column=1,column_mapping%TOTAL_NUMBER_OF_LOCAL
5456  global_column=column_mapping%LOCAL_TO_GLOBAL_MAP(local_column)
5457  sum=sum+matrix%DATA_DP(row+(global_column-1)*matrix%M)* &
5458  & cmiss_vector%DATA_DP(local_column)
5459  ENDDO !local_column
5460  cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5461  ENDDO !row
5463  DO row=1,number_of_rows
5464  sum=matrix%DATA_DP(row)*cmiss_vector%DATA_DP(row)
5465  cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5466  ENDDO !row
5468  DO row=1,number_of_rows
5469  sum=0.0_dp
5470  DO local_column=1,column_mapping%TOTAL_NUMBER_OF_LOCAL
5471  global_column=column_mapping%LOCAL_TO_GLOBAL_MAP(local_column)
5472  sum=sum+matrix%DATA_DP(row+(global_column-1)*matrix%MAX_M)* &
5473  & cmiss_vector%DATA_DP(local_column)
5474  ENDDO !local_column
5475  cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5476  ENDDO !row
5478  DO row=1,number_of_rows
5479  sum=0.0_dp
5480  DO local_column=1,column_mapping%TOTAL_NUMBER_OF_LOCAL
5481  global_column=column_mapping%LOCAL_TO_GLOBAL_MAP(local_column)
5482  sum=sum+matrix%DATA_DP((row-1)*matrix%MAX_N+global_column)* &
5483  & cmiss_vector%DATA_DP(local_column)
5484  ENDDO !local_column
5485  cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5486  ENDDO !row
5488  DO row=1,number_of_rows
5489  sum=0.0_dp
5490  DO column_idx=matrix%ROW_INDICES(row),matrix%ROW_INDICES(row+1)-1
5491  global_column=matrix%COLUMN_INDICES(column_idx)
5492  !This ranks global to local mappings are stored in the first position
5493  local_column=column_mapping%GLOBAL_TO_LOCAL_MAP(global_column)%LOCAL_NUMBER(1)
5494  sum=sum+matrix%DATA_DP(column_idx)* &
5495  & cmiss_vector%DATA_DP(local_column)
5496  ENDDO !local_column
5497  cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5498  ENDDO !row
5500  DO column_idx=1,number_of_columns
5501  DO row_idx=matrix%COLUMN_INDICES(column_idx),matrix%COLUMN_INDICES(column_idx+1)-1
5502  row=matrix%ROW_INDICES(row_idx)
5503  local_column=column_mapping%GLOBAL_TO_LOCAL_MAP(column_idx)%LOCAL_NUMBER(1)
5504  sum=matrix%DATA_DP(row_idx)* &
5505  & cmiss_vector%DATA_DP(local_column)
5506  cmiss_product%DATA_DP(row)=cmiss_product%DATA_DP(row)+(alpha*sum)
5507  ENDDO !local_row
5508  ENDDO !column_idx
5510  CALL flagerror("Not implemented.",err,error,*999)
5511  CASE DEFAULT
5512  local_error="The matrix storage type of "// &
5513  & trim(numbertovstring(matrix%STORAGE_TYPE,"*",err,error))//" is invalid."
5514 
5515  CALL flagerror(local_error,err,error,*999)
5516  END SELECT
5518  CALL flagerror("Not implemented.",err,error,*999)
5519  CASE DEFAULT
5520  local_error="The distributed matrix vector data type of "// &
5521  & trim(numbertovstring(matrix%DATA_TYPE,"*",err,error))//" is invalid."
5522  CALL flagerror(local_error,err,error,*999)
5523  END SELECT
5524  ELSE
5525  local_error="The distributed product vector data type of "// &
5526  & trim(numbertovstring(distributed_product%DATA_TYPE,"*",err,error))// &
5527  & " does not match the distributed matrix data type of "// &
5528  & trim(numbertovstring(matrix%DATA_TYPE,"*",err,error))//"."
5529  CALL flagerror(local_error,err,error,*999)
5530  ENDIF
5531  ELSE
5532  local_error="The distributed vector data type of "// &
5533  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
5534  & " does not match the distributed matrix data type of "// &
5535  & trim(numbertovstring(matrix%DATA_TYPE,"*",err,error))//"."
5536  CALL flagerror(local_error,err,error,*999)
5537  ENDIF
5538  ELSE
5539  CALL flagerror("Distributed product CMISS vector is not associated.",err,error,*999)
5540  ENDIF
5541  ELSE
5542  CALL flagerror("Distributed vector CMISS vector is not associated.",err,error,*999)
5543  ENDIF
5544  ELSE
5545  CALL flagerror("CMISS matrix matrix is not associated.",err,error,*999)
5546  ENDIF
5547  ELSE
5548  CALL flagerror("Distrubuted matrix CMISS is not associated.",err,error,*999)
5549  ENDIF
5551  CALL flagerror("Not implemented.",err,error,*999)
5552  CASE DEFAULT
5553  local_error="The distributed matrix library type of "// &
5554  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//" is invalid"
5555  CALL flagerror(local_error,err,error,*999)
5556  END SELECT
5557  ELSE
5558  CALL flagerror("The distributed matrix and the distributed product vector have different "// &
5559  & "domain mappings.",err,error,*999)
5560  ENDIF
5561  ELSE
5562  CALL flagerror("The distributed matrix and the distributed vector have different domain mappings.", &
5563  & err,error,*999)
5564  ENDIF
5565  ELSE
5566  CALL flagerror("The distributed matrix row domain mapping is not associated.",err,error,*999)
5567  ENDIF
5568  ELSE
5569  CALL flagerror("The distributed matrix column domain mapping is not associated.",err,error,*999)
5570  ENDIF
5571  ELSE
5572  local_error="The distributed product vector library type of "// &
5573  & trim(numbertovstring(distributed_product%LIBRARY_TYPE,"*",err,error))// &
5574  & " does not match the distributed matrix library type of "// &
5575  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//"."
5576  CALL flagerror(local_error,err,error,*999)
5577  ENDIF
5578  ELSE
5579  local_error="The distributed vector library type of "// &
5580  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))// &
5581  & " does not match the distributed matrix library type of "// &
5582  & trim(numbertovstring(distributed_matrix%LIBRARY_TYPE,"*",err,error))//"."
5583  CALL flagerror(local_error,err,error,*999)
5584  ENDIF
5585  ELSE
5586  CALL flagerror("The distributed product vector has not been finished.",err,error,*999)
5587  ENDIF
5588  ELSE
5589  CALL flagerror("The distributed product vector is not associated.",err,error,*999)
5590  ENDIF
5591  ELSE
5592  CALL flagerror("Distributed vector has not been finished.",err,error,*999)
5593  ENDIF
5594  ELSE
5595  CALL flagerror("Distrubuted vector is not associated.",err,error,*999)
5596  ENDIF
5597  ELSE
5598  CALL flagerror("Distributed matrix has not been finished.",err,error,*999)
5599  ENDIF
5600  ELSE
5601  CALL flagerror("Distributed matrix is not associated",err,error,*999)
5602  ENDIF
5603  ENDIF
5604  exits("DISTRIBUTED_MATRIX_BY_VECTOR_ADD")
5605  RETURN
5606 999 errorsexits("DISTRIBUTED_MATRIX_BY_VECTOR_ADD",err,error)
5607  RETURN 1
5608  END SUBROUTINE distributed_matrix_by_vector_add
5609 
5610  !
5611  !================================================================================================================================
5612  !
5613 
5615  SUBROUTINE distributed_vector_all_values_set_intg(DISTRIBUTED_VECTOR,VALUE,ERR,ERROR,*)
5617  !Argument variables
5618  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
5619  INTEGER(INTG), INTENT(IN) :: VALUE
5620  INTEGER(INTG), INTENT(OUT) :: ERR
5621  TYPE(varying_string), INTENT(OUT) :: ERROR
5622  !Local variables
5623  TYPE(varying_string) :: LOCAL_ERROR
5624 
5625  enters("DISTRIBUTED_VECTOR_ALL_VALUES_SET_INTG",err,error,*999)
5626 
5627  IF(ASSOCIATED(distributed_vector)) THEN
5628  IF(distributed_vector%VECTOR_FINISHED) THEN
5629  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
5630  SELECT CASE(distributed_vector%LIBRARY_TYPE)
5632  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
5633  distributed_vector%CMISS%DATA_INTG=VALUE
5634  ELSE
5635  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
5636  ENDIF
5638  CALL flagerror("Cannot get values for an integer PETSc distributed vector.",err,error,*999)
5639  CASE DEFAULT
5640  local_error="The distributed vector library type of "// &
5641  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
5642  CALL flagerror(local_error,err,error,*999)
5643  END SELECT
5644  ELSE
5645  local_error="The data type of "//trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
5646  & " does not correspond to the integer data type of the given value."
5647  CALL flagerror(local_error,err,error,*999)
5648  ENDIF
5649  ELSE
5650  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
5651  ENDIF
5652  ELSE
5653  CALL flagerror("Distributed vector is not associated.",err,error,*999)
5654  ENDIF
5655 
5656  exits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_INTG")
5657  RETURN
5658 999 errorsexits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_INTG",err,error)
5659  RETURN 1
5661 
5662  !
5663  !================================================================================================================================
5664  !
5665 
5667  SUBROUTINE distributed_vector_all_values_set_sp(DISTRIBUTED_VECTOR,VALUE,ERR,ERROR,*)
5669  !Argument variables
5670  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
5671  REAL(SP), INTENT(IN) :: VALUE
5672  INTEGER(INTG), INTENT(OUT) :: ERR
5673  TYPE(varying_string), INTENT(OUT) :: ERROR
5674  !Local variables
5675  TYPE(varying_string) :: LOCAL_ERROR
5676 
5677  enters("DISTRIBUTED_VECTOR_ALL_VALUES_SET_SP",err,error,*999)
5678 
5679  IF(ASSOCIATED(distributed_vector)) THEN
5680  IF(distributed_vector%VECTOR_FINISHED) THEN
5681  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
5682  SELECT CASE(distributed_vector%LIBRARY_TYPE)
5684  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
5685  distributed_vector%CMISS%DATA_SP=VALUE
5686  ELSE
5687  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
5688  ENDIF
5690  CALL flagerror("Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
5691  CASE DEFAULT
5692  local_error="The distributed vector library type of "// &
5693  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
5694  CALL flagerror(local_error,err,error,*999)
5695  END SELECT
5696  ELSE
5697  local_error="The data type of "//trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
5698  & " does not correspond to the single precision data type of the given value."
5699  CALL flagerror(local_error,err,error,*999)
5700  ENDIF
5701  ELSE
5702  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
5703  ENDIF
5704  ELSE
5705  CALL flagerror("Distributed vector is not associated.",err,error,*999)
5706  ENDIF
5707 
5708  exits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_SP")
5709  RETURN
5710 999 errorsexits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_SP",err,error)
5711  RETURN 1
5713 
5714  !
5715  !================================================================================================================================
5716  !
5717 
5719  SUBROUTINE distributed_vector_all_values_set_dp(DISTRIBUTED_VECTOR,VALUE,ERR,ERROR,*)
5721  !Argument variables
5722  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
5723  REAL(DP), INTENT(IN) :: VALUE
5724  INTEGER(INTG), INTENT(OUT) :: ERR
5725  TYPE(varying_string), INTENT(OUT) :: ERROR
5726  !Local variables
5727  TYPE(varying_string) :: LOCAL_ERROR
5728 
5729  enters("DISTRIBUTED_VECTOR_ALL_VALUES_SET_DP",err,error,*999)
5730 
5731  IF(ASSOCIATED(distributed_vector)) THEN
5732  IF(distributed_vector%VECTOR_FINISHED) THEN
5733  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
5734  SELECT CASE(distributed_vector%LIBRARY_TYPE)
5736  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
5737  distributed_vector%CMISS%DATA_DP=VALUE
5738  ELSE
5739  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
5740  ENDIF
5742  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
5743  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
5744  CALL petsc_vecset(distributed_vector%PETSC%OVERRIDE_VECTOR,VALUE,err,error,*999)
5745  ELSE
5746  CALL petsc_vecset(distributed_vector%PETSC%VECTOR,VALUE,err,error,*999)
5747  ENDIF
5748  ELSE
5749  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
5750  ENDIF
5751  CASE DEFAULT
5752  local_error="The distributed vector library type of "// &
5753  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
5754  CALL flagerror(local_error,err,error,*999)
5755  END SELECT
5756  ELSE
5757  local_error="The data type of "//trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
5758  & " does not correspond to the double precision data type of the given value."
5759  CALL flagerror(local_error,err,error,*999)
5760  ENDIF
5761  ELSE
5762  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
5763  ENDIF
5764  ELSE
5765  CALL flagerror("Distributed vector is not associated.",err,error,*999)
5766  ENDIF
5767 
5768  exits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_DP")
5769  RETURN
5770 999 errorsexits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_DP",err,error)
5771  RETURN 1
5773 
5774  !
5775  !================================================================================================================================
5776  !
5777 
5779  SUBROUTINE distributed_vector_all_values_set_l(DISTRIBUTED_VECTOR,VALUE,ERR,ERROR,*)
5781  !Argument variables
5782  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
5783  LOGICAL, INTENT(IN) :: VALUE
5784  INTEGER(INTG), INTENT(OUT) :: ERR
5785  TYPE(varying_string), INTENT(OUT) :: ERROR
5786  !Local variables
5787  TYPE(varying_string) :: LOCAL_ERROR
5788 
5789  enters("DISTRIBUTED_VECTOR_ALL_VALUES_SET_L",err,error,*999)
5790 
5791  IF(ASSOCIATED(distributed_vector)) THEN
5792  IF(distributed_vector%VECTOR_FINISHED) THEN
5793  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
5794  SELECT CASE(distributed_vector%LIBRARY_TYPE)
5796  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
5797  distributed_vector%CMISS%DATA_L=VALUE
5798  ELSE
5799  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
5800  ENDIF
5802  CALL flagerror("Cannot get values for a logical PETSc distributed vector.",err,error,*999)
5803  CASE DEFAULT
5804  local_error="The distributed vector library type of "// &
5805  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
5806  CALL flagerror(local_error,err,error,*999)
5807  END SELECT
5808  ELSE
5809  local_error="The data type of "//trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
5810  & " does not correspond to the logical data type of the given value."
5811  CALL flagerror(local_error,err,error,*999)
5812  ENDIF
5813  ELSE
5814  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
5815  ENDIF
5816  ELSE
5817  CALL flagerror("Distributed vector is not associated.",err,error,*999)
5818  ENDIF
5819 
5820  exits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_L")
5821  RETURN
5822 999 errorsexits("DISTRIBUTED_VECTOR_ALL_VALUES_SET_L",err,error)
5823  RETURN 1
5825 
5826  !
5827  !================================================================================================================================
5828  !
5829 
5831  SUBROUTINE distributed_vector_copy_intg(FROM_VECTOR,TO_VECTOR,ALPHA,ERR,ERROR,*)
5833  !Argument variables
5834  TYPE(distributed_vector_type), POINTER :: FROM_VECTOR
5835  TYPE(distributed_vector_type), POINTER :: TO_VECTOR
5836  INTEGER(INTG), INTENT(IN) :: ALPHA
5837  INTEGER(INTG), INTENT(OUT) :: ERR
5838  TYPE(varying_string), INTENT(OUT) :: ERROR
5839  !Local Variables
5840  TYPE(varying_string) :: LOCAL_ERROR
5841 
5842  enters("DISTRIBUTED_VECTOR_COPY_INTG",err,error,*999)
5843 
5844  IF(ASSOCIATED(from_vector)) THEN
5845  IF(from_vector%VECTOR_FINISHED) THEN
5846  IF(ASSOCIATED(to_vector)) THEN
5847  IF(to_vector%VECTOR_FINISHED) THEN
5848  IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE) THEN
5849  IF(from_vector%DATA_TYPE==distributed_matrix_vector_intg_type) THEN
5850  IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE) THEN
5851  !Vectors are of the same library type
5852  SELECT CASE(from_vector%LIBRARY_TYPE)
5854  IF(ASSOCIATED(from_vector%CMISS)) THEN
5855  IF(ASSOCIATED(to_vector%CMISS)) THEN
5856  IF(ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING)) THEN
5857  to_vector%CMISS%DATA_INTG(1:to_vector%CMISS%N)=alpha*from_vector%CMISS%DATA_INTG(1:from_vector%CMISS%N)
5858  ELSE
5859  CALL flagerror("The from vector does not have the same domain mapping as the to vector.",err,error,*999)
5860  ENDIF
5861  ELSE
5862  CALL flagerror("To vector CMISS is not associated.",err,error,*999)
5863  ENDIF
5864  ELSE
5865  CALL flagerror("From vector CMISS is not associated.",err,error,*999)
5866  ENDIF
5868  CALL flagerror("Cannot copy a vector fro an integer PETSc distributed vector.",err,error,*999)
5869  CASE DEFAULT
5870  local_error="The from vector library type of "// &
5871  & trim(numbertovstring(from_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
5872  CALL flagerror(local_error,err,error,*999)
5873  END SELECT
5874  ELSE
5875  !Vectors are of from different library types
5876  CALL flagerror("Not implemented.",err,error,*999)
5877  ENDIF
5878  ELSE
5879  local_error="The from vector data type of "//trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
5880  & " does not match the integer data type of the supplied alpha value."
5881  CALL flagerror(local_error,err,error,*999)
5882  ENDIF
5883  ELSE
5884  local_error="The from vector data type of "// &
5885  & trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
5886  & " does not match the to vector data type of "// &
5887  & trim(numbertovstring(to_vector%DATA_TYPE,"*",err,error))//"."
5888  CALL flagerror(local_error,err,error,*999)
5889  ENDIF
5890  ELSE
5891  CALL flagerror("To vector has not been finished.",err,error,*999)
5892  ENDIF
5893  ELSE
5894  CALL flagerror("To vector is not associated.",err,error,*999)
5895  ENDIF
5896  ELSE
5897  CALL flagerror("From vector has not been finished.",err,error,*999)
5898  ENDIF
5899  ELSE
5900  CALL flagerror("From vector is not associated.",err,error,*999)
5901  ENDIF
5902 
5903  exits("DISTRIBUTED_VECTOR_COPY_INTG")
5904  RETURN
5905 999 errorsexits("DISTRIBUTED_VECTOR_COPY_INTG",err,error)
5906  RETURN 1
5907 
5908  END SUBROUTINE distributed_vector_copy_intg
5909 
5910  !
5911  !================================================================================================================================
5912  !
5913 
5915  SUBROUTINE distributed_vector_copy_dp(FROM_VECTOR,TO_VECTOR,ALPHA,ERR,ERROR,*)
5917  !Argument variables
5918  TYPE(distributed_vector_type), POINTER :: FROM_VECTOR
5919  TYPE(distributed_vector_type), POINTER :: TO_VECTOR
5920  REAL(DP), INTENT(IN) :: ALPHA
5921  INTEGER(INTG), INTENT(OUT) :: ERR
5922  TYPE(varying_string), INTENT(OUT) :: ERROR
5923  !Local Variables
5924  TYPE(varying_string) :: LOCAL_ERROR
5925 
5926  enters("DISTRIBUTED_VECTOR_COPY_DP",err,error,*999)
5927 
5928  IF(ASSOCIATED(from_vector)) THEN
5929  IF(from_vector%VECTOR_FINISHED) THEN
5930  IF(ASSOCIATED(to_vector)) THEN
5931  IF(to_vector%VECTOR_FINISHED) THEN
5932  IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE) THEN
5933  IF(from_vector%DATA_TYPE==distributed_matrix_vector_dp_type) THEN
5934  IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE) THEN
5935  !Vectors are of the same library type
5936  SELECT CASE(from_vector%LIBRARY_TYPE)
5938  IF(ASSOCIATED(from_vector%CMISS)) THEN
5939  IF(ASSOCIATED(to_vector%CMISS)) THEN
5940  IF(ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING)) THEN
5941  to_vector%CMISS%DATA_DP(1:to_vector%CMISS%N)=alpha*from_vector%CMISS%DATA_DP(1:from_vector%CMISS%N)
5942  ELSE
5943  CALL flagerror("The from vector does not have the same domain mapping as the to vector.",err,error,*999)
5944  ENDIF
5945  ELSE
5946  CALL flagerror("To vector CMISS is not associated.",err,error,*999)
5947  ENDIF
5948  ELSE
5949  CALL flagerror("From vector CMISS is not associated.",err,error,*999)
5950  ENDIF
5952  IF(ASSOCIATED(from_vector%PETSC)) THEN
5953  IF(ASSOCIATED(to_vector%PETSC)) THEN
5954  IF(from_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
5955  IF(to_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
5956  CALL petsc_veccopy(from_vector%PETSC%OVERRIDE_VECTOR,to_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
5957  CALL petsc_vecscale(to_vector%PETSC%OVERRIDE_VECTOR,alpha,err,error,*999)
5958  ELSE
5959  CALL petsc_veccopy(from_vector%PETSC%OVERRIDE_VECTOR,to_vector%PETSC%VECTOR,err,error,*999)
5960  CALL petsc_vecscale(to_vector%PETSC%VECTOR,alpha,err,error,*999)
5961  ENDIF
5962  ELSE
5963  IF(to_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
5964  CALL petsc_veccopy(from_vector%PETSC%VECTOR,to_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
5965  CALL petsc_vecscale(to_vector%PETSC%OVERRIDE_VECTOR,alpha,err,error,*999)
5966  ELSE
5967  CALL petsc_veccopy(from_vector%PETSC%VECTOR,to_vector%PETSC%VECTOR,err,error,*999)
5968  CALL petsc_vecscale(to_vector%PETSC%VECTOR,alpha,err,error,*999)
5969  ENDIF
5970  ENDIF
5971  ELSE
5972  CALL flagerror("To vector PETSc is not associated.",err,error,*999)
5973  ENDIF
5974  ELSE
5975  CALL flagerror("From vector PETSc is not associated.",err,error,*999)
5976  ENDIF
5977  CASE DEFAULT
5978  local_error="The from vector library type of "// &
5979  & trim(numbertovstring(from_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
5980  CALL flagerror(local_error,err,error,*999)
5981  END SELECT
5982  ELSE
5983  !Vectors are of from different library types
5984  CALL flagerror("Not implemented.",err,error,*999)
5985  ENDIF
5986  ELSE
5987  local_error="The from vector data type of "//trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
5988  & " does not match the double precision data type of the supplied alpha value."
5989  CALL flagerror(local_error,err,error,*999)
5990  ENDIF
5991  ELSE
5992  local_error="The from vector data type of "// &
5993  & trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
5994  & " does not match the to vector data type of "// &
5995  & trim(numbertovstring(to_vector%DATA_TYPE,"*",err,error))//"."
5996  CALL flagerror(local_error,err,error,*999)
5997  ENDIF
5998  ELSE
5999  CALL flagerror("To vector has not been finished.",err,error,*999)
6000  ENDIF
6001  ELSE
6002  CALL flagerror("To vector is not associated.",err,error,*999)
6003  ENDIF
6004  ELSE
6005  CALL flagerror("From vector has not been finished.",err,error,*999)
6006  ENDIF
6007  ELSE
6008  CALL flagerror("From vector is not associated.",err,error,*999)
6009  ENDIF
6010 
6011  exits("DISTRIBUTED_VECTOR_COPY_DP")
6012  RETURN
6013 999 errorsexits("DISTRIBUTED_VECTOR_COPY_DP",err,error)
6014  RETURN 1
6015 
6016  END SUBROUTINE distributed_vector_copy_dp
6017 
6018  !
6019  !================================================================================================================================
6020  !
6021 
6023  SUBROUTINE distributed_vector_copy_sp(FROM_VECTOR,TO_VECTOR,ALPHA,ERR,ERROR,*)
6025  !Argument variables
6026  TYPE(distributed_vector_type), POINTER :: FROM_VECTOR
6027  TYPE(distributed_vector_type), POINTER :: TO_VECTOR
6028  REAL(SP), INTENT(IN) :: ALPHA
6029  INTEGER(INTG), INTENT(OUT) :: ERR
6030  TYPE(varying_string), INTENT(OUT) :: ERROR
6031  !Local Variables
6032  TYPE(varying_string) :: LOCAL_ERROR
6033 
6034  enters("DISTRIBUTED_VECTOR_COPY_SP",err,error,*999)
6035 
6036  IF(ASSOCIATED(from_vector)) THEN
6037  IF(from_vector%VECTOR_FINISHED) THEN
6038  IF(ASSOCIATED(to_vector)) THEN
6039  IF(to_vector%VECTOR_FINISHED) THEN
6040  IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE) THEN
6041  IF(from_vector%DATA_TYPE==distributed_matrix_vector_sp_type) THEN
6042  IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE) THEN
6043  !Vectors are of the same library type
6044  SELECT CASE(from_vector%LIBRARY_TYPE)
6046  IF(ASSOCIATED(from_vector%CMISS)) THEN
6047  IF(ASSOCIATED(to_vector%CMISS)) THEN
6048  IF(ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING)) THEN
6049  to_vector%CMISS%DATA_SP(1:to_vector%CMISS%N)=alpha*from_vector%CMISS%DATA_SP(1:from_vector%CMISS%N)
6050  ELSE
6051  CALL flagerror("The from vector does not have the same domain mapping as the to vector.",err,error,*999)
6052  ENDIF
6053  ELSE
6054  CALL flagerror("To vector CMISS is not associated.",err,error,*999)
6055  ENDIF
6056  ELSE
6057  CALL flagerror("From vector CMISS is not associated.",err,error,*999)
6058  ENDIF
6060  CALL flagerror("Cannot copy a vector for a single precision PETSc distributed vector.",err,error,*999)
6061  CASE DEFAULT
6062  local_error="The from vector library type of "// &
6063  & trim(numbertovstring(from_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6064  CALL flagerror(local_error,err,error,*999)
6065  END SELECT
6066  ELSE
6067  !Vectors are of from different library types
6068  CALL flagerror("Not implemented.",err,error,*999)
6069  ENDIF
6070  ELSE
6071  local_error="The from vector data type of "//trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
6072  & " does not match the single precision data type of the supplied alpha value."
6073  CALL flagerror(local_error,err,error,*999)
6074  ENDIF
6075  ELSE
6076  local_error="The from vector data type of "// &
6077  & trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
6078  & " does not match the to vector data type of "// &
6079  & trim(numbertovstring(to_vector%DATA_TYPE,"*",err,error))//"."
6080  CALL flagerror(local_error,err,error,*999)
6081  ENDIF
6082  ELSE
6083  CALL flagerror("To vector has not been finished.",err,error,*999)
6084  ENDIF
6085  ELSE
6086  CALL flagerror("To vector is not associated.",err,error,*999)
6087  ENDIF
6088  ELSE
6089  CALL flagerror("From vector has not been finished.",err,error,*999)
6090  ENDIF
6091  ELSE
6092  CALL flagerror("From vector is not associated.",err,error,*999)
6093  ENDIF
6094 
6095  exits("DISTRIBUTED_VECTOR_COPY_SP")
6096  RETURN
6097 999 errorsexits("DISTRIBUTED_VECTOR_COPY_SP",err,error)
6098  RETURN 1
6099 
6100  END SUBROUTINE distributed_vector_copy_sp
6101 
6102  !
6103  !================================================================================================================================
6104  !
6105 
6107  SUBROUTINE distributed_vector_copy_l(FROM_VECTOR,TO_VECTOR,ALPHA,ERR,ERROR,*)
6109  !Argument variables
6110  TYPE(distributed_vector_type), POINTER :: FROM_VECTOR
6111  TYPE(distributed_vector_type), POINTER :: TO_VECTOR
6112  LOGICAL, INTENT(IN) :: ALPHA
6113  INTEGER(INTG), INTENT(OUT) :: ERR
6114  TYPE(varying_string), INTENT(OUT) :: ERROR
6115  !Local Variables
6116  TYPE(varying_string) :: LOCAL_ERROR
6117 
6118  enters("DISTRIBUTED_VECTOR_COPY_L",err,error,*999)
6119 
6120  IF(ASSOCIATED(from_vector)) THEN
6121  IF(from_vector%VECTOR_FINISHED) THEN
6122  IF(ASSOCIATED(to_vector)) THEN
6123  IF(to_vector%VECTOR_FINISHED) THEN
6124  IF(from_vector%DATA_TYPE==to_vector%DATA_TYPE) THEN
6125  IF(from_vector%DATA_TYPE==distributed_matrix_vector_l_type) THEN
6126  IF(from_vector%LIBRARY_TYPE==to_vector%LIBRARY_TYPE) THEN
6127  !Vectors are of the same library type
6128  SELECT CASE(from_vector%LIBRARY_TYPE)
6130  IF(ASSOCIATED(from_vector%CMISS)) THEN
6131  IF(ASSOCIATED(to_vector%CMISS)) THEN
6132  IF(ASSOCIATED(from_vector%DOMAIN_MAPPING,to_vector%DOMAIN_MAPPING)) THEN
6133  to_vector%CMISS%DATA_L(1:to_vector%CMISS%N)=alpha.AND.from_vector%CMISS%DATA_L(1:from_vector%CMISS%N)
6134  ELSE
6135  CALL flagerror("The from vector does not have the same domain mapping as the to vector.",err,error,*999)
6136  ENDIF
6137  ELSE
6138  CALL flagerror("To vector CMISS is not associated.",err,error,*999)
6139  ENDIF
6140  ELSE
6141  CALL flagerror("From vector CMISS is not associated.",err,error,*999)
6142  ENDIF
6144  CALL flagerror("Cannot copy a vector for an integer PETSc distributed vector.",err,error,*999)
6145  CASE DEFAULT
6146  local_error="The from vector library type of "// &
6147  & trim(numbertovstring(from_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6148  CALL flagerror(local_error,err,error,*999)
6149  END SELECT
6150  ELSE
6151  !Vectors are of from different library types
6152  CALL flagerror("Not implemented.",err,error,*999)
6153  ENDIF
6154  ELSE
6155  local_error="The from vector data type of "//trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
6156  & " does not match the logical data type of the supplied alpha value."
6157  CALL flagerror(local_error,err,error,*999)
6158  ENDIF
6159  ELSE
6160  local_error="The from vector data type of "// &
6161  & trim(numbertovstring(from_vector%DATA_TYPE,"*",err,error))// &
6162  & " does not match the to vector data type of "// &
6163  & trim(numbertovstring(to_vector%DATA_TYPE,"*",err,error))//"."
6164  CALL flagerror(local_error,err,error,*999)
6165  ENDIF
6166  ELSE
6167  CALL flagerror("To vector has not been finished.",err,error,*999)
6168  ENDIF
6169  ELSE
6170  CALL flagerror("To vector is not associated.",err,error,*999)
6171  ENDIF
6172  ELSE
6173  CALL flagerror("From vector has not been finished.",err,error,*999)
6174  ENDIF
6175  ELSE
6176  CALL flagerror("From vector is not associated.",err,error,*999)
6177  ENDIF
6178 
6179  exits("DISTRIBUTED_VECTOR_COPY_L")
6180  RETURN
6181 999 errorsexits("DISTRIBUTED_VECTOR_COPY_L",err,error)
6182  RETURN 1
6183 
6184  END SUBROUTINE distributed_vector_copy_l
6185 
6186  !
6187  !================================================================================================================================
6188  !
6189 
6191  SUBROUTINE distributed_vector_cmiss_finalise(CMISS_VECTOR,ERR,ERROR,*)
6193  !Argument variables
6194  TYPE(distributed_vector_cmiss_type), POINTER :: CMISS_VECTOR
6195  INTEGER(INTG), INTENT(OUT) :: ERR
6196  TYPE(varying_string), INTENT(OUT) :: ERROR
6197  !Local Variables
6198  INTEGER(INTG) :: domain_idx
6199 
6200  enters("DISTRIBUTED_VECTOR_CMISS_FINALISE",err,error,*999)
6201 
6202  IF(ASSOCIATED(cmiss_vector)) THEN
6203  IF(ALLOCATED(cmiss_vector%DATA_INTG)) DEALLOCATE(cmiss_vector%DATA_INTG)
6204  IF(ALLOCATED(cmiss_vector%DATA_SP)) DEALLOCATE(cmiss_vector%DATA_SP)
6205  IF(ALLOCATED(cmiss_vector%DATA_DP)) DEALLOCATE(cmiss_vector%DATA_DP)
6206  IF(ALLOCATED(cmiss_vector%DATA_L)) DEALLOCATE(cmiss_vector%DATA_L)
6207  IF(ALLOCATED(cmiss_vector%TRANSFERS)) THEN
6208  DO domain_idx=1,SIZE(cmiss_vector%TRANSFERS)
6209  CALL distributed_vector_cmiss_transfer_finalise(cmiss_vector,domain_idx,err,error,*999)
6210  ENDDO !domain_idx
6211  DEALLOCATE(cmiss_vector%TRANSFERS)
6212  ENDIF
6213  DEALLOCATE(cmiss_vector)
6214  ENDIF
6215 
6216  exits("DISTRIBUTED_VECTOR_CMISS_FINALSE")
6217  RETURN
6218 999 errorsexits("DISTRIBUTED_VECTOR_CMISS_FINALISE",err,error)
6219  RETURN 1
6220  END SUBROUTINE distributed_vector_cmiss_finalise
6221 
6222  !
6223  !================================================================================================================================
6224  !
6225 
6227  SUBROUTINE distributed_vector_cmiss_initialise(DISTRIBUTED_VECTOR,ERR,ERROR,*)
6229  !Argument variables
6230  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6231  INTEGER(INTG), INTENT(OUT) :: ERR
6232  TYPE(varying_string), INTENT(OUT) :: ERROR
6233  !Local Variables
6234  INTEGER(INTG) :: DUMMY_ERR
6235  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
6236 
6237  enters("DISTRIBUTED_VECTOR_CMISS_INITIALISE",err,error,*998)
6238 
6239  IF(ASSOCIATED(distributed_vector)) THEN
6240  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
6241  CALL flagerror("CMISS is already associated for this distributed vector.",err,error,*998)
6242  ELSE
6243  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
6244  ALLOCATE(distributed_vector%CMISS,stat=err)
6245  IF(err/=0) CALL flagerror("Could not allocated CMISS distributed vector.",err,error,*999)
6246  distributed_vector%CMISS%DISTRIBUTED_VECTOR=>distributed_vector
6247  distributed_vector%LIBRARY_TYPE=distributed_matrix_vector_cmiss_type
6248  !Set the defaults
6249  distributed_vector%CMISS%BASE_TAG_NUMBER=0
6250  SELECT CASE(distributed_vector%GHOSTING_TYPE)
6252  distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
6254  distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
6255  CASE DEFAULT
6256  local_error="The distributed vector ghosting type of "// &
6257  & trim(numbertovstring(distributed_vector%GHOSTING_TYPE,"*",err,error))//" is invalid."
6258  CALL flagerror(local_error,err,error,*999)
6259  END SELECT
6260  distributed_vector%CMISS%DATA_SIZE=0
6261  ELSE
6262  CALL flagerror("Distributed vector domain mapping is not associated.",err,error,*998)
6263  ENDIF
6264  ENDIF
6265  ELSE
6266  CALL flagerror("Distributed vector is not associated.",err,error,*998)
6267  ENDIF
6268 
6269  exits("DISTRIBUTED_VECTOR_CMISS_INITIALSE")
6270  RETURN
6271 999 IF(ASSOCIATED(distributed_vector%CMISS)) &
6272  & CALL distributed_vector_cmiss_finalise(distributed_vector%CMISS,dummy_err,dummy_error,*998)
6273 998 errorsexits("DISTRIBUTED_VECTOR_CMISS_INITIALISE",err,error)
6274  RETURN 1
6276 
6277  !
6278  !================================================================================================================================
6279  !
6280 
6282  SUBROUTINE distributed_vector_cmiss_create_finish(CMISS_VECTOR,ERR,ERROR,*)
6284  !Argument variables
6285  TYPE(distributed_vector_cmiss_type), POINTER :: CMISS_VECTOR
6286  INTEGER(INTG), INTENT(OUT) :: ERR
6287  TYPE(varying_string), INTENT(OUT) :: ERROR
6288  !Local Variables
6289  INTEGER(INTG) :: domain_idx,domain_idx2,domain_no,DUMMY_ERR,my_computational_node_number
6290  LOGICAL :: FOUND
6291  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6292  TYPE(domain_mapping_type), POINTER :: DOMAIN_MAPPING
6293  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
6294 
6295  enters("DISTRIBUTED_VECTOR_CMISS_CREATE_FINISH",err,error,*999)
6296 
6297  IF(ASSOCIATED(cmiss_vector)) THEN
6298  distributed_vector=>cmiss_vector%DISTRIBUTED_VECTOR
6299  IF(ASSOCIATED(distributed_vector)) THEN
6300  domain_mapping=>distributed_vector%DOMAIN_MAPPING
6301  IF(ASSOCIATED(domain_mapping)) THEN
6302  cmiss_vector%DATA_SIZE=cmiss_vector%N
6303  SELECT CASE(distributed_vector%DATA_TYPE)
6305  ALLOCATE(cmiss_vector%DATA_INTG(cmiss_vector%DATA_SIZE),stat=err)
6306  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed vector integer data.",err,error,*999)
6307  CASE(matrix_vector_sp_type)
6308  ALLOCATE(cmiss_vector%DATA_SP(cmiss_vector%DATA_SIZE),stat=err)
6309  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed vector single precsion data.",err,error,*999)
6310  CASE(matrix_vector_dp_type)
6311  ALLOCATE(cmiss_vector%DATA_DP(cmiss_vector%DATA_SIZE),stat=err)
6312  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed vector double precsion data.",err,error,*999)
6313  CASE(matrix_vector_l_type)
6314  ALLOCATE(cmiss_vector%DATA_L(cmiss_vector%DATA_SIZE),stat=err)
6315  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed vector logical data.",err,error,*999)
6316  CASE DEFAULT
6317  local_error="The distributed vector data type of "// &
6318  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
6319  CALL flagerror(local_error,err,error,*999)
6320  END SELECT
6321  cmiss_vector%BASE_TAG_NUMBER=distributed_data_id
6322  IF(domain_mapping%NUMBER_OF_DOMAINS==1) THEN
6324  ELSE
6326  & domain_mapping%ADJACENT_DOMAINS_PTR(domain_mapping%NUMBER_OF_DOMAINS)
6327  END IF
6328  IF(domain_mapping%NUMBER_OF_ADJACENT_DOMAINS>0) THEN
6329  my_computational_node_number=computational_node_number_get(err,error)
6330  IF(err/=0) GOTO 999
6331  IF(distributed_vector%GHOSTING_TYPE==distributed_matrix_vector_include_ghosts_type) THEN
6332  ALLOCATE(cmiss_vector%TRANSFERS(domain_mapping%NUMBER_OF_ADJACENT_DOMAINS),stat=err)
6333  IF(err/=0) CALL flagerror("Could not allocate CMISS distributed vector transfer buffers.",err,error,*999)
6334  DO domain_idx=1,domain_mapping%NUMBER_OF_ADJACENT_DOMAINS
6335  CALL distributedvector_cmisstransferinitialise(cmiss_vector,domain_idx,err,error,*999)
6336  cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE=domain_mapping%ADJACENT_DOMAINS(domain_idx)% &
6337  & number_of_send_ghosts
6338  cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE= &
6339  & domain_mapping%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
6340  cmiss_vector%TRANSFERS(domain_idx)%DATA_TYPE=distributed_vector%DATA_TYPE
6341  cmiss_vector%TRANSFERS(domain_idx)%SEND_TAG_NUMBER=cmiss_vector%BASE_TAG_NUMBER + &
6342  & domain_mapping%ADJACENT_DOMAINS_PTR(my_computational_node_number)+domain_idx-1
6343  domain_no=domain_mapping%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER
6344  found=.false.
6345  DO domain_idx2=domain_mapping%ADJACENT_DOMAINS_PTR(domain_no),domain_mapping%ADJACENT_DOMAINS_PTR(domain_no+1)-1
6346  IF(domain_mapping%ADJACENT_DOMAINS_LIST(domain_idx2)==my_computational_node_number) THEN
6347  found=.true.
6348  EXIT
6349  ENDIF
6350  ENDDO !domain_idx2
6351  IF(found) THEN
6352  domain_idx2=domain_idx2-domain_mapping%ADJACENT_DOMAINS_PTR(domain_no)+1
6353  cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER=cmiss_vector%BASE_TAG_NUMBER + &
6354  & domain_mapping%ADJACENT_DOMAINS_PTR(domain_no)+domain_idx2-1
6355  ELSE
6356  CALL flagerror("Could not find domain to set the receive tag number.",err,error,*999)
6357  ENDIF
6358  SELECT CASE(distributed_vector%DATA_TYPE)
6360  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_INTG(cmiss_vector%TRANSFERS(domain_idx)% &
6361  & send_buffer_size),stat=err)
6362  IF(err/=0) CALL flagerror("Could not allocate distributed vector send integer transfer buffer.",err,error,*999)
6363  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG(cmiss_vector%TRANSFERS(domain_idx)% &
6364  & receive_buffer_size),stat=err)
6365  IF(err/=0) CALL flagerror("Could not allocate distributed vector receive integer transfer buffer.", &
6366  & err,error,*999)
6368  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SP(cmiss_vector%TRANSFERS(domain_idx)% &
6369  & send_buffer_size),stat=err)
6370  IF(err/=0) CALL flagerror("Could not allocate distributed vector send single precision transfer buffer.", &
6371  & err,error,*999)
6372  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP(cmiss_vector%TRANSFERS(domain_idx)% &
6373  & receive_buffer_size),stat=err)
6374  IF(err/=0) CALL flagerror("Could not allocate distributed vector receive single precision transfer buffer.", &
6375  & err,error,*999)
6377  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_DP(cmiss_vector%TRANSFERS(domain_idx)% &
6378  & send_buffer_size),stat=err)
6379  IF(err/=0) CALL flagerror("Could not allocate distributed vector send double precision transfer buffer.", &
6380  & err,error,*999)
6381  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP(cmiss_vector%TRANSFERS(domain_idx)% &
6382  & receive_buffer_size),stat=err)
6383  IF(err/=0) CALL flagerror("Could not allocate distributed vector receive double precision transfer buffer.", &
6384  & err,error,*999)
6386  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_L(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE), &
6387  & stat=err)
6388  IF(err/=0) CALL flagerror("Could not allocate distributed vector send logical transfer buffer.",err,error,*999)
6389  ALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L(cmiss_vector%TRANSFERS(domain_idx)% &
6390  & receive_buffer_size),stat=err)
6391  IF(err/=0) CALL flagerror("Could not allocate distributed vector receive logical transfer buffer.", &
6392  & err,error,*999)
6393  CASE DEFAULT
6394  local_error="The distributed vector data type of "// &
6395  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
6396  CALL flagerror(local_error,err,error,*999)
6397  END SELECT
6398  ENDDO !domain_idx
6399  ENDIF
6400  ENDIF
6401  ELSE
6402  CALL flagerror("CMISS vector distributed vector domain mapping is not associated.",err,error,*999)
6403  ENDIF
6404  ELSE
6405  CALL flagerror("CMISS vector distributed vector is not associated.",err,error,*999)
6406  ENDIF
6407  ELSE
6408  CALL flagerror("CMISS vector is not associated.",err,error,*999)
6409  ENDIF
6410 
6411  exits("DISTRIBUTED_VECTOR_CMISS_CREATE_FINISH")
6412  RETURN
6413 999 CALL distributed_vector_cmiss_finalise(cmiss_vector,dummy_err,dummy_error,*998)
6414 998 errorsexits("DISTRIBUTED_VECTOR_CMISS_CREATE_FINISH",err,error)
6415  RETURN 1
6417 
6418  !
6419  !================================================================================================================================
6420  !
6421 
6423  SUBROUTINE distributed_vector_create_finish(DISTRIBUTED_VECTOR,ERR,ERROR,*)
6425  !Argument variables
6426  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6427  INTEGER(INTG), INTENT(OUT) :: ERR
6428  TYPE(varying_string), INTENT(OUT) :: ERROR
6429  !Local Variables
6430  INTEGER(INTG) :: DUMMY_ERR
6431  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
6432 
6433  enters("DISTRIBUTED_VECTOR_CREATE_FINISH",err,error,*999)
6434 
6435  IF(ASSOCIATED(distributed_vector)) THEN
6436  IF(distributed_vector%VECTOR_FINISHED) THEN
6437  CALL flagerror("The distributed vector has already been finished.",err,error,*999)
6438  ELSE
6439  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6441  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
6442  CALL distributed_vector_cmiss_create_finish(distributed_vector%CMISS,err,error,*999)
6443  ELSE
6444  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
6445  ENDIF
6447  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
6448  CALL distributed_vector_petsc_create_finish(distributed_vector%PETSC,err,error,*999)
6449  ELSE
6450  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
6451  ENDIF
6452  CASE DEFAULT
6453  local_error="The distributed vector library type of "// &
6454  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6455  CALL flagerror(local_error,err,error,*999)
6456  END SELECT
6457  distributed_vector%VECTOR_FINISHED=.true.
6458  ENDIF
6459  ENDIF
6460 
6461  exits("DISTRIBUTED_VECTOR_CREATE_FINISH")
6462  RETURN
6463 999 IF(ASSOCIATED(distributed_vector)) CALL distributed_vector_finalise(distributed_vector,dummy_err,dummy_error,*998)
6464  DEALLOCATE(distributed_vector)
6465 998 errorsexits("DISTRIBUTED_VECTOR_CREATE_FINISH",err,error)
6466  RETURN 1
6467  END SUBROUTINE distributed_vector_create_finish
6468 
6469  !
6470  !================================================================================================================================
6471  !
6472 
6474  SUBROUTINE distributed_vector_create_start(DOMAIN_MAPPING,DISTRIBUTED_VECTOR,ERR,ERROR,*)
6476  !Argument variables
6477  TYPE(domain_mapping_type), POINTER :: DOMAIN_MAPPING
6478  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6479  INTEGER(INTG), INTENT(OUT) :: ERR
6480  TYPE(varying_string), INTENT(OUT) :: ERROR
6481  !Local Variables
6482  INTEGER(INTG) :: DUMMY_ERR
6483  TYPE(varying_string) :: DUMMY_ERROR
6484 
6485  enters("DISTRIBUTED_VECTOR_CREATE_START",err,error,*998)
6486 
6487  IF(ASSOCIATED(domain_mapping)) THEN
6488  IF(ASSOCIATED(distributed_vector)) THEN
6489  CALL flagerror("Distributed vector is already associated.",err,error,*998)
6490  ELSE
6491  CALL distributed_vector_initialise(domain_mapping,distributed_vector,err,error,*999)
6492  !Set the default values
6493  ENDIF
6494  ELSE
6495  CALL flagerror("Domain mapping is not associated.",err,error,*998)
6496  ENDIF
6497 
6498  exits("DISTRIBUTED_VECTOR_CREATE_START")
6499  RETURN
6500 999 CALL distributed_vector_finalise(distributed_vector,dummy_err,dummy_error,*998)
6501 998 errorsexits("DISTRIBUTED_VECTOR_CREATE_START",err,error)
6502  RETURN 1
6503  END SUBROUTINE distributed_vector_create_start
6504 
6505  !
6506  !================================================================================================================================
6507  !
6508 
6510  SUBROUTINE distributedvector_datatypeget(vector,dataType,err,error,*)
6512  !Argument variables
6513  TYPE(distributed_vector_type), POINTER :: vector
6514  INTEGER(INTG), INTENT(OUT) :: dataType
6515  INTEGER(INTG), INTENT(OUT) :: err
6516  TYPE(varying_string), INTENT(OUT) :: error
6517 
6518  enters("DistributedVector_DataTypeGet",err,error,*999)
6519 
6520  IF(ASSOCIATED(vector)) THEN
6521  IF(.NOT.vector%vector_finished) THEN
6522  CALL flag_error("The vector has not been finished.",err,error,*999)
6523  ELSE
6524  datatype=vector%data_type
6525  END IF
6526  ELSE
6527  CALL flag_error("Distributed vector is not associated.",err,error,*999)
6528  END IF
6529 
6530  exits("DistributedVector_DataTypeGet")
6531  RETURN
6532 999 errorsexits("DistributedVector_DataTypeGet",err,error)
6533  RETURN 1
6534  END SUBROUTINE distributedvector_datatypeget
6535 
6536  !
6537  !================================================================================================================================
6538  !
6539 
6541  SUBROUTINE distributed_vector_data_type_set(DISTRIBUTED_VECTOR,DATA_TYPE,ERR,ERROR,*)
6543  !Argument variables
6544  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6545  INTEGER(INTG), INTENT(IN) :: DATA_TYPE
6546  INTEGER(INTG), INTENT(OUT) :: ERR
6547  TYPE(varying_string), INTENT(OUT) :: ERROR
6548  !Local Variables
6549  TYPE(varying_string) :: LOCAL_ERROR
6550 
6551  enters("DISTRIBUTED_VECTOR_DATA_TYPE_SET",err,error,*999)
6552 
6553  IF(ASSOCIATED(distributed_vector)) THEN
6554  IF(distributed_vector%VECTOR_FINISHED) THEN
6555  CALL flagerror("The distributed vector has been finished.",err,error,*999)
6556  ELSE
6557  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6559  SELECT CASE(data_type)
6561  distributed_vector%DATA_TYPE=matrix_vector_intg_type
6562  CASE(matrix_vector_sp_type)
6563  distributed_vector%DATA_TYPE=matrix_vector_sp_type
6564  CASE(matrix_vector_dp_type)
6565  distributed_vector%DATA_TYPE=matrix_vector_dp_type
6566  CASE(matrix_vector_l_type)
6567  distributed_vector%DATA_TYPE=matrix_vector_l_type
6568  CASE DEFAULT
6569  local_error="The distributed data type of "//trim(numbertovstring(data_type,"*",err,error))//" is invalid."
6570  CALL flagerror(local_error,err,error,*999)
6571  END SELECT
6573  SELECT CASE(data_type)
6575  CALL flagerror("An integer distributed PETSc vector is not implemented.",err,error,*999)
6576  CASE(matrix_vector_sp_type)
6577  CALL flagerror("A single precision distributed PETSc vector is not implemented.",err,error,*999)
6578  CASE(matrix_vector_dp_type)
6579  distributed_vector%DATA_TYPE=matrix_vector_dp_type
6580  CASE(matrix_vector_l_type)
6581  CALL flagerror("A logical distributed PETSc vector is not implemented.",err,error,*999)
6582  CASE DEFAULT
6583  local_error="The distributed data type of "//trim(numbertovstring(data_type,"*",err,error))//" is invalid."
6584  CALL flagerror(local_error,err,error,*999)
6585  END SELECT
6586  CASE DEFAULT
6587  local_error="The distributed vector library type of "// &
6588  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6589  CALL flagerror(local_error,err,error,*999)
6590  END SELECT
6591  ENDIF
6592  ELSE
6593  CALL flagerror("Distributed vector is not associated.",err,error,*999)
6594  ENDIF
6595 
6596  exits("DISTRIBUTED_VECTOR_DATA_TYPE_SET")
6597  RETURN
6598 999 errorsexits("DISTRIBUTED_VECTOR_DATA_TYPE_SET",err,error)
6599  RETURN 1
6600  END SUBROUTINE distributed_vector_data_type_set
6601 
6602  !
6603  !================================================================================================================================
6604  !
6605 
6607  SUBROUTINE distributed_vector_destroy(DISTRIBUTED_VECTOR,ERR,ERROR,*)
6609  !Argument variables
6610  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6611  INTEGER(INTG), INTENT(OUT) :: ERR
6612  TYPE(varying_string), INTENT(OUT) :: ERROR
6613  !Local Variables
6614 
6615  enters("DISTRIBUTED_VECTOR_DESTROY",err,error,*999)
6616 
6617  IF(ASSOCIATED(distributed_vector)) THEN
6618  CALL distributed_vector_finalise(distributed_vector,err,error,*999)
6619  ELSE
6620  CALL flagerror("Distributed vector is not associated.",err,error,*999)
6621  ENDIF
6622 
6623  exits("DISTRIBUTED_VECTOR_DESTROY")
6624  RETURN
6625 999 errorsexits("DISTRIBUTED_VECTOR_DESTROY",err,error)
6626  RETURN 1
6627  END SUBROUTINE distributed_vector_destroy
6628 
6629  !
6630  !================================================================================================================================
6631  !
6632 
6634  SUBROUTINE distributed_vector_duplicate(DISTRIBUTED_VECTOR,NEW_DISTRIBUTED_VECTOR,ERR,ERROR,*)
6636  !Argument variables
6637  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6638  TYPE(distributed_vector_type), POINTER :: NEW_DISTRIBUTED_VECTOR
6639  INTEGER(INTG), INTENT(OUT) :: ERR
6640  TYPE(varying_string), INTENT(OUT) :: ERROR
6641  !Local Variables
6642  INTEGER(INTG) :: DUMMY_ERR
6643  TYPE(varying_string) :: DUMMY_ERROR
6644 
6645  enters("DISTRIBUTED_VECTOR_DUPLICATE",err,error,*998)
6646 
6647  IF(ASSOCIATED(distributed_vector)) THEN
6648  IF(ASSOCIATED(new_distributed_vector)) THEN
6649  CALL flagerror("New distributed vector is already associated.",err,error,*998)
6650  ELSE
6651  CALL distributed_vector_create_start(distributed_vector%DOMAIN_MAPPING,new_distributed_vector,err,error,*999)
6652  CALL distributed_vector_library_type_set(new_distributed_vector,distributed_vector%LIBRARY_TYPE,err,error,*999)
6653  CALL distributed_vector_data_type_set(new_distributed_vector,distributed_vector%DATA_TYPE,err,error,*999)
6654  CALL distributed_vector_create_finish(new_distributed_vector,err,error,*999)
6655  ENDIF
6656  ELSE
6657  CALL flagerror("Distributed vector is not associated.",err,error,*998)
6658  ENDIF
6659 
6660  exits("DISTRIBUTED_VECTOR_DUPLICATE")
6661  RETURN
6662 999 CALL distributed_vector_finalise(new_distributed_vector,dummy_err,dummy_error,*998)
6663 998 errorsexits("DISTRIBUTED_VECTOR_DUPLICATE",err,error)
6664  RETURN 1
6665  END SUBROUTINE distributed_vector_duplicate
6666 
6667  !
6668  !================================================================================================================================
6669  !
6670 
6672  SUBROUTINE distributed_vector_finalise(DISTRIBUTED_VECTOR,ERR,ERROR,*)
6674  !Argument variables
6675  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6676  INTEGER(INTG), INTENT(OUT) :: ERR
6677  TYPE(varying_string), INTENT(OUT) :: ERROR
6678  !Local Variables
6679 
6680  enters("DISTRIBUTED_VECTOR_FINALISE",err,error,*999)
6681 
6682  IF(ASSOCIATED(distributed_vector)) THEN
6683  CALL distributed_vector_cmiss_finalise(distributed_vector%CMISS,err,error,*999)
6684  CALL distributed_vector_petsc_finalise(distributed_vector%PETSC,err,error,*999)
6685  DEALLOCATE(distributed_vector)
6686  ENDIF
6687 
6688  exits("DISTRIBUTED_VECTOR_FINALISE")
6689  RETURN
6690 999 errorsexits("DISTRIBUTED_VECTOR_FINALISE",err,error)
6691  RETURN 1
6692  END SUBROUTINE distributed_vector_finalise
6693 
6694  !
6695  !================================================================================================================================
6696  !
6697 
6699  SUBROUTINE distributed_vector_initialise(DOMAIN_MAPPING,DISTRIBUTED_VECTOR,ERR,ERROR,*)
6701  !Argument variables
6702  TYPE(domain_mapping_type), POINTER :: DOMAIN_MAPPING
6703  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6704  INTEGER(INTG), INTENT(OUT) :: ERR
6705  TYPE(varying_string), INTENT(OUT) :: ERROR
6706  !Local Variables
6707  INTEGER(INTG) :: DUMMY_ERR
6708  TYPE(varying_string) :: DUMMY_ERROR
6709 
6710  enters("DISTRIBUTED_VECTOR_INITIALISE",err,error,*998)
6711 
6712  IF(ASSOCIATED(domain_mapping)) THEN
6713  IF(ASSOCIATED(distributed_vector)) THEN
6714  CALL flagerror("Distributed vector is already associated.",err,error,*998)
6715  ELSE
6716  ALLOCATE(distributed_vector,stat=err)
6717  IF(err/=0) CALL flagerror("Could not allocated the distributed vector.",err,error,*999)
6718  distributed_vector%VECTOR_FINISHED=.false.
6719  distributed_vector%LIBRARY_TYPE=0
6720  distributed_vector%GHOSTING_TYPE=distributed_matrix_vector_include_ghosts_type
6721  distributed_vector%DOMAIN_MAPPING=>domain_mapping
6722  distributed_vector%DATA_TYPE=matrix_vector_dp_type
6723  NULLIFY(distributed_vector%CMISS)
6724  NULLIFY(distributed_vector%PETSC)
6725  CALL distributed_vector_cmiss_initialise(distributed_vector,err,error,*999)
6726  ENDIF
6727  ELSE
6728  CALL flagerror("Domain mapping is not associated.",err,error,*998)
6729  ENDIF
6730 
6731  exits("DISTRIBUTED_VECTOR_INITIALISE")
6732  RETURN
6733 999 CALL distributed_vector_finalise(distributed_vector,dummy_err,dummy_error,*998)
6734 998 errorsexits("DISTRIBUTED_VECTOR_INITIALISE",err,error)
6735  RETURN 1
6736  END SUBROUTINE distributed_vector_initialise
6737 
6738  !
6739  !================================================================================================================================
6740  !
6741 
6743  SUBROUTINE distributed_vector_data_get_intg(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
6745  !Argument variables
6746  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6747  INTEGER(INTG), POINTER :: DATA(:)
6748  INTEGER(INTG), INTENT(OUT) :: ERR
6749  TYPE(varying_string), INTENT(OUT) :: ERROR
6750  !Local Variables
6751  TYPE(varying_string) :: LOCAL_ERROR
6752 
6753  enters("DISTRIBUTED_VECTOR_DATA_GET_INTG",err,error,*999)
6754 
6755  IF(ASSOCIATED(distributed_vector)) THEN
6756  IF(ASSOCIATED(data)) THEN
6757  CALL flagerror("Data is already associated.",err,error,*999)
6758  ELSE
6759  NULLIFY(data)
6760  IF(distributed_vector%VECTOR_FINISHED) THEN
6761  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
6762  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6764  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
6765  data=>distributed_vector%CMISS%DATA_INTG
6766  ELSE
6767  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
6768  ENDIF
6770  CALL flagerror("Cannot get data for an integer PETSc distributed vector.",err,error,*999)
6771  CASE DEFAULT
6772  local_error="The distributed vector library type of "// &
6773  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6774  CALL flagerror(local_error,err,error,*999)
6775  END SELECT
6776  ELSE
6777  local_error="The distributed data type of "// &
6778  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
6779  & " does not correspond to the integer data type of the requested values."
6780  CALL flagerror(local_error,err,error,*999)
6781  ENDIF
6782  ELSE
6783  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
6784  ENDIF
6785  ENDIF
6786  ELSE
6787  CALL flagerror("Distributed vector is not associated.",err,error,*999)
6788  ENDIF
6789 
6790  exits("DISTRIBUTED_VECTOR_DATA_GET_INTG")
6791  RETURN
6792 999 errorsexits("DISTRIBUTED_VECTOR_DATA_GET_INTG",err,error)
6793  RETURN 1
6794  END SUBROUTINE distributed_vector_data_get_intg
6795 
6796  !
6797  !================================================================================================================================
6798  !
6799 
6801  SUBROUTINE distributed_vector_data_get_sp(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
6803  !Argument variables
6804  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6805  REAL(SP), POINTER :: DATA(:)
6806  INTEGER(INTG), INTENT(OUT) :: ERR
6807  TYPE(varying_string), INTENT(OUT) :: ERROR
6808  !Local Variables
6809  TYPE(varying_string) :: LOCAL_ERROR
6810 
6811  enters("DISTRIBUTED_VECTOR_DATA_GET_SP",err,error,*999)
6812 
6813  IF(ASSOCIATED(distributed_vector)) THEN
6814  IF(ASSOCIATED(data)) THEN
6815  CALL flagerror("Data is already associated.",err,error,*999)
6816  ELSE
6817  NULLIFY(data)
6818  IF(distributed_vector%VECTOR_FINISHED) THEN
6819  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
6820  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6822  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
6823  data=>distributed_vector%CMISS%DATA_SP
6824  ELSE
6825  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
6826  ENDIF
6828  CALL flagerror("Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
6829  CASE DEFAULT
6830  local_error="The distributed vector library type of "// &
6831  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6832  CALL flagerror(local_error,err,error,*999)
6833  END SELECT
6834  ELSE
6835  local_error="The distributed data type of "// &
6836  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
6837  & " does not correspond to the single precision data type of the requested values."
6838  CALL flagerror(local_error,err,error,*999)
6839  ENDIF
6840  ELSE
6841  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
6842  ENDIF
6843  ENDIF
6844  ELSE
6845  CALL flagerror("Distributed vector is not associated.",err,error,*999)
6846  ENDIF
6847 
6848  exits("DISTRIBUTED_VECTOR_DATA_GET_SP")
6849  RETURN
6850 999 errorsexits("DISTRIBUTED_VECTOR_DATA_GET_SP",err,error)
6851  RETURN 1
6852  END SUBROUTINE distributed_vector_data_get_sp
6853 
6854  !
6855  !================================================================================================================================
6856  !
6857 
6859  SUBROUTINE distributed_vector_data_get_dp(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
6861  !Argument variables
6862  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6863  REAL(DP), POINTER :: DATA(:)
6864  INTEGER(INTG), INTENT(OUT) :: ERR
6865  TYPE(varying_string), INTENT(OUT) :: ERROR
6866  !Local Variables
6867  TYPE(varying_string) :: LOCAL_ERROR
6868 
6869  enters("DISTRIBUTED_VECTOR_DATA_GET_DP",err,error,*999)
6870 
6871  IF(ASSOCIATED(distributed_vector)) THEN
6872  IF(ASSOCIATED(data)) THEN
6873  CALL flagerror("Data is already associated.",err,error,*999)
6874  ELSE
6875  NULLIFY(data)
6876  IF(distributed_vector%VECTOR_FINISHED) THEN
6877  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
6878  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6880  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
6881  data=>distributed_vector%CMISS%DATA_DP
6882  ELSE
6883  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
6884  ENDIF
6886  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
6887  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
6888  CALL petsc_vecgetarrayreadf90(distributed_vector%PETSC%OVERRIDE_VECTOR,DATA,err,error,*999)
6889  ELSE
6890  CALL petsc_vecgetarrayreadf90(distributed_vector%PETSC%VECTOR,DATA,err,error,*999)
6891  ENDIF
6892  ELSE
6893  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
6894  ENDIF
6895  CASE DEFAULT
6896  local_error="The distributed vector library type of "// &
6897  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6898  CALL flagerror(local_error,err,error,*999)
6899  END SELECT
6900  ELSE
6901  local_error="The distributed data type of "// &
6902  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
6903  & " does not correspond to the double precision data type of the requested values."
6904  CALL flagerror(local_error,err,error,*999)
6905  ENDIF
6906  ELSE
6907  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
6908  ENDIF
6909  ENDIF
6910  ELSE
6911  CALL flagerror("Distributed vector is not associated.",err,error,*999)
6912  ENDIF
6913 
6914  exits("DISTRIBUTED_VECTOR_DATA_GET_DP")
6915  RETURN
6916 999 errorsexits("DISTRIBUTED_VECTOR_DATA_GET_DP",err,error)
6917  RETURN 1
6918  END SUBROUTINE distributed_vector_data_get_dp
6919 
6920  !
6921  !================================================================================================================================
6922  !
6923 
6925  SUBROUTINE distributed_vector_data_get_l(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
6927  !Argument variables
6928  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6929  LOGICAL, POINTER :: DATA(:)
6930  INTEGER(INTG), INTENT(OUT) :: ERR
6931  TYPE(varying_string), INTENT(OUT) :: ERROR
6932  !Local Variables
6933  TYPE(varying_string) :: LOCAL_ERROR
6934 
6935  enters("DISTRIBUTED_VECTOR_DATA_GET_L",err,error,*999)
6936 
6937  IF(ASSOCIATED(distributed_vector)) THEN
6938  IF(ASSOCIATED(data)) THEN
6939  CALL flagerror("Data is already associated.",err,error,*999)
6940  ELSE
6941  NULLIFY(data)
6942  IF(distributed_vector%VECTOR_FINISHED) THEN
6943  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
6944  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6946  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
6947  data=>distributed_vector%CMISS%DATA_L
6948  ELSE
6949  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
6950  ENDIF
6952  CALL flagerror("Cannot get values for a logical PETSc distributed vector.",err,error,*999)
6953  CASE DEFAULT
6954  local_error="The distributed vector library type of "// &
6955  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
6956  CALL flagerror(local_error,err,error,*999)
6957  END SELECT
6958  ELSE
6959  local_error="The distributed data type of "//trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
6960  & " does not correspond to the logical data type of the requested values."
6961  CALL flagerror(local_error,err,error,*999)
6962  ENDIF
6963  ELSE
6964  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
6965  ENDIF
6966  ENDIF
6967  ELSE
6968  CALL flagerror("Distributed vector is not associated.",err,error,*999)
6969  ENDIF
6970 
6971  exits("DISTRIBUTED_VECTOR_DATA_GET_L")
6972  RETURN
6973 999 errorsexits("DISTRIBUTED_VECTOR_DATA_GET_L",err,error)
6974  RETURN 1
6975  END SUBROUTINE distributed_vector_data_get_l
6976 
6977  !
6978  !================================================================================================================================
6979  !
6980 
6982  SUBROUTINE distributed_vector_data_restore_intg(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
6984  !Argument variables
6985  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
6986  INTEGER(INTG), POINTER :: DATA(:)
6987  INTEGER(INTG), INTENT(OUT) :: ERR
6988  TYPE(varying_string), INTENT(OUT) :: ERROR
6989  !Local Variables
6990  TYPE(varying_string) :: LOCAL_ERROR
6991 
6992  enters("DISTRIBUTED_VECTOR_DATA_RESTORE_INTG",err,error,*999)
6993 
6994  IF(ASSOCIATED(distributed_vector)) THEN
6995  IF(ASSOCIATED(data)) THEN
6996  IF(distributed_vector%VECTOR_FINISHED) THEN
6997  SELECT CASE(distributed_vector%LIBRARY_TYPE)
6999  NULLIFY(data)
7001  CALL flagerror("Cannot restore data for an integer PETSc distributed vector.",err,error,*999)
7002  CASE DEFAULT
7003  local_error="The distributed vector library type of "// &
7004  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7005  CALL flagerror(local_error,err,error,*999)
7006  END SELECT
7007  ELSE
7008  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7009  ENDIF
7010  ELSE
7011  CALL flagerror("Data is not associated.",err,error,*999)
7012  ENDIF
7013  ELSE
7014  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7015  ENDIF
7016 
7017  exits("DISTRIBUTED_VECTOR_DATA_RESTORE_INTG")
7018  RETURN
7019 999 errorsexits("DISTRIBUTED_VECTOR_DATA_RESTORE_INTG",err,error)
7020  RETURN 1
7022 
7023  !
7024  !================================================================================================================================
7025  !
7026 
7028  SUBROUTINE distributed_vector_data_restore_sp(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
7030  !Argument variables
7031  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7032  REAL(SP), POINTER :: DATA(:)
7033  INTEGER(INTG), INTENT(OUT) :: ERR
7034  TYPE(varying_string), INTENT(OUT) :: ERROR
7035  !Local Variables
7036  TYPE(varying_string) :: LOCAL_ERROR
7037 
7038  enters("DISTRIBUTED_VECTOR_DATA_RESTORE_SP",err,error,*999)
7039 
7040  IF(ASSOCIATED(distributed_vector)) THEN
7041  IF(ASSOCIATED(data)) THEN
7042  IF(distributed_vector%VECTOR_FINISHED) THEN
7043  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7045  NULLIFY(data)
7047  CALL flagerror("Cannot restore data for a single precision PETSc distributed vector.",err,error,*999)
7048  CASE DEFAULT
7049  local_error="The distributed vector library type of "// &
7050  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7051  CALL flagerror(local_error,err,error,*999)
7052  END SELECT
7053  ELSE
7054  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7055  ENDIF
7056  ELSE
7057  CALL flagerror("Data is not associated.",err,error,*999)
7058  ENDIF
7059  ELSE
7060  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7061  ENDIF
7062 
7063  exits("DISTRIBUTED_VECTOR_DATA_RESTORE_SP")
7064  RETURN
7065 999 errorsexits("DISTRIBUTED_VECTOR_DATA_RESTORE_SP",err,error)
7066  RETURN 1
7067  END SUBROUTINE distributed_vector_data_restore_sp
7068 
7069  !
7070  !================================================================================================================================
7071  !
7072 
7074  SUBROUTINE distributed_vector_data_restore_dp(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
7076  !Argument variables
7077  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7078  REAL(DP), POINTER :: DATA(:)
7079  INTEGER(INTG), INTENT(OUT) :: ERR
7080  TYPE(varying_string), INTENT(OUT) :: ERROR
7081  !Local Variables
7082  TYPE(varying_string) :: LOCAL_ERROR
7083 
7084  enters("DISTRIBUTED_VECTOR_DATA_RESTORE_DP",err,error,*999)
7085 
7086  IF(ASSOCIATED(distributed_vector)) THEN
7087  IF(ASSOCIATED(data)) THEN
7088  IF(distributed_vector%VECTOR_FINISHED) THEN
7089  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7091  NULLIFY(data)
7093  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7094  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
7095  CALL petsc_vecrestorearrayreadf90(distributed_vector%PETSC%OVERRIDE_VECTOR,DATA,err,error,*999)
7096  ELSE
7097  CALL petsc_vecrestorearrayreadf90(distributed_vector%PETSC%VECTOR,DATA,err,error,*999)
7098  ENDIF
7099  ELSE
7100  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
7101  ENDIF
7102  CASE DEFAULT
7103  local_error="The distributed vector library type of "// &
7104  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7105  CALL flagerror(local_error,err,error,*999)
7106  END SELECT
7107  ELSE
7108  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7109  ENDIF
7110  ELSE
7111  CALL flagerror("Data is not associated.",err,error,*999)
7112  ENDIF
7113  ELSE
7114  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7115  ENDIF
7116 
7117  exits("DISTRIBUTED_VECTOR_DATA_RESTORE_DP")
7118  RETURN
7119 999 errorsexits("DISTRIBUTED_VECTOR_DATA_RESTORE_DP",err,error)
7120  RETURN 1
7121  END SUBROUTINE distributed_vector_data_restore_dp
7122 
7123  !
7124  !================================================================================================================================
7125  !
7126 
7128  SUBROUTINE distributed_vector_data_restore_l(DISTRIBUTED_VECTOR,DATA,ERR,ERROR,*)
7130  !Argument variables
7131  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7132  LOGICAL, POINTER :: DATA(:)
7133  INTEGER(INTG), INTENT(OUT) :: ERR
7134  TYPE(varying_string), INTENT(OUT) :: ERROR
7135  !Local Variables
7136  TYPE(varying_string) :: LOCAL_ERROR
7137 
7138  enters("DISTRIBUTED_VECTOR_DATA_RESTORE_L",err,error,*999)
7139 
7140  IF(ASSOCIATED(distributed_vector)) THEN
7141  IF(ASSOCIATED(data)) THEN
7142  IF(distributed_vector%VECTOR_FINISHED) THEN
7143  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7145  NULLIFY(data)
7147  CALL flagerror("Cannot restore data for a logical PETSc distributed vector.",err,error,*999)
7148  CASE DEFAULT
7149  local_error="The distributed matrix library type of "// &
7150  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7151  CALL flagerror(local_error,err,error,*999)
7152  END SELECT
7153  ELSE
7154  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7155  ENDIF
7156  ELSE
7157  CALL flagerror("Data is not associated.",err,error,*999)
7158  ENDIF
7159  ELSE
7160  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7161  ENDIF
7162 
7163  exits("DISTRIBUTED_VECTOR_DATA_RESTORE_L")
7164  RETURN
7165 999 errorsexits("DISTRIBUTED_VECTOR_DATA_RESTORE_L",err,error)
7166  RETURN 1
7167  END SUBROUTINE distributed_vector_data_restore_l
7168 
7169  !
7170  !================================================================================================================================
7171  !
7172 
7174  SUBROUTINE distributed_vector_ghosting_type_set(DISTRIBUTED_VECTOR,GHOSTING_TYPE,ERR,ERROR,*)
7176  !Argument variables
7177  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7178  INTEGER(INTG), INTENT(IN) :: GHOSTING_TYPE
7179  INTEGER(INTG), INTENT(OUT) :: ERR
7180  TYPE(varying_string), INTENT(OUT) :: ERROR
7181  !Local Variables
7182  TYPE(varying_string) :: LOCAL_ERROR
7183 
7184  enters("DISTRIBUTED_VECTOR_GHOSTING_TYPE_SET",err,error,*999)
7185 
7186  IF(ASSOCIATED(distributed_vector)) THEN
7187  IF(distributed_vector%VECTOR_FINISHED) THEN
7188  CALL flagerror("The distributed vector has already been finished.",err,error,*999)
7189  ELSE
7190  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
7191  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7193  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
7194  SELECT CASE(ghosting_type)
7196  distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
7198  distributed_vector%CMISS%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
7199  CASE DEFAULT
7200  local_error="The given ghosting type of "//trim(numbertovstring(ghosting_type,"*",err,error))//" is invalid."
7201  CALL flagerror(local_error,err,error,*999)
7202  END SELECT
7203  ELSE
7204  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
7205  ENDIF
7207  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7208  SELECT CASE(ghosting_type)
7210  distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
7212  distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
7213  CASE DEFAULT
7214  local_error="The given ghosting type of "//trim(numbertovstring(ghosting_type,"*",err,error))//" is invalid."
7215  CALL flagerror(local_error,err,error,*999)
7216  END SELECT
7217  ELSE
7218  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
7219  ENDIF
7220  CASE DEFAULT
7221  local_error="The distributed vector library type of "// &
7222  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7223  CALL flagerror(local_error,err,error,*999)
7224  END SELECT
7225  distributed_vector%GHOSTING_TYPE=ghosting_type
7226  ELSE
7227  CALL flagerror("Distributed vector domain mapping is not associated.",err,error,*999)
7228  ENDIF
7229  ENDIF
7230  ELSE
7231  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7232  ENDIF
7233 
7234  exits("DISTRIBUTED_VECTOR_GHOSTING_TYPE_SET")
7235  RETURN
7236 999 errorsexits("DISTRIBUTED_VECTOR_GHOSTING_TYPE_SET",err,error)
7237  RETURN 1
7239 
7240  !
7241  !================================================================================================================================
7242  !
7243 
7245  SUBROUTINE distributed_vector_library_type_set(DISTRIBUTED_VECTOR,LIBRARY_TYPE,ERR,ERROR,*)
7247  !Argument variables
7248  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7249  INTEGER(INTG), INTENT(IN) :: LIBRARY_TYPE
7250  INTEGER(INTG), INTENT(OUT) :: ERR
7251  TYPE(varying_string), INTENT(OUT) :: ERROR
7252  !Local Variables
7253  INTEGER(INTG) :: DUMMY_ERR,OLD_LIBRARY_TYPE
7254  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
7255 
7256  enters("DISTRIBUTED_VECTOR_LIBRARY_TYPE_SET",err,error,*998)
7257 
7258  IF(ASSOCIATED(distributed_vector)) THEN
7259  IF(distributed_vector%VECTOR_FINISHED) THEN
7260  CALL flagerror("The distributed vector has already been finished.",err,error,*998)
7261  ELSE
7262  old_library_type=distributed_vector%LIBRARY_TYPE
7263  IF(library_type/=old_library_type) THEN
7264  !Initialise the new library type
7265  SELECT CASE(library_type)
7267  CALL distributed_vector_cmiss_initialise(distributed_vector,err,error,*999)
7269  CALL distributed_vector_petsc_initialise(distributed_vector,err,error,*999)
7270  CASE DEFAULT
7271  local_error="The distributed vector library type of "//trim(numbertovstring(library_type,"*",err,error))// &
7272  & " is invalid."
7273  CALL flagerror(local_error,err,error,*999)
7274  END SELECT
7275  !Finalise the old library type
7276  SELECT CASE(old_library_type)
7278  CALL distributed_vector_cmiss_finalise(distributed_vector%CMISS,err,error,*999)
7280  CALL distributed_vector_petsc_finalise(distributed_vector%PETSC,err,error,*999)
7281  CASE DEFAULT
7282  local_error="The distributed vector library type of "// &
7283  & trim(numbertovstring(old_library_type,"*",err,error))//" is invalid."
7284  CALL flagerror(local_error,err,error,*999)
7285  END SELECT
7286  distributed_vector%LIBRARY_TYPE=library_type
7287  ENDIF
7288  ENDIF
7289  ELSE
7290  CALL flagerror("Distributed vector is not associated.",err,error,*998)
7291  ENDIF
7292 
7293  exits("DISTRIBUTED_VECTOR_LIBRARY_TYPE_SET")
7294  RETURN
7295 999 SELECT CASE(library_type)
7297  CALL distributed_vector_cmiss_finalise(distributed_vector%CMISS,dummy_err,dummy_error,*998)
7299  CALL distributed_vector_petsc_finalise(distributed_vector%PETSC,dummy_err,dummy_error,*998)
7300  END SELECT
7301 998 errorsexits("DISTRIBUTED_VECTOR_LIBRARY_TYPE_SET",err,error)
7302  RETURN 1
7304 
7305  !
7306  !================================================================================================================================
7307  !
7308 
7310  SUBROUTINE distributed_vector_output(ID,DISTRIBUTED_VECTOR,ERR,ERROR,*)
7312  !Argument variables
7313  INTEGER(INTG), INTENT(IN) :: ID
7314  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7315  INTEGER(INTG), INTENT(OUT) :: ERR
7316  TYPE(varying_string), INTENT(OUT) :: ERROR
7317  !Local Variables
7318  REAL(DP), POINTER :: VECTOR(:)
7319  TYPE(varying_string) :: LOCAL_ERROR
7320 
7321  enters("DISTRIBUTED_VECTOR_OUTPUT",err,error,*999)
7322 
7323  IF(ASSOCIATED(distributed_vector)) THEN
7324  IF(distributed_vector%VECTOR_FINISHED) THEN
7325  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7327  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
7328  SELECT CASE(distributed_vector%DATA_TYPE)
7330  CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_INTG, &
7331  & '("Vector(:) :",8(X,I13))','(20X,8(X,I13))',err,error,*999)
7332  CASE(matrix_vector_sp_type)
7333  CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_SP, &
7334  & '("Vector(:) :",8(X,E13.6))','(20X,8(X,E13.6))',err,error,*999)
7335  CASE(matrix_vector_dp_type)
7336  CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_DP, &
7337  & '("Vector(:) :",8(X,E13.6))','(20X,8(X,E13.6))',err,error,*999)
7338  CASE(matrix_vector_l_type)
7339  CALL write_string_vector(id,1,1,distributed_vector%CMISS%N,8,8,distributed_vector%CMISS%DATA_INTG, &
7340  & '("Vector(:) :",8(X,L13))','(20X,8(X,L13))',err,error,*999)
7341  CASE DEFAULT
7342  local_error="The distributed vector data type of "// &
7343  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
7344  CALL flagerror(local_error,err,error,*999)
7345  END SELECT
7346  ELSE
7347  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
7348  ENDIF
7350  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7351  NULLIFY(vector)
7352  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
7353  CALL petsc_vecgetarrayreadf90(distributed_vector%PETSC%OVERRIDE_VECTOR,vector,err,error,*999)
7354  ELSE
7355  CALL petsc_vecgetarrayreadf90(distributed_vector%PETSC%VECTOR,vector,err,error,*999)
7356  ENDIF
7357  CALL write_string_vector(id,1,1,distributed_vector%PETSC%N,8,8,vector, &
7358  & '("Vector(:) :",8(X,E13.6))','(20X,8(X,E13.6))',err,error,*999)
7359  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
7360  CALL petsc_vecrestorearrayreadf90(distributed_vector%PETSC%OVERRIDE_VECTOR,vector,err,error,*999)
7361  ELSE
7362  CALL petsc_vecrestorearrayreadf90(distributed_vector%PETSC%VECTOR,vector,err,error,*999)
7363  ENDIF
7364  ELSE
7365  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
7366  ENDIF
7367  CASE DEFAULT
7368  local_error="The distributed vector library type of "// &
7369  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7370  CALL flagerror(local_error,err,error,*999)
7371  END SELECT
7372  ELSE
7373  CALL flagerror("Distributed vector has not been finished.",err,error,*999)
7374  ENDIF
7375  ELSE
7376  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7377  ENDIF
7378 
7379  exits("DISTRIBUTED_VECTOR_OUTPUT")
7380  RETURN
7381 999 errorsexits("DISTRIBUTED_VECTOR_OUTPUT",err,error)
7382  RETURN 1
7383  END SUBROUTINE distributed_vector_output
7384 
7385  !
7386  !================================================================================================================================
7387  !
7388 
7390  SUBROUTINE distributed_vector_override_set_on(DISTRIBUTED_VECTOR,OVERRIDE_VECTOR,ERR,ERROR,*)
7392  !Argument variables
7393  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7394  TYPE(petscvectype), INTENT(IN) :: OVERRIDE_VECTOR
7395  INTEGER(INTG), INTENT(OUT) :: ERR
7396  TYPE(varying_string), INTENT(OUT) :: ERROR
7397  !Local Variables
7398  TYPE(varying_string) :: LOCAL_ERROR
7399 
7400  enters("DISTRIBUTED_VECTOR_OVERRIDE_SET_ON",err,error,*999)
7401 
7402  IF(ASSOCIATED(distributed_vector)) THEN
7403  IF(distributed_vector%VECTOR_FINISHED) THEN
7404  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7406  CALL flagerror("Not implemented.",err,error,*999)
7408  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7409  distributed_vector%PETSC%USE_OVERRIDE_VECTOR=.true.
7410  distributed_vector%PETSC%OVERRIDE_VECTOR=override_vector
7411  ELSE
7412  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
7413  ENDIF
7414  CASE DEFAULT
7415  local_error="The distributed vector library type of "// &
7416  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7417  CALL flagerror(local_error,err,error,*999)
7418  END SELECT
7419  ELSE
7420  CALL flagerror("Distributed vector has not been finished.",err,error,*999)
7421  ENDIF
7422  ELSE
7423  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7424  ENDIF
7425 
7426  exits("DISTRIBUTED_VECTOR_OVERRIDE_SET_ON")
7427  RETURN
7428 999 errorsexits("DISTRIBUTED_VECTOR_OVERRIDE_SET_ON",err,error)
7429  RETURN 1
7430  END SUBROUTINE distributed_vector_override_set_on
7431 
7432  !
7433  !================================================================================================================================
7434  !
7435 
7437  SUBROUTINE distributed_vector_override_set_off(DISTRIBUTED_VECTOR,ERR,ERROR,*)
7439  !Argument variables
7440  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7441  INTEGER(INTG), INTENT(OUT) :: ERR
7442  TYPE(varying_string), INTENT(OUT) :: ERROR
7443  !Local Variables
7444  TYPE(varying_string) :: LOCAL_ERROR
7445 
7446  enters("DISTRIBUTED_VECTOR_OVERRIDE_SET_OFF",err,error,*999)
7447 
7448  IF(ASSOCIATED(distributed_vector)) THEN
7449  IF(distributed_vector%VECTOR_FINISHED) THEN
7450  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7452  CALL flagerror("Not implemented.",err,error,*999)
7454  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7455  distributed_vector%PETSC%USE_OVERRIDE_VECTOR=.false.
7456  CALL petsc_vecinitialise(distributed_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
7457  ELSE
7458  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
7459  ENDIF
7460  CASE DEFAULT
7461  local_error="The distributed vector library type of "// &
7462  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7463  CALL flagerror(local_error,err,error,*999)
7464  END SELECT
7465  ELSE
7466  CALL flagerror("Distributed vector has not been finished.",err,error,*999)
7467  ENDIF
7468  ELSE
7469  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7470  ENDIF
7471 
7472  exits("DISTRIBUTED_VECTOR_OVERRIDE_SET_OFF")
7473  RETURN
7474 999 errorsexits("DISTRIBUTED_VECTOR_OVERRIDE_SET_OFF",err,error)
7475  RETURN 1
7477 
7478  !
7479  !================================================================================================================================
7480  !
7481 
7483  SUBROUTINE distributed_vector_petsc_create_finish(PETSC_VECTOR,ERR,ERROR,*)
7485  !Argument variables
7486  TYPE(distributed_vector_petsc_type), POINTER :: PETSC_VECTOR
7487  INTEGER(INTG), INTENT(OUT) :: ERR
7488  TYPE(varying_string), INTENT(OUT) :: ERROR
7489  !Local Variables
7490  INTEGER(INTG) :: DUMMY_ERR,i
7491  INTEGER(INTG), ALLOCATABLE :: GLOBAL_NUMBERS(:)
7492  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7493  TYPE(domain_mapping_type), POINTER :: DOMAIN_MAPPING
7494  TYPE(varying_string) :: DUMMY_ERROR
7495 
7496  enters("DISTRIBUTED_VECTOR_PETSC_CREATE_FINISH",err,error,*998)
7497 
7498  IF(ASSOCIATED(petsc_vector)) THEN
7499  distributed_vector=>petsc_vector%DISTRIBUTED_VECTOR
7500  IF(ASSOCIATED(distributed_vector)) THEN
7501  domain_mapping=>distributed_vector%DOMAIN_MAPPING
7502  IF(ASSOCIATED(domain_mapping)) THEN
7503  !Create the PETSc vector
7504  petsc_vector%DATA_SIZE=petsc_vector%N
7505  CALL petsc_veccreatempi(computational_environment%MPI_COMM,petsc_vector%N,petsc_vector%GLOBAL_N,petsc_vector%VECTOR, &
7506  & err,error,*999)
7507  !Set up the Local to Global Mappings
7508  DO i=1,petsc_vector%N
7509  petsc_vector%GLOBAL_NUMBERS(i)=domain_mapping%LOCAL_TO_GLOBAL_MAP(i)-1
7510  ENDDO !i
7511  ELSE
7512  CALL flagerror("PETSc vector distributed vector domain mapping is not associated.",err,error,*999)
7513  ENDIF
7514  ENDIF
7515  ELSE
7516  CALL flagerror("PETSc vector is not associated.",err,error,*998)
7517  ENDIF
7518 
7519  exits("DISTRIBUTED_VECTOR_PETSC_CREATE_FINISH")
7520  RETURN
7521 999 IF(ALLOCATED(global_numbers)) DEALLOCATE(global_numbers)
7522  CALL distributed_vector_petsc_finalise(petsc_vector,dummy_err,dummy_error,*998)
7523 998 errorsexits("DISTRIBUTED_VECTOR_PETSC_CREATE_FINISH",err,error)
7524  RETURN 1
7526 
7527  !
7528  !================================================================================================================================
7529  !
7530 
7532  SUBROUTINE distributed_vector_petsc_finalise(PETSC_VECTOR,ERR,ERROR,*)
7534  !Argument variables
7535  TYPE(distributed_vector_petsc_type), POINTER :: PETSC_VECTOR
7536  INTEGER(INTG), INTENT(OUT) :: ERR
7537  TYPE(varying_string), INTENT(OUT) :: ERROR
7538  !Local Variables
7539 
7540  enters("DISTRIBUTED_VECTOR_PETSC_FINALISE",err,error,*999)
7541 
7542  IF(ASSOCIATED(petsc_vector)) THEN
7543  IF(ALLOCATED(petsc_vector%GLOBAL_NUMBERS)) DEALLOCATE(petsc_vector%GLOBAL_NUMBERS)
7544  CALL petsc_vecfinalise(petsc_vector%VECTOR,err,error,*999)
7545  CALL petsc_vecfinalise(petsc_vector%OVERRIDE_VECTOR,err,error,*999)
7546  DEALLOCATE(petsc_vector)
7547  ENDIF
7548 
7549  exits("DISTRIBUTED_VECTOR_PETSC_FINALSE")
7550  RETURN
7551 999 errorsexits("DISTRIBUTED_VECTOR_PETSC_FINALISE",err,error)
7552  RETURN 1
7553  END SUBROUTINE distributed_vector_petsc_finalise
7554 
7555  !
7556  !================================================================================================================================
7557  !
7558 
7560  SUBROUTINE distributed_vector_petsc_initialise(DISTRIBUTED_VECTOR,ERR,ERROR,*)
7562  !Argument variables
7563  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7564  INTEGER(INTG), INTENT(OUT) :: ERR
7565  TYPE(varying_string), INTENT(OUT) :: ERROR
7566  !Local Variables
7567  INTEGER(INTG) :: DUMMY_ERR
7568  TYPE(varying_string) :: DUMMY_ERROR,LOCAL_ERROR
7569 
7570  enters("DISTRIBUTED_VECTOR_PETSC_INITIALISE",err,error,*998)
7571 
7572  IF(ASSOCIATED(distributed_vector)) THEN
7573  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7574  CALL flagerror("PETSc is already associated for this distributed vector.",err,error,*998)
7575  ELSE
7576  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
7577  ALLOCATE(distributed_vector%PETSC,stat=err)
7578  IF(err/=0) CALL flagerror("Could not allocate PETSc distributed vector.",err,error,*999)
7579  distributed_vector%PETSC%DISTRIBUTED_VECTOR=>distributed_vector
7580  distributed_vector%LIBRARY_TYPE=distributed_matrix_vector_petsc_type
7581  !Set the defaults
7582  SELECT CASE(distributed_vector%GHOSTING_TYPE)
7584  distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%TOTAL_NUMBER_OF_LOCAL
7586  distributed_vector%PETSC%N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_LOCAL
7587  CASE DEFAULT
7588  local_error="The distributed vector ghosting type of "// &
7589  & trim(numbertovstring(distributed_vector%GHOSTING_TYPE,"*",err,error))//" is invalid."
7590  CALL flagerror(local_error,err,error,*999)
7591  END SELECT
7592  distributed_vector%PETSC%GLOBAL_N=distributed_vector%DOMAIN_MAPPING%NUMBER_OF_GLOBAL
7593  ALLOCATE(distributed_vector%PETSC%GLOBAL_NUMBERS(distributed_vector%PETSC%N),stat=err)
7594  IF(err/=0) CALL flagerror("Could not allocate PETSc distributed vector global numbers.",err,error,*999)
7595  distributed_vector%PETSC%USE_OVERRIDE_VECTOR=.false.
7596  CALL petsc_vecinitialise(distributed_vector%PETSC%VECTOR,err,error,*999)
7597  CALL petsc_vecinitialise(distributed_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
7598  ELSE
7599  CALL flagerror("Distributed vector domain mapping is not associated",err,error,*998)
7600  ENDIF
7601  ENDIF
7602  ELSE
7603  CALL flagerror("Distributed vector is not associated",err,error,*998)
7604  ENDIF
7605 
7606  exits("DISTRIBUTED_VECTOR_PETSC_INITIALSE")
7607  RETURN
7608 999 IF(ASSOCIATED(distributed_vector%PETSC)) &
7609  & CALL distributed_vector_petsc_finalise(distributed_vector%PETSC,dummy_err,dummy_error,*998)
7610 998 errorsexits("DISTRIBUTED_VECTOR_PETSC_INITIALISE",err,error)
7611  RETURN 1
7613 
7614  !
7615  !================================================================================================================================
7616  !
7617 
7619  SUBROUTINE distributed_vector_cmiss_transfer_finalise(CMISS_VECTOR,domain_idx,ERR,ERROR,*)
7621  !Argument variables
7622  TYPE(distributed_vector_cmiss_type), POINTER :: CMISS_VECTOR
7623  INTEGER(INTG), INTENT(IN) :: domain_idx
7624  INTEGER(INTG), INTENT(OUT) :: ERR
7625  TYPE(varying_string), INTENT(OUT) :: ERROR
7626  !Local Variables
7627  TYPE(varying_string) :: LOCAL_ERROR
7628 
7629  enters("DISTRIBUTED_VECTOR_CMISS_TRANSFER_FINALISE",err,error,*999)
7630 
7631  IF(ASSOCIATED(cmiss_vector)) THEN
7632  IF(ALLOCATED(cmiss_vector%TRANSFERS)) THEN
7633  IF(domain_idx>0.AND.domain_idx<=SIZE(cmiss_vector%TRANSFERS,1)) THEN
7634  NULLIFY(cmiss_vector%TRANSFERS(domain_idx)%CMISS_VECTOR)
7635  cmiss_vector%TRANSFERS(domain_idx)%DATA_TYPE=0
7636  cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER=-1
7637  cmiss_vector%TRANSFERS(domain_idx)%SEND_TAG_NUMBER=-1
7638  cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE=0
7639  cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE=0
7640  cmiss_vector%TRANSFERS(domain_idx)%MPI_SEND_REQUEST=mpi_request_null
7641  cmiss_vector%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST=mpi_request_null
7642  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_INTG)) &
7643  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_INTG)
7644  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SP)) &
7645  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SP)
7646  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_DP)) &
7647  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_DP)
7648  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_L)) &
7649  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_L)
7650  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG)) &
7651  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG)
7652  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP)) &
7653  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP)
7654  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP)) &
7655  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP)
7656  IF(ALLOCATED(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L)) &
7657  & DEALLOCATE(cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L)
7658  ELSE
7659  local_error="The domain index of "//trim(numbertovstring(domain_idx,"*",err,error))// &
7660  & " is invalid. It must be between 1 and "//trim(numbertovstring(SIZE(cmiss_vector%TRANSFERS,1),"*",err,error))//"."
7661  CALL flagerror(local_error,err,error,*999)
7662  ENDIF
7663  ENDIF
7664  ENDIF
7665 
7666  exits("DISTRIBUTED_VECTOR_CMISS_TRANSFER_FINALISE")
7667  RETURN
7668 999 errorsexits("DISTRIBUTED_VECTOR_CMISS_TRANSFER_FINALISE",err,error)
7669  RETURN 1
7671 
7672  !
7673  !================================================================================================================================
7674  !
7675 
7677  SUBROUTINE distributedvector_cmisstransferinitialise(CMISS_VECTOR,domain_idx,ERR,ERROR,*)
7679  !Argument variables
7680  TYPE(distributed_vector_cmiss_type), POINTER :: CMISS_VECTOR
7681  INTEGER(INTG), INTENT(IN) :: domain_idx
7682  INTEGER(INTG), INTENT(OUT) :: ERR
7683  TYPE(varying_string), INTENT(OUT) :: ERROR
7684  !Local Variables
7685  TYPE(varying_string) :: LOCAL_ERROR
7686 
7687  enters("DistributedVector_CmissTransferInitialise",err,error,*999)
7688 
7689  IF(ASSOCIATED(cmiss_vector)) THEN
7690  IF(ALLOCATED(cmiss_vector%TRANSFERS)) THEN
7691  IF(domain_idx>0.AND.domain_idx<=SIZE(cmiss_vector%TRANSFERS,1)) THEN
7692  cmiss_vector%TRANSFERS(domain_idx)%CMISS_VECTOR=>cmiss_vector
7693  cmiss_vector%TRANSFERS(domain_idx)%DATA_TYPE=0
7694  cmiss_vector%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE=0
7695  cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE=0
7696  cmiss_vector%TRANSFERS(domain_idx)%SEND_TAG_NUMBER=-1
7697  cmiss_vector%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER=-1
7698  cmiss_vector%TRANSFERS(domain_idx)%MPI_SEND_REQUEST=mpi_request_null
7699  cmiss_vector%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST=mpi_request_null
7700  ELSE
7701  local_error="The domain index of "//trim(numbertovstring(domain_idx,"*",err,error))// &
7702  & " is invalid. It must be between 1 and "// &
7703  & trim(numbertovstring(SIZE(cmiss_vector%TRANSFERS,1),"*",err,error))//"."
7704  CALL flagerror(local_error,err,error,*999)
7705  ENDIF
7706  ELSE
7707  CALL flagerror("CMISS vector transfers is not allocated.",err,error,*999)
7708  ENDIF
7709  ELSE
7710  CALL flagerror("CMISS vector is not associated.",err,error,*999)
7711  ENDIF
7712 
7713  exits("DistributedVector_CmissTransferInitialise")
7714  RETURN
7715 999 errorsexits("DistributedVector_CmissTransferInitialise",err,error)
7716  RETURN 1
7718 
7719  !
7720  !================================================================================================================================
7721  !
7722 
7724  SUBROUTINE distributed_vector_update_finish(DISTRIBUTED_VECTOR,ERR,ERROR,*)
7726  !Argument variables
7727  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7728  INTEGER(INTG), INTENT(OUT) :: ERR
7729  TYPE(varying_string), INTENT(OUT) :: ERROR
7730  !Local Variables
7731  INTEGER(INTG) :: domain_idx,i,NUMBER_OF_COMPUTATIONAL_NODES
7732  TYPE(varying_string) :: LOCAL_ERROR
7733 
7734  enters("DISTRIBUTED_VECTOR_UPDATE_FINISH",err,error,*999)
7735 
7736  IF(ASSOCIATED(distributed_vector)) THEN
7737  IF(distributed_vector%VECTOR_FINISHED) THEN
7738  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7740  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
7741  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
7742  number_of_computational_nodes=computational_nodes_number_get(err,error)
7743  IF(err/=0) GOTO 999
7744  IF(number_of_computational_nodes>1) THEN
7745  CALL distributed_vector_update_waitfinished(distributed_vector,err,error,*999)
7746  !Copy the receive buffers back to the ghost positions in the data vector
7747  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7748  SELECT CASE(distributed_vector%DATA_TYPE)
7750  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7751  distributed_vector%CMISS%DATA_INTG(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7752  & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG(i)
7753  ENDDO !i
7754  CASE(matrix_vector_sp_type)
7755  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7756  distributed_vector%CMISS%DATA_SP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7757  & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP(i)
7758  ENDDO !i
7759  CASE(matrix_vector_dp_type)
7760  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7761  distributed_vector%CMISS%DATA_DP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7762  & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP(i)
7763  ENDDO !i
7764  CASE(matrix_vector_l_type)
7765  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_RECEIVE_GHOSTS
7766  distributed_vector%CMISS%DATA_L(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
7767  & local_ghost_receive_indices(i))=distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L(i)
7768  ENDDO !i
7769  CASE DEFAULT
7770  local_error="The distributed vector data type of "// &
7771  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
7772  CALL flagerror(local_error,err,error,*999)
7773  END SELECT
7774  ENDDO !domain_idx
7775  ENDIF
7776  ELSE
7777  CALL flagerror("Distributed vector domain mapping is not associated.",err,error,*999)
7778  ENDIF
7779  ELSE
7780  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
7781  ENDIF
7783  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
7784  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
7785  CALL petsc_vecassemblyend(distributed_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
7786  ELSE
7787  CALL petsc_vecassemblyend(distributed_vector%PETSC%VECTOR,err,error,*999)
7788  ENDIF
7789  ELSE
7790  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
7791  ENDIF
7792  CASE DEFAULT
7793  local_error="The distributed vector library type of "// &
7794  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7795  CALL flagerror(local_error,err,error,*999)
7796  END SELECT
7797  ELSE
7798  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7799  ENDIF
7800  ELSE
7801  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7802  ENDIF
7803 
7804  IF(diagnostics1) THEN
7805  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7807  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
7808  CALL write_string(diagnostic_output_type,"Distributed vector :",err,error,*999)
7809  CALL write_string_value(diagnostic_output_type," Data type = ",distributed_vector%DATA_TYPE,err,error,*999)
7810  CALL write_string_value(diagnostic_output_type," Base tag number = ",distributed_vector%CMISS%BASE_TAG_NUMBER, &
7811  & err,error,*999)
7812  CALL write_string_value(diagnostic_output_type," Number of adjacent domains = ",distributed_vector%DOMAIN_MAPPING% &
7813  & number_of_adjacent_domains,err,error,*999)
7814  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7815  CALL write_string_value(diagnostic_output_type," Domain idx = ",domain_idx,err,error,*999)
7816  CALL write_string_value(diagnostic_output_type," Domain number = ",distributed_vector%DOMAIN_MAPPING% &
7817  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
7818  CALL write_string_value(diagnostic_output_type," Receive tag number = ",distributed_vector%CMISS% &
7819  & transfers(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
7820  CALL write_string_value(diagnostic_output_type," Send tag number = ",distributed_vector%CMISS% &
7821  & transfers(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
7822  CALL write_string_value(diagnostic_output_type," MPI send request = ",distributed_vector%CMISS% &
7823  & transfers(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
7824  CALL write_string_value(diagnostic_output_type," MPI receive request = ",distributed_vector%CMISS% &
7825  & transfers(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
7826  ENDDO !domain_idx
7827  CALL write_string_value(diagnostic_output_type," Data size = ",distributed_vector%CMISS%DATA_SIZE,err,error,*999)
7828  SELECT CASE(distributed_vector%DATA_TYPE)
7830  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,5,5,distributed_vector%CMISS% &
7831  & data_intg,'(" Data :",5(X,I13))','(8X,5(X,I13))',err,error,*999)
7832  CASE(matrix_vector_sp_type)
7833  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,5,5,distributed_vector%CMISS% &
7834  & data_sp,'(" Data :",5(X,E13.6))','(8X,5(X,E13.6))',err,error,*999)
7835  CASE(matrix_vector_dp_type)
7836  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,5,5,distributed_vector%CMISS% &
7837  & data_dp,'(" Data :",5(X,E13.6))','(8X,5(X,E13.6))',err,error,*999)
7838  CASE(matrix_vector_l_type)
7839  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,8,8,distributed_vector%CMISS% &
7840  & data_l,'(" Data :",8(X,L))','(8X,8(X,L))',err,error,*999)
7841  CASE DEFAULT
7842  local_error="The distributed vector data type of "// &
7843  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
7844  CALL flagerror(local_error,err,error,*999)
7845  END SELECT
7846  ELSE
7847  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
7848  ENDIF
7850  !Do nothing
7851  CASE DEFAULT
7852  local_error="The distributed vector library type of "// &
7853  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7854  CALL flagerror(local_error,err,error,*999)
7855  END SELECT
7856  ENDIF
7857 
7858  exits("DISTRIBUTED_VECTOR_UPDATE_FINISH")
7859  RETURN
7860 999 errorsexits("DISTRIBUTED_VECTOR_UPDATE_FINISH",err,error)
7861  RETURN 1
7862  END SUBROUTINE distributed_vector_update_finish
7863 
7864  !
7865  !================================================================================================================================
7866  !
7867 
7869  SUBROUTINE distributed_vector_update_isfinished(DISTRIBUTED_VECTOR,ISFINISHED,ERR,ERROR,*)
7871  !Argument variables
7872  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7873  LOGICAL, INTENT(OUT) :: ISFINISHED
7874  INTEGER(INTG), INTENT(OUT) :: ERR
7875  TYPE(varying_string), INTENT(OUT) :: ERROR
7876  !Local Variables
7877  INTEGER(INTG) :: domain_idx
7878  INTEGER(INTG) :: MPI_IERROR,STATUS(mpi_status_size)
7879  TYPE(varying_string) :: LOCAL_ERROR
7880 
7881  enters("DISTRIBUTED_VECTOR_UPDATE_ISFINISHED",err,error,*999)
7882 
7883  isfinished=.false.
7884  IF(ASSOCIATED(distributed_vector)) THEN
7885  IF(distributed_vector%VECTOR_FINISHED) THEN
7886  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7888  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
7889  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
7890 !!TODO: USE MPI_TESTALL and store the request handles as big array.
7891  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7892  CALL mpi_test(distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,isfinished,status,mpi_ierror)
7893  CALL mpi_error_check("MPI_TEST",mpi_ierror,err,error,*999)
7894  IF(.NOT.isfinished) EXIT
7895  !CALL MPI_TEST(DISTRIBUTED_VECTOR%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,ISFINISHED,STATUS,MPI_IERROR)
7896  !CALL MPI_ERROR_CHECK("MPI_TEST",MPI_IERROR,ERR,ERROR,*999)
7897  !IF(.NOT.ISFINISHED) EXIT
7898  ENDDO !domain_idx
7899  ELSE
7900  CALL flagerror("Distributed vector domain mapping is not associated.",err,error,*999)
7901  ENDIF
7902  ELSE
7903  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
7904  ENDIF
7906  CALL flagerror("Cannot test if update isfinished for a PETSc distributed vector.",err,error,*999)
7907  CASE DEFAULT
7908  local_error="The distributed vector library type of "// &
7909  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7910  CALL flagerror(local_error,err,error,*999)
7911  END SELECT
7912  ELSE
7913  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7914  ENDIF
7915  ELSE
7916  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7917  ENDIF
7918 
7919  exits("DISTRIBUTED_VECTOR_UPDATE_ISFINISHED")
7920  RETURN
7921 999 errorsexits("DISTRIBUTED_VECTOR_UPDATE_ISFINISHED",err,error)
7922  RETURN 1
7924 
7925  !
7926  !================================================================================================================================
7927  !
7928 
7930  SUBROUTINE distributed_vector_update_waitfinished(DISTRIBUTED_VECTOR,ERR,ERROR,*)
7932  !Argument variables
7933  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7934  INTEGER(INTG), INTENT(OUT) :: ERR
7935  TYPE(varying_string), INTENT(OUT) :: ERROR
7936  !Local Variables
7937  INTEGER(INTG) :: domain_idx
7938  INTEGER(INTG) :: MPI_IERROR,STATUS(mpi_status_size)
7939  TYPE(varying_string) :: LOCAL_ERROR
7940 
7941  enters("DISTRIBUTED_VECTOR_UPDATE_WAITFINISHED",err,error,*999)
7942 
7943  IF(ASSOCIATED(distributed_vector)) THEN
7944  IF(distributed_vector%VECTOR_FINISHED) THEN
7945  SELECT CASE(distributed_vector%LIBRARY_TYPE)
7947  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
7948  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
7949 !!TODO: USE MPI_WAITALL and store the request handles as big array.
7950  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
7951  CALL mpi_wait(distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,status,mpi_ierror)
7952  CALL mpi_error_check("MPI_WAIT",mpi_ierror,err,error,*999)
7953  ENDDO !domain_idx
7954  ELSE
7955  CALL flagerror("Distributed vector domain mapping is not associated.",err,error,*999)
7956  ENDIF
7957  ELSE
7958  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
7959  ENDIF
7961  CALL flagerror("Cannot wait for finished for a PETSc distributed vector.",err,error,*999)
7962  CASE DEFAULT
7963  local_error="The distributed vector library type of "// &
7964  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
7965  CALL flagerror(local_error,err,error,*999)
7966  END SELECT
7967  ELSE
7968  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
7969  ENDIF
7970  ELSE
7971  CALL flagerror("Distributed vector is not associated.",err,error,*999)
7972  ENDIF
7973 
7974  exits("DISTRIBUTED_VECTOR_UPDATE_WAITFINISHED")
7975  RETURN
7976 999 errorsexits("DISTRIBUTED_VECTOR_UPDATE_WAITFINISHED",err,error)
7977  RETURN 1
7979 
7980  !
7981  !================================================================================================================================
7982  !
7983 
7985  SUBROUTINE distributed_vector_update_start(DISTRIBUTED_VECTOR,ERR,ERROR,*)
7987  !Argument variables
7988  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
7989  INTEGER(INTG), INTENT(OUT) :: ERR
7990  TYPE(varying_string), INTENT(OUT) :: ERROR
7991  !Local Variables
7992  INTEGER(INTG) :: domain_idx,i,MPI_IERROR,NUMBER_OF_COMPUTATIONAL_NODES
7993  TYPE(varying_string) :: LOCAL_ERROR
7994 
7995  enters("DISTRIBUTED_VECTOR_UPDATE_START",err,error,*999)
7996 
7997  IF(ASSOCIATED(distributed_vector)) THEN
7998  IF(distributed_vector%VECTOR_FINISHED) THEN
7999  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8001  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8002  IF(ASSOCIATED(distributed_vector%DOMAIN_MAPPING)) THEN
8003  number_of_computational_nodes=computational_nodes_number_get(err,error)
8004  IF(err/=0) GOTO 999
8005  IF(number_of_computational_nodes>1) THEN
8006  IF(distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS>0) THEN
8007  !Fill in the send buffers with the send ghost values
8008  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8009  SELECT CASE(distributed_vector%DATA_TYPE)
8011  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8012  distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_INTG(i)= &
8013  & distributed_vector%CMISS%DATA_INTG(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8014  & local_ghost_send_indices(i))
8015  ENDDO !i
8016  CASE(matrix_vector_sp_type)
8017  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8018  distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SP(i)= &
8019  & distributed_vector%CMISS%DATA_SP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8020  & local_ghost_send_indices(i))
8021  ENDDO !i
8022  CASE(matrix_vector_dp_type)
8023  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8024  distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_DP(i)= &
8025  & distributed_vector%CMISS%DATA_DP(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8026  & local_ghost_send_indices(i))
8027  ENDDO !i
8028  CASE(matrix_vector_l_type)
8029  DO i=1,distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%NUMBER_OF_SEND_GHOSTS
8030  distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_L(i)= &
8031  & distributed_vector%CMISS%DATA_L(distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)% &
8032  & local_ghost_send_indices(i))
8033  ENDDO !i
8034  CASE DEFAULT
8035  local_error="The distributed vector data type of "// &
8036  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
8037  CALL flagerror(local_error,err,error,*999)
8038  END SELECT
8039  ENDDO !domain_idx
8040  !Post all the receive calls first and then the send calls.
8041  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8042  SELECT CASE(distributed_vector%DATA_TYPE)
8044  CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_INTG, &
8045  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_integer, &
8046  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8047  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,computational_environment%MPI_COMM, &
8048  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8049  CALL mpi_error_check("MPI_IRECV",mpi_ierror,err,error,*999)
8050  IF(diagnostics5) THEN
8051  CALL write_string(diagnostic_output_type,"MPI IRECV call posted:",err,error,*999)
8052  CALL write_string_value(diagnostic_output_type," Receive count = ",distributed_vector% &
8053  & cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8054  CALL write_string_value(diagnostic_output_type," Receive datatype = ",mpi_integer,err,error,*999)
8055  CALL write_string_value(diagnostic_output_type," Receive source = ",distributed_vector%DOMAIN_MAPPING% &
8056  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8057  CALL write_string_value(diagnostic_output_type," Receive tag = ",distributed_vector% &
8058  & cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8059  CALL write_string_value(diagnostic_output_type," Receive comm = ",computational_environment%MPI_COMM, &
8060  & err,error,*999)
8061  CALL write_string_value(diagnostic_output_type," Receive request = ",distributed_vector% &
8062  & cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8063  ENDIF
8064  CASE(matrix_vector_sp_type)
8065  CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SP, &
8066  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_real, &
8067  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8068  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,computational_environment%MPI_COMM, &
8069  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8070  CALL mpi_error_check("MPI_IRECV",mpi_ierror,err,error,*999)
8071  IF(diagnostics5) THEN
8072  CALL write_string(diagnostic_output_type,"MPI IRECV call posted:",err,error,*999)
8073  CALL write_string_value(diagnostic_output_type," Receive count = ",distributed_vector% &
8074  & cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8075  CALL write_string_value(diagnostic_output_type," Receive datatype = ",mpi_real,err,error,*999)
8076  CALL write_string_value(diagnostic_output_type," Receive source = ",distributed_vector%DOMAIN_MAPPING% &
8077  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8078  CALL write_string_value(diagnostic_output_type," Receive tag = ",distributed_vector% &
8079  & cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8080  CALL write_string_value(diagnostic_output_type," Receive comm = ",computational_environment%MPI_COMM, &
8081  & err,error,*999)
8082  CALL write_string_value(diagnostic_output_type," Receive request = ",distributed_vector% &
8083  & cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8084  ENDIF
8085  CASE(matrix_vector_dp_type)
8086  CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_DP, &
8087  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_double_precision, &
8088  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8089  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,computational_environment%MPI_COMM, &
8090  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8091  CALL mpi_error_check("MPI_IRECV",mpi_ierror,err,error,*999)
8092  IF(diagnostics5) THEN
8093  CALL write_string(diagnostic_output_type,"MPI IRECV call posted:",err,error,*999)
8094  CALL write_string_value(diagnostic_output_type," Receive count = ",distributed_vector% &
8095  & cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8096  CALL write_string_value(diagnostic_output_type," Receive datatype = ",mpi_double_precision,err,error,*999)
8097  CALL write_string_value(diagnostic_output_type," Receive source = ",distributed_vector%DOMAIN_MAPPING% &
8098  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8099  CALL write_string_value(diagnostic_output_type," Receive tag = ",distributed_vector% &
8100  & cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8101  CALL write_string_value(diagnostic_output_type," Receive comm = ",computational_environment%MPI_COMM, &
8102  & err,error,*999)
8103  CALL write_string_value(diagnostic_output_type," Receive request = ",distributed_vector% &
8104  & cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8105  ENDIF
8106  CASE(matrix_vector_l_type)
8107  CALL mpi_irecv(distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_L, &
8108  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,mpi_logical, &
8109  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8110  & distributed_vector%CMISS%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,computational_environment%MPI_COMM, &
8111  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,mpi_ierror)
8112  CALL mpi_error_check("MPI_IRECV",mpi_ierror,err,error,*999)
8113  IF(diagnostics5) THEN
8114  CALL write_string(diagnostic_output_type,"MPI IRECV call posted:",err,error,*999)
8115  CALL write_string_value(diagnostic_output_type," Receive count = ",distributed_vector% &
8116  & cmiss%TRANSFERS(domain_idx)%RECEIVE_BUFFER_SIZE,err,error,*999)
8117  CALL write_string_value(diagnostic_output_type," Receive datatype = ",mpi_logical,err,error,*999)
8118  CALL write_string_value(diagnostic_output_type," Receive source = ",distributed_vector%DOMAIN_MAPPING% &
8119  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8120  CALL write_string_value(diagnostic_output_type," Receive tag = ",distributed_vector% &
8121  & cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8122  CALL write_string_value(diagnostic_output_type," Receive comm = ",computational_environment%MPI_COMM, &
8123  & err,error,*999)
8124  CALL write_string_value(diagnostic_output_type," Receive request = ",distributed_vector% &
8125  & cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8126  ENDIF
8127  CASE DEFAULT
8128  local_error="The distributed vector data type of "// &
8129  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
8130  CALL flagerror(local_error,err,error,*999)
8131  END SELECT
8132  ENDDO !domain_idx
8133  !Post all the send calls.
8134  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8135  SELECT CASE(distributed_vector%DATA_TYPE)
8137  CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_INTG, &
8138  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_integer, &
8139  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8140  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,computational_environment%MPI_COMM, &
8141  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8142  CALL mpi_error_check("MPI_ISEND",mpi_ierror,err,error,*999)
8143  IF(diagnostics5) THEN
8144  CALL write_string(diagnostic_output_type,"MPI ISEND call posted:",err,error,*999)
8145  CALL write_string_value(diagnostic_output_type," Send count = ",distributed_vector% &
8146  & cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8147  CALL write_string_value(diagnostic_output_type," Send datatype = ",mpi_integer,err,error,*999)
8148  CALL write_string_value(diagnostic_output_type," Send dest = ",distributed_vector%DOMAIN_MAPPING% &
8149  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8150  CALL write_string_value(diagnostic_output_type," Send tag = ",distributed_vector% &
8151  & cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8153  & err,error,*999)
8154  CALL write_string_value(diagnostic_output_type," Send request = ",distributed_vector% &
8155  & cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8156  ENDIF
8157  CASE(matrix_vector_sp_type)
8158  CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SP, &
8159  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_real, &
8160  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8161  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,computational_environment%MPI_COMM, &
8162  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8163  CALL mpi_error_check("MPI_ISEND",mpi_ierror,err,error,*999)
8164  IF(diagnostics5) THEN
8165  CALL write_string(diagnostic_output_type,"MPI ISEND call posted:",err,error,*999)
8166  CALL write_string_value(diagnostic_output_type," Send count = ",distributed_vector% &
8167  & cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8168  CALL write_string_value(diagnostic_output_type," Send datatype = ",mpi_real,err,error,*999)
8169  CALL write_string_value(diagnostic_output_type," Send dest = ",distributed_vector%DOMAIN_MAPPING% &
8170  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8171  CALL write_string_value(diagnostic_output_type," Send tag = ",distributed_vector% &
8172  & cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8174  & err,error,*999)
8175  CALL write_string_value(diagnostic_output_type," Send request = ",distributed_vector% &
8176  & cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8177  ENDIF
8178  CASE(matrix_vector_dp_type)
8179  CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_DP, &
8180  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_double_precision, &
8181  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8182  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,computational_environment%MPI_COMM, &
8183  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8184  CALL mpi_error_check("MPI_ISEND",mpi_ierror,err,error,*999)
8185  IF(diagnostics5) THEN
8186  CALL write_string(diagnostic_output_type,"MPI ISEND call posted:",err,error,*999)
8187  CALL write_string_value(diagnostic_output_type," Send count = ",distributed_vector% &
8188  & cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8189  CALL write_string_value(diagnostic_output_type," Send datatype = ",mpi_double_precision,err,error,*999)
8190  CALL write_string_value(diagnostic_output_type," Send dest = ",distributed_vector%DOMAIN_MAPPING% &
8191  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8192  CALL write_string_value(diagnostic_output_type," Send tag = ",distributed_vector% &
8193  & cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8195  & err,error,*999)
8196  CALL write_string_value(diagnostic_output_type," Send request = ",distributed_vector% &
8197  & cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8198  ENDIF
8199  CASE(matrix_vector_l_type)
8200  CALL mpi_isend(distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_L, &
8201  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,mpi_logical, &
8202  & distributed_vector%DOMAIN_MAPPING%ADJACENT_DOMAINS(domain_idx)%DOMAIN_NUMBER, &
8203  & distributed_vector%CMISS%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,computational_environment%MPI_COMM, &
8204  & distributed_vector%CMISS%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,mpi_ierror)
8205  CALL mpi_error_check("MPI_ISEND",mpi_ierror,err,error,*999)
8206  IF(diagnostics5) THEN
8207  CALL write_string(diagnostic_output_type,"MPI ISEND call posted:",err,error,*999)
8208  CALL write_string_value(diagnostic_output_type," Send count = ",distributed_vector% &
8209  & cmiss%TRANSFERS(domain_idx)%SEND_BUFFER_SIZE,err,error,*999)
8210  CALL write_string_value(diagnostic_output_type," Send datatype = ",mpi_logical,err,error,*999)
8211  CALL write_string_value(diagnostic_output_type," Send dest = ",distributed_vector%DOMAIN_MAPPING% &
8212  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8213  CALL write_string_value(diagnostic_output_type," Send tag = ",distributed_vector% &
8214  & cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8216  & err,error,*999)
8217  CALL write_string_value(diagnostic_output_type," Send request = ",distributed_vector% &
8218  & cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8219  ENDIF
8220  CASE DEFAULT
8221  local_error="The distributed vector data type of "// &
8222  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
8223  CALL flagerror(local_error,err,error,*999)
8224  END SELECT
8225  ENDDO !domain_idx
8226  ENDIF
8227  ENDIF
8228  ELSE
8229  CALL flagerror("Domain mapping is not associated for the distributed vector.",err,error,*999)
8230  ENDIF
8231  ELSE
8232  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8233  ENDIF
8235  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
8236  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
8237  CALL petsc_vecassemblybegin(distributed_vector%PETSC%OVERRIDE_VECTOR,err,error,*999)
8238  ELSE
8239  CALL petsc_vecassemblybegin(distributed_vector%PETSC%VECTOR,err,error,*999)
8240  ENDIF
8241  ELSE
8242  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
8243  ENDIF
8244  CASE DEFAULT
8245  local_error="The distributed vector library type of "// &
8246  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8247  CALL flagerror(local_error,err,error,*999)
8248  END SELECT
8249  ELSE
8250  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8251  ENDIF
8252  ELSE
8253  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8254  ENDIF
8255 
8256  IF(diagnostics1) THEN
8257  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8259  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8260  CALL write_string(diagnostic_output_type,"Distributed vector :",err,error,*999)
8261  CALL write_string_value(diagnostic_output_type," Data type = ",distributed_vector%DATA_TYPE,err,error,*999)
8262  CALL write_string_value(diagnostic_output_type," Base tag number = ",distributed_vector%CMISS%BASE_TAG_NUMBER, &
8263  & err,error,*999)
8264  CALL write_string_value(diagnostic_output_type," Number of adjacent domains = ",distributed_vector%DOMAIN_MAPPING% &
8265  & number_of_adjacent_domains,err,error,*999)
8266  DO domain_idx=1,distributed_vector%DOMAIN_MAPPING%NUMBER_OF_ADJACENT_DOMAINS
8267  CALL write_string_value(diagnostic_output_type," Domain idx = ",domain_idx,err,error,*999)
8268  CALL write_string_value(diagnostic_output_type," Domain number = ",distributed_vector%DOMAIN_MAPPING% &
8269  & adjacent_domains(domain_idx)%DOMAIN_NUMBER,err,error,*999)
8270  CALL write_string_value(diagnostic_output_type," Receive tag number = ",distributed_vector% &
8271  & cmiss%TRANSFERS(domain_idx)%RECEIVE_TAG_NUMBER,err,error,*999)
8272  CALL write_string_value(diagnostic_output_type," Send tag number = ",distributed_vector% &
8273  & cmiss%TRANSFERS(domain_idx)%SEND_TAG_NUMBER,err,error,*999)
8274  CALL write_string_value(diagnostic_output_type," MPI send request = ",distributed_vector% &
8275  & cmiss%TRANSFERS(domain_idx)%MPI_SEND_REQUEST,err,error,*999)
8276  CALL write_string_value(diagnostic_output_type," MPI receive request = ",distributed_vector% &
8277  & cmiss%TRANSFERS(domain_idx)%MPI_RECEIVE_REQUEST,err,error,*999)
8278  ENDDO !domain_idx
8279  CALL write_string_value(diagnostic_output_type," Data size = ",distributed_vector%CMISS%DATA_SIZE,err,error,*999)
8280  SELECT CASE(distributed_vector%DATA_TYPE)
8282  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,5,5,distributed_vector%CMISS% &
8283  & data_intg,'(" Data :",5(X,I13))','(8X,5(X,I13))',err,error,*999)
8284  CASE(matrix_vector_sp_type)
8285  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,5,5,distributed_vector%CMISS% &
8286  & data_sp,'(" Data :",5(X,E13.6))','(8X,5(X,E13.6))',err,error,*999)
8287  CASE(matrix_vector_dp_type)
8288  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,5,5,distributed_vector%CMISS% &
8289  & data_dp,'(" Data :",5(X,E13.6))','(8X,5(X,E13.6))',err,error,*999)
8290  CASE(matrix_vector_l_type)
8291  CALL write_string_vector(diagnostic_output_type,1,1,distributed_vector%CMISS%DATA_SIZE,8,8,distributed_vector%CMISS% &
8292  & data_l,'(" Data :",8(X,L))','(8X,8(X,L))',err,error,*999)
8293  CASE DEFAULT
8294  local_error="The distributed vector data type of "// &
8295  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))//" is invalid."
8296  CALL flagerror(local_error,err,error,*999)
8297  END SELECT
8298  ELSE
8299  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8300  ENDIF
8302  !Do nothing
8303  CASE DEFAULT
8304  local_error="The distributed vector library type of "// &
8305  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8306  CALL flagerror(local_error,err,error,*999)
8307  END SELECT
8308  ENDIF
8309 
8310  exits("DISTRIBUTED_VECTOR_UPDATE_START")
8311  RETURN
8312 999 errorsexits("DISTRIBUTED_VECTOR_UPDATE_START",err,error)
8313  RETURN 1
8314  END SUBROUTINE distributed_vector_update_start
8315 
8316  !
8317  !================================================================================================================================
8318  !
8319 
8321  SUBROUTINE distributedvector_l2norm(distributedVector,norm,err,error,*)
8323  !Argument variables
8324  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVector
8325  REAL(DP), INTENT(OUT) :: norm
8326  INTEGER(INTG), INTENT(OUT) :: err
8327  TYPE(varying_string), INTENT(OUT) :: error
8328  !Local Variables
8329  INTEGER(INTG) :: i
8330  TYPE(varying_string) :: localError
8331 
8332  enters("DistributedVector_L2Norm",err,error,*999)
8333 
8334  IF(ASSOCIATED(distributedvector)) THEN
8335  IF(distributedvector%VECTOR_FINISHED) THEN
8336  SELECT CASE(distributedvector%LIBRARY_TYPE)
8338  SELECT CASE(distributedvector%DATA_TYPE)
8339  CASE(matrix_vector_dp_type)
8340  IF(ASSOCIATED(distributedvector%CMISS)) THEN
8341  norm=0.0_dp
8342  DO i=1,distributedvector%CMISS%DATA_SIZE
8343  norm=norm+(distributedvector%CMISS%DATA_DP(i)**2)
8344  ENDDO !i
8345  norm=sqrt(norm)
8346  ELSE
8347  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8348  ENDIF
8349  CASE(matrix_vector_sp_type)
8350  CALL flagerror("Not implemented.",err,error,*999)
8352  CALL flagerror("Not implemented.",err,error,*999)
8353  CASE(matrix_vector_l_type)
8354  CALL flagerror("Not implemented.",err,error,*999)
8355  CASE DEFAULT
8356  localerror="The distributed data type of "// &
8357  & trim(numbertovstring(distributedvector%DATA_TYPE,"*",err,error))// &
8358  & " is invalid."
8359  CALL flagerror(localerror,err,error,*999)
8360  END SELECT
8362  CALL flagerror("Cannot calculate norm for a PETSc distributed vector.",err,error,*999)
8363  CASE DEFAULT
8364  localerror="The distributed vector library type of "// &
8365  & trim(numbertovstring(distributedvector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8366  CALL flagerror(localerror,err,error,*999)
8367  END SELECT
8368  ELSE
8369  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8370  ENDIF
8371  ELSE
8372  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8373  ENDIF
8374 
8375  exits("DistributedVector_L2Norm")
8376  RETURN
8377 999 errorsexits("DistributedVector_L2Norm",err,error)
8378  RETURN 1
8379  END SUBROUTINE distributedvector_l2norm
8380 
8381  !
8382  !================================================================================================================================
8383  !
8384 
8386  SUBROUTINE distributedvector_vecdotintg(distributedVectorA,distributedVectorB,dotProduct,err,error,*)
8388  !Argument variables
8389  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVectorA
8390  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVectorB
8391  INTEGER(INTG), INTENT(OUT) :: dotProduct
8392  INTEGER(INTG), INTENT(OUT) :: err
8393  TYPE(varying_string), INTENT(OUT) :: error
8394  !Local Variables
8395  INTEGER(INTG) :: dataTypeA,dataTypeB,i
8396  TYPE(varying_string) :: localError
8397 
8398  enters("DistributedVector_VecDotIntg",err,error,*999)
8399 
8400  IF(ASSOCIATED(distributedvectora) .AND. ASSOCIATED(distributedvectorb)) THEN
8401  IF(distributedvectora%VECTOR_FINISHED .AND. distributedvectorb%VECTOR_FINISHED) THEN
8402  IF (distributedvectora%LIBRARY_TYPE==distributedvectorb%LIBRARY_TYPE) THEN
8403  CALL distributedvector_datatypeget(distributedvectora,datatypea,err,error,*999)
8404  CALL distributedvector_datatypeget(distributedvectorb,datatypeb,err,error,*999)
8405  IF(datatypea==datatypeb) THEN
8406  SELECT CASE(distributedvectora%LIBRARY_TYPE)
8408  IF(ASSOCIATED(distributedvectora%CMISS)) THEN
8409  IF(distributedvectora%CMISS%DATA_SIZE==distributedvectorb%CMISS%DATA_SIZE) THEN
8410  IF(distributedvectora%DATA_TYPE==matrix_vector_intg_type) THEN
8411  dotproduct=0
8412  DO i=1,distributedvectora%CMISS%DATA_SIZE
8413  dotproduct=dotproduct+(distributedvectora%CMISS%DATA_INTG(i)*distributedvectorb%CMISS%DATA_INTG(i))
8414  ENDDO !i
8415  ELSE
8416  CALL flagerror("Input distributed vector data type does not match output.",err,error,*999)
8417  ENDIF
8418  ELSE
8419  CALL flagerror("The distributed vectors do not have the same size.",err,error,*999)
8420  ENDIF
8421  ELSE
8422  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8423  ENDIF
8425  CALL flagerror("Distributed vector PETSC is double-precision, output scalar should be DP",err,error,*999)
8426  CASE DEFAULT
8427  localerror="The distributed vector library type of "// &
8428  & trim(numbertovstring(distributedvectora%LIBRARY_TYPE,"*",err,error))//" is invalid."
8429  CALL flagerror(localerror,err,error,*999)
8430  END SELECT
8431  ELSE
8432  CALL flagerror("The distributed vectors do not have the same data type.",err,error,*999)
8433  ENDIF
8434  ELSE
8435  CALL flagerror("The distributed vectors do not have the same library type.",err,error,*999)
8436  ENDIF
8437  ELSE
8438  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8439  ENDIF
8440  ELSE
8441  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8442  ENDIF
8443 
8444  exits("DistributedVector_VecDotIntg")
8445  RETURN
8446 999 errorsexits("DistributedVector_VecDotIntg",err,error)
8447  RETURN 1
8448  END SUBROUTINE distributedvector_vecdotintg
8449 
8450  !
8451  !================================================================================================================================
8452  !
8453 
8455  SUBROUTINE distributedvector_vecdotsp(distributedVectorA,distributedVectorB,dotProduct,err,error,*)
8457  !Argument variables
8458  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVectorA
8459  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVectorB
8460  REAL(SP), INTENT(OUT) :: dotProduct
8461  INTEGER(INTG), INTENT(OUT) :: err
8462  TYPE(varying_string), INTENT(OUT) :: error
8463  !Local Variables
8464  INTEGER(INTG) :: dataTypeA,dataTypeB,i
8465  TYPE(varying_string) :: localError
8466 
8467  enters("DistributedVector_VecDotSp",err,error,*999)
8468 
8469  IF(ASSOCIATED(distributedvectora) .AND. ASSOCIATED(distributedvectorb)) THEN
8470  IF(distributedvectora%VECTOR_FINISHED .AND. distributedvectorb%VECTOR_FINISHED) THEN
8471  IF (distributedvectora%LIBRARY_TYPE==distributedvectorb%LIBRARY_TYPE) THEN
8472  CALL distributedvector_datatypeget(distributedvectora,datatypea,err,error,*999)
8473  CALL distributedvector_datatypeget(distributedvectorb,datatypeb,err,error,*999)
8474  IF(datatypea==datatypeb) THEN
8475  SELECT CASE(distributedvectora%LIBRARY_TYPE)
8477  IF(ASSOCIATED(distributedvectora%CMISS)) THEN
8478  IF(distributedvectora%CMISS%DATA_SIZE==distributedvectorb%CMISS%DATA_SIZE) THEN
8479  IF(distributedvectora%DATA_TYPE==matrix_vector_sp_type) THEN
8480  dotproduct=0.0_sp
8481  DO i=1,distributedvectora%CMISS%DATA_SIZE
8482  dotproduct=dotproduct+(distributedvectora%CMISS%DATA_SP(i)*distributedvectorb%CMISS%DATA_SP(i))
8483  ENDDO !i
8484  ELSE
8485  CALL flagerror("Input distributed vector data type does not match output.",err,error,*999)
8486  ENDIF
8487  ELSE
8488  CALL flagerror("The distributed vectors do not have the same size.",err,error,*999)
8489  ENDIF
8490  ELSE
8491  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8492  ENDIF
8494  CALL flagerror("Distributed vector PETSC is double-precision, output scalar should be DP",err,error,*999)
8495  CASE DEFAULT
8496  localerror="The distributed vector library type of "// &
8497  & trim(numbertovstring(distributedvectora%LIBRARY_TYPE,"*",err,error))//" is invalid."
8498  CALL flagerror(localerror,err,error,*999)
8499  END SELECT
8500  ELSE
8501  CALL flagerror("The distributed vectors do not have the same data type.",err,error,*999)
8502  ENDIF
8503  ELSE
8504  CALL flagerror("The distributed vectors do not have the same library type.",err,error,*999)
8505  ENDIF
8506  ELSE
8507  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8508  ENDIF
8509  ELSE
8510  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8511  ENDIF
8512 
8513  exits("DistributedVector_VecDotSp")
8514  RETURN
8515 999 errorsexits("DistributedVector_VecDotSp",err,error)
8516  RETURN 1
8517  END SUBROUTINE distributedvector_vecdotsp
8518 
8519  !
8520  !================================================================================================================================
8521  !
8522 
8524  SUBROUTINE distributedvector_vecdotdp(distributedVectorA,distributedVectorB,dotProduct,err,error,*)
8526  !Argument variables
8527  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVectorA
8528  TYPE(distributed_vector_type), INTENT(IN), POINTER :: distributedVectorB
8529  REAL(DP), INTENT(OUT) :: dotProduct
8530  INTEGER(INTG), INTENT(OUT) :: err
8531  TYPE(varying_string), INTENT(OUT) :: error
8532  !Local Variables
8533  INTEGER(INTG) :: dataTypeA,dataTypeB,i
8534  TYPE(varying_string) :: localError
8535 
8536  enters("DistributedVector_VecDotDp",err,error,*999)
8537 
8538  IF(ASSOCIATED(distributedvectora) .AND. ASSOCIATED(distributedvectorb)) THEN
8539  IF(distributedvectora%VECTOR_FINISHED .AND. distributedvectorb%VECTOR_FINISHED) THEN
8540  IF (distributedvectora%LIBRARY_TYPE==distributedvectorb%LIBRARY_TYPE) THEN
8541  CALL distributedvector_datatypeget(distributedvectora,datatypea,err,error,*999)
8542  CALL distributedvector_datatypeget(distributedvectorb,datatypeb,err,error,*999)
8543  IF(datatypea==datatypeb) THEN
8544  SELECT CASE(distributedvectora%LIBRARY_TYPE)
8546  IF(ASSOCIATED(distributedvectora%CMISS)) THEN
8547  IF(distributedvectora%CMISS%DATA_SIZE==distributedvectorb%CMISS%DATA_SIZE) THEN
8548  IF(distributedvectora%DATA_TYPE==matrix_vector_dp_type) THEN
8549  dotproduct=0.0_dp
8550  DO i=1,distributedvectora%CMISS%DATA_SIZE
8551  dotproduct=dotproduct+(distributedvectora%CMISS%DATA_DP(i)*distributedvectorb%CMISS%DATA_DP(i))
8552  ENDDO !i
8553  ELSE
8554  CALL flagerror("Input distributed vector data type does not match output.",err,error,*999)
8555  ENDIF
8556  ELSE
8557  CALL flagerror("The distributed vectors do not have the same size.",err,error,*999)
8558  ENDIF
8559  ELSE
8560  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8561  ENDIF
8563  IF(ASSOCIATED(distributedvectora%PETSC)) THEN
8564  CALL petsc_vecdot(distributedvectora%PETSC%VECTOR,distributedvectorb%PETSC%VECTOR, &
8565  & dotproduct,err,error,*999)
8566  ELSE
8567  CALL flagerror("Distributed vector PETSC is not associated.",err,error,*999)
8568  ENDIF
8569  CASE DEFAULT
8570  localerror="The distributed vector library type of "// &
8571  & trim(numbertovstring(distributedvectora%LIBRARY_TYPE,"*",err,error))//" is invalid."
8572  CALL flagerror(localerror,err,error,*999)
8573  END SELECT
8574  ELSE
8575  CALL flagerror("The distributed vectors do not have the same data type.",err,error,*999)
8576  ENDIF
8577  ELSE
8578  CALL flagerror("The distributed vectors do not have the same library type.",err,error,*999)
8579  ENDIF
8580  ELSE
8581  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8582  ENDIF
8583  ELSE
8584  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8585  ENDIF
8586 
8587  exits("DistributedVector_VecDotDp")
8588  RETURN
8589 999 errorsexits("DistributedVector_VecDotDp",err,error)
8590  RETURN 1
8591  END SUBROUTINE distributedvector_vecdotdp
8592 
8593  !
8594  !================================================================================================================================
8595  !
8596 
8598  SUBROUTINE distributed_vector_values_add_intg(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
8600  !Argument variables
8601  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
8602  INTEGER(INTG), INTENT(IN) :: INDICES(:)
8603  INTEGER(INTG), INTENT(IN) :: VALUES(:)
8604  INTEGER(INTG), INTENT(OUT) :: ERR
8605  TYPE(varying_string), INTENT(OUT) :: ERROR
8606  !Local Variables
8607  INTEGER(INTG) :: i
8608  TYPE(varying_string) :: LOCAL_ERROR
8609 
8610  enters("DISTRIBUTED_VECTOR_VALUES_ADD_INTG",err,error,*999)
8611 
8612  IF(ASSOCIATED(distributed_vector)) THEN
8613  IF(distributed_vector%VECTOR_FINISHED) THEN
8614  IF(SIZE(indices,1)==SIZE(values,1)) THEN
8615  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
8616  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8618  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8619  DO i=1,SIZE(indices,1)
8620  !Allow all values added until dof mappings fixed. Ghost values that are added will not be propogated
8621  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
8622  distributed_vector%CMISS%DATA_INTG(indices(i))=distributed_vector%CMISS%DATA_INTG(indices(i))+values(i)
8623  ELSE
8624  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
8625  & " is invalid. The index must be between 1 and "// &
8626  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
8627  CALL flagerror(local_error,err,error,*999)
8628  ENDIF
8629  ENDDO !i
8630  ELSE
8631  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8632  ENDIF
8634  CALL flagerror("Cannot add values for an integer PETSc distributed vector.",err,error,*999)
8635  CASE DEFAULT
8636  local_error="The distributed vector library type of "// &
8637  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8638  CALL flagerror(local_error,err,error,*999)
8639  END SELECT
8640  ELSE
8641  local_error="The distributed data type of "// &
8642  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
8643  & " does not correspond to the integer data type of the given values."
8644  CALL flagerror(local_error,err,error,*999)
8645  ENDIF
8646  ELSE
8647  local_error="The size of the indicies array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
8648  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
8649  CALL flagerror(local_error,err,error,*999)
8650  ENDIF
8651  ELSE
8652  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8653  ENDIF
8654  ELSE
8655  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8656  ENDIF
8657 
8658  exits("DISTRIBUTED_VECTOR_VALUES_ADD_INTG")
8659  RETURN
8660 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_INTG",err,error)
8661  RETURN 1
8662  END SUBROUTINE distributed_vector_values_add_intg
8663 
8664  !
8665  !================================================================================================================================
8666  !
8667 
8669  SUBROUTINE distributed_vector_values_add_intg1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
8671  !Argument variables
8672  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
8673  INTEGER(INTG), INTENT(IN) :: INDEX
8674  INTEGER(INTG), INTENT(IN) :: VALUE
8675  INTEGER(INTG), INTENT(OUT) :: ERR
8676  TYPE(varying_string), INTENT(OUT) :: ERROR
8677  !Local Variables
8678  TYPE(varying_string) :: LOCAL_ERROR
8679 
8680  enters("DISTRIBUTED_VECTOR_VALUES_ADD_INTG1",err,error,*999)
8681 
8682  IF(ASSOCIATED(distributed_vector)) THEN
8683  IF(distributed_vector%VECTOR_FINISHED) THEN
8684  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
8685  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8687  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8688  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
8689  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
8690  distributed_vector%CMISS%DATA_INTG(index)=distributed_vector%CMISS%DATA_INTG(index)+VALUE
8691  ELSE
8692  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
8693  & " is invalid. The index must be between 1 and "// &
8694  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
8695  CALL flagerror(local_error,err,error,*999)
8696  ENDIF
8697  ELSE
8698  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8699  ENDIF
8701  CALL flagerror("Cannot add values for an integer PETSc distributed vector.",err,error,*999)
8702  CASE DEFAULT
8703  local_error="The distributed vector library type of "// &
8704  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8705  CALL flagerror(local_error,err,error,*999)
8706  END SELECT
8707  ELSE
8708  local_error="The distributed data type of "// &
8709  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
8710  & " does not correspond to the integer data type of the given value."
8711  CALL flagerror(local_error,err,error,*999)
8712  ENDIF
8713  ELSE
8714  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8715  ENDIF
8716  ELSE
8717  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8718  ENDIF
8719 
8720  exits("DISTRIBUTED_VECTOR_VALUES_ADD_INTG1")
8721  RETURN
8722 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_INTG1",err,error)
8723  RETURN 1
8725 
8726  !
8727  !================================================================================================================================
8728  !
8729 
8731  SUBROUTINE distributed_vector_values_add_sp(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
8733  !Argument variables
8734  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
8735  INTEGER(INTG), INTENT(IN) :: INDICES(:)
8736  REAL(SP), INTENT(IN) :: VALUES(:)
8737  INTEGER(INTG), INTENT(OUT) :: ERR
8738  TYPE(varying_string), INTENT(OUT) :: ERROR
8739  !Local Variables
8740  INTEGER(INTG) :: i
8741  TYPE(varying_string) :: LOCAL_ERROR
8742 
8743  enters("DISTRIBUTED_VECTOR_VALUES_ADD_SP",err,error,*999)
8744 
8745  IF(ASSOCIATED(distributed_vector)) THEN
8746  IF(distributed_vector%VECTOR_FINISHED) THEN
8747  IF(SIZE(indices,1)==SIZE(values,1)) THEN
8748  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
8749  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8751  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8752  DO i=1,SIZE(indices,1)
8753  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
8754  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
8755  distributed_vector%CMISS%DATA_SP(indices(i))=distributed_vector%CMISS%DATA_SP(indices(i))+values(i)
8756  ELSE
8757  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
8758  & " is invalid. The index must be between 1 and "// &
8759  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
8760  CALL flagerror(local_error,err,error,*999)
8761  ENDIF
8762  ENDDO !i
8763  ELSE
8764  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8765  ENDIF
8767  CALL flagerror("Cannot add values for a single precision PETSc distributed vector.",err,error,*999)
8768  CASE DEFAULT
8769  local_error="The distributed vector library type of "// &
8770  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8771  CALL flagerror(local_error,err,error,*999)
8772  END SELECT
8773  ELSE
8774  local_error="The distributed data type of "// &
8775  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
8776  & " does not correspond to the single precision data type of the given values."
8777  CALL flagerror(local_error,err,error,*999)
8778  ENDIF
8779  ELSE
8780  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
8781  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
8782  CALL flagerror(local_error,err,error,*999)
8783  ENDIF
8784  ELSE
8785  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8786  ENDIF
8787  ELSE
8788  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8789  ENDIF
8790 
8791  exits("DISTRIBUTED_VECTOR_VALUES_ADD_SP")
8792  RETURN
8793 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_SP",err,error)
8794  RETURN 1
8795  END SUBROUTINE distributed_vector_values_add_sp
8796 
8797  !
8798  !================================================================================================================================
8799  !
8800 
8802  SUBROUTINE distributed_vector_values_add_sp1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
8804  !Argument variables
8805  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
8806  INTEGER(INTG), INTENT(IN) :: INDEX
8807  REAL(SP), INTENT(IN) :: VALUE
8808  INTEGER(INTG), INTENT(OUT) :: ERR
8809  TYPE(varying_string), INTENT(OUT) :: ERROR
8810  !Local Variables
8811  TYPE(varying_string) :: LOCAL_ERROR
8812 
8813  enters("DISTRIBUTED_VECTOR_VALUES_ADD_SP1",err,error,*999)
8814 
8815  IF(ASSOCIATED(distributed_vector)) THEN
8816  IF(distributed_vector%VECTOR_FINISHED) THEN
8817  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
8818  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8820  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8821  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
8822  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
8823  distributed_vector%CMISS%DATA_SP(index)=distributed_vector%CMISS%DATA_SP(index)+VALUE
8824  ELSE
8825  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
8826  & " is invalid. The index must be between 1 and "// &
8827  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
8828  CALL flagerror(local_error,err,error,*999)
8829  ENDIF
8830  ELSE
8831  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8832  ENDIF
8834  CALL flagerror("Cannot add values for a single precision PETSc distributed vector.",err,error,*999)
8835  CASE DEFAULT
8836  local_error="The distributed vector library type of "// &
8837  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8838  CALL flagerror(local_error,err,error,*999)
8839  END SELECT
8840  ELSE
8841  local_error="The distributed data type of "// &
8842  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
8843  & " does not correspond to the single precision data type of the given value."
8844  CALL flagerror(local_error,err,error,*999)
8845  ENDIF
8846  ELSE
8847  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8848  ENDIF
8849  ELSE
8850  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8851  ENDIF
8852 
8853  exits("DISTRIBUTED_VECTOR_VALUES_ADD_SP1")
8854  RETURN
8855 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_SP1",err,error)
8856  RETURN 1
8857  END SUBROUTINE distributed_vector_values_add_sp1
8858 
8859  !
8860  !================================================================================================================================
8861  !
8862 
8864  SUBROUTINE distributed_vector_values_add_dp(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
8866  !Argument variables
8867  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
8868  INTEGER(INTG), INTENT(IN) :: INDICES(:)
8869  REAL(DP), INTENT(IN) :: VALUES(:)
8870  INTEGER(INTG), INTENT(OUT) :: ERR
8871  TYPE(varying_string), INTENT(OUT) :: ERROR
8872  !Local Variables
8873  INTEGER(INTG) :: i
8874  TYPE(varying_string) :: LOCAL_ERROR
8875 
8876  enters("DISTRIBUTED_VECTOR_VALUES_ADD_DP",err,error,*999)
8877 
8878  IF(ASSOCIATED(distributed_vector)) THEN
8879  IF(distributed_vector%VECTOR_FINISHED) THEN
8880  IF(SIZE(indices,1)==SIZE(values,1)) THEN
8881  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
8882  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8884  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8885  DO i=1,SIZE(indices,1)
8886  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
8887  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
8888  distributed_vector%CMISS%DATA_DP(indices(i))=distributed_vector%CMISS%DATA_DP(indices(i))+values(i)
8889  ELSE
8890  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
8891  & " is invalid. The index must be between 1 and "// &
8892  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
8893  CALL flagerror(local_error,err,error,*999)
8894  ENDIF
8895  ENDDO !i
8896  ELSE
8897  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8898  ENDIF
8900  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
8901  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
8902  CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,SIZE(indices,1),distributed_vector%PETSC% &
8903  & global_numbers(indices),values,petsc_add_values,err,error,*999)
8904  ELSE
8905  CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,SIZE(indices,1),distributed_vector%PETSC% &
8906  & global_numbers(indices),values,petsc_add_values,err,error,*999)
8907  ENDIF
8908  ELSE
8909  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
8910  ENDIF
8911  CASE DEFAULT
8912  local_error="The distributed vector library type of "// &
8913  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8914  CALL flagerror(local_error,err,error,*999)
8915  END SELECT
8916  ELSE
8917  local_error="The distributed data type of "// &
8918  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
8919  & " does not correspond to the double precision data type of the given values."
8920  CALL flagerror(local_error,err,error,*999)
8921  ENDIF
8922  ELSE
8923  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
8924  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
8925  CALL flagerror(local_error,err,error,*999)
8926  ENDIF
8927  ELSE
8928  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
8929  ENDIF
8930  ELSE
8931  CALL flagerror("Distributed vector is not associated.",err,error,*999)
8932  ENDIF
8933 
8934  exits("DISTRIBUTED_VECTOR_VALUES_ADD_DP")
8935  RETURN
8936 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_DP",err,error)
8937  RETURN 1
8938  END SUBROUTINE distributed_vector_values_add_dp
8939 
8940  !
8941  !================================================================================================================================
8942  !
8943 
8945  SUBROUTINE distributed_vector_values_add_dp1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
8947  !Argument variables
8948  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
8949  INTEGER(INTG), INTENT(IN) :: INDEX
8950  REAL(DP), INTENT(IN) :: VALUE
8951  INTEGER(INTG), INTENT(OUT) :: ERR
8952  TYPE(varying_string), INTENT(OUT) :: ERROR
8953  !Local Variables
8954  REAL(DP) :: PETSC_VALUE(1)
8955  TYPE(varying_string) :: LOCAL_ERROR
8956 
8957  enters("DISTRIBUTED_VECTOR_VALUES_ADD_DP1",err,error,*999)
8958 
8959  IF(ASSOCIATED(distributed_vector)) THEN
8960  IF(distributed_vector%VECTOR_FINISHED) THEN
8961  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
8962  SELECT CASE(distributed_vector%LIBRARY_TYPE)
8964  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
8965  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
8966  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
8967  distributed_vector%CMISS%DATA_DP(index)=distributed_vector%CMISS%DATA_DP(index)+VALUE
8968  ELSE
8969  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
8970  & " is invalid. The index must be between 1 and "// &
8971  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
8972  CALL flagerror(local_error,err,error,*999)
8973  ENDIF
8974  ELSE
8975  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
8976  ENDIF
8978  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
8979  petsc_value(1)=VALUE
8980  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
8981  CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,1,distributed_vector%PETSC%GLOBAL_NUMBERS(index), &
8982  & petsc_value,petsc_add_values,err,error,*999)
8983  ELSE
8984  CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,1,distributed_vector%PETSC%GLOBAL_NUMBERS(index), &
8985  & petsc_value,petsc_add_values,err,error,*999)
8986  ENDIF
8987  ELSE
8988  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
8989  ENDIF
8990  CASE DEFAULT
8991  local_error="The distributed vector library type of "// &
8992  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
8993  CALL flagerror(local_error,err,error,*999)
8994  END SELECT
8995  ELSE
8996  local_error="The distributed data type of "// &
8997  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
8998  & " does not correspond to the double precision data type of the given value."
8999  CALL flagerror(local_error,err,error,*999)
9000  ENDIF
9001  ELSE
9002  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9003  ENDIF
9004  ELSE
9005  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9006  ENDIF
9007 
9008  exits("DISTRIBUTED_VECTOR_VALUES_ADD_DP1")
9009  RETURN
9010 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_DP1",err,error)
9011  RETURN 1
9012  END SUBROUTINE distributed_vector_values_add_dp1
9013 
9014  !
9015  !================================================================================================================================
9016  !
9017 
9019  SUBROUTINE distributed_vector_values_add_l(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9021  !Argument variables
9022  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9023  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9024  LOGICAL, INTENT(IN) :: VALUES(:)
9025  INTEGER(INTG), INTENT(OUT) :: ERR
9026  TYPE(varying_string), INTENT(OUT) :: ERROR
9027  !Local Variables
9028  INTEGER(INTG) :: i
9029  TYPE(varying_string) :: LOCAL_ERROR
9030 
9031  enters("DISTRIBUTED_VECTOR_VALUES_ADDED_L",err,error,*999)
9032 
9033  IF(ASSOCIATED(distributed_vector)) THEN
9034  IF(distributed_vector%VECTOR_FINISHED) THEN
9035  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9036  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
9037  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9039  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9040  DO i=1,SIZE(indices,1)
9041  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
9042  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9043  distributed_vector%CMISS%DATA_L(indices(i))=distributed_vector%CMISS%DATA_L(indices(i)).OR.values(i)
9044  ELSE
9045  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9046  & " is invalid. The index must be between 1 and "// &
9047  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9048  CALL flagerror(local_error,err,error,*999)
9049  ENDIF
9050  ENDDO !i
9051  ELSE
9052  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9053  ENDIF
9055  CALL flagerror("Cannot add values for a logical PETSc distributed vector.",err,error,*999)
9056  CASE DEFAULT
9057  local_error="The distributed vector library type of "// &
9058  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9059  CALL flagerror(local_error,err,error,*999)
9060  END SELECT
9061  ELSE
9062  local_error="The distributed data type of "// &
9063  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9064  & " does not correspond to the logical data type of the given values."
9065  CALL flagerror(local_error,err,error,*999)
9066  ENDIF
9067  ELSE
9068  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9069  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9070  CALL flagerror(local_error,err,error,*999)
9071  ENDIF
9072  ELSE
9073  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9074  ENDIF
9075  ELSE
9076  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9077  ENDIF
9078 
9079  exits("DISTRIBUTED_VECTOR_VALUES_ADD_L")
9080  RETURN
9081 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_L",err,error)
9082  RETURN 1
9083  END SUBROUTINE distributed_vector_values_add_l
9084 
9085  !
9086  !================================================================================================================================
9087  !
9088 
9090  SUBROUTINE distributed_vector_values_add_l1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9092  !Argument variables
9093  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9094  INTEGER(INTG), INTENT(IN) :: INDEX
9095  LOGICAL, INTENT(IN) :: VALUE
9096  INTEGER(INTG), INTENT(OUT) :: ERR
9097  TYPE(varying_string), INTENT(OUT) :: ERROR
9098  !Local Variables
9099  TYPE(varying_string) :: LOCAL_ERROR
9100 
9101  enters("DISTRIBUTED_VECTOR_VALUES_ADD_L1",err,error,*999)
9102 
9103  IF(ASSOCIATED(distributed_vector)) THEN
9104  IF(distributed_vector%VECTOR_FINISHED) THEN
9105  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
9106  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9108  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9109  !Allow all values to be added until dof mappings fixed. Ghost values that are added will not be propogated
9110  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9111  distributed_vector%CMISS%DATA_L(index)=distributed_vector%CMISS%DATA_L(index).OR.VALUE
9112  ELSE
9113  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9114  & " is invalid. The index must be between 1 and "// &
9115  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9116  CALL flagerror(local_error,err,error,*999)
9117  ENDIF
9118  ELSE
9119  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9120  ENDIF
9122  CALL flagerror("Cannot add values for a logical PETSc distributed vector.",err,error,*999)
9123  CASE DEFAULT
9124  local_error="The distributed vector library type of "// &
9125  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9126  CALL flagerror(local_error,err,error,*999)
9127  END SELECT
9128  ELSE
9129  local_error="The distributed data type of "// &
9130  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9131  & " does not correspond to the logical data type of the given value."
9132  CALL flagerror(local_error,err,error,*999)
9133  ENDIF
9134  ELSE
9135  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9136  ENDIF
9137  ELSE
9138  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9139  ENDIF
9140 
9141  exits("DISTRIBUTED_VECTOR_VALUES_ADD_L1")
9142  RETURN
9143 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_ADD_L1",err,error)
9144  RETURN 1
9145  END SUBROUTINE distributed_vector_values_add_l1
9146 
9147  !
9148  !================================================================================================================================
9149  !
9150 
9152  SUBROUTINE distributed_vector_values_get_intg(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9154  !Argument variables
9155  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9156  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9157  INTEGER(INTG), INTENT(OUT) :: VALUES(:)
9158  INTEGER(INTG), INTENT(OUT) :: ERR
9159  TYPE(varying_string), INTENT(OUT) :: ERROR
9160  !Local Variables
9161  INTEGER(INTG) :: i
9162  TYPE(varying_string) :: LOCAL_ERROR
9163 
9164  enters("DISTRIBUTED_VECTOR_VALUES_GET_INTG",err,error,*999)
9165 
9166  IF(ASSOCIATED(distributed_vector)) THEN
9167  IF(distributed_vector%VECTOR_FINISHED) THEN
9168  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9169  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
9170  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9172  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9173  DO i=1,SIZE(indices,1)
9174  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9175  values(i)=distributed_vector%CMISS%DATA_INTG(indices(i))
9176  ELSE
9177  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9178  & " is invalid. The index must be between 1 and "// &
9179 
9180  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9181  CALL flagerror(local_error,err,error,*999)
9182  ENDIF
9183  ENDDO !i
9184  ELSE
9185  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9186  ENDIF
9188  CALL flagerror("Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9189  CASE DEFAULT
9190  local_error="The distributed vector library type of "// &
9191  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9192  CALL flagerror(local_error,err,error,*999)
9193  END SELECT
9194  ELSE
9195  local_error="The distributed data type of "// &
9196  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9197  & " does not correspond to the integer data type of the given values."
9198  CALL flagerror(local_error,err,error,*999)
9199  ENDIF
9200  ELSE
9201  local_error="The size of the indicies array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9202  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9203  CALL flagerror(local_error,err,error,*999)
9204  ENDIF
9205  ELSE
9206  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9207  ENDIF
9208  ELSE
9209  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9210  ENDIF
9211 
9212  exits("DISTRIBUTED_VECTOR_VALUES_GET_INTG")
9213  RETURN
9214 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_INTG",err,error)
9215  RETURN 1
9216  END SUBROUTINE distributed_vector_values_get_intg
9217 
9218  !
9219  !================================================================================================================================
9220  !
9221 
9223  SUBROUTINE distributed_vector_values_get_intg1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9225  !Argument variables
9226  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9227  INTEGER(INTG), INTENT(IN) :: INDEX
9228  INTEGER(INTG), INTENT(OUT) :: VALUE
9229  INTEGER(INTG), INTENT(OUT) :: ERR
9230  TYPE(varying_string), INTENT(OUT) :: ERROR
9231  !Local Variables
9232  TYPE(varying_string) :: LOCAL_ERROR
9233 
9234  enters("DISTRIBUTED_VECTOR_VALUES_GET_INTG1",err,error,*999)
9235 
9236  IF(ASSOCIATED(distributed_vector)) THEN
9237  IF(distributed_vector%VECTOR_FINISHED) THEN
9238  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
9239  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9241  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9242  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9243  VALUE=distributed_vector%CMISS%DATA_INTG(index)
9244  ELSE
9245  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9246  & " is invalid. The index must be between 1 and "// &
9247  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9248  CALL flagerror(local_error,err,error,*999)
9249  ENDIF
9250  ELSE
9251  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9252  ENDIF
9254  CALL flagerror("Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9255  CASE DEFAULT
9256  local_error="The distributed vector library type of "// &
9257  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9258  CALL flagerror(local_error,err,error,*999)
9259  END SELECT
9260  ELSE
9261  local_error="The distributed data type of "// &
9262  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9263  & " does not correspond to the integer data type of the given value."
9264  CALL flagerror(local_error,err,error,*999)
9265  ENDIF
9266  ELSE
9267  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9268  ENDIF
9269  ELSE
9270  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9271  ENDIF
9272 
9273  exits("DISTRIBUTED_VECTOR_VALUES_GET_INTG1")
9274  RETURN
9275 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_INTG1",err,error)
9276  RETURN 1
9278 
9279  !
9280  !================================================================================================================================
9281  !
9282 
9284  SUBROUTINE distributed_vector_values_get_sp(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9286  !Argument variables
9287  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9288  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9289  REAL(SP), INTENT(OUT) :: VALUES(:)
9290  INTEGER(INTG), INTENT(OUT) :: ERR
9291  TYPE(varying_string), INTENT(OUT) :: ERROR
9292  !Local Variables
9293  INTEGER(INTG) :: i
9294  TYPE(varying_string) :: LOCAL_ERROR
9295 
9296  enters("DISTRIBUTED_VECTOR_VALUES_GET_SP",err,error,*999)
9297 
9298  IF(ASSOCIATED(distributed_vector)) THEN
9299  IF(distributed_vector%VECTOR_FINISHED) THEN
9300  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9301  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
9302  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9304  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9305  DO i=1,SIZE(indices,1)
9306  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9307  values(i)=distributed_vector%CMISS%DATA_SP(indices(i))
9308  ELSE
9309  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9310  & " is invalid. The index must be between 1 and "// &
9311  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9312  CALL flagerror(local_error,err,error,*999)
9313  ENDIF
9314  ENDDO !i
9315  ELSE
9316  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9317  ENDIF
9319  CALL flagerror("Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
9320  CASE DEFAULT
9321  local_error="The distributed vector library type of "// &
9322  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9323  CALL flagerror(local_error,err,error,*999)
9324  END SELECT
9325  ELSE
9326  local_error="The distributed data type of "// &
9327  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9328  & " does not correspond to the single precision data type of the given values."
9329  CALL flagerror(local_error,err,error,*999)
9330  ENDIF
9331  ELSE
9332  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9333  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9334  CALL flagerror(local_error,err,error,*999)
9335  ENDIF
9336  ELSE
9337  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9338  ENDIF
9339  ELSE
9340  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9341  ENDIF
9342 
9343  exits("DISTRIBUTED_VECTOR_VALUES_GET_SP")
9344  RETURN
9345 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_SP",err,error)
9346  RETURN 1
9347  END SUBROUTINE distributed_vector_values_get_sp
9348 
9349  !
9350  !================================================================================================================================
9351  !
9352 
9354  SUBROUTINE distributed_vector_values_get_sp1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9356  !Argument variables
9357  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9358  INTEGER(INTG), INTENT(IN) :: INDEX
9359  REAL(SP), INTENT(OUT) :: VALUE
9360  INTEGER(INTG), INTENT(OUT) :: ERR
9361  TYPE(varying_string), INTENT(OUT) :: ERROR
9362  !Local Variables
9363  TYPE(varying_string) :: LOCAL_ERROR
9364 
9365  enters("DISTRIBUTED_VECTOR_VALUES_GET_SP1",err,error,*999)
9366 
9367  IF(ASSOCIATED(distributed_vector)) THEN
9368  IF(distributed_vector%VECTOR_FINISHED) THEN
9369  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
9370  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9372  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9373  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9374  VALUE=distributed_vector%CMISS%DATA_SP(index)
9375  ELSE
9376  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9377  & " is invalid. The index must be between 1 and "// &
9378  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9379  CALL flagerror(local_error,err,error,*999)
9380  ENDIF
9381  ELSE
9382  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9383  ENDIF
9385  CALL flagerror("Cannot set values for a single precision PETSc distributed vector.",err,error,*999)
9386  CASE DEFAULT
9387  local_error="The distributed vector library type of "// &
9388  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9389  CALL flagerror(local_error,err,error,*999)
9390  END SELECT
9391  ELSE
9392  local_error="The distributed data type of "// &
9393  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9394  & " does not correspond to the single precision data type of the given value."
9395  CALL flagerror(local_error,err,error,*999)
9396  ENDIF
9397  ELSE
9398  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9399  ENDIF
9400  ELSE
9401  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9402  ENDIF
9403 
9404  exits("DISTRIBUTED_VECTOR_VALUES_GET_SP1")
9405  RETURN
9406 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_SP1",err,error)
9407  RETURN 1
9408  END SUBROUTINE distributed_vector_values_get_sp1
9409 
9410  !
9411  !================================================================================================================================
9412  !
9413 
9415  SUBROUTINE distributed_vector_values_get_dp(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9417  !Argument variables
9418  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9419  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9420  REAL(DP), INTENT(OUT) :: VALUES(:)
9421  INTEGER(INTG), INTENT(OUT) :: ERR
9422  TYPE(varying_string), INTENT(OUT) :: ERROR
9423  !Local Variables
9424  INTEGER(INTG) :: i,PETSC_INDICES(size(indices,1))
9425  TYPE(varying_string) :: LOCAL_ERROR
9426 
9427  enters("DISTRIBUTED_VECTOR_VALUES_GET_DP",err,error,*999)
9428 
9429  IF(ASSOCIATED(distributed_vector)) THEN
9430  IF(distributed_vector%VECTOR_FINISHED) THEN
9431  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9432  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
9433  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9435  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9436  DO i=1,SIZE(indices,1)
9437  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9438  values(i)=distributed_vector%CMISS%DATA_DP(indices(i))
9439  ELSE
9440  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9441  & " is invalid. The index must be between 1 and "// &
9442  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9443  CALL flagerror(local_error,err,error,*999)
9444  ENDIF
9445  ENDDO !i
9446  ELSE
9447  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9448  ENDIF
9450  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
9451  DO i=1,SIZE(indices,1)
9452  petsc_indices(i)=distributed_vector%DOMAIN_MAPPING%LOCAL_TO_GLOBAL_MAP(indices(i))-1 !PETSc uses global 0-based indices
9453  ENDDO !i
9454  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
9455  CALL petsc_vecgetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,SIZE(indices,1),petsc_indices,values, &
9456  & err,error,*999)
9457  ELSE
9458  CALL petsc_vecgetvalues(distributed_vector%PETSC%VECTOR,SIZE(indices,1),petsc_indices,values,err,error,*999)
9459  ENDIF
9460  ELSE
9461  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
9462  ENDIF
9463  CASE DEFAULT
9464  local_error="The distributed vector library type of "// &
9465  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9466  CALL flagerror(local_error,err,error,*999)
9467  END SELECT
9468  ELSE
9469  local_error="The distributed data type of "// &
9470  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9471  & " does not correspond to the double precision data type of the given values."
9472  CALL flagerror(local_error,err,error,*999)
9473  ENDIF
9474  ELSE
9475  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9476  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9477  CALL flagerror(local_error,err,error,*999)
9478  ENDIF
9479  ELSE
9480  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9481  ENDIF
9482  ELSE
9483  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9484  ENDIF
9485 
9486  exits("DISTRIBUTED_VECTOR_VALUES_GET_DP")
9487  RETURN
9488 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_DP",err,error)
9489  RETURN 1
9490  END SUBROUTINE distributed_vector_values_get_dp
9491 
9492  !
9493  !================================================================================================================================
9494  !
9495 
9497  SUBROUTINE distributed_vector_values_get_dp1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9499  !Argument variables
9500  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9501  INTEGER(INTG), INTENT(IN) :: INDEX
9502  REAL(DP), INTENT(OUT) :: VALUE
9503  INTEGER(INTG), INTENT(OUT) :: ERR
9504  TYPE(varying_string), INTENT(OUT) :: ERROR
9505  !Local Variables
9506  INTEGER(INTG) :: PETSC_INDEX(1)
9507  REAL(DP) :: PETSC_VALUE(1)
9508  TYPE(varying_string) :: LOCAL_ERROR
9509 
9510  enters("DISTRIBUTED_VECTOR_VALUES_GET_DP1",err,error,*999)
9511 
9512  IF(ASSOCIATED(distributed_vector)) THEN
9513  IF(distributed_vector%VECTOR_FINISHED) THEN
9514  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
9515  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9517  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9518  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9519  VALUE=distributed_vector%CMISS%DATA_DP(index)
9520  ELSE
9521  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9522  & " is invalid. The index must be between 1 and "// &
9523  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9524  CALL flagerror(local_error,err,error,*999)
9525  ENDIF
9526  ELSE
9527  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9528  ENDIF
9530  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
9531  petsc_index=distributed_vector%DOMAIN_MAPPING%LOCAL_TO_GLOBAL_MAP(index)-1 !PETSc uses global 0-based indices
9532  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
9533  CALL petsc_vecgetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,1,petsc_index,petsc_value,err,error,*999)
9534  ELSE
9535  CALL petsc_vecgetvalues(distributed_vector%PETSC%VECTOR,1,petsc_index,petsc_value,err,error,*999)
9536  ENDIF
9537  VALUE=petsc_value(1)
9538  ELSE
9539  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
9540  ENDIF
9541  CASE DEFAULT
9542  local_error="The distributed vector library type of "// &
9543  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9544  CALL flagerror(local_error,err,error,*999)
9545  END SELECT
9546  ELSE
9547  local_error="The distributed data type of "// &
9548  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9549  & " does not correspond to the double precision data type of the given value."
9550  CALL flagerror(local_error,err,error,*999)
9551  ENDIF
9552  ELSE
9553  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9554  ENDIF
9555  ELSE
9556  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9557  ENDIF
9558 
9559  exits("DISTRIBUTED_VECTOR_VALUES_GET_DP1")
9560  RETURN
9561 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_DP1",err,error)
9562  RETURN 1
9563  END SUBROUTINE distributed_vector_values_get_dp1
9564 
9565  !
9566  !================================================================================================================================
9567  !
9568 
9570  SUBROUTINE distributed_vector_values_get_l(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9572  !Argument variables
9573  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9574  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9575  LOGICAL, INTENT(OUT) :: VALUES(:)
9576  INTEGER(INTG), INTENT(OUT) :: ERR
9577  TYPE(varying_string), INTENT(OUT) :: ERROR
9578  !Local Variables
9579  INTEGER(INTG) :: i
9580  TYPE(varying_string) :: LOCAL_ERROR
9581 
9582  enters("DISTRIBUTED_VECTOR_VALUES_GET_L",err,error,*999)
9583 
9584  IF(ASSOCIATED(distributed_vector)) THEN
9585  IF(distributed_vector%VECTOR_FINISHED) THEN
9586  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9587  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
9588  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9590  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9591  DO i=1,SIZE(indices,1)
9592  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9593  values(i)=distributed_vector%CMISS%DATA_L(indices(i))
9594  ELSE
9595  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9596  & " is invalid. The index must be between 1 and "// &
9597  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9598  CALL flagerror(local_error,err,error,*999)
9599  ENDIF
9600  ENDDO !i
9601  ELSE
9602  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9603  ENDIF
9605  CALL flagerror("Cannot set values for a logical PETSc distributed vector.",err,error,*999)
9606  CASE DEFAULT
9607  local_error="The distributed vector library type of "// &
9608  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9609  CALL flagerror(local_error,err,error,*999)
9610  END SELECT
9611  ELSE
9612  local_error="The distributed data type of "// &
9613  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9614  & " does not correspond to the logical data type of the given values."
9615  CALL flagerror(local_error,err,error,*999)
9616  ENDIF
9617  ELSE
9618  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9619  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9620  CALL flagerror(local_error,err,error,*999)
9621  ENDIF
9622  ELSE
9623  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9624  ENDIF
9625  ELSE
9626  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9627  ENDIF
9628 
9629  exits("DISTRIBUTED_VECTOR_VALUES_GET_L")
9630  RETURN
9631 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_L",err,error)
9632  RETURN 1
9633  END SUBROUTINE distributed_vector_values_get_l
9634 
9635  !
9636  !================================================================================================================================
9637  !
9638 
9640  SUBROUTINE distributed_vector_values_get_l1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9642  !Argument variables
9643  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9644  INTEGER(INTG), INTENT(IN) :: INDEX
9645  LOGICAL, INTENT(OUT) :: VALUE
9646  INTEGER(INTG), INTENT(OUT) :: ERR
9647  TYPE(varying_string), INTENT(OUT) :: ERROR
9648  !Local Variables
9649  TYPE(varying_string) :: LOCAL_ERROR
9650 
9651  enters("DISTRIBUTED_VECTOR_VALUES_GET_L1",err,error,*999)
9652 
9653  IF(ASSOCIATED(distributed_vector)) THEN
9654  IF(distributed_vector%VECTOR_FINISHED) THEN
9655  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
9656  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9658  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9659  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9660  VALUE=distributed_vector%CMISS%DATA_L(index)
9661  ELSE
9662  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9663  & " is invalid. The index must be between 1 and "// &
9664  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9665  CALL flagerror(local_error,err,error,*999)
9666  ENDIF
9667  ELSE
9668  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9669  ENDIF
9671  CALL flagerror("Cannot set values for a logical PETSc distributed vector.",err,error,*999)
9672  CASE DEFAULT
9673  local_error="The distributed vector library type of "// &
9674  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9675  CALL flagerror(local_error,err,error,*999)
9676  END SELECT
9677  ELSE
9678  local_error="The distributed data type of "// &
9679  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9680  & " does not correspond to the logical data type of the given value."
9681  CALL flagerror(local_error,err,error,*999)
9682  ENDIF
9683  ELSE
9684  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9685  ENDIF
9686  ELSE
9687  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9688  ENDIF
9689 
9690  exits("DISTRIBUTED_VECTOR_VALUES_GET_L1")
9691  RETURN
9692 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_GET_L1",err,error)
9693  RETURN 1
9694  END SUBROUTINE distributed_vector_values_get_l1
9695 
9696  !
9697  !================================================================================================================================
9698  !
9699 
9701  SUBROUTINE distributed_vector_values_set_intg(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9703  !Argument variables
9704  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9705  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9706  INTEGER(INTG), INTENT(IN) :: VALUES(:)
9707  INTEGER(INTG), INTENT(OUT) :: ERR
9708  TYPE(varying_string), INTENT(OUT) :: ERROR
9709  !Local Variables
9710  INTEGER(INTG) :: i
9711  TYPE(varying_string) :: LOCAL_ERROR
9712 
9713  enters("DISTRIBUTED_VECTOR_VALUES_SET_INTG",err,error,*999)
9714 
9715  IF(ASSOCIATED(distributed_vector)) THEN
9716  IF(distributed_vector%VECTOR_FINISHED) THEN
9717  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9718  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
9719  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9721  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9722  DO i=1,SIZE(indices,1)
9723  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
9724  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9725  distributed_vector%CMISS%DATA_INTG(indices(i))=values(i)
9726  ELSE
9727  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9728  & " is invalid. The index must be between 1 and "// &
9729  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9730  CALL flagerror(local_error,err,error,*999)
9731  ENDIF
9732  ENDDO !i
9733  ELSE
9734  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9735  ENDIF
9737  CALL flagerror("Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9738  CASE DEFAULT
9739  local_error="The distributed vector library type of "// &
9740  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9741  CALL flagerror(local_error,err,error,*999)
9742  END SELECT
9743  ELSE
9744  local_error="The distributed data type of "// &
9745  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9746  & " does not correspond to the integer data type of the given values."
9747  CALL flagerror(local_error,err,error,*999)
9748  ENDIF
9749  ELSE
9750  local_error="The size of the indicies array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9751  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9752  CALL flagerror(local_error,err,error,*999)
9753  ENDIF
9754  ELSE
9755  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9756  ENDIF
9757  ELSE
9758  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9759  ENDIF
9760 
9761  exits("DISTRIBUTED_VECTOR_VALUES_SET_INTG")
9762  RETURN
9763 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_INTG",err,error)
9764  RETURN 1
9765  END SUBROUTINE distributed_vector_values_set_intg
9766 
9767  !
9768  !================================================================================================================================
9769  !
9770 
9772  SUBROUTINE distributed_vector_values_set_intg1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9774  !Argument variables
9775  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9776  INTEGER(INTG), INTENT(IN) :: INDEX
9777  INTEGER(INTG), INTENT(IN) :: VALUE
9778  INTEGER(INTG), INTENT(OUT) :: ERR
9779  TYPE(varying_string), INTENT(OUT) :: ERROR
9780  !Local Variables
9781  TYPE(varying_string) :: LOCAL_ERROR
9782 
9783  enters("DISTRIBUTED_VECTOR_VALUES_SET_INTG1",err,error,*999)
9784 
9785  IF(ASSOCIATED(distributed_vector)) THEN
9786  IF(distributed_vector%VECTOR_FINISHED) THEN
9787  IF(distributed_vector%DATA_TYPE==matrix_vector_intg_type) THEN
9788  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9790  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9791  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
9792  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9793  distributed_vector%CMISS%DATA_INTG(index)=VALUE
9794  ELSE
9795  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9796  & " is invalid. The index must be between 1 and "// &
9797  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9798  CALL flagerror(local_error,err,error,*999)
9799  ENDIF
9800  ELSE
9801  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9802  ENDIF
9804  CALL flagerror("Cannot set values for an integer PETSc distributed vector.",err,error,*999)
9805  CASE DEFAULT
9806  local_error="The distributed vector library type of "// &
9807  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9808  CALL flagerror(local_error,err,error,*999)
9809  END SELECT
9810  ELSE
9811  local_error="The distributed data type of "// &
9812  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9813  & " does not correspond to the integer data type of the given value."
9814  CALL flagerror(local_error,err,error,*999)
9815  ENDIF
9816  ELSE
9817  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9818  ENDIF
9819  ELSE
9820  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9821  ENDIF
9822 
9823  exits("DISTRIBUTED_VECTOR_VALUES_SET_INTG1")
9824  RETURN
9825 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_INTG1",err,error)
9826  RETURN 1
9828 
9829  !
9830  !================================================================================================================================
9831  !
9832 
9834  SUBROUTINE distributed_vector_values_set_sp(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9836  !Argument variables
9837  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9838  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9839  REAL(SP), INTENT(IN) :: VALUES(:)
9840  INTEGER(INTG), INTENT(OUT) :: ERR
9841  TYPE(varying_string), INTENT(OUT) :: ERROR
9842  !Local Variables
9843  INTEGER(INTG) :: i
9844  TYPE(varying_string) :: LOCAL_ERROR
9845 
9846  enters("DISTRIBUTED_VECTOR_VALUES_SET_SP",err,error,*999)
9847 
9848  IF(ASSOCIATED(distributed_vector)) THEN
9849  IF(distributed_vector%VECTOR_FINISHED) THEN
9850  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9851  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
9852  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9854  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9855  DO i=1,SIZE(indices,1)
9856  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
9857  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9858  distributed_vector%CMISS%DATA_SP(indices(i))=values(i)
9859  ELSE
9860  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9861  & " is invalid. The index must be between 1 and "// &
9862  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9863  CALL flagerror(local_error,err,error,*999)
9864  ENDIF
9865  ENDDO !i
9866  ELSE
9867  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9868  ENDIF
9870  CALL flagerror("Cannot get values for a single precision PETSc distributed vector.",err,error,*999)
9871  CASE DEFAULT
9872  local_error="The distributed vector library type of "// &
9873  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9874  CALL flagerror(local_error,err,error,*999)
9875  END SELECT
9876  ELSE
9877  local_error="The distributed data type of "// &
9878  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9879  & " does not correspond to the single precision data type of the given values."
9880  CALL flagerror(local_error,err,error,*999)
9881  ENDIF
9882  ELSE
9883  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
9884  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
9885  CALL flagerror(local_error,err,error,*999)
9886  ENDIF
9887  ELSE
9888  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9889  ENDIF
9890  ELSE
9891  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9892  ENDIF
9893 
9894  exits("DISTRIBUTED_VECTOR_VALUES_SET_SP")
9895  RETURN
9896 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_SP",err,error)
9897  RETURN 1
9898  END SUBROUTINE distributed_vector_values_set_sp
9899 
9900  !
9901  !================================================================================================================================
9902  !
9903 
9905  SUBROUTINE distributed_vector_values_set_sp1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
9907  !Argument variables
9908  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9909  INTEGER(INTG), INTENT(IN) :: INDEX
9910  REAL(SP), INTENT(IN) :: VALUE
9911  INTEGER(INTG), INTENT(OUT) :: ERR
9912  TYPE(varying_string), INTENT(OUT) :: ERROR
9913  !Local Variables
9914  TYPE(varying_string) :: LOCAL_ERROR
9915 
9916  enters("DISTRIBUTED_VECTOR_VALUES_SET_SP1",err,error,*999)
9917 
9918  IF(ASSOCIATED(distributed_vector)) THEN
9919  IF(distributed_vector%VECTOR_FINISHED) THEN
9920  IF(distributed_vector%DATA_TYPE==matrix_vector_sp_type) THEN
9921  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9923  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9924  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
9925  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
9926  distributed_vector%CMISS%DATA_SP(index)=VALUE
9927  ELSE
9928  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
9929  & " is invalid. The index must be between 1 and "// &
9930  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9931  CALL flagerror(local_error,err,error,*999)
9932  ENDIF
9933  ELSE
9934  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
9935  ENDIF
9937  CALL flagerror("Cannot set values for a single precision PETSc distributed vector.",err,error,*999)
9938  CASE DEFAULT
9939  local_error="The distributed vector library type of "// &
9940  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
9941  CALL flagerror(local_error,err,error,*999)
9942  END SELECT
9943  ELSE
9944  local_error="The distributed data type of "// &
9945  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
9946  & " does not correspond to the single precision data type of the given value."
9947  CALL flagerror(local_error,err,error,*999)
9948  ENDIF
9949  ELSE
9950  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
9951  ENDIF
9952  ELSE
9953  CALL flagerror("Distributed vector is not associated.",err,error,*999)
9954  ENDIF
9955 
9956  exits("DISTRIBUTED_VECTOR_VALUES_SET_SP1")
9957  RETURN
9958 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_SP1",err,error)
9959  RETURN 1
9960  END SUBROUTINE distributed_vector_values_set_sp1
9961 
9962  !
9963  !================================================================================================================================
9964  !
9965 
9967  SUBROUTINE distributed_vector_values_set_dp(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
9969  !Argument variables
9970  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
9971  INTEGER(INTG), INTENT(IN) :: INDICES(:)
9972  REAL(DP), INTENT(IN) :: VALUES(:)
9973  INTEGER(INTG), INTENT(OUT) :: ERR
9974  TYPE(varying_string), INTENT(OUT) :: ERROR
9975  !Local Variables
9976  INTEGER(INTG) :: i
9977  TYPE(varying_string) :: LOCAL_ERROR
9978 
9979  enters("DISTRIBUTED_VECTOR_VALUES_SET_DP",err,error,*999)
9980 
9981  IF(ASSOCIATED(distributed_vector)) THEN
9982  IF(distributed_vector%VECTOR_FINISHED) THEN
9983  IF(SIZE(indices,1)==SIZE(values,1)) THEN
9984  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
9985  SELECT CASE(distributed_vector%LIBRARY_TYPE)
9987  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
9988  DO i=1,SIZE(indices,1)
9989  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
9990  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
9991  distributed_vector%CMISS%DATA_DP(indices(i))=values(i)
9992  ELSE
9993  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
9994  & " is invalid. The index must be between 1 and "// &
9995  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
9996  CALL flagerror(local_error,err,error,*999)
9997  ENDIF
9998  ENDDO !i
9999  ELSE
10000  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
10001  ENDIF
10003  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
10004  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
10005  CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,SIZE(indices,1),distributed_vector%PETSC% &
10006  & global_numbers(indices),values,petsc_insert_values,err,error,*999)
10007  ELSE
10008  CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,SIZE(indices,1),distributed_vector%PETSC%GLOBAL_NUMBERS( &
10009  & indices),values,petsc_insert_values,err,error,*999)
10010  ENDIF
10011  ELSE
10012  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
10013  ENDIF
10014  CASE DEFAULT
10015  local_error="The distributed vector library type of "// &
10016  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
10017  CALL flagerror(local_error,err,error,*999)
10018  END SELECT
10019  ELSE
10020  local_error="The distributed data type of "// &
10021  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
10022  & " does not correspond to the double precision data type of the given values."
10023  CALL flagerror(local_error,err,error,*999)
10024  ENDIF
10025  ELSE
10026  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
10027  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
10028  CALL flagerror(local_error,err,error,*999)
10029  ENDIF
10030  ELSE
10031  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
10032  ENDIF
10033  ELSE
10034  CALL flagerror("Distributed vector is not associated.",err,error,*999)
10035  ENDIF
10036 
10037  exits("DISTRIBUTED_VECTOR_VALUES_SET_DP")
10038  RETURN
10039 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_DP",err,error)
10040  RETURN 1
10041  END SUBROUTINE distributed_vector_values_set_dp
10042 
10043  !
10044  !================================================================================================================================
10045  !
10046 
10048  SUBROUTINE distributed_vector_values_set_dp1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
10050  !Argument variables
10051  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
10052  INTEGER(INTG), INTENT(IN) :: INDEX
10053  REAL(DP), INTENT(IN) :: VALUE
10054  INTEGER(INTG), INTENT(OUT) :: ERR
10055  TYPE(varying_string), INTENT(OUT) :: ERROR
10056  !Local Variables
10057  INTEGER(INTG) :: PETSC_INDEX(1)
10058  REAL(DP) :: PETSC_VALUE(1)
10059  TYPE(varying_string) :: LOCAL_ERROR
10060 
10061  enters("DISTRIBUTED_VECTOR_VALUES_SET_DP1",err,error,*999)
10062 
10063  IF(ASSOCIATED(distributed_vector)) THEN
10064  IF(distributed_vector%VECTOR_FINISHED) THEN
10065  IF(distributed_vector%DATA_TYPE==matrix_vector_dp_type) THEN
10066  SELECT CASE(distributed_vector%LIBRARY_TYPE)
10068  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
10069  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
10070  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
10071  distributed_vector%CMISS%DATA_DP(index)=VALUE
10072  ELSE
10073  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
10074  & " is invalid. The index must be between 1 and "// &
10075  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
10076  CALL flagerror(local_error,err,error,*999)
10077  ENDIF
10078  ELSE
10079  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
10080  ENDIF
10082  IF(ASSOCIATED(distributed_vector%PETSC)) THEN
10083  petsc_index(1)=distributed_vector%PETSC%GLOBAL_NUMBERS(index)
10084  petsc_value(1)=VALUE
10085  IF(distributed_vector%PETSC%USE_OVERRIDE_VECTOR) THEN
10086  CALL petsc_vecsetvalues(distributed_vector%PETSC%OVERRIDE_VECTOR,1,petsc_index,petsc_value,petsc_insert_values, &
10087  & err,error,*999)
10088  ELSE
10089  CALL petsc_vecsetvalues(distributed_vector%PETSC%VECTOR,1,petsc_index,petsc_value,petsc_insert_values, &
10090  & err,error,*999)
10091  ENDIF
10092  ELSE
10093  CALL flagerror("Distributed vector PETSc is not associated.",err,error,*999)
10094  ENDIF
10095  CASE DEFAULT
10096  local_error="The distributed vector library type of "// &
10097  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
10098  CALL flagerror(local_error,err,error,*999)
10099  END SELECT
10100  ELSE
10101  local_error="The distributed data type of "// &
10102  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
10103  & " does not correspond to the double precision data type of the given value."
10104  CALL flagerror(local_error,err,error,*999)
10105  ENDIF
10106  ELSE
10107  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
10108  ENDIF
10109  ELSE
10110  CALL flagerror("Distributed vector is not associated.",err,error,*999)
10111  ENDIF
10112 
10113  exits("DISTRIBUTED_VECTOR_VALUES_SET_DP1")
10114  RETURN
10115 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_DP1",err,error)
10116  RETURN 1
10117  END SUBROUTINE distributed_vector_values_set_dp1
10118 
10119  !
10120  !================================================================================================================================
10121  !
10122 
10124  SUBROUTINE distributed_vector_values_set_l(DISTRIBUTED_VECTOR,INDICES,VALUES,ERR,ERROR,*)
10126  !Argument variables
10127  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
10128  INTEGER(INTG), INTENT(IN) :: INDICES(:)
10129  LOGICAL, INTENT(IN) :: VALUES(:)
10130  INTEGER(INTG), INTENT(OUT) :: ERR
10131  TYPE(varying_string), INTENT(OUT) :: ERROR
10132  !Local Variables
10133  INTEGER(INTG) :: i
10134  TYPE(varying_string) :: LOCAL_ERROR
10135 
10136  enters("DISTRIBUTED_VECTOR_VALUES_SET_L",err,error,*999)
10137 
10138  IF(ASSOCIATED(distributed_vector)) THEN
10139  IF(distributed_vector%VECTOR_FINISHED) THEN
10140  IF(SIZE(indices,1)==SIZE(values,1)) THEN
10141  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
10142  SELECT CASE(distributed_vector%LIBRARY_TYPE)
10144  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
10145  DO i=1,SIZE(indices,1)
10146  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
10147  IF(indices(i)>0.AND.indices(i)<=distributed_vector%CMISS%DATA_SIZE) THEN
10148  distributed_vector%CMISS%DATA_L(indices(i))=values(i)
10149  ELSE
10150  local_error="Index "//trim(numbertovstring(indices(i),"*",err,error))// &
10151  & " is invalid. The index must be between 1 and "// &
10152  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
10153  CALL flagerror(local_error,err,error,*999)
10154  ENDIF
10155  ENDDO !i
10156  ELSE
10157  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
10158  ENDIF
10160  CALL flagerror("Cannot set values for a logical PETSc distributed vector.",err,error,*999)
10161  CASE DEFAULT
10162  local_error="The distributed vector library type of "// &
10163  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
10164  CALL flagerror(local_error,err,error,*999)
10165  END SELECT
10166  ELSE
10167  local_error="The distributed data type of "// &
10168  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
10169  & " does not correspond to the logical data type of the given values."
10170  CALL flagerror(local_error,err,error,*999)
10171  ENDIF
10172  ELSE
10173  local_error="The size of the indices array ("//trim(numbertovstring(SIZE(indices,1),"*",err,error))// &
10174  & ") does not conform to the size of the values array ("//trim(numbertovstring(SIZE(values,1),"*",err,error))//")."
10175  CALL flagerror(local_error,err,error,*999)
10176  ENDIF
10177  ELSE
10178  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
10179  ENDIF
10180  ELSE
10181  CALL flagerror("Distributed vector is not associated.",err,error,*999)
10182  ENDIF
10183 
10184  exits("DISTRIBUTED_VECTOR_VALUES_SET_L")
10185  RETURN
10186 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_L",err,error)
10187  RETURN 1
10188  END SUBROUTINE distributed_vector_values_set_l
10189 
10190  !
10191  !================================================================================================================================
10192  !
10193 
10195  SUBROUTINE distributed_vector_values_set_l1(DISTRIBUTED_VECTOR,INDEX,VALUE,ERR,ERROR,*)
10197  !Argument variables
10198  TYPE(distributed_vector_type), POINTER :: DISTRIBUTED_VECTOR
10199  INTEGER(INTG), INTENT(IN) :: INDEX
10200  LOGICAL, INTENT(IN) :: VALUE
10201  INTEGER(INTG), INTENT(OUT) :: ERR
10202  TYPE(varying_string), INTENT(OUT) :: ERROR
10203  !Local Variables
10204  TYPE(varying_string) :: LOCAL_ERROR
10205 
10206  enters("DISTRIBUTED_VECTOR_VALUES_SET_L1",err,error,*999)
10207 
10208  IF(ASSOCIATED(distributed_vector)) THEN
10209  IF(distributed_vector%VECTOR_FINISHED) THEN
10210  IF(distributed_vector%DATA_TYPE==matrix_vector_l_type) THEN
10211  SELECT CASE(distributed_vector%LIBRARY_TYPE)
10213  IF(ASSOCIATED(distributed_vector%CMISS)) THEN
10214  !Allow all values set until dof mappings fixed. Ghost values that are set will not be propogated
10215  IF(index>0.AND.index<=distributed_vector%CMISS%DATA_SIZE) THEN
10216  distributed_vector%CMISS%DATA_L(index)=VALUE
10217  ELSE
10218  local_error="Index "//trim(numbertovstring(index,"*",err,error))// &
10219  & " is invalid. The index must be between 1 and "// &
10220  & trim(numbertovstring(distributed_vector%CMISS%DATA_SIZE,"*",err,error))//"."
10221  CALL flagerror(local_error,err,error,*999)
10222  ENDIF
10223  ELSE
10224  CALL flagerror("Distributed vector CMISS is not associated.",err,error,*999)
10225  ENDIF
10227  CALL flagerror("Cannot set values for a logical PETSc distributed vector.",err,error,*999)
10228  CASE DEFAULT
10229  local_error="The distributed vector library type of "// &
10230  & trim(numbertovstring(distributed_vector%LIBRARY_TYPE,"*",err,error))//" is invalid."
10231  CALL flagerror(local_error,err,error,*999)
10232  END SELECT
10233  ELSE
10234  local_error="The distributed data type of "// &
10235  & trim(numbertovstring(distributed_vector%DATA_TYPE,"*",err,error))// &
10236  & " does not correspond to the logical data type of the given value."
10237  CALL flagerror(local_error,err,error,*999)
10238  ENDIF
10239  ELSE
10240  CALL flagerror("The distributed vector has not been finished.",err,error,*999)
10241  ENDIF
10242  ELSE
10243  CALL flagerror("Distributed vector is not associated.",err,error,*999)
10244  ENDIF
10245 
10246  exits("DISTRIBUTED_VECTOR_VALUES_SET_L1")
10247  RETURN
10248 999 errorsexits("DISTRIBUTED_VECTOR_VALUES_SET_L1",err,error)
10249  RETURN 1
10250  END SUBROUTINE distributed_vector_values_set_l1
10251 
10252  !
10253  !================================================================================================================================
10254  !
10255 
10256 END MODULE distributed_matrix_vector
subroutine distributedvector_vecdotintg(distributedVectorA, distributedVectorB, dotProduct, err, error,)
Calculates the dot product of 2 distributed integer vectors on this computational node...
subroutine distributed_vector_data_restore_intg(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the integer data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data has finishe...
subroutine, public enters(NAME, ERR, ERROR,)
Records the entry into the named procedure and initialises the error code.
subroutine, public distributed_matrix_create_finish(DISTRIBUTED_MATRIX, ERR, ERROR,)
Finishes the creation of a distributed matrix.
Write a string followed by a value to a given output stream.
subroutine distributed_matrix_all_values_set_sp(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in a single precision distributed matrix to the specified value.
subroutine, public distributed_matrix_library_type_set(DISTRIBUTED_MATRIX, LIBRARY_TYPE, ERR, ERROR,)
Sets/changes the library type for a distributed matrix.
subroutine, public distributed_vector_override_set_on(DISTRIBUTED_VECTOR, OVERRIDE_VECTOR, ERR, ERROR,)
Sets the override vector for a distributed vector.
subroutine distributed_matrix_values_add_sp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed single precision matrix.
integer(intg), parameter, public distributed_matrix_vector_include_ghosts_type
Include ghost values in the distributed matrix/vector.
integer(intg), parameter, public matrix_vector_dp_type
Double precision real matrix-vector data type.
subroutine, public petsc_matseqaijgetarrayf90(a, array, err, error,)
Buffer routine to the PETSc MatSeqAIJGetArrayF90 routine.
subroutine distributed_matrix_values_set_l(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed logical matrix.
Converts a number to its equivalent varying string representation.
Definition: strings.f90:161
subroutine, public distributed_vector_create_start(DOMAIN_MAPPING, DISTRIBUTED_VECTOR, ERR, ERROR,)
Starts the creation a distributed vector.
This module is a CMISS buffer module to the PETSc library.
Definition: cmiss_petsc.f90:45
subroutine, public petsc_vecsetvalues(x, n, indices, values, insertMode, err, error,)
Buffer routine to the PETSc VecSetValues routine.
subroutine, public distributed_vector_create_finish(DISTRIBUTED_VECTOR, ERR, ERROR,)
Finishes the creation a distributed vector.
subroutine distributed_matrix_values_set_intg(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed integer matrix.
subroutine distributed_vector_all_values_set_l(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in a logical distributed_vector to the specified value.
subroutine, public distributed_matrix_data_type_set(DISTRIBUTED_MATRIX, DATA_TYPE, ERR, ERROR,)
Sets/changes the data type of a distributed matrix.
subroutine, public petsc_matinitialise(a, err, error,)
subroutine, public distributed_matrix_duplicate(DISTRIBUTED_MATRIX, NEW_DISTRIBUTED_MATRIX, ERR, ERROR,)
Duplicates the structure of a distributed matrix and returns a pointer to the new matrix in NEW_DISTR...
integer(intg), parameter, public distributed_matrix_row_major_storage_type
Distributed matrix row major storage type.
subroutine distributed_matrix_values_set_dp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed double precision matrix.
subroutine, public distributed_matrix_create_start(ROW_DOMAIN_MAPPING, COLUMN_DOMAIN_MAPPING, DISTRIBUTED_MATRIX, ERR, ERROR,)
Starts the creation of a distributed matrix.
integer(intg), parameter, public matrix_compressed_column_storage_type
Matrix compressed column storage type.
subroutine distributedvector_vecdotsp(distributedVectorA, distributedVectorB, dotProduct, err, error,)
Calculates the dot product of 2 distributed single-precision vectors on this computational node...
subroutine distributed_vector_petsc_initialise(DISTRIBUTED_VECTOR, ERR, ERROR,)
Intialises a PETSc distributed vector.
subroutine, public matrix_max_columns_per_row_get(MATRIX, MAX_COLUMNS_PER_ROW, ERR, ERROR,)
Gets the maximum number of columns in each row of a distributed matrix.
subroutine, public matrix_number_non_zeros_set(MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Sets/changes the number of non zeros for a matrix.
integer(intg), parameter, public distributed_matrix_vector_cmiss_type
CMISS distributed matrix-vector library type.
subroutine, public distributed_matrix_storage_type_set(DISTRIBUTED_MATRIX, STORAGE_TYPE, ERR, ERROR,)
Sets/changes the storage type of a distributed matrix.
subroutine distributed_matrix_values_add_dp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed double precision matrix.
subroutine distributed_vector_data_get_intg(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of an integer distributed vector. Note: the values can be used for read...
subroutine distributed_matrix_values_add_sp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed single precision matrix.
subroutine, public matrix_create_finish(MATRIX, ERR, ERROR,)
Finishes the creation a matrix.
subroutine distributed_vector_cmiss_transfer_finalise(CMISS_VECTOR, domain_idx, ERR, ERROR,)
Finalises a CMISS distributed vector transfer information and deallocates all memory.
subroutine distributed_matrix_values_get_intg(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed integer matrix.
subroutine, public matrix_data_type_set(MATRIX, DATA_TYPE, ERR, ERROR,)
Sets/changes the data type of a matrix.
subroutine, public petsc_matview(a, viewer, err, error,)
Buffer routine to the PETSc MatView routine.
subroutine distributed_matrix_values_add_l1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed logical matrix.
subroutine, public matrix_linklist_get(MATRIX, LIST, ERR, ERROR,)
This module contains all string manipulation and transformation routines.
Definition: strings.f90:45
subroutine distributed_matrix_data_get_l(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of a logical distributed matrix. Note: the values can be used for read ...
subroutine distributed_matrix_all_values_set_l(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in a logical distributed matrix to the specified value.
subroutine, public distributedmatrix_datatypeget(matrix, dataType, err, error,)
Gets the data type of a distributed matrix.
Contains information for a matrix.
Definition: types.f90:859
subroutine distributed_vector_values_get_sp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed single precision vector.
subroutine matrix_storage_locations_get(MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Gets the storage locations (sparsity pattern) of a matrix.
subroutine distributed_matrix_all_values_set_intg(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in an integer distributed matrix to the specified value.
subroutine distributed_matrix_all_values_set_dp(DISTRIBUTED_MATRIX, VALUE, ERR, ERROR,)
Sets all values in a double precision distributed matrix to the specified value.
subroutine distributed_vector_values_set_sp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed single precision vector.
subroutine, public distributed_matrix_update_isfinished(DISTRIBUTED_MATRIX, ISFINISHED, ERR, ERROR,)
Tests to see if a distributed matrix update has finised.
subroutine distributed_vector_values_get_intg(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed integer vector.
subroutine distributed_vector_cmiss_create_finish(CMISS_VECTOR, ERR, ERROR,)
Finishes the creation of a CMISS distributed vector.
subroutine, public distributed_matrix_form(DISTRIBUTED_MATRIX, ERR, ERROR,)
Forms a distributed matrix by initialising the structure of the matrix to zero.
subroutine distributed_vector_values_get_intg1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed integer vector.
subroutine distributed_matrix_cmiss_finalise(CMISS_MATRIX, ERR, ERROR,)
Finalise a CMISS distributed matrix.
subroutine distributed_vector_petsc_finalise(PETSC_VECTOR, ERR, ERROR,)
Finalise a PETSc distributed vector.
subroutine distributed_vector_data_restore_sp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the single precision data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data ha...
subroutine distributed_matrix_values_add_sp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed single precision matrix.
subroutine distributed_matrix_values_get_dp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed double precision matrix.
subroutine distributed_vector_all_values_set_intg(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in an integer distributed vector to the specified value.
subroutine distributed_matrix_cmiss_create_finish(CMISS_MATRIX, ERR, ERROR,)
Finishes the creation of a CMISS distributed matrix.
integer(intg), parameter, public matrix_vector_intg_type
Integer matrix-vector data type.
subroutine, public petsc_matdenserestorearrayf90(a, array, err, error,)
Buffer routine to the PETSc MatDenseRestoreArrayF90 routine.
subroutine, public matrix_destroy(MATRIX, ERR, ERROR,)
Destroys a matrix.
subroutine distributed_matrix_values_get_l1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed logical matrix.
subroutine distributed_matrix_petsc_create_finish(PETSC_MATRIX, ERR, ERROR,)
Finishes the creation of a CMISS distributed matrix.
subroutine, public petsc_vecfinalise(x, err, error,)
This module provides an iso_varying_string module, conformant to the API specified in ISO/IEC 1539-2:...
subroutine distributed_vector_values_set_sp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed single precision vector.
subroutine, public distributed_matrix_number_non_zeros_get(DISTRIBUTED_MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Gets the number of non zeros for a distributed matrix.
Only for integer data type for now.
subroutine distributed_vector_values_get_sp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed single precision vector.
Contains information for a CMISS distributed matrix.
Definition: types.f90:797
subroutine, public matrix_duplicate(MATRIX, NEW_MATRIX, ERR, ERROR,)
Duplicates the matrix and returns a pointer to the duplicated matrix in NEWMATRIX.
subroutine, public distributed_matrix_update_start(DISTRIBUTED_MATRIX, ERR, ERROR,)
Starts the update procedure for a distributed matrix.
subroutine distributed_matrix_values_get_intg1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed integer matrix.
subroutine distributed_matrix_values_get_sp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed single precision matrix.
subroutine, public distributedvector_l2norm(distributedVector, norm, err, error,)
Calculates the L2 norm of a distributed vector values on this computational node. ...
subroutine distributed_matrix_values_get_l(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed logical matrix.
Contains information for a PETSc distributed vector.
Definition: types.f90:774
subroutine, public petsc_matassemblyend(A, assemblyType, err, error,)
Buffer routine to the PETSc MatAssemblyEnd routine.
subroutine distributed_matrix_values_get_l2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed logical matrix.
subroutine distributed_vector_values_add_intg1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed integer vector.
integer(intg), parameter, public matrix_row_major_storage_type
Matrix row major storage type.
subroutine distributed_vector_values_get_l(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed logical vector.
subroutine distributed_matrix_values_set_dp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed double precision matrix.
subroutine distributed_matrix_values_add_intg2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed integer matrix.
integer(intg), parameter, public distributed_matrix_column_major_storage_type
Distributed matrix column major storage type.
subroutine, public petsc_matdensegetarrayf90(a, array, err, error,)
Buffer routine to the PETSc MatDenseGetArrayF90 routine.
subroutine, public petsc_matcreateaij(communicator, localM, localN, globalM, globalN, diagNumberNonZerosPerRow, diagNumberNonZerosEachRow, offDiagNumberNonZerosPerRow, offDiagNumberNonZerosEachRow, a, err, error,)
Buffer routine to the PETSc MatCreateAIJ routine.
subroutine distributed_matrix_values_set_intg1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed integer matrix.
subroutine, public petsc_veccreatempi(communicator, localN, globalN, x, err, error,)
Buffer routine to the PETSc VecCreateMPI routine.
subroutine, public distributed_matrix_max_columns_per_row_get(DISTRIBUTED_MATRIX, MAX_COLUMNS_PER_ROW, ERR, ERROR,)
Gets the maximum number of columns in each row of a distributed matrix.
subroutine distributed_vector_values_add_sp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed single precision vector.
subroutine distributed_vector_values_get_dp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed double precision vector.
subroutine distributed_matrix_petsc_finalise(PETSC_MATRIX, ERR, ERROR,)
Finalise a PETSc distributed matrix.
subroutine, public matrix_create_start(MATRIX, ERR, ERROR,)
Starts the creation a matrix.
subroutine, public distributed_matrix_by_vector_add(ROW_SELECTION_TYPE, ALPHA, DISTRIBUTED_MATRIX, DISTRIBUTED_VECTOR, DISTRIBUTED_PRODUCT, ERR, ERROR,)
Calculates the matrix vector product of a distrubted matrix times a distributed vector and adds it to...
logical, save, public diagnostics3
.TRUE. if level 3 diagnostic output is active in the current routine
subroutine, public distributed_matrix_override_set_on(DISTRIBUTED_MATRIX, OVERRIDE_MATRIX, ERR, ERROR,)
Sets the override matrix for a distributed matrix.
subroutine, public distributed_matrix_storage_locations_set(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Sets the storage locations (sparsity pattern) in a distributed matrix to that specified by the row an...
subroutine, public distributed_matrix_linklist_set(DISTRIBUTED_MATRIX, LIST, ERR, ERROR,)
Sets/changes the LIST STRUCTURE for a distributed matrix.
subroutine distributed_vector_data_get_l(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of a logical distributed vector. Note: the values can be used for read ...
subroutine, public distributed_vector_ghosting_type_set(DISTRIBUTED_VECTOR, GHOSTING_TYPE, ERR, ERROR,)
Sets/changes the ghosting type for a distributed vector.
subroutine, public distributed_matrix_update_waitfinished(DISTRIBUTED_MATRIX, ERR, ERROR,)
Waits until a distributed matrix update has finised.
subroutine distributed_matrix_data_get_intg(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of an integer distributed matrix. Note: the values can be used for read...
subroutine, public exits(NAME)
Records the exit out of the named procedure.
subroutine distributed_matrix_values_get_dp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed double precision matrix.
subroutine, public matrix_storage_type_get(MATRIX, STORAGE_TYPE, ERR, ERROR,)
Gets the storage type for a matrix.
subroutine, public petsc_vecset(x, VALUE, err, error,)
Buffer routine to the PETSc VecSet routine.
This module contains all type definitions in order to avoid cyclic module references.
Definition: types.f90:70
subroutine distributed_matrix_values_add_intg(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed integer matrix.
Write a string to a given output stream.
This module contains all the low-level base routines e.g., all debug, control, and low-level communic...
subroutine, public petsc_matfinalise(a, err, error,)
integer(intg), parameter, public distributed_matrix_vector_intg_type
Integer distributed matrix-vector data type.
subroutine distributed_vector_values_add_l1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed logical vector.
subroutine, public distributed_matrix_destroy(DISTRIBUTED_MATRIX, ERR, ERROR,)
Destroys a distributed matrix.
subroutine distributed_vector_finalise(DISTRIBUTED_VECTOR, ERR, ERROR,)
Finalises a distributed vector and deallocates all memory.
subroutine distributed_vector_all_values_set_sp(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in a single precision distributed vector to the specified value.
subroutine distributed_vector_data_restore_l(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the logical data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data has finishe...
subroutine distributed_matrix_values_get_intg2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets a matrix of values in a distributed integer matrix.
subroutine distributedvector_vecdotdp(distributedVectorA, distributedVectorB, dotProduct, err, error,)
Calculates the dot product of 2 distributed double-precision vectors on this computational node...
subroutine, public distributedvector_datatypeget(vector, dataType, err, error,)
Gets the data type of a distributed vector.
integer(intg) function, public computational_nodes_number_get(ERR, ERROR)
Returns the number of computational nodes.
subroutine distributed_matrix_values_set_sp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed single precision matrix.
subroutine distributed_matrix_values_set_sp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed single precision matrix.
subroutine, public matrix_size_set(MATRIX, M, N, ERR, ERROR,)
Sets/changes the size of a matrix.
This module contains all computational environment variables.
Contains information for a CMISS distributed vector.
Definition: types.f90:761
subroutine distributed_vector_data_restore_dp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Restores the double precision data pointer returned from DISTRIBUTED_VECTOR_DATA_GET once the data ha...
subroutine distributed_vector_data_get_sp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of a single precision distributed vector. Note: the values can be used ...
subroutine distributed_vector_initialise(DOMAIN_MAPPING, DISTRIBUTED_VECTOR, ERR, ERROR,)
Initialises a distributed vector.
This module contains CMISS MPI routines.
Definition: cmiss_mpi.f90:45
subroutine, public petsc_vecinitialise(x, err, error,)
integer(intg), parameter, public matrix_vector_sp_type
Single precision real matrix-vector data type.
subroutine, public matrix_linklist_set(MATRIX, LIST, ERR, ERROR,)
Gets the maximum number of columns in each row of a distributed matrix.
subroutine distributed_matrix_data_restore_dp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the double precision data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data ha...
subroutine, public petsc_vecrestorearrayreadf90(x, array, err, error,)
Buffer routine to the PETSc VecRestoreArrayReadF90 routine.
subroutine, public petsc_vecgetvalues(x, n, indices, values, err, error,)
Buffer routine to the PETSc VecGetValues routine.
integer(intg), parameter, public matrix_compressed_row_storage_type
Matrix compressed row storage type.
subroutine distributed_vector_copy_dp(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times a double precision distributed vector to another distributed vector.
type(computational_environment_type), target, public computational_environment
The computational environment the program is running in.
integer(intg), parameter, public distributed_matrix_vector_sp_type
Single precision real distributed matrix-vector data type.
subroutine distributed_vector_values_add_sp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed single precision vector.
Definition: cmiss.f90:51
subroutine, public distributed_matrix_storage_type_get(DISTRIBUTED_MATRIX, STORAGE_TYPE, ERR, ERROR,)
Gets the storage type of a distributed matrix.
subroutine, public petsc_matseqaijrestorearrayf90(a, array, err, error,)
Buffer routine to the PETSc MatSeqAIJRestoreArrayF90 routine.
subroutine, public distributed_vector_duplicate(DISTRIBUTED_VECTOR, NEW_DISTRIBUTED_VECTOR, ERR, ERROR,)
Duplicates the structure of a distributed vector and returns a pointer to the new distributed vector ...
Contains the information for a vector that is distributed across a number of domains.
Definition: types.f90:786
subroutine, public distributed_vector_output(ID, DISTRIBUTED_VECTOR, ERR, ERROR,)
Outputs a distributed vector to the specified output ID.
integer(intg), parameter, public matrix_diagonal_storage_type
Matrix diagonal storage type.
subroutine distributed_vector_values_set_dp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed double precision vector.
subroutine, public distributed_matrix_storage_locations_get(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Gets the storage locations (sparsity pattern) for a distributed matrix.
subroutine, public distributed_vector_override_set_off(DISTRIBUTED_VECTOR, ERR, ERROR,)
Turns off the override vector for a distributed vector.
subroutine distributed_vector_values_set_dp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed double precision vector.
subroutine distributed_matrix_values_add_dp2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed double precision matrix.
logical, save, public diagnostics1
.TRUE. if level 1 diagnostic output is active in the current routine
subroutine distributed_matrix_petsc_initialise(DISTRIBUTED_MATRIX, ERR, ERROR,)
Intialises a PETSc distributed matrix.
This module handles all distributed matrix vector routines.
subroutine distributed_matrix_values_get_sp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed single precision matrix.
subroutine distributed_matrix_finalise(DISTRIBUTED_MATRIX, ERR, ERROR,)
Finalises a distributed matrix and deallocates all memory.
subroutine, public distributed_vector_update_finish(DISTRIBUTED_VECTOR, ERR, ERROR,)
Finishes the (ghost) update procedure for a distributed vector. This routine will wait until all tran...
integer(intg), parameter, public distributed_matrix_compressed_column_storage_type
Distributed matrix compressed column storage type.
subroutine, public distributed_matrix_linklist_get(DISTRIBUTED_MATRIX, LIST, ERR, ERROR,)
Gets the LINKLIST STURUCTURE for a distributed matrix.
subroutine, public petsc_vecscale(x, alpha, err, error,)
Buffer routine to the PETSc VecScale routine.
subroutine, public petsc_matzeroentries(a, err, error,)
Buffer routine to the PETSc MatZeroEntries routine.
subroutine, public petsc_matgetvalues(a, m, mIndices, n, nIndices, values, err, error,)
Buffer routine to the PETSc MatGetValues routine.
subroutine, public distributed_vector_library_type_set(DISTRIBUTED_VECTOR, LIBRARY_TYPE, ERR, ERROR,)
Sets/changes the library type for a distributed vector.
subroutine distributedvector_cmisstransferinitialise(CMISS_VECTOR, domain_idx, ERR, ERROR,)
Initialises a CMISS distributed vector transfer information.
subroutine, public distributed_matrix_output(ID, DISTRIBUTED_MATRIX, ERR, ERROR,)
Outputs a distributed matrix.
subroutine, public distributed_vector_data_type_set(DISTRIBUTED_VECTOR, DATA_TYPE, ERR, ERROR,)
Sets/changes the data type of a distributed vector.
logical, save, public diagnostics5
.TRUE. if level 5 diagnostic output is active in the current routine
subroutine distributed_vector_values_set_l1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed logical vector.
subroutine distributed_vector_values_set_l(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed logical vector.
This module contains all routines dealing with (non-distributed) matrix and vectors types...
Write a string followed by a vector to a specified output stream.
integer(intg), parameter, public distributed_matrix_block_storage_type
Distributed matrix block storage type.
subroutine distributed_vector_all_values_set_dp(DISTRIBUTED_VECTOR, VALUE, ERR, ERROR,)
Sets all values in a double precision distributed vector to the specified value.
subroutine, public distributed_vector_destroy(DISTRIBUTED_VECTOR, ERR, ERROR,)
Destroys a distributed vector.
subroutine distributed_vector_values_set_intg1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed integer vector.
subroutine, public petsc_vecassemblyend(x, err, error,)
Buffer routine to the PETSc VecAssemblyEnd routine.
subroutine distributed_matrix_values_add_l(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed logical matrix.
integer(intg), parameter, public matrix_row_column_storage_type
Matrix row-column storage type.
subroutine, public petsc_matcreatedense(communicator, localM, localN, globalM, globalN, matrixData, a, err, error,)
Buffer routine to the PETSc MatCreateDense routine.
subroutine, public petsc_matsetvalue(a, row, col, value, insertMode, err, error,)
Buffer routine to the PETSc MatSetValue routine.
integer(intg), parameter, public diagnostic_output_type
Diagnostic output type.
subroutine, public distributed_vector_update_waitfinished(DISTRIBUTED_VECTOR, ERR, ERROR,)
Waits until a distributed vector update has finised.
subroutine, public matrix_storage_type_set(MATRIX, STORAGE_TYPE, ERR, ERROR,)
Sets/changes the storage type for a matrix.
subroutine, public distributed_vector_update_start(DISTRIBUTED_VECTOR, ERR, ERROR,)
Starts the (ghost) update procedure for a distributed vector.
subroutine, public petsc_veccopy(x, y, err, error,)
Buffer routine to the PETSc VecCopy routine.
subroutine, public matrix_storage_locations_set(MATRIX, ROW_INDICES, COLUMN_INDICES, ERR, ERROR,)
Sets the storage locations (sparsity pattern) in a matrix to that specified by the row and column ind...
subroutine, public distributed_vector_update_isfinished(DISTRIBUTED_VECTOR, ISFINISHED, ERR, ERROR,)
Tests to see if a distributed vector update has finised!
Contains information on the domain mappings (i.e., local and global numberings).
Definition: types.f90:904
subroutine, public petsc_vecassemblybegin(x, err, error,)
Buffer routine to the PETSc VecAssemblyBegin routine.
subroutine, public petsc_vecdot(x, y, dotProduct, err, error,)
Buffer routine to the PETSc VecDot routine.
subroutine distributed_vector_copy_l(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times a logical distributed vector to another distributed vector.
Contains information for a PETSc distributed matrix.
Definition: types.f90:805
integer(intg), parameter, public matrix_vector_l_type
Logical matrix-vector data type.
subroutine distributed_matrix_initialise(ROW_DOMAIN_MAPPING, COLUMN_DOMAIN_MAPPING, DISTRIBUTED_MATRIX, ERR, ERROR,)
Intialises a distributed matrix.
subroutine distributed_matrix_data_restore_l(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the logical data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data has finishe...
subroutine distributed_vector_values_add_intg(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed integer vector.
subroutine distributed_matrix_data_get_sp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of a single precision distributed matrix. Note: the values can be used ...
subroutine distributed_matrix_data_restore_sp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the single precision data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data ha...
subroutine distributed_matrix_data_restore_intg(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Restores the integer data pointer returned from DISTRIBUTED_MATRIX_DATA_GET once the data has finishe...
subroutine distributed_matrix_cmiss_initialise(DISTRIBUTED_MATRIX, ERR, ERROR,)
Intialises a CMISS distributed matrix.
subroutine, public petsc_matsetoption(a, option, flag, err, error,)
Buffer routine to the PETSc MatSetOption routine.
integer(intg), parameter, public distributed_matrix_vector_dp_type
Double precision real distributed matrix-vector data type.
subroutine distributed_vector_data_get_dp(DISTRIBUTED_VECTOR, DATA, ERR, ERROR,)
Returns a pointer to the data of a double precision distributed vector. Note: the values can be used ...
Contains the information for a matrix that is distributed across a number of domains.
Definition: types.f90:828
subroutine distributed_matrix_values_get_dp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed double precision matrix.
subroutine distributed_matrix_values_get_sp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Gets values in a distributed single precision matrix.
subroutine distributed_vector_cmiss_finalise(CMISS_VECTOR, ERR, ERROR,)
Finalise a CMISS distributed vector.
subroutine, public matrix_output(ID, MATRIX, ERR, ERROR,)
Sets/changes the size of a matrix.
subroutine distributed_vector_values_get_l1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed logical vector.
subroutine distributed_matrix_values_add_l2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds a matrix of values to a distributed logical matrix.
subroutine, public petsc_matassemblybegin(A, assemblyType, err, error,)
Buffer routine to the PETSc MatAssemblyBegin routine.
integer(intg), parameter, public matrix_block_storage_type
Matrix block storage type.
subroutine, public petsc_matsetvalues(a, m, mIndices, n, nIndices, values, insertMode, err, error,)
Buffer routine to the PETSc MatSetValues routine.
integer(intg), parameter, public distributed_matrix_vector_petsc_type
PETSc distributed matrix-vector library type.
subroutine distributed_vector_values_get_dp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Gets one value in a distributed double precision vector.
subroutine distributed_vector_values_set_intg(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed integer vector.
Flags an error condition.
subroutine distributed_vector_values_add_dp(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed double precision vector.
subroutine distributed_matrix_values_set_sp1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed single precision matrix.
subroutine distributed_matrix_values_set_intg2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed integer matrix.
subroutine, public distributedmatrix_dimensionsget(distributedMatrix, m, n, err, error,)
Gets the dimensions of a matrix on this computational node.
integer(intg), parameter, public distributed_matrix_row_column_storage_type
Distributed matrix row-column storage type.
subroutine, public distributed_matrix_update_finish(DISTRIBUTED_MATRIX, ERR, ERROR,)
Finishes the update procedure for a distributed matrix. This routine will wait until all transfers ha...
subroutine, public distributed_matrix_override_set_off(DISTRIBUTED_MATRIX, ERR, ERROR,)
Turns off the override matrix for a distributed matrix.
subroutine distributed_vector_copy_sp(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times a single precision distributed vector to another distributed vector.
subroutine, public distributed_matrix_number_non_zeros_set(DISTRIBUTED_MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Sets/changes the number of non zeros for a distributed matrix.
subroutine distributed_matrix_data_get_dp(DISTRIBUTED_MATRIX, DATA, ERR, ERROR,)
Returns a pointer to the data of a double precision distributed matrix. Note: the values can be used ...
subroutine, public petsc_vecgetarrayreadf90(x, array, err, error,)
Buffer routine to the PETSc VecGetArrayReadF90 routine.
integer(intg), parameter, public matrix_column_major_storage_type
Matrix column major storage type.
Flags an error condition.
subroutine, public distributed_matrix_ghosting_type_set(DISTRIBUTED_MATRIX, GHOSTING_TYPE, ERR, ERROR,)
Sets/changes the ghosting type for a distributed matrix.
integer(intg), parameter, public distributed_matrix_vector_no_ghosts_type
Do not include ghost values/rows in the distributed matrix/vector.
subroutine, public matrix_number_non_zeros_get(MATRIX, NUMBER_NON_ZEROS, ERR, ERROR,)
Gets the number of non zeros for a matrix.
subroutine distributed_matrix_values_set_l1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Sets one value in a distributed logical matrix.
subroutine distributed_vector_copy_intg(FROM_VECTOR, TO_VECTOR, ALPHA, ERR, ERROR,)
Copies alpha times an integer distributed vector to another distributed vector.
subroutine distributed_matrix_values_set_dp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets values in a distributed double precision matrix.
integer(intg) function, public computational_node_number_get(ERR, ERROR)
Returns the number/rank of the computational nodes.
subroutine distributed_vector_values_add_l(DISTRIBUTED_VECTOR, INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed logical vector.
subroutine distributed_matrix_values_add_intg1(DISTRIBUTED_MATRIX, ROW_INDEX, COLUMN_INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed integer matrix.
subroutine distributed_vector_values_add_dp1(DISTRIBUTED_VECTOR, INDEX, VALUE, ERR, ERROR,)
Adds one value to a distributed double precision vector.
integer(intg), parameter, public distributed_matrix_diagonal_storage_type
Distributed matrix diagonal storage type.
subroutine distributed_vector_cmiss_initialise(DISTRIBUTED_VECTOR, ERR, ERROR,)
Intialises a CMISS distributed vector.
subroutine distributed_matrix_values_add_dp(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Adds values to a distributed double precision matrix.
This module contains all kind definitions.
Definition: kinds.f90:45
subroutine distributed_matrix_values_set_l2(DISTRIBUTED_MATRIX, ROW_INDICES, COLUMN_INDICES, VALUES, ERR, ERROR,)
Sets a matrix of values in a distributed logical matrix.
subroutine distributed_vector_petsc_create_finish(PETSC_VECTOR, ERR, ERROR,)
Finishes the creation of a PETSc distributed vector.
integer(intg), parameter, public distributed_matrix_vector_l_type
Logical distributed matrix-vector data type.
integer(intg), parameter, public distributed_matrix_compressed_row_storage_type
Distributed matrix compressed row storage type.
subroutine, public mpi_error_check(ROUTINE, MPI_ERR_CODE, ERR, ERROR,)
Checks to see if an MPI error has occured during an MPI call and flags a CMISS error it if it has...
Definition: cmiss_mpi.f90:84
This module handles all formating and input and output.