Changeset 2906
- Timestamp:
- May 18, 2006, 11:45:48 AM (18 years ago)
- Location:
- inundation/parallel
- Files:
-
- 15 edited
Legend:
- Unmodified
- Added
- Removed
-
inundation/parallel/build_submesh.py
r2625 r2906 13 13 14 14 import sys 15 import pypar # The Python-MPI interface 15 16 16 17 from Numeric import zeros, Float, Int, concatenate, \ … … 18 19 19 20 from mesh import Mesh 21 20 22 21 23 … … 553 555 # 554 556 ######################################################### 555 def extract_hostmesh(submesh ):557 def extract_hostmesh(submesh, triangles_per_proc): 556 558 557 559 submesh_cell = {} … … 570 572 submesh_cell["ghost_quan"][k] = submesh["ghost_quan"][k][0] 571 573 572 return submesh_cell 573 574 numprocs = pypar.size() 575 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ 576 build_local_mesh(submesh_cell, 0, triangles_per_proc[0], numprocs) 577 return points, vertices, boundary, quantities, ghost_recv_dict, \ 578 full_send_dict 579 -
inundation/parallel/documentation/code/RunParallelAdvection.py
r2786 r2906 58 58 59 59 ####################### 60 # Partition the domain60 # Partition the mesh 61 61 ####################### 62 62 -
inundation/parallel/documentation/code/RunParallelMerimbulaMetis.py
r2786 r2906 89 89 filename = 'merimbula_10785.tsh' 90 90 91 domain_full = pmesh_to_domain_instance(filename, Advection_Domain)92 domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))91 mesh_full = pmesh_to_domain_instance(filename, Advection_Domain) 92 mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0)) 93 93 94 94 # Define the domain boundaries for visualisation 95 95 96 rect = array( domain_full.xy_extent, Float)96 rect = array(mesh_full.xy_extent, Float) 97 97 98 98 # Subdivide the mesh 99 99 100 100 nodes, triangles, boundary, triangles_per_proc, quantities =\ 101 pmesh_divide_metis( domain_full, numprocs)101 pmesh_divide_metis(mesh_full, numprocs) 102 102 103 103 # Build the mesh that should be assigned to each processor, … … 114 114 # Build the local mesh for processor 0 115 115 116 hostmesh = extract_hostmesh(submesh)117 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \118 build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs) 116 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict =\ 117 extract_hostmesh(submesh, triangles_per_proc) 118 119 119 120 120 else: … … 145 145 try: 146 146 domain.initialise_visualiser(rect=rect) 147 #domain.visualiser.coloring['stage'] = True148 #domain.visualiser.scale_z['stage'] = 0.2149 147 except: 150 148 print 'No visualiser' … … 153 151 154 152 T = Transmissive_boundary(domain) 155 #R = Reflective_boundary(domain)156 153 domain.set_boundary( {'outflow': T, 'inflow': T, 'inner':T, \ 157 154 'exterior': T, 'open':T, 'ghost':None} ) 158 155 159 156 # Set the initial quantities 160 161 157 162 158 domain.set_quantity('stage', quantities['stage']) -
inundation/parallel/documentation/code/RunParallelSwMerimbulaMetis.py
r2786 r2906 75 75 76 76 ####################### 77 # Partition the domain77 # Partition the mesh 78 78 ####################### 79 79 … … 84 84 filename = 'merimbula_10785_1.tsh' 85 85 86 # Build the whole domain86 # Build the whole mesh 87 87 88 domain_full = pmesh_to_domain_instance(filename, Domain)88 mesh_full = pmesh_to_domain_instance(filename, Domain) 89 89 90 90 # Define the domain boundaries for visualisation 91 91 92 rect = array( domain_full.xy_extent, Float)92 rect = array(mesh_full.xy_extent, Float) 93 93 94 94 # Initialise the wave 95 95 96 domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))96 mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0)) 97 97 98 98 # Subdivide the mesh 99 99 100 100 nodes, triangles, boundary, triangles_per_proc, quantities = \ 101 pmesh_divide_metis( domain_full, numprocs)101 pmesh_divide_metis(mesh_full, numprocs) 102 102 103 103 # Build the mesh that should be assigned to each processor, … … 113 113 114 114 # Build the local mesh for processor 0 115 116 hostmesh = extract_hostmesh(submesh) 115 117 116 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ 118 build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)117 extract_hostmesh(submesh, triangles_per_proc) 119 118 120 119 else: -
inundation/parallel/documentation/figures/domain.fig
r2785 r2906 11 11 2 2 0 2 0 7 50 -1 -1 0.000 0 0 -1 0 0 5 12 12 7110 3870 8955 3870 8955 4500 7110 4500 7110 3870 13 4 0 0 50 -1 0 16 0.0000 4 1 80 12907380 4140 Build Local\00114 4 0 0 50 -1 0 16 0.0000 4 1 80 6007380 4410 Mesh\00113 4 0 0 50 -1 0 16 0.0000 4 195 1275 7380 4140 Build Local\001 14 4 0 0 50 -1 0 16 0.0000 4 195 615 7380 4410 Mesh\001 15 15 -6 16 16 6 8010 2745 8145 3420 17 4 0 0 50 -1 0 24 0.0000 4 45 1058010 2790 .\00118 4 0 0 50 -1 0 24 0.0000 4 45 1058010 2970 .\00119 4 0 0 50 -1 0 24 0.0000 4 45 1058010 3195 .\00120 4 0 0 50 -1 0 24 0.0000 4 45 1058010 3420 .\00117 4 0 0 50 -1 0 24 0.0000 4 30 90 8010 2790 .\001 18 4 0 0 50 -1 0 24 0.0000 4 30 90 8010 2970 .\001 19 4 0 0 50 -1 0 24 0.0000 4 30 90 8010 3195 .\001 20 4 0 0 50 -1 0 24 0.0000 4 30 90 8010 3420 .\001 21 21 -6 22 22 6 7065 1755 9000 2475 23 23 2 2 0 2 0 7 50 -1 -1 0.000 0 0 -1 0 0 5 24 24 7110 1800 8955 1800 8955 2430 7110 2430 7110 1800 25 4 0 0 50 -1 0 16 0.0000 4 1 80 12907380 2070 Build Local\00126 4 0 0 50 -1 0 16 0.0000 4 1 80 6007380 2340 Mesh\00125 4 0 0 50 -1 0 16 0.0000 4 195 1275 7380 2070 Build Local\001 26 4 0 0 50 -1 0 16 0.0000 4 195 615 7380 2340 Mesh\001 27 27 -6 28 28 6 9585 2700 9720 3375 29 4 0 0 50 -1 0 24 0.0000 4 45 1059585 2745 .\00130 4 0 0 50 -1 0 24 0.0000 4 45 1059585 2925 .\00131 4 0 0 50 -1 0 24 0.0000 4 45 1059585 3150 .\00132 4 0 0 50 -1 0 24 0.0000 4 45 1059585 3375 .\00129 4 0 0 50 -1 0 24 0.0000 4 30 90 9585 2745 .\001 30 4 0 0 50 -1 0 24 0.0000 4 30 90 9585 2925 .\001 31 4 0 0 50 -1 0 24 0.0000 4 30 90 9585 3150 .\001 32 4 0 0 50 -1 0 24 0.0000 4 30 90 9585 3375 .\001 33 33 -6 34 34 2 2 0 2 0 7 50 -1 -1 0.000 0 0 -1 0 0 5 … … 45 45 1 1 3.00 120.00 120.00 46 46 4950 3150 7065 4185 47 4 0 0 50 -1 0 16 0.0000 4 1 80 17552925 3105 Build Commun.\00148 4 0 9 50 -1 0 16 0.0000 4 1 80 12603240 3870 Processor 0\00149 4 0 0 50 -1 0 16 0.0000 4 2 40630 2925 3330 Layer\00150 4 0 9 50 -1 0 16 0.0000 4 1 80 1260900 3870 Processor 0\00151 4 0 0 50 -1 0 16 0.0000 4 180 870 990 3375 Domain\00152 4 0 0 50 -1 0 16 0.0000 4 180 1095 990 3105 Subdivide\00153 4 0 9 50 -1 0 16 0.0000 4 180 1260 9135 4230 Processor 0\00154 4 0 12 50 -1 0 16 0.0000 4 180 1230 5670 3375 Subdomain\00155 4 0 12 50 -1 0 16 0.0000 4 180 540 5670 3060 Send\00156 4 0 9 50 -1 0 16 0.0000 4 240 1260 9135 2160 Processor p\00147 4 0 0 50 -1 0 16 0.0000 4 195 1770 2925 3105 Build Commun.\001 48 4 0 9 50 -1 0 16 0.0000 4 195 1275 3240 3870 Processor 0\001 49 4 0 0 50 -1 0 16 0.0000 4 255 630 2925 3330 Layer\001 50 4 0 9 50 -1 0 16 0.0000 4 195 1275 900 3870 Processor 0\001 51 4 0 9 50 -1 0 16 0.0000 4 195 1275 9135 4230 Processor 0\001 52 4 0 12 50 -1 0 16 0.0000 4 195 555 5670 3060 Send\001 53 4 0 9 50 -1 0 16 0.0000 4 255 1275 9135 2160 Processor p\001 54 4 0 0 50 -1 0 16 0.0000 4 195 1095 990 3105 Subdivide\001 55 4 0 0 50 -1 0 16 0.0000 4 195 615 990 3375 Mesh\001 56 4 0 12 50 -1 0 16 0.0000 4 195 1020 5670 3375 Submesh\001 -
inundation/parallel/documentation/figures/subdomain.fig
r2785 r2906 8 8 -2 9 9 1200 2 10 6 1575 1845 4725 409511 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 412 1980 2340 2880 3060 1575 3060 1980 234013 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 414 1980 2340 3150 2070 2880 3015 2880 306015 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 316 2880 3060 3285 4095 3960 283517 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 318 3150 2070 4725 1845 3960 288019 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 320 3960 2880 4545 4095 4725 184521 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 222 3285 4095 4545 409523 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 324 3150 2115 3960 2880 2880 306025 4 0 0 50 -1 0 16 0.0000 4 180 135 3870 2295 4\00126 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 6\00127 4 0 0 50 -1 0 16 0.0000 4 180 135 4365 2970 7\00128 4 0 0 50 -1 0 16 0.0000 4 180 135 3330 3375 5\00129 4 0 0 50 -1 0 16 0.0000 4 180 135 3375 2745 3\00130 4 0 0 50 -1 0 16 0.0000 4 180 135 2655 2520 2\00131 4 0 0 50 -1 0 16 0.0000 4 180 135 2025 2835 1\00132 -633 10 2 1 0 2 0 7 50 -1 -1 0.000 0 0 -1 1 0 2 34 11 1 1 2.00 90.00 120.00 … … 55 32 2 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 2 56 33 4500 7785 5760 7785 57 4 0 1 50 -1 0 16 0.0000 4 180 135 1080 6345 1\001 58 4 0 1 50 -1 0 16 0.0000 4 180 135 1710 6030 2\001 59 4 0 1 50 -1 0 16 0.0000 4 180 135 2430 6255 3\001 60 4 0 1 50 -1 0 16 0.0000 4 180 135 2385 6885 5\001 61 4 0 4 50 -1 0 16 0.0000 4 180 135 5085 5985 4\001 62 4 0 4 50 -1 0 16 0.0000 4 180 135 5580 6660 7\001 63 4 0 4 50 -1 0 16 0.0000 4 195 135 5130 7380 6\001 64 4 0 0 50 -1 0 16 0.0000 4 180 1440 1035 8235 Subdomain 0\001 65 4 0 0 50 -1 0 16 0.0000 4 180 1440 4500 8235 Subdomain 1\001 34 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4 35 1980 2340 2880 3060 1575 3060 1980 2340 36 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4 37 1980 2340 3150 2070 2880 3015 2880 3060 38 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 39 2880 3060 3285 4095 3960 2835 40 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 41 3150 2070 4725 1845 3960 2880 42 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 43 3960 2880 4545 4095 4725 1845 44 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 45 3285 4095 4545 4095 46 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 47 3150 2115 3960 2880 2880 3060 48 4 0 0 50 -1 0 16 0.0000 4 195 135 2025 2835 0\001 49 4 0 0 50 -1 0 16 0.0000 4 195 135 2655 2520 1\001 50 4 0 0 50 -1 0 16 0.0000 4 195 135 3375 2745 2\001 51 4 0 0 50 -1 0 16 0.0000 4 195 135 3870 2295 3\001 52 4 0 0 50 -1 0 16 0.0000 4 195 135 3330 3375 4\001 53 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 5\001 54 4 0 0 50 -1 0 16 0.0000 4 195 135 4365 2970 6\001 55 4 0 1 50 -1 0 16 0.0000 4 195 135 1080 6345 0\001 56 4 0 1 50 -1 0 16 0.0000 4 195 135 1710 6030 1\001 57 4 0 1 50 -1 0 16 0.0000 4 195 135 2430 6255 2\001 58 4 0 1 50 -1 0 16 0.0000 4 195 135 2385 6885 4\001 59 4 0 4 50 -1 0 16 0.0000 4 195 135 5085 5985 3\001 60 4 0 4 50 -1 0 16 0.0000 4 195 135 5580 6660 6\001 61 4 0 4 50 -1 0 16 0.0000 4 195 135 5130 7380 5\001 62 4 0 0 50 -1 0 16 0.0000 4 195 1215 1035 8235 Submesh 0\001 63 4 0 0 50 -1 0 16 0.0000 4 195 1215 4500 8235 Submesh 1\001 -
inundation/parallel/documentation/figures/subdomainfinal.fig
r2785 r2906 8 8 -2 9 9 1200 2 10 6 1575 1845 4725 409511 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 412 1980 2340 2880 3060 1575 3060 1980 234013 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 414 1980 2340 3150 2070 2880 3015 2880 306015 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 316 2880 3060 3285 4095 3960 283517 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 318 3150 2070 4725 1845 3960 288019 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 320 3960 2880 4545 4095 4725 184521 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 222 3285 4095 4545 409523 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 324 3150 2115 3960 2880 2880 306025 4 0 0 50 -1 0 16 0.0000 4 180 135 3870 2295 4\00126 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 6\00127 4 0 0 50 -1 0 16 0.0000 4 180 135 4365 2970 7\00128 4 0 0 50 -1 0 16 0.0000 4 180 135 3330 3375 5\00129 4 0 0 50 -1 0 16 0.0000 4 180 135 3375 2745 3\00130 4 0 0 50 -1 0 16 0.0000 4 180 135 2655 2520 2\00131 4 0 0 50 -1 0 16 0.0000 4 180 135 2025 2835 1\00132 -633 10 2 1 0 2 0 7 50 -1 -1 0.000 0 0 -1 1 0 2 34 11 1 1 2.00 90.00 120.00 … … 57 34 2 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 3 58 35 1894 5605 3469 5380 2704 6415 59 2 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 360 2682 6392 3267 7607 3447 535761 36 2 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 2 62 37 2025 7650 3285 7650 … … 67 42 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 2 68 43 4455 6705 4905 7740 69 4 0 0 50 -1 0 16 0.0000 4 180 1260 1035 8235 Processor 0\001 70 4 0 0 50 -1 0 16 0.0000 4 180 1260 4860 8190 Processor 1\001 71 4 0 1 50 -1 0 16 0.0000 4 180 135 4860 7020 5\001 72 4 0 1 50 -1 0 16 0.0000 4 180 135 765 6390 1\001 73 4 0 1 50 -1 0 16 0.0000 4 180 135 1395 6075 2\001 74 4 0 1 50 -1 0 16 0.0000 4 180 135 2115 6300 3\001 75 4 0 4 50 -1 0 16 0.0000 4 180 135 3060 6615 7\001 76 4 0 4 50 -1 0 16 0.0000 4 195 135 2655 7245 6\001 77 4 0 1 50 -1 0 16 0.0000 4 180 135 2070 6930 4\001 78 4 0 4 50 -1 0 16 0.0000 4 180 135 2520 5895 5\001 79 4 0 4 50 -1 0 16 0.0000 4 180 135 5445 5940 1\001 80 4 0 4 50 -1 0 16 0.0000 4 180 135 5940 6615 2\001 81 4 0 4 50 -1 0 16 0.0000 4 180 135 5490 7335 3\001 82 4 0 1 50 -1 0 16 0.0000 4 180 135 4815 6390 4\001 44 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4 45 1980 2340 2880 3060 1575 3060 1980 2340 46 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4 47 1980 2340 3150 2070 2880 3015 2880 3060 48 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 49 2880 3060 3285 4095 3960 2835 50 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 51 3150 2070 4725 1845 3960 2880 52 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 53 3960 2880 4545 4095 4725 1845 54 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 55 3285 4095 4545 4095 56 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 57 3150 2115 3960 2880 2880 3060 58 2 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 2 59 2700 6435 3285 7650 60 4 0 0 50 -1 0 16 0.0000 4 195 1275 1035 8235 Processor 0\001 61 4 0 0 50 -1 0 16 0.0000 4 195 1275 4860 8190 Processor 1\001 62 4 0 0 50 -1 0 16 0.0000 4 195 135 2025 2835 0\001 63 4 0 0 50 -1 0 16 0.0000 4 195 135 2655 2520 1\001 64 4 0 0 50 -1 0 16 0.0000 4 195 135 3375 2745 2\001 65 4 0 0 50 -1 0 16 0.0000 4 195 135 3870 2295 3\001 66 4 0 0 50 -1 0 16 0.0000 4 195 135 3330 3375 4\001 67 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 5\001 68 4 0 0 50 -1 0 16 0.0000 4 195 135 4365 2970 6\001 69 4 0 1 50 -1 0 16 0.0000 4 195 135 765 6390 0\001 70 4 0 1 50 -1 0 16 0.0000 4 195 135 1395 6075 1\001 71 4 0 1 50 -1 0 16 0.0000 4 195 135 2115 6300 2\001 72 4 0 1 50 -1 0 16 0.0000 4 195 135 2070 6930 3\001 73 4 0 4 50 -1 0 16 0.0000 4 195 135 2520 5895 4\001 74 4 0 4 50 -1 0 16 0.0000 4 195 135 2655 7245 5\001 75 4 0 4 50 -1 0 16 0.0000 4 195 135 5445 5940 0\001 76 4 0 4 50 -1 0 16 0.0000 4 195 135 5940 6615 1\001 77 4 0 4 50 -1 0 16 0.0000 4 195 135 5490 7335 2\001 78 4 0 1 50 -1 0 16 0.0000 4 195 135 4815 6390 3\001 79 4 0 1 50 -1 0 16 0.0000 4 195 135 4860 7020 4\001 -
inundation/parallel/documentation/figures/subdomainghost.fig
r2785 r2906 8 8 -2 9 9 1200 2 10 6 1575 1845 4725 409511 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 412 1980 2340 2880 3060 1575 3060 1980 234013 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 414 1980 2340 3150 2070 2880 3015 2880 306015 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 316 2880 3060 3285 4095 3960 283517 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 318 3150 2070 4725 1845 3960 288019 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 320 3960 2880 4545 4095 4725 184521 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 222 3285 4095 4545 409523 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 324 3150 2115 3960 2880 2880 306025 4 0 0 50 -1 0 16 0.0000 4 180 135 3870 2295 4\00126 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 6\00127 4 0 0 50 -1 0 16 0.0000 4 180 135 4365 2970 7\00128 4 0 0 50 -1 0 16 0.0000 4 180 135 3330 3375 5\00129 4 0 0 50 -1 0 16 0.0000 4 180 135 3375 2745 3\00130 4 0 0 50 -1 0 16 0.0000 4 180 135 2655 2520 2\00131 4 0 0 50 -1 0 16 0.0000 4 180 135 2025 2835 1\00132 -633 6 315 5355 3510 765034 6 1890 5355 3510 765035 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 336 1894 5605 3469 5380 2704 641537 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 338 2682 6392 3267 7607 3447 535739 -640 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 441 720 5895 1620 6615 315 6615 720 589542 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 443 720 5895 1890 5625 1620 6570 1620 661544 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 345 1890 5670 2700 6435 1620 661546 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 347 1620 6615 2025 7650 2700 639048 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 249 2025 7650 3285 765050 4 0 1 50 -1 0 16 0.0000 4 180 135 765 6390 1\00151 4 0 1 50 -1 0 16 0.0000 4 180 135 1395 6075 2\00152 4 0 1 50 -1 0 16 0.0000 4 180 135 2115 6300 3\00153 4 0 1 50 -1 0 16 0.0000 4 180 135 2070 6930 5\00154 4 0 4 50 -1 0 16 0.0000 4 180 135 2520 5895 4\00155 4 0 4 50 -1 0 16 0.0000 4 180 135 3060 6615 7\00156 4 0 4 50 -1 0 16 0.0000 4 195 135 2655 7245 6\00157 -658 10 2 1 0 2 0 7 50 -1 -1 0.000 0 0 -1 1 0 2 59 11 1 1 2.00 90.00 120.00 … … 78 30 2 1 1 1 1 7 50 -1 -1 4.000 0 0 -1 0 0 2 79 31 4455 6705 4905 7740 80 4 0 4 50 -1 0 16 0.0000 4 180 135 5445 5940 4\001 81 4 0 4 50 -1 0 16 0.0000 4 180 135 5940 6615 7\001 82 4 0 4 50 -1 0 16 0.0000 4 195 135 5490 7335 6\001 83 4 0 1 50 -1 0 16 0.0000 4 180 135 4815 6390 3\001 84 4 0 1 50 -1 0 16 0.0000 4 180 135 4860 7020 5\001 85 4 0 0 50 -1 0 16 0.0000 4 180 1440 1035 8235 Subdomain 0\001 86 4 0 0 50 -1 0 16 0.0000 4 180 1440 4860 8190 Subdomain 1\001 32 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4 33 1980 2340 2880 3060 1575 3060 1980 2340 34 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4 35 1980 2340 3150 2070 2880 3015 2880 3060 36 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 37 2880 3060 3285 4095 3960 2835 38 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 39 3150 2070 4725 1845 3960 2880 40 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 41 3960 2880 4545 4095 4725 1845 42 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 43 3285 4095 4545 4095 44 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3 45 3150 2115 3960 2880 2880 3060 46 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 4 47 720 5895 1620 6615 315 6615 720 5895 48 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 4 49 720 5895 1890 5625 1620 6570 1620 6615 50 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 3 51 1890 5670 2700 6435 1620 6615 52 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 3 53 1620 6615 2025 7650 2700 6390 54 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 2 55 2025 7650 3285 7650 56 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 3 57 1894 5605 3469 5380 2704 6415 58 2 1 1 1 4 0 50 -1 -1 4.000 0 0 -1 0 0 2 59 2700 6435 3285 7650 60 4 0 0 50 -1 0 16 0.0000 4 195 1215 4860 8190 Submesh 1\001 61 4 0 0 50 -1 0 16 0.0000 4 195 135 2025 2835 0\001 62 4 0 0 50 -1 0 16 0.0000 4 195 135 2655 2520 1\001 63 4 0 0 50 -1 0 16 0.0000 4 195 135 3375 2745 2\001 64 4 0 0 50 -1 0 16 0.0000 4 195 135 3870 2295 3\001 65 4 0 0 50 -1 0 16 0.0000 4 195 135 3330 3375 4\001 66 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 5\001 67 4 0 0 50 -1 0 16 0.0000 4 195 135 4365 2970 6\001 68 4 0 0 50 -1 0 16 0.0000 4 195 1215 1215 8235 Submesh 0\001 69 4 0 1 50 -1 0 16 0.0000 4 195 135 1170 6120 1\001 70 4 0 1 50 -1 0 16 0.0000 4 195 135 1935 6165 2\001 71 4 0 4 50 -1 0 16 0.0000 4 195 135 2430 5850 3\001 72 4 0 1 50 -1 0 16 0.0000 4 195 135 1980 6885 4\001 73 4 0 1 50 -1 0 16 0.0000 4 195 135 4725 6390 2\001 74 4 0 1 50 -1 0 16 0.0000 4 195 135 675 6435 0\001 75 4 0 15 50 -1 0 16 0.0000 4 255 345 2655 7425 (5)\001 76 4 0 24 50 -1 0 16 0.0000 4 255 345 5400 7515 (2)\001 77 4 0 15 50 -1 0 16 0.0000 4 255 345 855 6525 (0)\001 78 4 0 15 50 -1 0 16 0.0000 4 255 345 1350 6255 (1)\001 79 4 0 15 50 -1 0 16 0.0000 4 255 345 2115 6390 (2)\001 80 4 0 15 50 -1 0 16 0.0000 4 255 345 2565 6075 (4)\001 81 4 0 15 50 -1 0 16 0.0000 4 255 345 1890 7200 (3)\001 82 4 0 4 50 -1 0 16 0.0000 4 195 135 2520 7110 5\001 83 4 0 24 50 -1 0 16 0.0000 4 255 345 4950 6480 (3)\001 84 4 0 1 50 -1 0 16 0.0000 4 195 135 4815 7290 4\001 85 4 0 24 50 -1 0 16 0.0000 4 255 345 4815 6930 (4)\001 86 4 0 4 50 -1 0 16 0.0000 4 195 135 5895 6525 6\001 87 4 0 24 50 -1 0 16 0.0000 4 255 345 5760 6885 (1)\001 88 4 0 4 50 -1 0 16 0.0000 4 195 135 5445 7155 5\001 89 4 0 24 50 -1 0 16 0.0000 4 255 345 5355 6210 (0)\001 90 4 0 4 50 -1 0 16 0.0000 4 195 135 5490 5895 3\001 -
inundation/parallel/documentation/parallel.tex
r2849 r2906 27 27 28 28 The first step in parallelising the code is to subdivide the mesh 29 into, roughly, equally sized partitions. On a rectangular domainthis may be29 into, roughly, equally sized partitions. On a rectangular mesh this may be 30 30 done by a simple co-ordinate based dissection, but on a complicated 31 domain such as the Merimbula gridshown in Figure \ref{fig:mergrid}31 domain such as the Merimbula mesh shown in Figure \ref{fig:mergrid} 32 32 a more sophisticated approach must be used. We use pymetis, a 33 33 python wrapper around the Metis … … 40 40 \begin{figure}[hbtp] 41 41 \centerline{ \includegraphics[scale = 0.75]{figures/mermesh.eps}} 42 \caption{The Merimbula grid.}42 \caption{The Merimbula mesh.} 43 43 \label{fig:mergrid} 44 44 \end{figure} … … 87 87 setting up the communication pattern as well as assigning the local numbering scheme for the submeshes. 88 88 89 Consider the example subpartitioning given in Figure \ref{fig:subdomain}. During the \code{evolve} calculations Triangle 3 in Submesh 0 will need to access its neighbour Triangle 4stored in Submesh 1. The standard approach to this problem is to add an extra layer of triangles, which we call ghost triangles. The ghost triangles89 Consider the example subpartitioning given in Figure \ref{fig:subdomain}. During the \code{evolve} calculations Triangle 2 in Submesh 0 will need to access its neighbour Triangle 3 stored in Submesh 1. The standard approach to this problem is to add an extra layer of triangles, which we call ghost triangles. The ghost triangles 90 90 are read-only, they should not be updated during the calculations, they are only there to hold any extra information that a processor may need to complete its calculations. The ghost triangle values are updated through communication calls. Figure \ref{fig:subdomaing} shows the submeshes with the extra layer of ghost triangles. 91 91 92 92 \begin{figure}[hbtp] 93 93 \centerline{ \includegraphics[scale = 0.6]{figures/subdomain.eps}} 94 \caption{An example subpartioning .}94 \caption{An example subpartioning of a mesh.} 95 95 \label{fig:subdomain} 96 96 \end{figure} … … 99 99 \begin{figure}[hbtp] 100 100 \centerline{ \includegraphics[scale = 0.6]{figures/subdomainghost.eps}} 101 \caption{An example subpartioning with ghost triangles. }101 \caption{An example subpartioning with ghost triangles. The numbers in brackets shows the local numbering scheme that is calculated and stored with the mesh, but not implemented until the local mesh is built. See Section \ref{sec:part4}. } 102 102 \label{fig:subdomaing} 103 103 \end{figure} 104 104 105 When partitioning the mesh we introduce new, dummy, boundary edges. For example, Triangle 3 in Submesh 1, from Figure \ref{fig:subdomaing}, originally shared an edge with Triangle 2, but after partitioning that edge becomes a boundary edge. These new boundary edges are are tagged as \code{ghost} and should, in general, be assigned a type of \code{None}. The following piece of code taken from {\tt run_parallel_advection.py} shows an example. 106 105 When partitioning the mesh we introduce new, dummy, boundary edges. For example, Triangle 2 in Submesh 1 from Figure \ref{fig:subdomaing} originally shared an edge with Triangle 1, but after partitioning that edge becomes a boundary edge. These new boundary edges are are tagged as \code{ghost} and should, in general, be assigned a type of \code{None}. The following piece of code taken from {\tt run_parallel_advection.py} shows an example. 107 106 {\small \begin{verbatim} 108 107 T = Transmissive_boundary(domain) … … 112 111 \end{verbatim}} 113 112 114 115 Looking at Figure \ref{fig:subdomaing} we see that after each \code{evolve} step Processor 0 will have to send the updated values for Triangle 3 and Triangle 5 to Processor 1, and similarly Processor 1 will have to send the updated values for triangles 4, 7 and 6 (recall that Submesh $p$ will be assigned to Processor $p$). The \code{build_submesh} function builds a dictionary that defines the communication pattern. 116 117 Finally, the ANUGA code assumes that the triangles (and nodes etc.) are numbered consecutively starting from 1 (FIXME (Ole): Isn't it 0?). Consequently, if Submesh 1 in Figure \ref{fig:subdomaing} was passed into the \code{evolve} calculations it would crash due to the 'missing' triangles. The \code{build_submesh} function determines a local numbering scheme for each submesh, but it does not actually update the numbering, that is left to the function \code{build_local}. 113 Looking at Figure \ref{fig:subdomaing} we see that after each \code{evolve} step Processor 0 will have to send the updated values for Triangle 2 and Triangle 4 to Processor 1, and similarly Processor 1 will have to send the updated values for Triangle 3 and Triangle 5 (recall that Submesh $p$ will be assigned to Processor $p$). The \code{build_submesh} function builds a dictionary that defines the communication pattern. 114 115 Finally, the ANUGA code assumes that the triangles (and nodes etc.) are numbered consecutively starting from 0. Consequently, if Submesh 1 in Figure \ref{fig:subdomaing} was passed into the \code{evolve} calculations it would crash. The \code{build_submesh} function determines a local numbering scheme for each submesh, but it does not actually update the numbering, that is left to \code{build_local}. 116 118 117 119 118 \subsection {Sending the Submeshes}\label{sec:part3} … … 121 120 All of functions described so far must be run in serial on Processor 0. The next step is to start the parallel computation by spreading the submeshes over the processors. The communication is carried out by 122 121 \code{send_submesh} and \code{rec_submesh} defined in {\tt build_commun.py}. 123 The \code{send_submesh} function should be called on Processor 0 and sends the Submesh $p$ to Processor $p$, while \code{rec_submesh} should be called by Processor $p$ to receive Submesh $p$ from Processor 0. Note that the order of communication is very important, if any changes are made to the \code{send_submesh} function the corresponding change must be made to the \code{rec_submesh} function. 122 The \code{send_submesh} function should be called on Processor 0 and sends the Submesh $p$ to Processor $p$, while \code{rec_submesh} should be called by Processor $p$ to receive Submesh $p$ from Processor 0. 123 124 As an aside, the order of communication is very important. If someone was to modify the \code{send_submesh} routine the corresponding change must be made to the \code{rec_submesh} routine. 124 125 125 126 While it is possible to get Processor 0 to communicate it's submesh to itself, it is an expensive and unnecessary communication call. The {\tt build_commun.py} file also includes a function called \code{extract_hostmesh} that should be called on Processor 0 to extract Submesh 0. 126 127 127 128 128 \subsection {Building the Local Mesh} 129 \subsection {Building the Local Mesh}\label{sec:part4} 129 130 After using \code{send_submesh} and \code{rec_submesh}, Processor $p$ should have its own local copy of Submesh $p$, however as stated previously the triangle numbering will be incorrect on all processors except number $0$. The \code{build_local_mesh} function from {\tt build_local.py} primarily focuses on renumbering the information stored with the submesh; including the nodes, vertices and quantities. Figure \ref{fig:subdomainf} shows what the mesh in each processor may look like. 130 131 … … 147 148 \begin{verbatim} 148 149 ####################### 149 # Partition the domain150 # Partition the mesh 150 151 ####################### 151 152 … … 157 158 \end{verbatim} 158 159 159 This rectangular mesh is artificial, and the approach to subpartitioning the mesh is different to the one described above, however this example may be of interest to those who want to measure the parallel efficiency of the code on their machine. A rectangular mesh should give a good load balance and is therefore an important first test problem.160 161 162 A more \lq real life\rq\ mesh is the Merimbula mesh used in the code shown in Section \ref{sec:codeRPMM}. This example also solves the advection equation. In this case the techniques described in Section \ref{sec:part} must be used to partition the mesh. Figure \ref{fig:code} shows the part of the code that is responsible for spreading the domainover the processors. We now look at the code in detail.160 Most simulations will not be done on a rectangular mesh, and the approach to subpartitioning the mesh is different to the one described above, however this example may be of interest to those who want to measure the parallel efficiency of the code on their machine. A rectangular mesh should give a good load balance and is therefore an important first test problem. 161 162 163 A more \lq real life\rq\ mesh is the Merimbula mesh used in the code shown in Section \ref{sec:codeRPMM}. This example also solves the advection equation. In this case the techniques described in Section \ref{sec:part} must be used to partition the mesh. Figure \ref{fig:code} shows the part of the code that is responsible for spreading the mesh over the processors. We now look at the code in detail. 163 164 164 165 \begin{figure}[htbp] … … 170 171 filename = 'merimbula_10785.tsh' 171 172 172 domain_full = pmesh_to_domain_instance(filename, Advection_Domain) 173 domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0)) 174 175 # Define the domain boundaries for visualisation 176 177 rect = array(domain_full.xy_extent, Float) 173 mesh_full = pmesh_to_domain_instance(filename, Advection_Domain) 174 mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0)) 178 175 179 176 # Subdivide the mesh 180 177 181 178 nodes, triangles, boundary, triangles_per_proc, quantities =\ 182 pmesh_divide_metis( domain_full, numprocs)179 pmesh_divide_metis(mesh_full, numprocs) 183 180 184 181 # Build the mesh that should be assigned to each processor. … … 195 192 # Build the local mesh for processor 0 196 193 197 hostmesh = extract_hostmesh(submesh) 198 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ 199 build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs) 194 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict =\ 195 extract_hostmesh(submesh, triangles_per_proc) 200 196 201 197 else: … … 213 209 \begin{itemize} 214 210 \item 215 These first few lines of code read in and define the (global) mesh. 211 These first few lines of code read in and define the (global) mesh. The \code{Set_Stage} function sets the initial conditions. See the code in \ref{sec:codeRPMM} for the definition of \code{Set_Stage}. 216 212 \begin{verbatim} 217 213 filename = 'merimbula_10785.tsh' 218 domain_full = pmesh_to_domain_instance(filename, Advection_Domain) 219 domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0)) 220 \end{verbatim} 221 222 \item 223 The \code{rect} array is used by the visualiser and records the domain size. 214 mesh_full = pmesh_to_domain_instance(filename, Advection_Domain) 215 mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0)) 216 \end{verbatim} 217 224 218 \item \code{pmesh_divide_metis} divides the mesh into a set of non-overlapping subdomains as described in Section \ref{sec:part1}. 225 219 \begin{verbatim} 226 220 nodes, triangles, boundary, triangles_per_proc, quantities =\ 227 pmesh_divide_metis( domain_full, numprocs)228 \end{verbatim} 229 230 \item The next step is to build a boundary layer of ghost triangles and define the communication pattern. This step is implemented by \code{build_submesh} as discussed in Section \ref{sec:part2}. 221 pmesh_divide_metis(mesh_full, numprocs) 222 \end{verbatim} 223 224 \item The next step is to build a boundary layer of ghost triangles and define the communication pattern. This step is implemented by \code{build_submesh} as discussed in Section \ref{sec:part2}. The \code{submesh} variable contains a copy of the submesh for each processor. 231 225 \begin{verbatim} 232 226 submesh = build_submesh(nodes, triangles, boundary, quantities, \ … … 240 234 \end{verbatim} 241 235 242 The processors receive a given subpartition by calling \code{rec_submesh}. The \code{rec_submesh} routine also calls \code{build_local_mesh}. The \code{build_local_mesh} routine described in Section \ref{sec:part4} ensures that the information is stored in a way that is compatible with the Domain datastructure. This means, for example, that the triangles and nodes must be numbered consecutively starting from 1 (FIXME (Ole): or is it 0?). 243 \begin{verbatim} 244 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ 236 237 The processors receives a given subpartition by calling \code{rec_submesh}. The \code{rec_submesh} routine also calls \code{build_local_mesh}. The \code{build_local_mesh} routine described in Section \ref{sec:part4} ensures that the information is stored in a way that is compatible with the Domain datastructure. This means, for example, that the triangles and nodes must be numbered consecutively starting from 0. 238 239 \begin{verbatim} 240 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict=\ 245 241 rec_submesh(0) 246 242 \end{verbatim} 247 243 248 Note that the submesh is not received by, or sent to, Processor 0. Rather \code{hostmesh = extract_hostmesh(submesh)} extracts the appropriate information. This saves the cost of an unnecessary communication call. It is described further in Section \ref{sec:part3}. 249 \begin{verbatim} 250 hostmesh = extract_hostmesh(submesh) 251 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \ 252 build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs) 244 Note that the submesh is not received by, or sent to, Processor 0. Rather \code{hostmesh = extract_hostmesh(submesh)} simply extracts the mesh that has been assigned to Processor 0. Recall \code{submesh} contains the list of submeshes to be assigned to each processor. This is described further in Section \ref{sec:part3}. The \code{build_local_mesh} renumbers the nodes 245 \begin{verbatim} 246 points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict)=\ 247 extract_hostmesh(submesh, triangles_per_proc) 253 248 \end{verbatim} 254 249 -
inundation/parallel/print_stats.py
r2653 r2906 6 6 # 7 7 # 8 # The routine defined here are intended for debugging8 # The routines defined here are intended for debugging 9 9 # use. They print the norms of the quantities in the 10 10 # domain. As opposed to the definitions given -
inundation/parallel/run_parallel_sw_merimbula_test.py
r2769 r2906 102 102 # Read in the test files 103 103 104 #filename = 'test-100.tsh'105 filename = 'merimbula_10785_1.tsh'104 filename = 'test-100.tsh' 105 # filename = 'merimbula_10785_1.tsh' 106 106 107 107 # Build the whole domain … … 117 117 # Initialise the wave 118 118 119 #domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))120 domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))119 domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0)) 120 # domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0)) 121 121 # domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,0.0)) 122 122 … … 211 211 print 'Processor %d on %s: No of elements %d'%(domain.processor,processor_name,domain.number_of_elements) 212 212 yieldstep = 0.05 213 finaltime = 5 00.0213 finaltime = 5.0 214 214 215 215 yieldstep = 1
Note: See TracChangeset
for help on using the changeset viewer.