Changeset 2906


Ignore:
Timestamp:
May 18, 2006, 11:45:48 AM (18 years ago)
Author:
linda
Message:

Made correction to the parallel report

Location:
inundation/parallel
Files:
15 edited

Legend:

Unmodified
Added
Removed
  • inundation/parallel/build_submesh.py

    r2625 r2906  
    1313
    1414import sys
     15import pypar    # The Python-MPI interface
    1516
    1617from Numeric import zeros, Float, Int, concatenate, \
     
    1819
    1920from mesh import Mesh
     21
    2022
    2123
     
    553555#
    554556#########################################################
    555 def extract_hostmesh(submesh):
     557def extract_hostmesh(submesh, triangles_per_proc):
    556558
    557559    submesh_cell = {}
     
    570572        submesh_cell["ghost_quan"][k] = submesh["ghost_quan"][k][0]
    571573
    572     return submesh_cell
    573 
     574    numprocs = pypar.size()
     575    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
     576            build_local_mesh(submesh_cell, 0, triangles_per_proc[0], numprocs)
     577    return  points, vertices, boundary, quantities, ghost_recv_dict, \
     578           full_send_dict
     579
  • inundation/parallel/documentation/code/RunParallelAdvection.py

    r2786 r2906  
    5858
    5959#######################
    60 # Partition the domain
     60# Partition the mesh
    6161#######################
    6262
  • inundation/parallel/documentation/code/RunParallelMerimbulaMetis.py

    r2786 r2906  
    8989    filename = 'merimbula_10785.tsh'
    9090
    91     domain_full = pmesh_to_domain_instance(filename, Advection_Domain)
    92     domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))
     91    mesh_full = pmesh_to_domain_instance(filename, Advection_Domain)
     92    mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))
    9393
    9494    # Define the domain boundaries for visualisation
    9595   
    96     rect = array(domain_full.xy_extent, Float)
     96    rect = array(mesh_full.xy_extent, Float)
    9797
    9898    # Subdivide the mesh
    9999
    100100    nodes, triangles, boundary, triangles_per_proc, quantities  =\
    101             pmesh_divide_metis(domain_full, numprocs)
     101            pmesh_divide_metis(mesh_full, numprocs)
    102102
    103103    # Build the mesh that should be assigned to each processor,
     
    114114    # Build the local mesh for processor 0
    115115
    116     hostmesh = extract_hostmesh(submesh)
    117     points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
    118              build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
     116     points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict =\
     117             extract_hostmesh(submesh, triangles_per_proc)
     118
    119119
    120120else:
     
    145145try:
    146146    domain.initialise_visualiser(rect=rect)
    147     #domain.visualiser.coloring['stage'] = True
    148     #domain.visualiser.scale_z['stage'] = 0.2
    149147except:
    150148    print 'No visualiser'
     
    153151
    154152T = Transmissive_boundary(domain)
    155 #R = Reflective_boundary(domain)
    156153domain.set_boundary( {'outflow': T, 'inflow': T, 'inner':T, \
    157154                      'exterior': T, 'open':T, 'ghost':None} )
    158155
    159156# Set the initial quantities
    160 
    161157
    162158domain.set_quantity('stage', quantities['stage'])
  • inundation/parallel/documentation/code/RunParallelSwMerimbulaMetis.py

    r2786 r2906  
    7575
    7676#######################
    77 # Partition the domain
     77# Partition the mesh
    7878#######################
    7979
     
    8484    filename = 'merimbula_10785_1.tsh'
    8585
    86     # Build the whole domain
     86    # Build the whole mesh
    8787   
    88     domain_full = pmesh_to_domain_instance(filename, Domain)
     88    mesh_full = pmesh_to_domain_instance(filename, Domain)
    8989
    9090    # Define the domain boundaries for visualisation
    9191
    92     rect = array(domain_full.xy_extent, Float)
     92    rect = array(mesh_full.xy_extent, Float)
    9393
    9494    # Initialise the wave
    9595
    96     domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
     96    mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
    9797
    9898    # Subdivide the mesh
    9999   
    100100    nodes, triangles, boundary, triangles_per_proc, quantities = \
    101          pmesh_divide_metis(domain_full, numprocs)
     101         pmesh_divide_metis(mesh_full, numprocs)
    102102
    103103    # Build the mesh that should be assigned to each processor,
     
    113113
    114114    # Build the local mesh for processor 0
    115 
    116     hostmesh = extract_hostmesh(submesh)
     115   
    117116    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
    118              build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
     117              extract_hostmesh(submesh, triangles_per_proc)
    119118
    120119else:
  • inundation/parallel/documentation/figures/domain.fig

    r2785 r2906  
    11112 2 0 2 0 7 50 -1 -1 0.000 0 0 -1 0 0 5
    1212         7110 3870 8955 3870 8955 4500 7110 4500 7110 3870
    13 4 0 0 50 -1 0 16 0.0000 4 180 1290 7380 4140 Build Local\001
    14 4 0 0 50 -1 0 16 0.0000 4 180 600 7380 4410 Mesh\001
     134 0 0 50 -1 0 16 0.0000 4 195 1275 7380 4140 Build Local\001
     144 0 0 50 -1 0 16 0.0000 4 195 615 7380 4410 Mesh\001
    1515-6
    16166 8010 2745 8145 3420
    17 4 0 0 50 -1 0 24 0.0000 4 45 105 8010 2790 .\001
    18 4 0 0 50 -1 0 24 0.0000 4 45 105 8010 2970 .\001
    19 4 0 0 50 -1 0 24 0.0000 4 45 105 8010 3195 .\001
    20 4 0 0 50 -1 0 24 0.0000 4 45 105 8010 3420 .\001
     174 0 0 50 -1 0 24 0.0000 4 30 90 8010 2790 .\001
     184 0 0 50 -1 0 24 0.0000 4 30 90 8010 2970 .\001
     194 0 0 50 -1 0 24 0.0000 4 30 90 8010 3195 .\001
     204 0 0 50 -1 0 24 0.0000 4 30 90 8010 3420 .\001
    2121-6
    22226 7065 1755 9000 2475
    23232 2 0 2 0 7 50 -1 -1 0.000 0 0 -1 0 0 5
    2424         7110 1800 8955 1800 8955 2430 7110 2430 7110 1800
    25 4 0 0 50 -1 0 16 0.0000 4 180 1290 7380 2070 Build Local\001
    26 4 0 0 50 -1 0 16 0.0000 4 180 600 7380 2340 Mesh\001
     254 0 0 50 -1 0 16 0.0000 4 195 1275 7380 2070 Build Local\001
     264 0 0 50 -1 0 16 0.0000 4 195 615 7380 2340 Mesh\001
    2727-6
    28286 9585 2700 9720 3375
    29 4 0 0 50 -1 0 24 0.0000 4 45 105 9585 2745 .\001
    30 4 0 0 50 -1 0 24 0.0000 4 45 105 9585 2925 .\001
    31 4 0 0 50 -1 0 24 0.0000 4 45 105 9585 3150 .\001
    32 4 0 0 50 -1 0 24 0.0000 4 45 105 9585 3375 .\001
     294 0 0 50 -1 0 24 0.0000 4 30 90 9585 2745 .\001
     304 0 0 50 -1 0 24 0.0000 4 30 90 9585 2925 .\001
     314 0 0 50 -1 0 24 0.0000 4 30 90 9585 3150 .\001
     324 0 0 50 -1 0 24 0.0000 4 30 90 9585 3375 .\001
    3333-6
    34342 2 0 2 0 7 50 -1 -1 0.000 0 0 -1 0 0 5
     
    4545        1 1 3.00 120.00 120.00
    4646         4950 3150 7065 4185
    47 4 0 0 50 -1 0 16 0.0000 4 180 1755 2925 3105 Build Commun.\001
    48 4 0 9 50 -1 0 16 0.0000 4 180 1260 3240 3870 Processor 0\001
    49 4 0 0 50 -1 0 16 0.0000 4 240 630 2925 3330 Layer\001
    50 4 0 9 50 -1 0 16 0.0000 4 180 1260 900 3870 Processor 0\001
    51 4 0 0 50 -1 0 16 0.0000 4 180 870 990 3375 Domain\001
    52 4 0 0 50 -1 0 16 0.0000 4 180 1095 990 3105 Subdivide\001
    53 4 0 9 50 -1 0 16 0.0000 4 180 1260 9135 4230 Processor 0\001
    54 4 0 12 50 -1 0 16 0.0000 4 180 1230 5670 3375 Subdomain\001
    55 4 0 12 50 -1 0 16 0.0000 4 180 540 5670 3060 Send\001
    56 4 0 9 50 -1 0 16 0.0000 4 240 1260 9135 2160 Processor p\001
     474 0 0 50 -1 0 16 0.0000 4 195 1770 2925 3105 Build Commun.\001
     484 0 9 50 -1 0 16 0.0000 4 195 1275 3240 3870 Processor 0\001
     494 0 0 50 -1 0 16 0.0000 4 255 630 2925 3330 Layer\001
     504 0 9 50 -1 0 16 0.0000 4 195 1275 900 3870 Processor 0\001
     514 0 9 50 -1 0 16 0.0000 4 195 1275 9135 4230 Processor 0\001
     524 0 12 50 -1 0 16 0.0000 4 195 555 5670 3060 Send\001
     534 0 9 50 -1 0 16 0.0000 4 255 1275 9135 2160 Processor p\001
     544 0 0 50 -1 0 16 0.0000 4 195 1095 990 3105 Subdivide\001
     554 0 0 50 -1 0 16 0.0000 4 195 615 990 3375 Mesh\001
     564 0 12 50 -1 0 16 0.0000 4 195 1020 5670 3375 Submesh\001
  • inundation/parallel/documentation/figures/subdomain.fig

    r2785 r2906  
    88-2
    991200 2
    10 6 1575 1845 4725 4095
    11 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
    12          1980 2340 2880 3060 1575 3060 1980 2340
    13 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
    14          1980 2340 3150 2070 2880 3015 2880 3060
    15 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    16          2880 3060 3285 4095 3960 2835
    17 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    18          3150 2070 4725 1845 3960 2880
    19 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    20          3960 2880 4545 4095 4725 1845
    21 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2
    22          3285 4095 4545 4095
    23 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    24          3150 2115 3960 2880 2880 3060
    25 4 0 0 50 -1 0 16 0.0000 4 180 135 3870 2295 4\001
    26 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 6\001
    27 4 0 0 50 -1 0 16 0.0000 4 180 135 4365 2970 7\001
    28 4 0 0 50 -1 0 16 0.0000 4 180 135 3330 3375 5\001
    29 4 0 0 50 -1 0 16 0.0000 4 180 135 3375 2745 3\001
    30 4 0 0 50 -1 0 16 0.0000 4 180 135 2655 2520 2\001
    31 4 0 0 50 -1 0 16 0.0000 4 180 135 2025 2835 1\001
    32 -6
    33102 1 0 2 0 7 50 -1 -1 0.000 0 0 -1 1 0 2
    3411        1 1 2.00 90.00 120.00
     
    55322 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 2
    5633         4500 7785 5760 7785
    57 4 0 1 50 -1 0 16 0.0000 4 180 135 1080 6345 1\001
    58 4 0 1 50 -1 0 16 0.0000 4 180 135 1710 6030 2\001
    59 4 0 1 50 -1 0 16 0.0000 4 180 135 2430 6255 3\001
    60 4 0 1 50 -1 0 16 0.0000 4 180 135 2385 6885 5\001
    61 4 0 4 50 -1 0 16 0.0000 4 180 135 5085 5985 4\001
    62 4 0 4 50 -1 0 16 0.0000 4 180 135 5580 6660 7\001
    63 4 0 4 50 -1 0 16 0.0000 4 195 135 5130 7380 6\001
    64 4 0 0 50 -1 0 16 0.0000 4 180 1440 1035 8235 Subdomain 0\001
    65 4 0 0 50 -1 0 16 0.0000 4 180 1440 4500 8235 Subdomain 1\001
     342 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
     35         1980 2340 2880 3060 1575 3060 1980 2340
     362 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
     37         1980 2340 3150 2070 2880 3015 2880 3060
     382 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     39         2880 3060 3285 4095 3960 2835
     402 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     41         3150 2070 4725 1845 3960 2880
     422 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     43         3960 2880 4545 4095 4725 1845
     442 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2
     45         3285 4095 4545 4095
     462 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     47         3150 2115 3960 2880 2880 3060
     484 0 0 50 -1 0 16 0.0000 4 195 135 2025 2835 0\001
     494 0 0 50 -1 0 16 0.0000 4 195 135 2655 2520 1\001
     504 0 0 50 -1 0 16 0.0000 4 195 135 3375 2745 2\001
     514 0 0 50 -1 0 16 0.0000 4 195 135 3870 2295 3\001
     524 0 0 50 -1 0 16 0.0000 4 195 135 3330 3375 4\001
     534 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 5\001
     544 0 0 50 -1 0 16 0.0000 4 195 135 4365 2970 6\001
     554 0 1 50 -1 0 16 0.0000 4 195 135 1080 6345 0\001
     564 0 1 50 -1 0 16 0.0000 4 195 135 1710 6030 1\001
     574 0 1 50 -1 0 16 0.0000 4 195 135 2430 6255 2\001
     584 0 1 50 -1 0 16 0.0000 4 195 135 2385 6885 4\001
     594 0 4 50 -1 0 16 0.0000 4 195 135 5085 5985 3\001
     604 0 4 50 -1 0 16 0.0000 4 195 135 5580 6660 6\001
     614 0 4 50 -1 0 16 0.0000 4 195 135 5130 7380 5\001
     624 0 0 50 -1 0 16 0.0000 4 195 1215 1035 8235 Submesh 0\001
     634 0 0 50 -1 0 16 0.0000 4 195 1215 4500 8235 Submesh 1\001
  • inundation/parallel/documentation/figures/subdomainfinal.fig

    r2785 r2906  
    88-2
    991200 2
    10 6 1575 1845 4725 4095
    11 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
    12          1980 2340 2880 3060 1575 3060 1980 2340
    13 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
    14          1980 2340 3150 2070 2880 3015 2880 3060
    15 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    16          2880 3060 3285 4095 3960 2835
    17 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    18          3150 2070 4725 1845 3960 2880
    19 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    20          3960 2880 4545 4095 4725 1845
    21 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2
    22          3285 4095 4545 4095
    23 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    24          3150 2115 3960 2880 2880 3060
    25 4 0 0 50 -1 0 16 0.0000 4 180 135 3870 2295 4\001
    26 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 6\001
    27 4 0 0 50 -1 0 16 0.0000 4 180 135 4365 2970 7\001
    28 4 0 0 50 -1 0 16 0.0000 4 180 135 3330 3375 5\001
    29 4 0 0 50 -1 0 16 0.0000 4 180 135 3375 2745 3\001
    30 4 0 0 50 -1 0 16 0.0000 4 180 135 2655 2520 2\001
    31 4 0 0 50 -1 0 16 0.0000 4 180 135 2025 2835 1\001
    32 -6
    33102 1 0 2 0 7 50 -1 -1 0.000 0 0 -1 1 0 2
    3411        1 1 2.00 90.00 120.00
     
    57342 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 3
    5835         1894 5605 3469 5380 2704 6415
    59 2 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 3
    60          2682 6392 3267 7607 3447 5357
    61362 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 2
    6237         2025 7650 3285 7650
     
    67422 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 2
    6843         4455 6705 4905 7740
    69 4 0 0 50 -1 0 16 0.0000 4 180 1260 1035 8235 Processor 0\001
    70 4 0 0 50 -1 0 16 0.0000 4 180 1260 4860 8190 Processor 1\001
    71 4 0 1 50 -1 0 16 0.0000 4 180 135 4860 7020 5\001
    72 4 0 1 50 -1 0 16 0.0000 4 180 135 765 6390 1\001
    73 4 0 1 50 -1 0 16 0.0000 4 180 135 1395 6075 2\001
    74 4 0 1 50 -1 0 16 0.0000 4 180 135 2115 6300 3\001
    75 4 0 4 50 -1 0 16 0.0000 4 180 135 3060 6615 7\001
    76 4 0 4 50 -1 0 16 0.0000 4 195 135 2655 7245 6\001
    77 4 0 1 50 -1 0 16 0.0000 4 180 135 2070 6930 4\001
    78 4 0 4 50 -1 0 16 0.0000 4 180 135 2520 5895 5\001
    79 4 0 4 50 -1 0 16 0.0000 4 180 135 5445 5940 1\001
    80 4 0 4 50 -1 0 16 0.0000 4 180 135 5940 6615 2\001
    81 4 0 4 50 -1 0 16 0.0000 4 180 135 5490 7335 3\001
    82 4 0 1 50 -1 0 16 0.0000 4 180 135 4815 6390 4\001
     442 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
     45         1980 2340 2880 3060 1575 3060 1980 2340
     462 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
     47         1980 2340 3150 2070 2880 3015 2880 3060
     482 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     49         2880 3060 3285 4095 3960 2835
     502 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     51         3150 2070 4725 1845 3960 2880
     522 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     53         3960 2880 4545 4095 4725 1845
     542 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2
     55         3285 4095 4545 4095
     562 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     57         3150 2115 3960 2880 2880 3060
     582 1 0 1 4 7 50 -1 -1 0.000 0 0 -1 0 0 2
     59         2700 6435 3285 7650
     604 0 0 50 -1 0 16 0.0000 4 195 1275 1035 8235 Processor 0\001
     614 0 0 50 -1 0 16 0.0000 4 195 1275 4860 8190 Processor 1\001
     624 0 0 50 -1 0 16 0.0000 4 195 135 2025 2835 0\001
     634 0 0 50 -1 0 16 0.0000 4 195 135 2655 2520 1\001
     644 0 0 50 -1 0 16 0.0000 4 195 135 3375 2745 2\001
     654 0 0 50 -1 0 16 0.0000 4 195 135 3870 2295 3\001
     664 0 0 50 -1 0 16 0.0000 4 195 135 3330 3375 4\001
     674 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 5\001
     684 0 0 50 -1 0 16 0.0000 4 195 135 4365 2970 6\001
     694 0 1 50 -1 0 16 0.0000 4 195 135 765 6390 0\001
     704 0 1 50 -1 0 16 0.0000 4 195 135 1395 6075 1\001
     714 0 1 50 -1 0 16 0.0000 4 195 135 2115 6300 2\001
     724 0 1 50 -1 0 16 0.0000 4 195 135 2070 6930 3\001
     734 0 4 50 -1 0 16 0.0000 4 195 135 2520 5895 4\001
     744 0 4 50 -1 0 16 0.0000 4 195 135 2655 7245 5\001
     754 0 4 50 -1 0 16 0.0000 4 195 135 5445 5940 0\001
     764 0 4 50 -1 0 16 0.0000 4 195 135 5940 6615 1\001
     774 0 4 50 -1 0 16 0.0000 4 195 135 5490 7335 2\001
     784 0 1 50 -1 0 16 0.0000 4 195 135 4815 6390 3\001
     794 0 1 50 -1 0 16 0.0000 4 195 135 4860 7020 4\001
  • inundation/parallel/documentation/figures/subdomainghost.fig

    r2785 r2906  
    88-2
    991200 2
    10 6 1575 1845 4725 4095
    11 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
    12          1980 2340 2880 3060 1575 3060 1980 2340
    13 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
    14          1980 2340 3150 2070 2880 3015 2880 3060
    15 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    16          2880 3060 3285 4095 3960 2835
    17 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    18          3150 2070 4725 1845 3960 2880
    19 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    20          3960 2880 4545 4095 4725 1845
    21 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2
    22          3285 4095 4545 4095
    23 2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
    24          3150 2115 3960 2880 2880 3060
    25 4 0 0 50 -1 0 16 0.0000 4 180 135 3870 2295 4\001
    26 4 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 6\001
    27 4 0 0 50 -1 0 16 0.0000 4 180 135 4365 2970 7\001
    28 4 0 0 50 -1 0 16 0.0000 4 180 135 3330 3375 5\001
    29 4 0 0 50 -1 0 16 0.0000 4 180 135 3375 2745 3\001
    30 4 0 0 50 -1 0 16 0.0000 4 180 135 2655 2520 2\001
    31 4 0 0 50 -1 0 16 0.0000 4 180 135 2025 2835 1\001
    32 -6
    33 6 315 5355 3510 7650
    34 6 1890 5355 3510 7650
    35 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 3
    36          1894 5605 3469 5380 2704 6415
    37 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 3
    38          2682 6392 3267 7607 3447 5357
    39 -6
    40 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 4
    41          720 5895 1620 6615 315 6615 720 5895
    42 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 4
    43          720 5895 1890 5625 1620 6570 1620 6615
    44 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 3
    45          1890 5670 2700 6435 1620 6615
    46 2 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 3
    47          1620 6615 2025 7650 2700 6390
    48 2 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 2
    49          2025 7650 3285 7650
    50 4 0 1 50 -1 0 16 0.0000 4 180 135 765 6390 1\001
    51 4 0 1 50 -1 0 16 0.0000 4 180 135 1395 6075 2\001
    52 4 0 1 50 -1 0 16 0.0000 4 180 135 2115 6300 3\001
    53 4 0 1 50 -1 0 16 0.0000 4 180 135 2070 6930 5\001
    54 4 0 4 50 -1 0 16 0.0000 4 180 135 2520 5895 4\001
    55 4 0 4 50 -1 0 16 0.0000 4 180 135 3060 6615 7\001
    56 4 0 4 50 -1 0 16 0.0000 4 195 135 2655 7245 6\001
    57 -6
    58102 1 0 2 0 7 50 -1 -1 0.000 0 0 -1 1 0 2
    5911        1 1 2.00 90.00 120.00
     
    78302 1 1 1 1 7 50 -1 -1 4.000 0 0 -1 0 0 2
    7931         4455 6705 4905 7740
    80 4 0 4 50 -1 0 16 0.0000 4 180 135 5445 5940 4\001
    81 4 0 4 50 -1 0 16 0.0000 4 180 135 5940 6615 7\001
    82 4 0 4 50 -1 0 16 0.0000 4 195 135 5490 7335 6\001
    83 4 0 1 50 -1 0 16 0.0000 4 180 135 4815 6390 3\001
    84 4 0 1 50 -1 0 16 0.0000 4 180 135 4860 7020 5\001
    85 4 0 0 50 -1 0 16 0.0000 4 180 1440 1035 8235 Subdomain 0\001
    86 4 0 0 50 -1 0 16 0.0000 4 180 1440 4860 8190 Subdomain 1\001
     322 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
     33         1980 2340 2880 3060 1575 3060 1980 2340
     342 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 4
     35         1980 2340 3150 2070 2880 3015 2880 3060
     362 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     37         2880 3060 3285 4095 3960 2835
     382 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     39         3150 2070 4725 1845 3960 2880
     402 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     41         3960 2880 4545 4095 4725 1845
     422 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2
     43         3285 4095 4545 4095
     442 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 3
     45         3150 2115 3960 2880 2880 3060
     462 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 4
     47         720 5895 1620 6615 315 6615 720 5895
     482 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 4
     49         720 5895 1890 5625 1620 6570 1620 6615
     502 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 3
     51         1890 5670 2700 6435 1620 6615
     522 1 0 1 1 7 50 -1 -1 0.000 0 0 -1 0 0 3
     53         1620 6615 2025 7650 2700 6390
     542 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 2
     55         2025 7650 3285 7650
     562 1 1 1 4 7 50 -1 -1 4.000 0 0 -1 0 0 3
     57         1894 5605 3469 5380 2704 6415
     582 1 1 1 4 0 50 -1 -1 4.000 0 0 -1 0 0 2
     59         2700 6435 3285 7650
     604 0 0 50 -1 0 16 0.0000 4 195 1215 4860 8190 Submesh 1\001
     614 0 0 50 -1 0 16 0.0000 4 195 135 2025 2835 0\001
     624 0 0 50 -1 0 16 0.0000 4 195 135 2655 2520 1\001
     634 0 0 50 -1 0 16 0.0000 4 195 135 3375 2745 2\001
     644 0 0 50 -1 0 16 0.0000 4 195 135 3870 2295 3\001
     654 0 0 50 -1 0 16 0.0000 4 195 135 3330 3375 4\001
     664 0 0 50 -1 0 16 0.0000 4 195 135 3915 3690 5\001
     674 0 0 50 -1 0 16 0.0000 4 195 135 4365 2970 6\001
     684 0 0 50 -1 0 16 0.0000 4 195 1215 1215 8235 Submesh 0\001
     694 0 1 50 -1 0 16 0.0000 4 195 135 1170 6120 1\001
     704 0 1 50 -1 0 16 0.0000 4 195 135 1935 6165 2\001
     714 0 4 50 -1 0 16 0.0000 4 195 135 2430 5850 3\001
     724 0 1 50 -1 0 16 0.0000 4 195 135 1980 6885 4\001
     734 0 1 50 -1 0 16 0.0000 4 195 135 4725 6390 2\001
     744 0 1 50 -1 0 16 0.0000 4 195 135 675 6435 0\001
     754 0 15 50 -1 0 16 0.0000 4 255 345 2655 7425 (5)\001
     764 0 24 50 -1 0 16 0.0000 4 255 345 5400 7515 (2)\001
     774 0 15 50 -1 0 16 0.0000 4 255 345 855 6525 (0)\001
     784 0 15 50 -1 0 16 0.0000 4 255 345 1350 6255 (1)\001
     794 0 15 50 -1 0 16 0.0000 4 255 345 2115 6390 (2)\001
     804 0 15 50 -1 0 16 0.0000 4 255 345 2565 6075 (4)\001
     814 0 15 50 -1 0 16 0.0000 4 255 345 1890 7200 (3)\001
     824 0 4 50 -1 0 16 0.0000 4 195 135 2520 7110 5\001
     834 0 24 50 -1 0 16 0.0000 4 255 345 4950 6480 (3)\001
     844 0 1 50 -1 0 16 0.0000 4 195 135 4815 7290 4\001
     854 0 24 50 -1 0 16 0.0000 4 255 345 4815 6930 (4)\001
     864 0 4 50 -1 0 16 0.0000 4 195 135 5895 6525 6\001
     874 0 24 50 -1 0 16 0.0000 4 255 345 5760 6885 (1)\001
     884 0 4 50 -1 0 16 0.0000 4 195 135 5445 7155 5\001
     894 0 24 50 -1 0 16 0.0000 4 255 345 5355 6210 (0)\001
     904 0 4 50 -1 0 16 0.0000 4 195 135 5490 5895 3\001
  • inundation/parallel/documentation/parallel.tex

    r2849 r2906  
    2727
    2828The first step in parallelising the code is to subdivide the mesh
    29 into, roughly, equally sized partitions. On a rectangular domain this may be
     29into, roughly, equally sized partitions. On a rectangular mesh this may be
    3030done by a simple co-ordinate based dissection, but on a complicated
    31 domain such as the Merimbula grid shown in Figure \ref{fig:mergrid}
     31domain such as the Merimbula mesh shown in Figure \ref{fig:mergrid}
    3232a more sophisticated approach must be used.  We use pymetis, a
    3333python wrapper around the Metis
     
    4040\begin{figure}[hbtp]
    4141  \centerline{ \includegraphics[scale = 0.75]{figures/mermesh.eps}}
    42   \caption{The Merimbula grid.}
     42  \caption{The Merimbula mesh.}
    4343 \label{fig:mergrid}
    4444\end{figure}
     
    8787setting up the communication pattern as well as assigning the local numbering scheme for the submeshes.
    8888
    89 Consider the example subpartitioning given in Figure \ref{fig:subdomain}. During the \code{evolve} calculations Triangle 3 in Submesh 0 will need to access its neighbour Triangle 4 stored in Submesh 1. The standard approach to this problem is to add an extra layer of triangles, which we call ghost triangles. The ghost triangles
     89Consider the example subpartitioning given in Figure \ref{fig:subdomain}. During the \code{evolve} calculations Triangle 2 in Submesh 0 will need to access its neighbour Triangle 3 stored in Submesh 1. The standard approach to this problem is to add an extra layer of triangles, which we call ghost triangles. The ghost triangles
    9090are read-only, they should not be updated during the calculations, they are only there to hold any extra information that a processor may need to complete its calculations. The ghost triangle values are updated through communication calls. Figure \ref{fig:subdomaing} shows the submeshes with the extra layer of ghost triangles.
    9191
    9292\begin{figure}[hbtp]
    9393  \centerline{ \includegraphics[scale = 0.6]{figures/subdomain.eps}}
    94   \caption{An example subpartioning.}
     94  \caption{An example subpartioning of a mesh.}
    9595 \label{fig:subdomain}
    9696\end{figure}
     
    9999\begin{figure}[hbtp]
    100100  \centerline{ \includegraphics[scale = 0.6]{figures/subdomainghost.eps}}
    101   \caption{An example subpartioning with ghost triangles.}
     101  \caption{An example subpartioning with ghost triangles. The numbers in brackets shows the local numbering scheme that is calculated and stored with the mesh, but not implemented until the local mesh is built. See Section \ref{sec:part4}. }
    102102 \label{fig:subdomaing}
    103103\end{figure}
    104104
    105 When partitioning the mesh we introduce new, dummy, boundary edges. For example, Triangle 3 in Submesh 1, from Figure \ref{fig:subdomaing}, originally shared an edge with Triangle 2, but after partitioning that edge becomes a boundary edge. These new boundary edges are are tagged as \code{ghost} and should, in general, be assigned a type of \code{None}. The following piece of code taken from {\tt run_parallel_advection.py} shows an example. 
    106  
     105When partitioning the mesh we introduce new, dummy, boundary edges. For example, Triangle 2 in Submesh 1 from Figure \ref{fig:subdomaing} originally shared an edge with Triangle 1, but after partitioning that edge becomes a boundary edge. These new boundary edges are are tagged as \code{ghost} and should, in general, be assigned a type of \code{None}. The following piece of code taken from {\tt run_parallel_advection.py} shows an example. 
    107106{\small \begin{verbatim} 
    108107T = Transmissive_boundary(domain)
     
    112111\end{verbatim}}
    113112
    114 
    115 Looking at Figure \ref{fig:subdomaing} we see that after each \code{evolve} step Processor 0  will have to send the updated values for Triangle 3 and Triangle 5 to Processor 1, and similarly Processor 1 will have to send the updated values for triangles 4, 7 and 6 (recall that Submesh $p$ will be assigned to Processor $p$). The \code{build_submesh} function builds a dictionary that defines the communication pattern.
    116 
    117 Finally, the ANUGA code assumes that the triangles (and nodes etc.) are numbered consecutively starting from 1 (FIXME (Ole): Isn't it 0?). Consequently, if Submesh 1 in Figure \ref{fig:subdomaing} was passed into the \code{evolve} calculations it would crash due to the 'missing' triangles. The \code{build_submesh} function determines a local numbering scheme for each submesh, but it does not actually update the numbering, that is left to the function \code{build_local}.
     113Looking at Figure \ref{fig:subdomaing} we see that after each \code{evolve} step Processor 0  will have to send the updated values for Triangle 2 and Triangle 4 to Processor 1, and similarly Processor 1 will have to send the updated values for Triangle 3 and Triangle 5 (recall that Submesh $p$ will be assigned to Processor $p$). The \code{build_submesh} function builds a dictionary that defines the communication pattern.
     114
     115Finally, the ANUGA code assumes that the triangles (and nodes etc.) are numbered consecutively starting from 0. Consequently, if Submesh 1 in Figure \ref{fig:subdomaing} was passed into the \code{evolve} calculations it would crash. The \code{build_submesh} function determines a local numbering scheme for each submesh, but it does not actually update the numbering, that is left to \code{build_local}.
     116
    118117
    119118\subsection {Sending the Submeshes}\label{sec:part3}
     
    121120All of functions described so far must be run in serial on Processor 0. The next step is to start the parallel computation by spreading the submeshes over the processors. The communication is carried out by
    122121\code{send_submesh} and \code{rec_submesh} defined in {\tt build_commun.py}.
    123 The \code{send_submesh} function should be called on Processor 0 and sends the Submesh $p$ to Processor $p$, while \code{rec_submesh} should be called by Processor $p$ to receive Submesh $p$ from Processor 0. Note that the order of communication is very important, if any changes are made to the \code{send_submesh} function the corresponding change must be made to the \code{rec_submesh} function.
     122The \code{send_submesh} function should be called on Processor 0 and sends the Submesh $p$ to Processor $p$, while \code{rec_submesh} should be called by Processor $p$ to receive Submesh $p$ from Processor 0.
     123
     124As an aside, the order of communication is very important. If someone was to modify the \code{send_submesh} routine the corresponding change must be made to the \code{rec_submesh} routine.
    124125
    125126While it is possible to get Processor 0 to communicate it's submesh to itself, it is an expensive and unnecessary communication call. The {\tt build_commun.py} file also includes a function called \code{extract_hostmesh} that should be called on Processor 0 to extract Submesh 0.
    126127
    127128
    128 \subsection {Building the Local Mesh}
     129\subsection {Building the Local Mesh}\label{sec:part4}
    129130After using \code{send_submesh} and \code{rec_submesh}, Processor $p$ should have its own local copy of Submesh $p$, however as stated previously the triangle numbering will be incorrect on all processors except number $0$. The \code{build_local_mesh} function from {\tt build_local.py} primarily focuses on renumbering the information stored with the submesh; including the nodes, vertices and quantities. Figure \ref{fig:subdomainf} shows what the mesh in each processor may look like.
    130131
     
    147148\begin{verbatim}
    148149#######################
    149 # Partition the domain
     150# Partition the mesh
    150151#######################
    151152
     
    157158\end{verbatim}
    158159
    159 This rectangular mesh is artificial, and the approach to subpartitioning the mesh is different to the one described above, however this example may be of interest to those who want to measure the parallel efficiency of the code on their machine. A rectangular mesh should give a good load balance and is therefore an important first test problem. 
    160 
    161 
    162 A more \lq real life\rq\ mesh is the Merimbula mesh used in the code shown in Section \ref{sec:codeRPMM}. This example also solves the advection equation. In this case the techniques described in Section \ref{sec:part} must be used to partition the mesh. Figure \ref{fig:code} shows the part of the code that is responsible for spreading the domain over the processors. We now look at the code in detail.
     160Most simulations will not be done on a rectangular mesh, and the approach to subpartitioning the mesh is different to the one described above, however this example may be of interest to those who want to measure the parallel efficiency of the code on their machine. A rectangular mesh should give a good load balance and is therefore an important first test problem. 
     161
     162
     163A more \lq real life\rq\ mesh is the Merimbula mesh used in the code shown in Section \ref{sec:codeRPMM}. This example also solves the advection equation. In this case the techniques described in Section \ref{sec:part} must be used to partition the mesh. Figure \ref{fig:code} shows the part of the code that is responsible for spreading the mesh over the processors. We now look at the code in detail.
    163164
    164165\begin{figure}[htbp]
     
    170171    filename = 'merimbula_10785.tsh'
    171172
    172     domain_full = pmesh_to_domain_instance(filename, Advection_Domain)
    173     domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))
    174 
    175     # Define the domain boundaries for visualisation
    176    
    177     rect = array(domain_full.xy_extent, Float)
     173    mesh_full = pmesh_to_domain_instance(filename, Advection_Domain)
     174    mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))
    178175
    179176    # Subdivide the mesh
    180177
    181178    nodes, triangles, boundary, triangles_per_proc, quantities  =\
    182             pmesh_divide_metis(domain_full, numprocs)
     179            pmesh_divide_metis(mesh_full, numprocs)
    183180
    184181    # Build the mesh that should be assigned to each processor.
     
    195192    # Build the local mesh for processor 0
    196193
    197     hostmesh = extract_hostmesh(submesh)
    198     points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
    199              build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
     194     points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict =\
     195              extract_hostmesh(submesh, triangles_per_proc)
    200196
    201197else:
     
    213209\begin{itemize}
    214210\item
    215 These first few lines of code read in and define the (global) mesh.
     211These first few lines of code read in and define the (global) mesh. The \code{Set_Stage} function sets the initial conditions. See the code in \ref{sec:codeRPMM} for the definition of \code{Set_Stage}.
    216212\begin{verbatim}
    217213    filename = 'merimbula_10785.tsh'
    218     domain_full = pmesh_to_domain_instance(filename, Advection_Domain)
    219     domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))
    220 \end{verbatim}
    221 
    222 \item
    223 The \code{rect} array is used by the visualiser and records the domain size.
     214    mesh_full = pmesh_to_domain_instance(filename, Advection_Domain)
     215    mesh_full.set_quantity('stage', Set_Stage(756000.0,756500.0,4.0))
     216\end{verbatim}
     217
    224218\item \code{pmesh_divide_metis} divides the mesh into a set of non-overlapping subdomains as described in Section \ref{sec:part1}.
    225219\begin{verbatim}
    226220    nodes, triangles, boundary, triangles_per_proc, quantities  =\
    227             pmesh_divide_metis(domain_full, numprocs)
    228 \end{verbatim}
    229 
    230 \item The next step is to build a boundary layer of ghost triangles and define the communication pattern. This step is implemented by \code{build_submesh} as discussed in Section \ref{sec:part2}.
     221            pmesh_divide_metis(mesh_full, numprocs)
     222\end{verbatim}
     223
     224\item The next step is to build a boundary layer of ghost triangles and define the communication pattern. This step is implemented by \code{build_submesh} as discussed in Section \ref{sec:part2}. The \code{submesh} variable contains a copy of the submesh for each processor.
    231225\begin{verbatim}       
    232226    submesh = build_submesh(nodes, triangles, boundary, quantities, \
     
    240234\end{verbatim}
    241235
    242 The processors receive a given subpartition by calling \code{rec_submesh}. The \code{rec_submesh} routine also calls \code{build_local_mesh}. The \code{build_local_mesh} routine described in Section \ref{sec:part4} ensures that the information is stored in a way that is compatible with the Domain datastructure. This means, for example, that the triangles and nodes must be numbered consecutively starting from 1 (FIXME (Ole): or is it 0?).
    243 \begin{verbatim}
    244     points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
     236
     237The processors receives a given subpartition by calling \code{rec_submesh}. The \code{rec_submesh} routine also calls \code{build_local_mesh}. The \code{build_local_mesh} routine described in Section \ref{sec:part4} ensures that the information is stored in a way that is compatible with the Domain datastructure. This means, for example, that the triangles and nodes must be numbered consecutively starting from 0.
     238
     239\begin{verbatim}
     240    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict=\
    245241             rec_submesh(0)
    246242\end{verbatim}
    247243
    248 Note that the submesh is not received by, or sent to, Processor 0. Rather     \code{hostmesh = extract_hostmesh(submesh)} extracts the appropriate information. This saves the cost of an unnecessary communication call. It is described further in Section \ref{sec:part3}.
    249 \begin{verbatim}
    250     hostmesh = extract_hostmesh(submesh)
    251     points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict = \
    252              build_local_mesh(hostmesh, 0, triangles_per_proc[0], numprocs)
     244Note that the submesh is not received by, or sent to, Processor 0. Rather     \code{hostmesh = extract_hostmesh(submesh)} simply extracts the mesh that has been assigned to Processor 0. Recall \code{submesh} contains the list of submeshes to be assigned to each processor. This is described further in Section \ref{sec:part3}. The \code{build_local_mesh} renumbers the nodes
     245\begin{verbatim}
     246    points, vertices, boundary, quantities, ghost_recv_dict, full_send_dict)=\
     247              extract_hostmesh(submesh, triangles_per_proc)
    253248\end{verbatim}
    254249
  • inundation/parallel/print_stats.py

    r2653 r2906  
    66#
    77#
    8 #  The routine defined here are intended for debugging
     8#  The routines defined here are intended for debugging
    99# use. They print the norms of the quantities in the
    1010# domain. As opposed to the definitions given
  • inundation/parallel/run_parallel_sw_merimbula_test.py

    r2769 r2906  
    102102    # Read in the test files
    103103
    104 #    filename = 'test-100.tsh'
    105     filename = 'merimbula_10785_1.tsh'
     104    filename = 'test-100.tsh'
     105#    filename = 'merimbula_10785_1.tsh'
    106106
    107107    # Build the whole domain
     
    117117    # Initialise the wave
    118118
    119 #    domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))
    120     domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
     119    domain_full.set_quantity('stage', Set_Stage(200.0,300.0,1.0))
     120#    domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,2.0))
    121121#    domain_full.set_quantity('stage', Set_Stage(756000.0,756500.0,0.0))
    122122
     
    211211print 'Processor %d on %s: No of elements %d'%(domain.processor,processor_name,domain.number_of_elements)
    212212yieldstep = 0.05
    213 finaltime = 500.0
     213finaltime = 5.0
    214214
    215215yieldstep = 1
Note: See TracChangeset for help on using the changeset viewer.