Changeset 1160
- Timestamp:
- Mar 29, 2005, 6:06:54 PM (20 years ago)
- Location:
- inundation/ga/storm_surge
- Files:
-
- 7 added
- 2 deleted
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
inundation/ga/storm_surge/analytical solutions/Analytical solution_oblique_shock.py
r774 r1160 1 """Example of shallow water wave equation 1 """ 2 Example of shallow water wave equation 2 3 consisting of an asymetrical converging channel. 3 4 4 5 6 7 5 Copyright 2004 6 Christopher Zoppou, Stephen Roberts, Ole Nielsen, Duncan Gray 7 Geoscience Australia, ANU 8 8 9 Specific methods pertaining to the 2D shallow water equation 9 10 are imported from shallow_water … … 13 14 numerical vector named conserved_quantities. 14 15 15 16 16 """ 17 17 18 18 ###################### 19 # Module imports 19 # Module imports 20 20 # 21 21 … … 26 26 import sys 27 27 from os import sep 28 sys.path.append('..'+s 29 30 ep+'pyvolution') 28 sys.path.append('..'+sep+'pyvolution') 31 29 32 30 … … 50 48 51 49 points, elements, boundary = oblique(m, n, lenx, leny) 52 domain = Domain(points, elements, boundary) 50 domain = Domain(points, elements, boundary) 53 51 54 52 # Order of solver … … 97 95 for t in domain.evolve(yieldstep = 0.5, finaltime = 50): 98 96 domain.write_time() 99 97 100 98 print 'That took %.2f seconds' %(time.time()-t0) 101 99 … … 109 107 110 108 111 109 -
inundation/ga/storm_surge/pyvolution/cg_solve.py
r1156 r1160 4 4 import logging, logging.config 5 5 logger = logging.getLogger('cg_solve') 6 logger.setLevel(logging. DEBUG)6 logger.setLevel(logging.WARNING) 7 7 8 8 try: … … 10 10 except: 11 11 pass 12 13 14 15 16 #17 18 19 20 21 12 22 13 def conjugate_gradient(A,b,x0=None,imax=10000,tol=1.0e-8,iprint=0): … … 79 70 #FIXME: Should this raise an exception? 80 71 if i==imax: 81 print 'max number of iterations attained'72 logger.warning('max number of iterations attained') 82 73 83 74 return x -
inundation/ga/storm_surge/pyvolution/least_squares.py
r1158 r1160 12 12 A negative alpha is not allowed. 13 13 A typical value of alpha is 1.0e-6 14 14 15 15 16 16 Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou 17 Geoscience Australia, 2004. 17 Geoscience Australia, 2004. 18 18 """ 19 19 … … 32 32 try: 33 33 from util import gradient 34 except ImportError, e: 34 except ImportError, e: 35 35 #FIXME reduce the dependency of modules in pyvolution 36 36 # Have util in a dir, working like load_mesh, and get rid of this … … 38 38 """ 39 39 """ 40 41 det = (y2-y0)*(x1-x0) - (y1-y0)*(x2-x0) 40 41 det = (y2-y0)*(x1-x0) - (y1-y0)*(x2-x0) 42 42 a = (y2-y0)*(q1-q0) - (y1-y0)*(q2-q0) 43 43 a /= det 44 44 45 45 b = (x1-x0)*(q2-q0) - (x2-x0)*(q1-q0) 46 b /= det 46 b /= det 47 47 48 48 return a, b … … 50 50 51 51 DEFAULT_ALPHA = 0.001 52 52 53 53 def fit_to_mesh_file(mesh_file, point_file, mesh_output_file, 54 54 alpha=DEFAULT_ALPHA, verbose= False, … … 61 61 point attributes to the mesh and write a mesh file with the 62 62 results. 63 63 64 64 65 65 If data_origin is not None it is assumed to be 66 66 a 3-tuple with geo referenced 67 67 UTM coordinates (zone, easting, northing) 68 68 69 69 mesh_origin is the same but refers to the input tsh file. 70 70 FIXME: When the tsh format contains it own origin, these parameters can go. 71 71 FIXME: And both origins should be obtained from the specified files. 72 72 """ 73 73 74 74 from load_mesh.loadASCII import mesh_file_to_mesh_dictionary, \ 75 75 load_points_file, export_mesh_file, \ 76 76 concatinate_attributelist 77 77 78 78 mesh_dict = mesh_file_to_mesh_dictionary(mesh_file) 79 79 vertex_coordinates = mesh_dict['vertices'] 80 80 triangles = mesh_dict['triangles'] 81 if type(mesh_dict['vertex_attributes']) == ArrayType: 81 if type(mesh_dict['vertex_attributes']) == ArrayType: 82 82 old_point_attributes = mesh_dict['vertex_attributes'].tolist() 83 83 else: 84 old_point_attributes = mesh_dict['vertex_attributes'] 85 84 old_point_attributes = mesh_dict['vertex_attributes'] 85 86 86 if type(mesh_dict['vertex_attribute_titles']) == ArrayType: 87 87 old_title_list = mesh_dict['vertex_attribute_titles'].tolist() 88 else: 88 else: 89 89 old_title_list = mesh_dict['vertex_attribute_titles'] 90 90 91 91 if verbose: print 'tsh file %s loaded' %mesh_file 92 92 93 93 # load in the .pts file 94 94 try: … … 100 100 delimiter = ' ', 101 101 verbose=verbose) 102 102 103 103 point_coordinates = point_dict['pointlist'] 104 104 title_list,point_attributes = concatinate_attributelist(point_dict['attributelist']) 105 105 106 106 if point_dict.has_key('geo_reference') and not point_dict['geo_reference'] is None: 107 107 data_origin = point_dict['geo_reference'].get_origin() 108 108 else: 109 109 data_origin = (56, 0, 0) #FIXME(DSG-DSG) 110 110 111 111 if mesh_dict.has_key('geo_reference') and not mesh_dict['geo_reference'] is None: 112 112 mesh_origin = mesh_dict['geo_reference'].get_origin() 113 113 else: 114 114 mesh_origin = (56, 0, 0) #FIXME(DSG-DSG) 115 115 116 116 if verbose: print "points file loaded" 117 117 if verbose:print "fitting to mesh" … … 127 127 precrop = precrop) 128 128 if verbose: print "finished fitting to mesh" 129 129 130 130 # convert array to list of lists 131 131 new_point_attributes = f.tolist() … … 143 143 mesh_dict['vertex_attribute_titles'] = title_list 144 144 145 #FIXME (Ole): Remember to output mesh_origin as well 145 #FIXME (Ole): Remember to output mesh_origin as well 146 146 if verbose: print "exporting to file ",mesh_output_file 147 147 export_mesh_file(mesh_output_file, mesh_dict) 148 148 149 149 150 150 def fit_to_mesh(vertex_coordinates, … … 162 162 given data points with attributes. 163 163 164 164 165 165 Inputs: 166 166 167 167 vertex_coordinates: List of coordinate pairs [xi, eta] of points 168 168 constituting mesh (or a an m x 2 Numeric array) 169 169 170 170 triangles: List of 3-tuples (or a Numeric array) of 171 171 integers representing indices of all vertices in the mesh. … … 181 181 UTM zone, easting and northing. If specified 182 182 point coordinates and vertex coordinates are assumed to be 183 relative to their respective origins. 184 183 relative to their respective origins. 184 185 185 """ 186 186 interp = Interpolation(vertex_coordinates, … … 193 193 mesh_origin = mesh_origin, 194 194 precrop = precrop) 195 195 196 196 vertex_attributes = interp.fit_points(point_attributes, verbose = verbose) 197 197 return vertex_attributes 198 198 199 200 199 200 201 201 def pts2rectangular(pts_name, M, N, alpha = DEFAULT_ALPHA, 202 202 verbose = False, reduction = 1, format = 'netcdf'): 203 """Fits attributes from pts file to MxN rectangular mesh 204 203 """Fits attributes from pts file to MxN rectangular mesh 204 205 205 Read pts file and create rectangular mesh of resolution MxN such that 206 it covers all points specified in pts file. 207 208 FIXME: This may be a temporary function until we decide on 206 it covers all points specified in pts file. 207 208 FIXME: This may be a temporary function until we decide on 209 209 netcdf formats etc 210 210 211 211 FIXME: Uses elevation hardwired 212 """ 213 212 """ 213 214 214 import util, mesh_factory 215 215 … … 221 221 elevation = attributes['elevation'] #Must be elevation 222 222 elevation = elevation[::reduction] 223 223 224 224 if verbose: print 'Got %d data points' %len(points) 225 225 … … 229 229 max_y = min_y = points[0][1] 230 230 for point in points[1:]: 231 x = point[0] 231 x = point[0] 232 232 if x > max_x: max_x = x 233 if x < min_x: min_x = x 234 y = point[1] 233 if x < min_x: min_x = x 234 y = point[1] 235 235 if y > max_y: max_y = y 236 if y < min_y: min_y = y 237 236 if y < min_y: min_y = y 237 238 238 #Create appropriate mesh 239 239 vertex_coordinates, triangles, boundary =\ 240 mesh_factory.rectangular(M, N, max_x-min_x, max_y-min_y, 240 mesh_factory.rectangular(M, N, max_x-min_x, max_y-min_y, 241 241 (min_x, min_y)) 242 242 243 #Fit attributes to mesh 243 #Fit attributes to mesh 244 244 vertex_attributes = fit_to_mesh(vertex_coordinates, 245 245 triangles, … … 248 248 249 249 250 250 251 251 return vertex_coordinates, triangles, boundary, vertex_attributes 252 253 252 253 254 254 255 255 class Interpolation: … … 263 263 expand_search = True, 264 264 max_points_per_cell = 30, 265 mesh_origin = None, 265 mesh_origin = None, 266 266 data_origin = None, 267 267 precrop = False): 268 268 269 269 270 270 """ Build interpolation matrix mapping from 271 271 function values at vertices to function values at data points 272 272 273 273 Inputs: 274 275 vertex_coordinates: List of coordinate pairs [xi, eta] of 274 275 vertex_coordinates: List of coordinate pairs [xi, eta] of 276 276 points constituting mesh (or a an m x 2 Numeric array) 277 277 278 278 triangles: List of 3-tuples (or a Numeric array) of 279 279 integers representing indices of all vertices in the mesh. 280 280 281 point_coordinates: List of coordinate pairs [x, y] of 281 point_coordinates: List of coordinate pairs [x, y] of 282 282 data points (or an nx2 Numeric array) 283 If point_coordinates is absent, only smoothing matrix will 284 be built 283 If point_coordinates is absent, only smoothing matrix will 284 be built 285 285 286 286 alpha: Smoothing parameter … … 289 289 UTM zone, easting and northing. If specified 290 290 point coordinates and vertex coordinates are assumed to be 291 relative to their respective origins. 292 291 relative to their respective origins. 292 293 293 """ 294 294 from util import ensure_numeric 295 295 296 296 #Convert input to Numeric arrays 297 297 triangles = ensure_numeric(triangles, Int) … … 299 299 300 300 #Build underlying mesh 301 if verbose: print 'Building mesh' 301 if verbose: print 'Building mesh' 302 302 #self.mesh = General_mesh(vertex_coordinates, triangles, 303 303 #FIXME: Trying the normal mesh while testing precrop, 304 # The functionality of boundary_polygon is needed for that 304 # The functionality of boundary_polygon is needed for that 305 305 self.mesh = Mesh(vertex_coordinates, triangles, 306 306 origin = mesh_origin) … … 322 322 data_origin = data_origin, 323 323 precrop = precrop) 324 324 325 325 326 326 def set_point_coordinates(self, point_coordinates, … … 330 330 """ 331 331 self.build_coefficient_matrix_B(point_coordinates, data_origin) 332 332 333 333 def build_coefficient_matrix_B(self, point_coordinates=None, 334 334 verbose = False, expand_search = True, … … 337 337 precrop = False): 338 338 """Build final coefficient matrix""" 339 339 340 340 341 341 if self.alpha <> 0: 342 if verbose: print 'Building smoothing matrix' 342 if verbose: print 'Building smoothing matrix' 343 343 self.build_smoothing_matrix_D() 344 344 345 345 if point_coordinates is not None: 346 347 if verbose: print 'Building interpolation matrix' 346 347 if verbose: print 'Building interpolation matrix' 348 348 self.build_interpolation_matrix_A(point_coordinates, 349 349 verbose = verbose, … … 361 361 #Convert self.B matrix to CSR format for faster matrix vector 362 362 self.B = Sparse_CSR(self.B) 363 363 364 364 def build_interpolation_matrix_A(self, point_coordinates, 365 365 verbose = False, expand_search = True, … … 373 373 This algorithm uses a quad tree data structure for fast binning of data points 374 374 origin is a 3-tuple consisting of UTM zone, easting and northing. 375 If specified coordinates are assumed to be relative to this origin. 375 If specified coordinates are assumed to be relative to this origin. 376 376 377 377 This one will override any data_origin that may be specified in 378 interpolation instance 378 interpolation instance 379 379 380 380 """ … … 386 386 data_origin = self.data_origin #Use the one from 387 387 #interpolation instance 388 389 #Convert input to Numeric arrays just in case. 390 point_coordinates = ensure_numeric(point_coordinates, Float) 388 389 #Convert input to Numeric arrays just in case. 390 point_coordinates = ensure_numeric(point_coordinates, Float) 391 391 392 392 … … 408 408 # 409 409 #and similarly for eta 410 410 411 411 x_offset = data_origin[1] - mesh_origin[1] 412 412 y_offset = data_origin[2] - mesh_origin[2] 413 413 else: #Shift back to a zero origin 414 414 x_offset = data_origin[1] 415 y_offset = data_origin[2] 416 415 y_offset = data_origin[2] 416 417 417 point_coordinates[:,0] += x_offset 418 point_coordinates[:,1] += y_offset 418 point_coordinates[:,1] += y_offset 419 419 else: 420 420 if mesh_origin is not None: 421 421 #Use mesh origin for data points 422 point_coordinates[:,0] -= mesh_origin[1] 422 point_coordinates[:,0] -= mesh_origin[1] 423 423 point_coordinates[:,1] -= mesh_origin[2] 424 424 … … 435 435 P = self.mesh.get_boundary_polygon() 436 436 437 if verbose: print 'Getting indices inside mesh boundary' 437 if verbose: print 'Getting indices inside mesh boundary' 438 438 indices = inside_polygon(point_coordinates, P, verbose = verbose) 439 439 440 440 if verbose: 441 441 print 'Done' … … 448 448 449 449 450 451 #Build n x m interpolation matrix 450 451 #Build n x m interpolation matrix 452 452 m = self.mesh.coordinates.shape[0] #Nbr of basis functions (1/vertex) 453 453 n = point_coordinates.shape[0] #Nbr of data points … … 455 455 if verbose: print 'Number of datapoints: %d' %n 456 456 if verbose: print 'Number of basis functions: %d' %m 457 457 458 458 #FIXME (Ole): We should use CSR here since mat-mat mult is now OK. 459 459 #However, Sparse_CSR does not have the same methods as Sparse yet … … 477 477 candidate_vertices = root.search(x[0], x[1]) 478 478 is_more_elements = True 479 479 480 480 element_found, sigma0, sigma1, sigma2, k = \ 481 481 self.search_triangles_of_vertices(candidate_vertices, x) 482 while not element_found and is_more_elements and expand_search: 483 #if verbose: print 'Expanding search' 482 while not element_found and is_more_elements and expand_search: 483 #if verbose: print 'Expanding search' 484 484 candidate_vertices, branch = root.expand_search() 485 485 if branch == []: … … 492 492 element_found, sigma0, sigma1, sigma2, k = \ 493 493 self.search_triangles_of_vertices(candidate_vertices, x) 494 495 496 #Update interpolation matrix A if necessary 497 if element_found is True: 494 495 496 #Update interpolation matrix A if necessary 497 if element_found is True: 498 498 #Assign values to matrix A 499 499 … … 511 511 else: 512 512 pass 513 #Ok if there is no triangle for datapoint 513 #Ok if there is no triangle for datapoint 514 514 #(as in brute force version) 515 515 #raise 'Could not find triangle for point', x … … 529 529 #For all vertices in same cell as point x 530 530 for v in candidate_vertices: 531 532 #for each triangle id (k) which has v as a vertex 531 532 #for each triangle id (k) which has v as a vertex 533 533 for k, _ in self.mesh.vertexlist[v]: 534 534 535 535 #Get the three vertex_points of candidate triangle 536 536 xi0 = self.mesh.get_vertex_coordinate(k, 0) 537 537 xi1 = self.mesh.get_vertex_coordinate(k, 1) 538 xi2 = self.mesh.get_vertex_coordinate(k, 2) 538 xi2 = self.mesh.get_vertex_coordinate(k, 2) 539 539 540 540 #print "PDSG - k", k … … 544 544 #print "PDSG element %i verts((%f, %f),(%f, %f),(%f, %f))"\ 545 545 # % (k, xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1]) 546 546 547 547 #Get the three normals 548 548 n0 = self.mesh.get_normal(k, 0) 549 549 n1 = self.mesh.get_normal(k, 1) 550 n2 = self.mesh.get_normal(k, 2) 551 552 550 n2 = self.mesh.get_normal(k, 2) 551 552 553 553 #Compute interpolation 554 554 sigma2 = dot((x-xi0), n2)/dot((xi2-xi0), n2) … … 559 559 #print "PDSG - sigma1", sigma1 560 560 #print "PDSG - sigma2", sigma2 561 561 562 562 #FIXME: Maybe move out to test or something 563 563 epsilon = 1.0e-6 564 564 assert abs(sigma0 + sigma1 + sigma2 - 1.0) < epsilon 565 565 566 566 #Check that this triangle contains the data point 567 567 568 568 #Sigmas can get negative within 569 569 #machine precision on some machines (e.g nautilus) 570 #Hence the small eps 570 #Hence the small eps 571 571 eps = 1.0e-15 572 572 if sigma0 >= -eps and sigma1 >= -eps and sigma2 >= -eps: … … 577 577 #Don't look for any other triangle 578 578 break 579 return element_found, sigma0, sigma1, sigma2, k 580 581 582 579 return element_found, sigma0, sigma1, sigma2, k 580 581 582 583 583 def build_interpolation_matrix_A_brute(self, point_coordinates): 584 584 """Build n x m interpolation matrix, where … … 586 586 m is the number of basis functions phi_k (one per vertex) 587 587 588 This is the brute force which is too slow for large problems, 588 This is the brute force which is too slow for large problems, 589 589 but could be used for testing 590 590 """ 591 591 592 592 from util import ensure_numeric 593 593 594 594 #Convert input to Numeric arrays 595 point_coordinates = ensure_numeric(point_coordinates, Float) 596 597 #Build n x m interpolation matrix 595 point_coordinates = ensure_numeric(point_coordinates, Float) 596 597 #Build n x m interpolation matrix 598 598 m = self.mesh.coordinates.shape[0] #Nbr of basis functions (1/vertex) 599 599 n = point_coordinates.shape[0] #Nbr of data points 600 600 601 601 self.A = Sparse(n,m) 602 602 self.AtA = Sparse(m,m) … … 616 616 xi0 = self.mesh.get_vertex_coordinate(k, 0) 617 617 xi1 = self.mesh.get_vertex_coordinate(k, 1) 618 xi2 = self.mesh.get_vertex_coordinate(k, 2) 618 xi2 = self.mesh.get_vertex_coordinate(k, 2) 619 619 620 620 #Get the three normals 621 621 n0 = self.mesh.get_normal(k, 0) 622 622 n1 = self.mesh.get_normal(k, 1) 623 n2 = self.mesh.get_normal(k, 2) 623 n2 = self.mesh.get_normal(k, 2) 624 624 625 625 #Compute interpolation … … 654 654 self.AtA[j,k] += sigmas[j]*sigmas[k] 655 655 k = k+1 656 657 658 656 657 658 659 659 def get_A(self): 660 return self.A.todense() 660 return self.A.todense() 661 661 662 662 def get_B(self): 663 663 return self.B.todense() 664 664 665 665 def get_D(self): 666 666 return self.D.todense() 667 667 668 668 #FIXME: Remember to re-introduce the 1/n factor in the 669 669 #interpolation term 670 670 671 671 def build_smoothing_matrix_D(self): 672 672 """Build m x m smoothing matrix, where … … 702 702 self.D = Sparse(m,m) 703 703 704 #For each triangle compute contributions to D = D1+D2 704 #For each triangle compute contributions to D = D1+D2 705 705 for i in range(len(self.mesh)): 706 706 … … 712 712 v1 = self.mesh.triangles[i,1] 713 713 v2 = self.mesh.triangles[i,2] 714 714 715 715 #Get the three vertex_points 716 716 xi0 = self.mesh.get_vertex_coordinate(i, 0) 717 717 xi1 = self.mesh.get_vertex_coordinate(i, 1) 718 xi2 = self.mesh.get_vertex_coordinate(i, 2) 718 xi2 = self.mesh.get_vertex_coordinate(i, 2) 719 719 720 720 #Compute gradients for each vertex … … 726 726 727 727 a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1], 728 0, 0, 1) 728 0, 0, 1) 729 729 730 730 #Compute diagonal contributions 731 731 self.D[v0,v0] += (a0*a0 + b0*b0)*area 732 732 self.D[v1,v1] += (a1*a1 + b1*b1)*area 733 self.D[v2,v2] += (a2*a2 + b2*b2)*area 733 self.D[v2,v2] += (a2*a2 + b2*b2)*area 734 734 735 735 #Compute contributions for basis functions sharing edges … … 744 744 e20 = (a2*a0 + b2*b0)*area 745 745 self.D[v2,v0] += e20 746 self.D[v0,v2] += e20 747 748 746 self.D[v0,v2] += e20 747 748 749 749 def fit(self, z): 750 750 """Fit a smooth surface to given 1d array of data points z. 751 751 752 752 The smooth surface is computed at each vertex in the underlying 753 mesh using the formula given in the module doc string. 753 mesh using the formula given in the module doc string. 754 754 755 755 Pre Condition: 756 756 self.A, self.At and self.B have been initialised 757 757 758 758 Inputs: 759 759 z: Single 1d vector or array of data at the point_coordinates. 760 760 """ 761 761 762 762 #Convert input to Numeric arrays 763 763 from util import ensure_numeric 764 764 z = ensure_numeric(z, Float) 765 765 766 766 if len(z.shape) > 1 : 767 767 raise VectorShapeError, 'Can only deal with 1d data vector' … … 769 769 if self.point_indices is not None: 770 770 #Remove values for any points that were outside mesh 771 z = take(z, self.point_indices) 772 771 z = take(z, self.point_indices) 772 773 773 #Compute right hand side based on data 774 774 Atz = self.A.trans_mult(z) 775 775 776 776 777 777 #Check sanity 778 778 n, m = self.A.shape … … 788 788 789 789 return conjugate_gradient(self.B, Atz, Atz,imax=2*len(Atz) ) 790 #FIXME: Should we store the result here for later use? (ON) 791 792 790 #FIXME: Should we store the result here for later use? (ON) 791 792 793 793 def fit_points(self, z, verbose=False): 794 794 """Like fit, but more robust when each point has two or more attributes 795 FIXME (Ole): The name fit_points doesn't carry any meaning 795 FIXME (Ole): The name fit_points doesn't carry any meaning 796 796 for me. How about something like fit_multiple or fit_columns? 797 797 """ 798 798 799 799 try: 800 800 if verbose: print 'Solving penalised least_squares problem' … … 805 805 #Convert input to Numeric arrays 806 806 from util import ensure_numeric 807 z = ensure_numeric(z, Float) 808 809 #Build n x m interpolation matrix 807 z = ensure_numeric(z, Float) 808 809 #Build n x m interpolation matrix 810 810 m = self.mesh.coordinates.shape[0] #Number of vertices 811 n = z.shape[1] #Number of data points 811 n = z.shape[1] #Number of data points 812 812 813 813 f = zeros((m,n), Float) #Resulting columns 814 814 815 815 for i in range(z.shape[1]): 816 816 f[:,i] = self.fit(z[:,i]) 817 817 818 818 return f 819 820 819 820 821 821 def interpolate(self, f): 822 """Evaluate smooth surface f at data points implied in self.A. 822 """Evaluate smooth surface f at data points implied in self.A. 823 823 824 824 The mesh values representing a smooth surface are 825 825 assumed to be specified in f. This argument could, 826 826 for example have been obtained from the method self.fit() 827 827 828 828 Pre Condition: 829 829 self.A has been initialised 830 830 831 831 Inputs: 832 832 f: Vector or array of data at the mesh vertices. 833 833 If f is an array, interpolation will be done for each column as 834 834 per underlying matrix-matrix multiplication 835 835 836 836 Output: 837 837 Interpolated values at data points implied in self.A 838 838 839 839 """ 840 840 841 841 return self.A * f 842 842 843 843 def cull_outsiders(self, f): 844 844 pass 845 846 845 846 847 847 #------------------------------------------------------------- 848 848 if __name__ == "__main__": … … 862 862 point_file = sys.argv[2] 863 863 mesh_output_file = sys.argv[3] 864 864 865 865 expand_search = False 866 866 if len(sys.argv) > 4: 867 867 if sys.argv[4][0] == "e" or sys.argv[4][0] == "E": 868 868 expand_search = True 869 else: 869 else: 870 870 expand_search = False 871 871 872 872 verbose = False 873 873 if len(sys.argv) > 5: 874 874 if sys.argv[5][0] == "n" or sys.argv[5][0] == "N": 875 875 verbose = False 876 else: 876 else: 877 877 verbose = True 878 878 879 879 if len(sys.argv) > 6: 880 880 alpha = sys.argv[6] 881 881 else: 882 882 alpha = DEFAULT_ALPHA 883 883 884 884 t0 = time.time() 885 885 fit_to_mesh_file(mesh_file, … … 889 889 verbose= verbose, 890 890 expand_search = expand_search) 891 891 892 892 print 'That took %.2f seconds' %(time.time()-t0) 893 893 -
inundation/ga/storm_surge/pyvolution/mesh.py
r1158 r1160 2 2 3 3 Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou 4 Geoscience Australia, 2004 4 Geoscience Australia, 2004 5 5 """ 6 6 … … 11 11 12 12 A triangular element is defined in terms of three vertex ids, 13 ordered counter clock-wise, 13 ordered counter clock-wise, 14 14 each corresponding to a given coordinate set. 15 15 Vertices from different elements can point to the same … … 17 17 18 18 Coordinate sets are implemented as an N x 2 Numeric array containing 19 x and y coordinates. 20 19 x and y coordinates. 20 21 21 22 22 To instantiate: … … 38 38 c = [2.0,0.0] 39 39 e = [2.0, 2.0] 40 40 41 41 points = [a, b, c, e] 42 triangles = [ [1,0,2], [1,2,3] ] #bac, bce 42 triangles = [ [1,0,2], [1,2,3] ] #bac, bce 43 43 mesh = Mesh(points, triangles) 44 44 45 45 #creates two triangles: bac and bce 46 46 … … 56 56 57 57 #FIXME: Put in check for angles less than a set minimum 58 58 59 59 def __init__(self, coordinates, triangles, boundary = None, 60 60 tagged_elements = None, origin = None): … … 70 70 71 71 N = self.number_of_elements 72 72 73 73 #Allocate space for geometric quantities 74 74 self.centroid_coordinates = zeros((N, 2), Float) … … 78 78 self.neighbours = zeros((N, 3), Int) 79 79 self.neighbour_edges = zeros((N, 3), Int) 80 self.number_of_boundaries = zeros(N, Int) 81 self.surrogate_neighbours = zeros((N, 3), Int) 82 80 self.number_of_boundaries = zeros(N, Int) 81 self.surrogate_neighbours = zeros((N, 3), Int) 82 83 83 #Get x,y coordinates for all triangles and store 84 84 V = self.vertex_coordinates 85 85 86 86 #Initialise each triangle 87 87 for i in range(N): 88 88 #if i % (N/10) == 0: print '(%d/%d)' %(i, N) 89 89 90 90 x0 = V[i, 0]; y0 = V[i, 1] 91 91 x1 = V[i, 2]; y1 = V[i, 3] 92 x2 = V[i, 4]; y2 = V[i, 5] 92 x2 = V[i, 4]; y2 = V[i, 5] 93 93 94 94 #Compute centroid … … 96 96 self.centroid_coordinates[i] = centroid 97 97 98 #Midpoints 98 #Midpoints 99 99 m0 = array([(x1 + x2)/2, (y1 + y2)/2]) 100 100 m1 = array([(x0 + x2)/2, (y0 + y2)/2]) … … 104 104 #a triangle to the midpoint of the side of the triangle 105 105 #closest to the centroid 106 d0 = sqrt(sum( (centroid-m0)**2 )) 107 d1 = sqrt(sum( (centroid-m1)**2 )) 106 d0 = sqrt(sum( (centroid-m0)**2 )) 107 d1 = sqrt(sum( (centroid-m1)**2 )) 108 108 d2 = sqrt(sum( (centroid-m2)**2 )) 109 110 self.radii[i] = min(d0, d1, d2) 109 110 self.radii[i] = min(d0, d1, d2) 111 111 112 112 #Initialise Neighbours (-1 means that it is a boundary neighbour) … … 118 118 119 119 120 #Build neighbour structure 120 #Build neighbour structure 121 121 self.build_neighbour_structure() 122 122 123 #Build surrogate neighbour structure 124 self.build_surrogate_neighbour_structure() 125 126 #Build boundary dictionary mapping (id, edge) to symbolic tags 123 #Build surrogate neighbour structure 124 self.build_surrogate_neighbour_structure() 125 126 #Build boundary dictionary mapping (id, edge) to symbolic tags 127 127 self.build_boundary_dictionary(boundary) 128 128 129 129 #Build tagged element dictionary mapping (tag) to array of elements 130 130 self.build_tagged_elements_dictionary(tagged_elements) 131 131 132 132 #Update boundary indices FIXME: OBSOLETE 133 #self.build_boundary_structure() 133 #self.build_boundary_structure() 134 134 135 135 #FIXME check integrity? 136 136 137 137 def __repr__(self): 138 138 return 'Mesh: %d triangles, %d elements, %d boundary segments'\ 139 139 %(self.coordinates.shape[0], len(self), len(self.boundary)) 140 140 141 141 142 142 def build_neighbour_structure(self): 143 143 """Update all registered triangles to point to their neighbours. 144 144 145 145 Also, keep a tally of the number of boundaries for each triangle 146 146 … … 149 149 number_of_boundaries integer array is defined. 150 150 """ 151 151 152 152 #Step 1: 153 153 #Build dictionary mapping from segments (2-tuple of points) 154 154 #to left hand side edge (facing neighbouring triangle) 155 155 156 N = self.number_of_elements 156 N = self.number_of_elements 157 157 neighbourdict = {} 158 158 for i in range(N): … … 172 172 msg = "Edge 1 of triangle %d is duplicating edge %d of triangle %d.\n" %(i,neighbourdict[c,a][1],neighbourdict[c,a][0]) 173 173 raise msg 174 174 175 175 neighbourdict[a,b] = (i, 2) #(id, edge) 176 176 neighbourdict[b,c] = (i, 0) #(id, edge) … … 191 191 self.neighbour_edges[i, 2] = neighbourdict[b,a][1] 192 192 self.number_of_boundaries[i] -= 1 193 193 194 194 if neighbourdict.has_key((c,b)): 195 195 self.neighbours[i, 0] = neighbourdict[c,b][0] 196 196 self.neighbour_edges[i, 0] = neighbourdict[c,b][1] 197 self.number_of_boundaries[i] -= 1 198 197 self.number_of_boundaries[i] -= 1 198 199 199 if neighbourdict.has_key((a,c)): 200 200 self.neighbours[i, 1] = neighbourdict[a,c][0] 201 201 self.neighbour_edges[i, 1] = neighbourdict[a,c][1] 202 self.number_of_boundaries[i] -= 1 202 self.number_of_boundaries[i] -= 1 203 203 204 204 … … 234 234 keyed by volume id and edge: 235 235 { (id, edge): tag, ... } 236 236 237 237 Postconditions: 238 238 self.boundary is defined. … … 240 240 241 241 from config import default_boundary_tag 242 242 243 243 if boundary is None: 244 244 boundary = {} … … 278 278 boundary[ (vol_id, edge_id) ] =\ 279 279 default_boundary_tag 280 281 282 280 281 282 283 283 self.boundary = boundary 284 284 285 285 286 286 def build_tagged_elements_dictionary(self, tagged_elements = None): … … 289 289 keyed by tag: 290 290 { (tag): [e1, e2, e3..] } 291 291 292 292 Postconditions: 293 293 self.element_tag is defined 294 294 """ 295 295 from Numeric import array, Int 296 296 297 297 if tagged_elements is None: 298 298 tagged_elements = {} … … 301 301 for tag in tagged_elements.keys(): 302 302 tagged_elements[tag] = array(tagged_elements[tag]).astype(Int) 303 303 304 304 msg = 'Not all elements exist. ' 305 305 assert max(tagged_elements[tag]) < self.number_of_elements, msg 306 #print "tagged_elements", tagged_elements 306 #print "tagged_elements", tagged_elements 307 307 self.tagged_elements = tagged_elements 308 308 309 309 def build_boundary_structure(self): 310 """Traverse boundary and 310 """Traverse boundary and 311 311 enumerate neighbour indices from -1 and 312 312 counting down. … … 324 324 """ 325 325 326 #FIXME: Now Obsolete - maybe use some comments from here in 327 326 #FIXME: Now Obsolete - maybe use some comments from here in 327 #domain.set_boundary 328 328 329 329 if self.boundary is None: … … 332 332 raise msg 333 333 334 334 335 335 self.boundary_segments = self.boundary.keys() 336 336 self.boundary_segments.sort() 337 337 338 index = -1 338 index = -1 339 339 for id, edge in self.boundary_segments: 340 340 … … 342 342 #if self.neighbours[id, edge] > -1: 343 343 # print 'Internal boundary' 344 344 345 345 self.neighbours[id, edge] = index 346 346 index -= 1 347 347 348 348 349 349 def get_boundary_tags(self): 350 350 """Return list of available boundary tags 351 351 """ 352 352 353 353 tags = {} 354 354 for v in self.boundary.values(): 355 355 tags[v] = 1 356 356 357 return tags.keys() 358 357 return tags.keys() 358 359 359 360 360 def get_boundary_polygon(self): … … 362 362 """ 363 363 from Numeric import allclose, sqrt, array, minimum, maximum 364 365 364 365 366 366 367 367 V = self.get_vertex_coordinates() … … 377 377 378 378 pmax = array( [max(self.coordinates[:,0]), 379 max(self.coordinates[:,1]) ] ) 380 379 max(self.coordinates[:,1]) ] ) 380 381 381 mindist = sqrt(sum( (pmax-pmin)**2 )) 382 382 for i, edge_id in self.boundary.keys(): … … 384 384 if edge_id == 0: a = 1; b = 2 385 385 if edge_id == 1: a = 2; b = 0 386 if edge_id == 2: a = 0; b = 1 386 if edge_id == 2: a = 0; b = 1 387 387 388 388 A = tuple(self.get_vertex_coordinate(i, a)) … … 393 393 #a unique way of selecting 394 394 dist_A = sqrt(sum( (A-pmin)**2 )) 395 dist_B = sqrt(sum( (B-pmin)**2 )) 395 dist_B = sqrt(sum( (B-pmin)**2 )) 396 396 397 397 #Find minimal point … … 415 415 segments[A] = B 416 416 417 417 418 418 #Start with smallest point and follow boundary (counter clock wise) 419 419 polygon = [p0] … … 422 422 polygon.append(p1) 423 423 p0 = p1 424 424 425 425 return polygon 426 426 427 427 428 428 def check_integrity(self): … … 432 432 Neighbour structure will be checked by class Mesh 433 433 """ 434 434 435 435 from config import epsilon 436 436 from math import pi … … 438 438 439 439 N = self.number_of_elements 440 440 441 441 #Get x,y coordinates for all vertices for all triangles 442 442 V = self.get_vertex_coordinates() … … 446 446 x0 = V[i, 0]; y0 = V[i, 1] 447 447 x1 = V[i, 2]; y1 = V[i, 3] 448 x2 = V[i, 4]; y2 = V[i, 5] 448 x2 = V[i, 4]; y2 = V[i, 5] 449 449 450 450 #Check that area hasn't been compromised … … 454 454 %(x0,y0,x1,y1,x2,y2) 455 455 assert abs((area - ref)/area) < epsilon, msg 456 456 457 457 #Check that points are arranged in counter clock-wise order 458 458 v0 = [x1-x0, y1-y0] … … 462 462 a0 = anglediff(v1, v0) 463 463 a1 = anglediff(v2, v1) 464 a2 = anglediff(v0, v2) 464 a2 = anglediff(v0, v2) 465 465 466 466 msg = '''Vertices (%s,%s), (%s,%s), (%s,%s) are not arranged … … 473 473 normal0 = self.normals[i, 0:2] 474 474 normal1 = self.normals[i, 2:4] 475 normal2 = self.normals[i, 4:6] 475 normal2 = self.normals[i, 4:6] 476 476 477 477 for u, v in [ (v0, normal2), (v1, normal0), (v2, normal1) ]: … … 480 480 481 481 #Check that all vertices have been registered 482 for v in self.vertexlist: 483 msg = 'Some points do not belong to an element.\n' 484 msg += 'Make sure all points appear as element vertices!' 485 assert v is not None, msg 486 482 for v_id, v in enumerate(self.vertexlist): 483 484 msg = 'Vertex %s does not belong to an element.' 485 #assert v is not None, msg 486 if v is None: 487 print msg%v_id 488 487 489 #Check integrity of neighbour structure 488 490 for i in range(N): … … 495 497 (i, 1) in self.vertexlist[v] or\ 496 498 (i, 2) in self.vertexlist[v] 497 498 499 500 499 501 500 502 #Check neighbour structure … … 505 507 if neighbour_id >= 0: 506 508 edge = self.neighbour_edges[i, k] 507 msg = 'Triangle %d has neighbour %d but it does not point back. \n' %(i,neighbour_id) 509 msg = 'Triangle %d has neighbour %d but it does not point back. \n' %(i,neighbour_id) 508 510 msg += 'Only points to (%s)' %(self.neighbours[neighbour_id,:]) 509 511 assert self.neighbours[neighbour_id, edge] == i ,msg … … 512 514 513 515 #Check that all boundaries have 514 # unique, consecutive, negative indices 516 # unique, consecutive, negative indices 515 517 516 518 #L = len(self.boundary) … … 531 533 # 532 534 #See domain.set_boundary 533 534 535 536 535 537 def get_centroid_coordinates(self): 536 538 """Return all centroid coordinates. 537 539 Return all centroid coordinates for all triangles as an Nx2 array 538 (ordered as x0, y0 for each triangle) 540 (ordered as x0, y0 for each triangle) 539 541 """ 540 542 return self.centroid_coordinates -
inundation/ga/storm_surge/pyvolution/test_cg_solve.py
r1018 r1160 28 28 29 29 assert allclose(x,xe) 30 31 def test_max_iter(self): 32 """Small Sparse Matrix""" 33 34 A = [[2.0, -1.0, 0.0, 0.0 ], 35 [-1.0, 2.0, -1.0, 0.0], 36 [0.0, -1.0, 2.0, -1.0], 37 [0.0,0.0, -1.0, 2.0]] 38 39 A = Sparse(A) 40 41 xe = [0.0, 1.0, 2.0, 3.0] 42 b = A*xe 43 x = [0.0, 0.0, 0.0, 0.0] 44 45 x = conjugate_gradient(A,b,x,iprint=0,imax=2) 30 46 31 47 … … 170 186 171 187 172 -
inundation/ga/storm_surge/zeus/pyvolution.zpi
r1156 r1160 30 30 <Typedefs>Typedefs</Typedefs> 31 31 <Variables>Variables</Variables> 32 <MakeFile> </MakeFile>32 <MakeFile>makefile</MakeFile> 33 33 <SearchPath></SearchPath> 34 34 <RegexFile></RegexFile> … … 45 45 <ReleaseDirectory></ReleaseDirectory> 46 46 <ReleaseDebugger></ReleaseDebugger> 47 <DebugMake> </DebugMake>48 <DebugClean> </DebugClean>47 <DebugMake>make $PF</DebugMake> 48 <DebugClean>make clean</DebugClean> 49 49 <DebugCompile></DebugCompile> 50 <DebugRebuild> </DebugRebuild>50 <DebugRebuild>make $PF</DebugRebuild> 51 51 <DebugProjectOutput>Always</DebugProjectOutput> 52 52 <DebugCompilerOutput>Always</DebugCompilerOutput> … … 73 73 <ReleaseProjectSaveAll>Off</ReleaseProjectSaveAll> 74 74 <ReleaseProjectReload>Off</ReleaseProjectReload> 75 <file>cg_solve.py</file> 76 <file>compile.py</file> 77 <file>config.py</file> 78 <file>data_manager.py</file> 79 <file>domain.py</file> 80 <file>general_mesh.py</file> 81 <file>interpolate_sww.py</file> 82 <file>least_squares.py</file> 83 <file>mesh.py</file> 84 <file>mesh_factory.py</file> 85 <file>netherlands.py</file> 86 <file>pmesh2domain.py</file> 87 <file>pressure_force.py</file> 88 <file>quad.py</file> 89 <file>quantity.py</file> 90 <file>quantity_ext.c</file> 91 <file>realtime_visualisation.py</file> 92 <file>region.py</file> 93 <file>shallow_water.py</file> 94 <file>shallow_water_ext.c</file> 95 <file>sparse.py</file> 96 <file>sparse_ext.c</file> 97 <file>test_advection.py</file> 98 <file>test_all.py</file> 99 <file>test_cg_solve.py</file> 100 <file>test_data_manager.py</file> 101 <file>test_domain.py</file> 102 <file>test_general_mesh.py</file> 103 <file>test_generic_boundary_conditions.py</file> 104 <file>test_interpolate_sww.py</file> 105 <file>test_least_squares.py</file> 106 <file>test_mesh.py</file> 107 <file>test_pmesh2domain.py</file> 108 <file>test_quad.py</file> 109 <file>test_quantity.py</file> 110 <file>test_region.py</file> 111 <file>test_shallow_water.py</file> 112 <file>test_sparse.py</file> 113 <file>test_util.py</file> 114 <file>treenode.py</file> 115 <file>util.py</file> 116 <file>util_ext.c</file> 75 <file>..\pyvolution\advection.py</file> 76 <file>..\pyvolution\bed_w_eden_boundary.py</file> 77 <file>..\pyvolution\bed_w_file_boundary.py</file> 78 <file>..\pyvolution\cg_solve.py</file> 79 <file>..\pyvolution\check_sww_tsh.py</file> 80 <file>..\pyvolution\chris-dump.py</file> 81 <file>..\pyvolution\combine_pts.py</file> 82 <file>..\pyvolution\compile.py</file> 83 <file>..\pyvolution\config.py</file> 84 <file>..\pyvolution\cornell_room.py</file> 85 <file>..\pyvolution\data_manager.py</file> 86 <file>..\pyvolution\domain.py</file> 87 <file>..\pyvolution\flatbed.py</file> 88 <file>..\pyvolution\flatbed_compare.py</file> 89 <file>..\pyvolution\general_mesh.py</file> 90 <file>..\pyvolution\generic_boundary_conditions.py</file> 91 <file>..\pyvolution\interpolate_sww.py</file> 92 <file>..\pyvolution\island.py</file> 93 <file>..\pyvolution\least_squares.py</file> 94 <file>..\pyvolution\log.ini</file> 95 <file>..\pyvolution\mesh.py</file> 96 <file>..\pyvolution\mesh_factory.py</file> 97 <file>..\pyvolution\most2nc.py</file> 98 <file>..\pyvolution\netherlands.py</file> 99 <file>..\pyvolution\netherlands-chris.py</file> 100 <file>..\pyvolution\pmesh2domain.py</file> 101 <file>..\pyvolution\pressure_force.py</file> 102 <file>..\pyvolution\quad.py</file> 103 <file>..\pyvolution\quantity.py</file> 104 <file>..\pyvolution\quantity_ext.c</file> 105 <file>..\pyvolution\realtime_visualisation.py</file> 106 <file>..\pyvolution\region.py</file> 107 <file>..\pyvolution\run_profile.py</file> 108 <file>..\pyvolution\run_tsh_weir.py</file> 109 <file>..\pyvolution\shallow_water.py</file> 110 <file>..\pyvolution\shallow_water_ext.c</file> 111 <file>..\pyvolution\show_balanced_limiters.py</file> 112 <file>..\pyvolution\sparse.py</file> 113 <file>..\pyvolution\sparse_ext.c</file> 114 <file>..\pyvolution\test_advection.py</file> 115 <file>..\pyvolution\test_all.py</file> 116 <file>..\pyvolution\test_cg_solve.py</file> 117 <file>..\pyvolution\test_combine_pts.py</file> 118 <file>..\pyvolution\test_data_manager.py</file> 119 <file>..\pyvolution\test_domain.py</file> 120 <file>..\pyvolution\test_general_mesh.py</file> 121 <file>..\pyvolution\test_generic_boundary_conditions.py</file> 122 <file>..\pyvolution\test_interpolate_sww.py</file> 123 <file>..\pyvolution\test_least_squares.py</file> 124 <file>..\pyvolution\test_mesh.py</file> 125 <file>..\pyvolution\test_most2nc.py</file> 126 <file>..\pyvolution\test_pmesh2domain.py</file> 127 <file>..\pyvolution\test_quad.py</file> 128 <file>..\pyvolution\test_quantity.py</file> 129 <file>..\pyvolution\test_region.py</file> 130 <file>..\pyvolution\test_shallow_water.py</file> 131 <file>..\pyvolution\test_sparse.py</file> 132 <file>..\pyvolution\test_util.py</file> 133 <file>..\pyvolution\treenode.py</file> 134 <file>..\pyvolution\tsh2sww.py</file> 135 <file>..\pyvolution\twolevels.py</file> 136 <file>..\pyvolution\util.py</file> 137 <file>..\pyvolution\util_ext.c</file> 138 <file>..\pyvolution\util_ext.h</file> 139 <file>..\pyvolution\wind_example_variable.py</file> 117 140 <folder name="Header Files" /> 118 141 <folder name="Resource Files" />
Note: See TracChangeset
for help on using the changeset viewer.