Changeset 274
- Timestamp:
- Sep 6, 2004, 1:57:41 PM (21 years ago)
- Location:
- inundation/ga/storm_surge/pyvolution
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
inundation/ga/storm_surge/pyvolution/domain.py
r272 r274 219 219 220 220 221 def get_name(self): 222 return self.filename 223 221 224 222 225 … … 316 319 317 320 def evolve_to_end(self, finaltime = 1.0): 318 """Iterate evolve generatorall the way to the end321 """Iterate evolve all the way to the end 319 322 """ 320 323 … … 393 396 def compute_forcing_terms(self): 394 397 """If there are any forcing functions driving the system 395 they should be defined in Domain subclass 398 they should be defined in Domain subclass and appended to 399 the list self.forcing_terms 396 400 """ 397 401 … … 421 425 422 426 #Clean up 427 #Note that Q.explicit_update is reset by compute_fluxes 423 428 Q.semi_implicit_update[:] = 0.0 424 Q.explicit_update[:] = 0.0 #Unnecessary as fluxes will set it 429 425 430 426 431 … … 449 454 ############################################## 450 455 #Initialise module 451 452 #C-extensions453 #import compile454 #if compile.can_use_C_extension('domain_ext.c'):455 # compute_fluxes = compute_fluxes_c456 #distribute_to_vertices_and_edges = distribute_to_vertices_and_edges_c457 #update_conserved_quantities = update_conserved_quantities_c458 #else:459 # from shallow_water import compute_fluxes460 #from python_versions import distribute_to_vertices_and_edges461 #from python_versions import update_conserved_quantities462 463 456 464 457 #Optimisation with psyco -
inundation/ga/storm_surge/pyvolution/mesh.py
r262 r274 210 210 #Build dictionary mapping from segments (2-tuple of points) 211 211 #to left hand side edge (facing neighbouring triangle) 212 #Also build list of vertices containing indices of triangles213 212 214 213 N = self.number_of_elements 215 214 neighbourdict = {} 216 vertexlist = [None]*len(self.coordinates)217 215 for i in range(N): 218 216 … … 226 224 neighbourdict[c,a] = (i, 1) #(id, edge) 227 225 228 #Register the vertices as keys mapping to229 #(triangle, edge) tuples associated with them230 #This is used for smoothing231 for vertex_id, v in enumerate([a,b,c]):232 if vertexlist[v] is None:233 vertexlist[v] = []234 235 vertexlist[v].append( (i, vertex_id) )236 226 237 227 #Step 2: … … 260 250 261 251 262 self.vertexlist = vertexlist263 252 264 253 def build_vertexlist(self): -
inundation/ga/storm_surge/pyvolution/quantity.py
r272 r274 200 200 201 201 202 203 def smooth_vertex_values(self, value_array='field_values', 204 precision = None): 205 """ Smooths field_values or conserved_quantities data. 206 TODO: be able to smooth individual fields 207 NOTE: This function does not have a test. 208 """ 209 210 from Numeric import concatenate, zeros, Float, Int, array, reshape 211 212 213 A,V = self.get_vertex_values(xy=False, 214 value_array=value_array, 215 smooth = True, 216 precision = precision) 217 218 #Set some field values 219 for volume in self: 220 for i,v in enumerate(volume.vertices): 221 if value_array == 'field_values': 222 volume.set_field_values('vertex', i, A[v,:]) 223 elif value_array == 'conserved_quantities': 224 volume.set_conserved_quantities('vertex', i, A[v,:]) 225 226 if value_array == 'field_values': 227 self.precompute() 228 elif value_array == 'conserved_quantities': 229 Volume.interpolate_conserved_quantities() 230 231 232 #Method for outputting model results 233 def get_vertex_values(self, 234 xy=True, 235 smooth = None, 236 precision = None, 237 reduction = None): 238 """Return vertex values like an OBJ format 239 240 The vertex values are returned as one sequence in the 1D float array A. 241 If requested the coordinates will be returned in 1D arrays X and Y. 242 243 The connectivity is represented as an integer array, V, of dimension 244 M x 3, where M is the number of volumes. Each row has three indices 245 into the X, Y, A arrays defining the triangle. 246 247 if smooth is True, vertex values corresponding to one common 248 coordinate set will be smoothed according to the given 249 reduction operator. In this case vertex coordinates will be 250 de-duplicated. 251 252 If no smoothings is required, vertex coordinates and values will 253 be aggregated as a concatenation of of values at 254 vertices 0, vertices 1 and vertices 2 255 256 257 Calling convention 258 if xy is True: 259 X,Y,A,V = get_vertex_values 260 else: 261 A,V = get_vertex_values 262 263 """ 264 265 from Numeric import concatenate, zeros, Float, Int, array, reshape 266 267 268 if smooth is None: 269 smooth = self.domain.smooth 270 271 if precision is None: 272 precision = Float 273 274 if reduction is None: 275 reduction = self.domain.reduction 276 277 if smooth == True: 278 M = len(self.domain.vertexlist) #Number of unique vertices 279 280 A = zeros((M,N), precision) 281 V = zeros((len(self),3), Int) 282 283 #Rebuild triangle structure 284 for k, v in enumerate(self.vertexdict.keys()): 285 for volume_id, vertex_id in self.vertexdict[v]: 286 V[volume_id, vertex_id] = k 287 288 289 #Smoothing loop 290 for j, i in enumerate(indices): 291 #For each quantity 292 # 293 #j is an index into returned quantities (0 <= j < N) 294 #i the given index into internal quantities 295 #(e.g. 0: bed elevation or 1: friction) 296 297 for k, v in enumerate(self.vertexdict.keys()): 298 #Go through all contributions to vertex v 299 #k is an index into returned vertices (0 <= k < M) 300 #v is an index into internal vertices 301 302 L = len(self.vertexdict[v]) 303 304 ulist = [] 305 for volume_id, vertex_id in self.vertexdict[v]: 306 #For all contributing triange, vertex pairs 307 308 cmd = 'u = Volume.%s_vertex%d[%d,%d]'\ 309 %(value_array, vertex_id, volume_id, i) 310 exec(cmd) 311 ulist.append(u) 312 313 A[k,j] = reduction(ulist) 314 315 316 if xy is True: 317 X = zeros(M, precision) 318 Y = zeros(M, precision) 319 320 for k, v in enumerate(self.vertexdict.keys()): 321 X[k], Y[k] = Point.coordinates[v].astype(precision) 322 323 return X, Y, A, V 324 else: 325 return A, V 326 else: 327 #Don't smooth 328 329 m = len(self) #Number of volumes 330 M = 3*m #Total number of unique vertices 331 332 A = self.vertex_values[:] 333 334 #Create connectivity 335 tmp = reshape(array(range(m)).astype(Int), (m,1)) 336 V = concatenate( (tmp, tmp+m, tmp+2*m), axis = 1 ) 337 338 #Do vertex coordinates 339 if xy is True: 340 V = self.domain.get_vertex_coordinates() 341 X = concatenate((V[:,0], V[:,2], V[:,4]), axis=0).\ 342 astype(precision) 343 Y = concatenate((V[:,1], V[:,3], V[:,5]), axis=0).\ 344 astype(precision) 345 346 return X, Y, A , V 347 else: 348 return A, V 349 350 351 352 353 202 354 class Conserved_quantity(Quantity): 203 355 """Class conserved quantity adds to Quantity: -
inundation/ga/storm_surge/pyvolution/shallow_water.py
r273 r274 386 386 387 387 def distribute_to_vertices_and_edges(domain): 388 389 protect_against_infintesimal_and_negative_heights(domain) 390 if domain.order == 1: 391 extrapolate_first_order(domain) 392 elif domain.order == 2: 393 extrapolate_second_order(domain) 394 else: 395 raise 'Unknown order' 396 397 #Compute edge values 388 """Distribution from centroids to vertices specific to the 389 shallow water wave 390 equation. 391 392 It will ensure that h (w-z) is always non-negative even in the 393 presence of steep bed-slopes by taking a weighted average between shallow 394 and deep cases. 395 396 In addition, all conserved quantities get distributed as per either a 397 constant (order==1) or a piecewise linear function (order==2). 398 399 FIXME: more explanation about removal of artificial variability etc 400 401 Precondition: 402 All quantities defined at centroids and bed elevation defined at 403 vertices. 404 405 Postcondition 406 Conserved quantities defined at vertices 407 408 """ 409 410 #Remove very thin layers of water 411 protect_against_infintesimal_and_negative_heights(domain) 412 413 #Extrapolate all conserved quantities 414 for name in domain.conserved_quantities: 415 Q = domain.quantities[name] 416 if domain.order == 1: 417 Q.extrapolate_first_order() 418 elif domain.order == 2: 419 Q.extrapolate_second_order() 420 Q.limit() 421 else: 422 raise 'Unknown order' 423 424 #Take bed slevation into account when heights are small 425 balance_deep_and_shallow(domain) 426 427 #Compute edge values by interpolation 398 428 for name in domain.conserved_quantities: 399 429 Q = domain.quantities[name] 400 430 Q.interpolate_from_vertices_to_edges() 401 431 402 #domain.w.interpolate_from_vertices_to_edges()403 #domain.uh.interpolate_from_vertices_to_edges()404 #domain.vh.interpolate_from_vertices_to_edges()405 432 406 433 … … 443 470 protect(domain.minimum_allowed_height, wc, zc, xmomc, ymomc) 444 471 445 446 447 def extrapolate_first_order(domain):448 """First order extrapolator function, specific449 to the shallow water wave equation.450 451 It will ensure that h (w-z) is always non-negative even in the452 presence of steep bed-slopes (see comment in code)453 In addition, momemtums get distributed as constant values.454 455 Precondition:456 All quantities defined at centroids and bed elevation defined at457 vertices.458 459 Postcondition460 Conserved quantities defined at vertices461 """462 463 #Shortcuts464 wc = domain.quantities['level'].centroid_values465 zc = domain.quantities['elevation'].centroid_values466 xmomc = domain.quantities['xmomentum'].centroid_values467 ymomc = domain.quantities['ymomentum'].centroid_values468 hc = wc - zc #Water depths at centroids469 470 471 #Update conserved quantities using straight first order472 for name in domain.conserved_quantities:473 Q = domain.quantities[name]474 Q.extrapolate_first_order()475 476 477 478 479 balance_deep_and_shallow(domain)480 481 482 483 484 485 def extrapolate_second_order(domain):486 """Second order limiter function, specific to the shallow water wave487 equation.488 489 It will ensure that h (w-z) is always non-negative even in the490 presence of steep bed-slopes (see comment in code)491 492 A weighted average between shallow493 and deep cases is as in the first order case.494 495 In addition, all conserved quantities get distributed as per a496 piecewise linear function.497 FIXME: more explanation about removal of artificial variability etc498 499 Precondition:500 All quantities defined at centroids and bed elevation defined at501 vertices.502 503 Postcondition504 Conserved quantities defined at vertices505 506 """507 508 509 #Update conserved quantities using straight second order with limiter510 for name in domain.conserved_quantities:511 Q = domain.quantities[name]512 Q.extrapolate_second_order()513 Q.limit()514 515 balance_deep_and_shallow(domain)516 517 472 518 473 def balance_deep_and_shallow(domain): -
inundation/ga/storm_surge/pyvolution/test_quantity.py
r265 r274 254 254 255 255 256 # def test_limiter(self):257 258 # initialise_consecutive_datastructure(points=6+4, elements=4)259 260 # a = Point (0.0, 0.0)261 # b = Point (0.0, 2.0)262 # c = Point (2.0, 0.0)263 # d = Point (0.0, 4.0)264 # e = Point (2.0, 2.0)265 # f = Point (4.0, 0.0)266 267 # #Set up for a gradient of (3,1), f(x) = 3x+y268 # v1 = Volume(b,a,c,array([0.0,0,0]))269 # v2 = Volume(b,c,e,array([1.0,0,0]))270 # v3 = Volume(e,c,f,array([10.0,0,0]))271 # v4 = Volume(d,b,e,array([0.0,0,0]))272 273 # #Setup neighbour structure274 # domain = Domain([v1,v2,v3,v4])275 # domain.precompute()276 277 # #Lets's check first order first, hey278 # domain.order = 1279 # domain.limiter = None280 # distribute_to_vertices_and_edges(domain)281 # assert allclose(v2.conserved_quantities_vertex0,282 # v2.conserved_quantities_centroid)283 # assert allclose(v2.conserved_quantities_vertex1,284 # v2.conserved_quantities_centroid)285 # assert allclose(v2.conserved_quantities_vertex2,286 # v2.conserved_quantities_centroid)287 288 289 # #Gradient of fitted pwl surface290 # a, b = compute_gradient(v2.id)291 292 293 # assert abs(a[0] - 5.0) < epsilon294 # assert abs(b[0]) < epsilon295 # #assert qminr[0] == 0.0296 # #assert qmaxr[0] == 10.0297 298 # #And now for the second order stuff299 # # - the full second order extrapolation300 # domain.order = 2301 # distribute_to_vertices_and_edges(domain)302 303 304 # qmin = qmax = v2.conserved_quantities_centroid305 306 # qmin = minimum(qmin, v1.conserved_quantities_centroid)307 # qmax = maximum(qmax, v1.conserved_quantities_centroid)308 309 # qmin = minimum(qmin, v3.conserved_quantities_centroid)310 # qmax = maximum(qmax, v3.conserved_quantities_centroid)311 312 # qmin = minimum(qmin, v4.conserved_quantities_centroid)313 # qmax = maximum(qmax, v4.conserved_quantities_centroid)314 # #assert qminr == qmin315 # #assert qmaxr == qmax316 317 # assert v2.conserved_quantities_vertex0 <= qmax318 # assert v2.conserved_quantities_vertex0 >= qmin319 # assert v2.conserved_quantities_vertex1 <= qmax320 # assert v2.conserved_quantities_vertex1 >= qmin321 # assert v2.conserved_quantities_vertex2 <= qmax322 # assert v2.conserved_quantities_vertex2 >= qmin323 324 325 # #Check that volume has been preserved326 327 # q = v2.conserved_quantities_centroid[0]328 # w = (v2.conserved_quantities_vertex0[0] +329 # v2.conserved_quantities_vertex1[0] +330 # v2.conserved_quantities_vertex2[0])/3331 332 # assert allclose(q, w)333 334 335 336 337 338 256 def test_first_order_extrapolator(self): 339 257 quantity = Conserved_quantity(self.mesh4) -
inundation/ga/storm_surge/pyvolution/util.py
r258 r274 42 42 43 43 44 def mean(x): 45 from Numeric import sum 46 return sum(x)/len(x) 44 47 45 48 ####################################################################
Note: See TracChangeset
for help on using the changeset viewer.