Changeset 7769
- Timestamp:
- Jun 2, 2010, 3:31:36 PM (15 years ago)
- Location:
- trunk/anuga_core/source/anuga/shallow_water
- Files:
-
- 1 deleted
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/anuga_core/source/anuga/shallow_water/data_manager.py
r7765 r7769 455 455 456 456 457 458 ##459 # @brief Get the extents of a NetCDF data file.460 # @param file_name The path to the SWW file.461 # @return A list of x, y, z and stage limits (min, max).462 def extent_sww(file_name):463 """Read in an sww file.464 465 Input:466 file_name - the sww file467 468 Output:469 A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]470 """471 472 from Scientific.IO.NetCDF import NetCDFFile473 474 #Get NetCDF475 fid = NetCDFFile(file_name, netcdf_mode_r)476 477 # Get the variables478 x = fid.variables['x'][:]479 y = fid.variables['y'][:]480 stage = fid.variables['stage'][:]481 482 fid.close()483 484 return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]485 486 487 ##488 # @brief489 # @param filename490 # @param boundary491 # @param t492 # @param fail_if_NaN493 # @param NaN_filler494 # @param verbose495 # @param very_verbose496 # @return497 def load_sww_as_domain(filename, boundary=None, t=None,498 fail_if_NaN=True, NaN_filler=0,499 verbose=False, very_verbose=False):500 """501 Usage: domain = sww2domain('file.sww',t=time (default = last time in file))502 503 Boundary is not recommended if domain.smooth is not selected, as it504 uses unique coordinates, but not unique boundaries. This means that505 the boundary file will not be compatable with the coordinates, and will506 give a different final boundary, or crash.507 """508 509 from Scientific.IO.NetCDF import NetCDFFile510 from shallow_water import Domain511 512 # initialise NaN.513 NaN = 9.969209968386869e+036514 515 if verbose: log.critical('Reading from %s' % filename)516 517 fid = NetCDFFile(filename, netcdf_mode_r) # Open existing file for read518 time = fid.variables['time'] # Timesteps519 if t is None:520 t = time[-1]521 time_interp = get_time_interp(time,t)522 523 # Get the variables as numeric arrays524 x = fid.variables['x'][:] # x-coordinates of vertices525 y = fid.variables['y'][:] # y-coordinates of vertices526 elevation = fid.variables['elevation'] # Elevation527 stage = fid.variables['stage'] # Water level528 xmomentum = fid.variables['xmomentum'] # Momentum in the x-direction529 ymomentum = fid.variables['ymomentum'] # Momentum in the y-direction530 531 starttime = fid.starttime[0]532 volumes = fid.variables['volumes'][:] # Connectivity533 coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))534 # FIXME (Ole): Something like this might be better:535 # concatenate((x, y), axis=1)536 # or concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)537 538 conserved_quantities = []539 interpolated_quantities = {}540 other_quantities = []541 542 # get geo_reference543 try: # sww files don't have to have a geo_ref544 geo_reference = Geo_reference(NetCDFObject=fid)545 except: # AttributeError, e:546 geo_reference = None547 548 if verbose: log.critical(' getting quantities')549 550 for quantity in fid.variables.keys():551 dimensions = fid.variables[quantity].dimensions552 if 'number_of_timesteps' in dimensions:553 conserved_quantities.append(quantity)554 interpolated_quantities[quantity] = \555 interpolated_quantity(fid.variables[quantity][:], time_interp)556 else:557 other_quantities.append(quantity)558 559 other_quantities.remove('x')560 other_quantities.remove('y')561 #other_quantities.remove('z')562 other_quantities.remove('volumes')563 try:564 other_quantities.remove('stage_range')565 other_quantities.remove('xmomentum_range')566 other_quantities.remove('ymomentum_range')567 other_quantities.remove('elevation_range')568 except:569 pass570 571 conserved_quantities.remove('time')572 573 if verbose: log.critical(' building domain')574 575 # From domain.Domain:576 # domain = Domain(coordinates, volumes,\577 # conserved_quantities = conserved_quantities,\578 # other_quantities = other_quantities,zone=zone,\579 # xllcorner=xllcorner, yllcorner=yllcorner)580 581 # From shallow_water.Domain:582 coordinates = coordinates.tolist()583 volumes = volumes.tolist()584 # FIXME:should this be in mesh? (peter row)585 if fid.smoothing == 'Yes':586 unique = False587 else:588 unique = True589 if unique:590 coordinates, volumes, boundary = weed(coordinates, volumes,boundary)591 592 593 594 try:595 domain = Domain(coordinates, volumes, boundary)596 except AssertionError, e:597 fid.close()598 msg = 'Domain could not be created: %s. ' \599 'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e600 raise DataDomainError, msg601 602 if not boundary is None:603 domain.boundary = boundary604 605 domain.geo_reference = geo_reference606 607 domain.starttime = float(starttime) + float(t)608 domain.time = 0.0609 610 for quantity in other_quantities:611 try:612 NaN = fid.variables[quantity].missing_value613 except:614 pass # quantity has no missing_value number615 X = fid.variables[quantity][:]616 if very_verbose:617 log.critical(' %s' % str(quantity))618 log.critical(' NaN = %s' % str(NaN))619 log.critical(' max(X)')620 log.critical(' %s' % str(max(X)))621 log.critical(' max(X)==NaN')622 log.critical(' %s' % str(max(X)==NaN))623 log.critical('')624 if max(X) == NaN or min(X) == NaN:625 if fail_if_NaN:626 msg = 'quantity "%s" contains no_data entry' % quantity627 raise DataMissingValuesError, msg628 else:629 data = (X != NaN)630 X = (X*data) + (data==0)*NaN_filler631 if unique:632 X = num.resize(X, (len(X)/3, 3))633 domain.set_quantity(quantity, X)634 #635 for quantity in conserved_quantities:636 try:637 NaN = fid.variables[quantity].missing_value638 except:639 pass # quantity has no missing_value number640 X = interpolated_quantities[quantity]641 if very_verbose:642 log.critical(' %s' % str(quantity))643 log.critical(' NaN = %s' % str(NaN))644 log.critical(' max(X)')645 log.critical(' %s' % str(max(X)))646 log.critical(' max(X)==NaN')647 log.critical(' %s' % str(max(X)==NaN))648 log.critical('')649 if max(X) == NaN or min(X) == NaN:650 if fail_if_NaN:651 msg = 'quantity "%s" contains no_data entry' % quantity652 raise DataMissingValuesError, msg653 else:654 data = (X != NaN)655 X = (X*data) + (data==0)*NaN_filler656 if unique:657 X = num.resize(X, (X.shape[0]/3, 3))658 domain.set_quantity(quantity, X)659 660 fid.close()661 662 return domain663 664 665 ##666 # @brief Interpolate a quantity wrt time.667 # @param saved_quantity The quantity to interpolate.668 # @param time_interp (index, ratio)669 # @return A vector of interpolated values.670 def interpolated_quantity(saved_quantity, time_interp):671 '''Given an index and ratio, interpolate quantity with respect to time.'''672 673 index, ratio = time_interp674 675 Q = saved_quantity676 677 if ratio > 0:678 q = (1-ratio)*Q[index] + ratio*Q[index+1]679 else:680 q = Q[index]681 682 #Return vector of interpolated values683 return q684 685 686 ##687 # @brief688 # @parm time689 # @param t690 # @return An (index, ration) tuple.691 def get_time_interp(time, t=None):692 #Finds the ratio and index for time interpolation.693 #It is borrowed from previous abstract_2d_finite_volumes code.694 if t is None:695 t=time[-1]696 index = -1697 ratio = 0.698 else:699 T = time700 tau = t701 index=0702 msg = 'Time interval derived from file %s [%s:%s]' \703 % ('FIXMEfilename', T[0], T[-1])704 msg += ' does not match model time: %s' % tau705 if tau < time[0]: raise DataTimeError, msg706 if tau > time[-1]: raise DataTimeError, msg707 while tau > time[index]: index += 1708 while tau < time[index]: index -= 1709 if tau == time[index]:710 #Protect against case where tau == time[-1] (last time)711 # - also works in general when tau == time[i]712 ratio = 0713 else:714 #t is now between index and index+1715 ratio = (tau - time[index])/(time[index+1] - time[index])716 717 return (index, ratio)718 719 720 ##721 # @brief722 # @param coordinates723 # @param volumes724 # @param boundary725 def weed(coordinates, volumes, boundary=None):726 if isinstance(coordinates, num.ndarray):727 coordinates = coordinates.tolist()728 if isinstance(volumes, num.ndarray):729 volumes = volumes.tolist()730 731 unique = False732 point_dict = {}733 same_point = {}734 for i in range(len(coordinates)):735 point = tuple(coordinates[i])736 if point_dict.has_key(point):737 unique = True738 same_point[i] = point739 #to change all point i references to point j740 else:741 point_dict[point] = i742 same_point[i] = point743 744 coordinates = []745 i = 0746 for point in point_dict.keys():747 point = tuple(point)748 coordinates.append(list(point))749 point_dict[point] = i750 i += 1751 752 for volume in volumes:753 for i in range(len(volume)):754 index = volume[i]755 if index > -1:756 volume[i] = point_dict[same_point[index]]757 758 new_boundary = {}759 if not boundary is None:760 for segment in boundary.keys():761 point0 = point_dict[same_point[segment[0]]]762 point1 = point_dict[same_point[segment[1]]]763 label = boundary[segment]764 #FIXME should the bounday attributes be concaterated765 #('exterior, pond') or replaced ('pond')(peter row)766 767 if new_boundary.has_key((point0, point1)):768 new_boundary[(point0,point1)] = new_boundary[(point0, point1)]769 770 elif new_boundary.has_key((point1, point0)):771 new_boundary[(point1,point0)] = new_boundary[(point1, point0)]772 else: new_boundary[(point0, point1)] = label773 774 boundary = new_boundary775 776 return coordinates, volumes, boundary777 778 779 457 ## 780 458 # @brief Read DEM file, decimate data, write new DEM file. … … 1164 842 # END URS 2 SWW 1165 843 ################################################################################ 1166 1167 ################################################################################1168 # URS UNGRIDDED 2 SWW1169 ################################################################################1170 1171 ### PRODUCING THE POINTS NEEDED FILE ###1172 1173 # Ones used for FESA 2007 results1174 #LL_LAT = -50.01175 #LL_LONG = 80.01176 #GRID_SPACING = 1.0/60.01177 #LAT_AMOUNT = 48001178 #LONG_AMOUNT = 36001179 1180 1181 ##1182 # @brief1183 # @param file_name1184 # @param boundary_polygon1185 # @param zone1186 # @param ll_lat1187 # @param ll_long1188 # @param grid_spacing1189 # @param lat_amount1190 # @param long_amount1191 # @param isSouthernHemisphere1192 # @param export_csv1193 # @param use_cache1194 # @param verbose True if this function is to be verbose.1195 # @return1196 def URS_points_needed_to_file(file_name, boundary_polygon, zone,1197 ll_lat, ll_long,1198 grid_spacing,1199 lat_amount, long_amount,1200 isSouthernHemisphere=True,1201 export_csv=False, use_cache=False,1202 verbose=False):1203 """1204 Given the info to replicate the URS grid and a polygon output1205 a file that specifies the cloud of boundary points for URS.1206 1207 This creates a .urs file. This is in the format used by URS;1208 1st line is the number of points,1209 each line after represents a point,in lats and longs.1210 1211 Note: The polygon cannot cross zones or hemispheres.1212 1213 A work-a-round for different zones or hemispheres is to run this twice,1214 once for each zone, and then combine the output.1215 1216 file_name - name of the urs file produced for David.1217 boundary_polygon - a list of points that describes a polygon.1218 The last point is assumed ot join the first point.1219 This is in UTM (lat long would be better though)1220 1221 This is info about the URS model that needs to be inputted.1222 1223 ll_lat - lower left latitude, in decimal degrees1224 ll-long - lower left longitude, in decimal degrees1225 grid_spacing - in deciamal degrees1226 lat_amount - number of latitudes1227 long_amount- number of longs1228 1229 Don't add the file extension. It will be added.1230 """1231 1232 geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long,1233 grid_spacing,1234 lat_amount, long_amount, isSouthernHemisphere,1235 use_cache, verbose)1236 1237 if not file_name[-4:] == ".urs":1238 file_name += ".urs"1239 1240 geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere)1241 1242 if export_csv:1243 if file_name[-4:] == ".urs":1244 file_name = file_name[:-4] + ".csv"1245 geo.export_points_file(file_name)1246 1247 return geo1248 1249 1250 ##1251 # @brief1252 # @param boundary_polygon1253 # @param zone1254 # @param ll_lat1255 # @param ll_long1256 # @param grid_spacing1257 # @param lat_amount1258 # @param long_amount1259 # @param isSouthHemisphere1260 # @param use_cache1261 # @param verbose1262 def URS_points_needed(boundary_polygon, zone, ll_lat,1263 ll_long, grid_spacing,1264 lat_amount, long_amount, isSouthHemisphere=True,1265 use_cache=False, verbose=False):1266 args = (boundary_polygon,1267 zone, ll_lat,1268 ll_long, grid_spacing,1269 lat_amount, long_amount, isSouthHemisphere)1270 kwargs = {}1271 1272 if use_cache is True:1273 try:1274 from anuga.caching import cache1275 except:1276 msg = 'Caching was requested, but caching module' \1277 'could not be imported'1278 raise msg1279 1280 geo = cache(_URS_points_needed,1281 args, kwargs,1282 verbose=verbose,1283 compression=False)1284 else:1285 geo = apply(_URS_points_needed, args, kwargs)1286 1287 return geo1288 1289 1290 ##1291 # @brief1292 # @param boundary_polygon An iterable of points that describe a polygon.1293 # @param zone1294 # @param ll_lat Lower left latitude, in decimal degrees1295 # @param ll_long Lower left longitude, in decimal degrees1296 # @param grid_spacing Grid spacing in decimal degrees.1297 # @param lat_amount1298 # @param long_amount1299 # @param isSouthHemisphere1300 def _URS_points_needed(boundary_polygon,1301 zone, ll_lat,1302 ll_long, grid_spacing,1303 lat_amount, long_amount,1304 isSouthHemisphere):1305 """1306 boundary_polygon - a list of points that describes a polygon.1307 The last point is assumed ot join the first point.1308 This is in UTM (lat long would b better though)1309 1310 ll_lat - lower left latitude, in decimal degrees1311 ll-long - lower left longitude, in decimal degrees1312 grid_spacing - in decimal degrees1313 1314 """1315 1316 msg = "grid_spacing can not be zero"1317 assert not grid_spacing == 0, msg1318 1319 a = boundary_polygon1320 1321 # List of segments. Each segment is two points.1322 segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]1323 1324 # convert the segs to Lat's and longs.1325 # Don't assume the zone of the segments is the same as the lower left1326 # corner of the lat long data!! They can easily be in different zones1327 lat_long_set = frozenset()1328 for seg in segs:1329 points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing,1330 lat_amount, long_amount, zone,1331 isSouthHemisphere)1332 lat_long_set |= frozenset(points_lat_long)1333 1334 if lat_long_set == frozenset([]):1335 msg = "URS region specified and polygon does not overlap."1336 raise ValueError, msg1337 1338 # Warning there is no info in geospatial saying the hemisphere of1339 # these points. There should be.1340 geo = Geospatial_data(data_points=list(lat_long_set),1341 points_are_lats_longs=True)1342 1343 return geo1344 1345 1346 ##1347 # @brief Get the points that are needed to interpolate any point a a segment.1348 # @param seg Two points in the UTM.1349 # @param ll_lat Lower left latitude, in decimal degrees1350 # @param ll_long Lower left longitude, in decimal degrees1351 # @param grid_spacing1352 # @param lat_amount1353 # @param long_amount1354 # @param zone1355 # @param isSouthHemisphere1356 # @return A list of points.1357 def points_needed(seg, ll_lat, ll_long, grid_spacing,1358 lat_amount, long_amount, zone,1359 isSouthHemisphere):1360 """1361 seg is two points, in UTM1362 return a list of the points, in lats and longs that are needed to1363 interpolate any point on the segment.1364 """1365 1366 from math import sqrt1367 1368 geo_reference = Geo_reference(zone=zone)1369 geo = Geospatial_data(seg, geo_reference=geo_reference)1370 seg_lat_long = geo.get_data_points(as_lat_long=True,1371 isSouthHemisphere=isSouthHemisphere)1372 1373 # 1.415 = 2^0.5, rounded up....1374 sqrt_2_rounded_up = 1.4151375 buffer = sqrt_2_rounded_up * grid_spacing1376 1377 max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer1378 max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer1379 min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer1380 min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer1381 1382 first_row = (min_long - ll_long) / grid_spacing1383 1384 # To round up1385 first_row_long = int(round(first_row + 0.5))1386 1387 last_row = (max_long - ll_long) / grid_spacing # round down1388 last_row_long = int(round(last_row))1389 1390 first_row = (min_lat - ll_lat) / grid_spacing1391 # To round up1392 first_row_lat = int(round(first_row + 0.5))1393 1394 last_row = (max_lat - ll_lat) / grid_spacing # round down1395 last_row_lat = int(round(last_row))1396 1397 max_distance = 157147.4112 * grid_spacing1398 points_lat_long = []1399 1400 # Create a list of the lat long points to include.1401 for index_lat in range(first_row_lat, last_row_lat + 1):1402 for index_long in range(first_row_long, last_row_long + 1):1403 lat = ll_lat + index_lat*grid_spacing1404 long = ll_long + index_long*grid_spacing1405 1406 #filter here to keep good points1407 if keep_point(lat, long, seg, max_distance):1408 points_lat_long.append((lat, long)) #must be hashable1409 1410 # Now that we have these points, lets throw ones out that are too far away1411 return points_lat_long1412 1413 1414 ##1415 # @brief1416 # @param lat1417 # @param long1418 # @param seg Two points in UTM.1419 # @param max_distance1420 def keep_point(lat, long, seg, max_distance):1421 """1422 seg is two points, UTM1423 """1424 1425 from math import sqrt1426 1427 _ , x0, y0 = redfearn(lat, long)1428 x1 = seg[0][0]1429 y1 = seg[0][1]1430 x2 = seg[1][0]1431 y2 = seg[1][1]1432 x2_1 = x2-x11433 y2_1 = y2-y11434 try:1435 d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/sqrt( \1436 (x2_1)*(x2_1)+(y2_1)*(y2_1))1437 except ZeroDivisionError:1438 if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 \1439 and abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0:1440 return True1441 else:1442 return False1443 1444 return d <= max_distance1445 1446 844 1447 845 … … 1555 953 ################################################################################ 1556 954 1557 ##1558 # @brief Get mesh and quantity data from an SWW file.1559 # @param filename Path to data file to read.1560 # @param quantities UNUSED!1561 # @param verbose True if this function is to be verbose.1562 # @return (mesh, quantities, time) where mesh is the mesh data, quantities is1563 # a dictionary of {name: value}, and time is the time vector.1564 # @note Quantities extracted: 'elevation', 'stage', 'xmomentum' and 'ymomentum'1565 def get_mesh_and_quantities_from_file(filename,1566 quantities=None,1567 verbose=False):1568 """Get and rebuild mesh structure and associated quantities from sww file1569 1570 Input:1571 filename - Name os sww file1572 quantities - Names of quantities to load1573 1574 Output:1575 mesh - instance of class Interpolate1576 (including mesh and interpolation functionality)1577 quantities - arrays with quantity values at each mesh node1578 time - vector of stored timesteps1579 1580 This function is used by e.g.:1581 get_interpolated_quantities_at_polyline_midpoints1582 """1583 1584 # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.1585 1586 import types1587 from Scientific.IO.NetCDF import NetCDFFile1588 from shallow_water import Domain1589 from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh1590 1591 if verbose: log.critical('Reading from %s' % filename)1592 1593 fid = NetCDFFile(filename, netcdf_mode_r) # Open existing file for read1594 time = fid.variables['time'][:] # Time vector1595 time += fid.starttime[0]1596 1597 # Get the variables as numeric arrays1598 x = fid.variables['x'][:] # x-coordinates of nodes1599 y = fid.variables['y'][:] # y-coordinates of nodes1600 elevation = fid.variables['elevation'][:] # Elevation1601 stage = fid.variables['stage'][:] # Water level1602 xmomentum = fid.variables['xmomentum'][:] # Momentum in the x-direction1603 ymomentum = fid.variables['ymomentum'][:] # Momentum in the y-direction1604 1605 # Mesh (nodes (Mx2), triangles (Nx3))1606 nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)1607 triangles = fid.variables['volumes'][:]1608 1609 # Get geo_reference1610 try:1611 geo_reference = Geo_reference(NetCDFObject=fid)1612 except: #AttributeError, e:1613 # Sww files don't have to have a geo_ref1614 geo_reference = None1615 1616 if verbose: log.critical(' building mesh from sww file %s' % filename)1617 1618 boundary = None1619 1620 #FIXME (Peter Row): Should this be in mesh?1621 if fid.smoothing != 'Yes':1622 nodes = nodes.tolist()1623 triangles = triangles.tolist()1624 nodes, triangles, boundary = weed(nodes, triangles, boundary)1625 1626 try:1627 mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)1628 except AssertionError, e:1629 fid.close()1630 msg = 'Domain could not be created: %s. "' % e1631 raise DataDomainError, msg1632 1633 quantities = {}1634 quantities['elevation'] = elevation1635 quantities['stage'] = stage1636 quantities['xmomentum'] = xmomentum1637 quantities['ymomentum'] = ymomentum1638 1639 fid.close()1640 1641 return mesh, quantities, time1642 955 1643 956 -
trunk/anuga_core/source/anuga/shallow_water/eqf_v2.py
r7765 r7769 224 224 U[i]=U[i]+SIGN*DU[i] 225 225 226 U1 =U[0]227 U2 =U[1]228 U3 =U[2]229 U11 =U[3]230 U12 =U[4]231 U21 =U[5]232 U22 =U[6]233 U31 =U[7]234 U32 =U[8]226 U1 = U[0] 227 U2 = U[1] 228 U3 = U[2] 229 U11 = U[3] 230 U12 = U[4] 231 U21 = U[5] 232 U22 = U[6] 233 U31 = U[7] 234 U32 = U[8] 235 235 236 236 self.U3 = U3 … … 298 298 299 299 if CD == 0: 300 #C============================== 301 #C===== INCLINED FAULT ===== 300 #C============================== 301 #C===== INCLINED FAULT ===== 302 302 #C============================== 303 303 RD2=RD*RD … … 311 311 C3= ALP*SD/RD*(XI2*RRD - F1) 312 312 else: 313 #C============================== 314 #C===== VERTICAL FAULT ===== 313 #C============================== 314 #C===== VERTICAL FAULT ===== 315 315 #C============================== 316 316 TD=SD/CD … … 345 345 346 346 if DISL1 != F0: 347 #C====================================== 348 #C===== STRIKE-SLIP CONTRIBUTION ===== 347 #C====================================== 348 #C===== STRIKE-SLIP CONTRIBUTION ===== 349 349 #C====================================== 350 350 UN=DISL1/PI2 … … 362 362 363 363 if DISL2 != F0: 364 #C=================================== 365 #C===== DIP-SLIP CONTRIBUTION ===== 364 #C=================================== 365 #C===== DIP-SLIP CONTRIBUTION ===== 366 366 #C=================================== 367 367 UN=DISL2/PI2 … … 379 379 if DISL3 != F0: 380 380 #C======================================== 381 #C===== TENSILE-FAULT CONTRIBUTION ===== 381 #C===== TENSILE-FAULT CONTRIBUTION ===== 382 382 #C======================================== 383 383 UN=DISL3/PI2 … … 443 443 PI2 = 6.283185307179586 444 444 445 D = DEP446 P = Y*CD + D*SD447 Q = Y*SD - D*CD448 S = P*SD + Q*CD449 X2 =X*X450 Y2 =Y*Y451 XY =X*Y452 D2 =D*D453 R2= X2 + Y2 + D2454 R = sqrt(R2)455 R3 =R *R2456 R5 =R3*R2457 QR =F3*Q/R5458 XR = F5*X2/R2459 YR = F5*Y2/R2460 XYR =F5*XY/R2461 DR =F5*D /R2462 RD = R + D463 R12 =F1/(R*RD*RD)464 R32 =R12*(F2*R + D)/ R2465 R33 =R12*(F3*R + D)/(R2*RD)466 R53 =R12*(F8*R2 + F9*R*D + F3*D2)/(R2*R2*RD)467 R54 =R12*(F5*R2 + F4*R*D + D2)/R3*R12468 469 A1 =ALP*Y*(R12-X2*R33)470 A2 =ALP*X*(R12-Y2*R33)471 A3 =ALP*X/R3 - A2472 A4 =-ALP*XY*R32473 A5 =ALP*( F1/(R*RD) - X2*R32 )474 B1 =ALP*(-F3*XY*R33 + F3*X2*XY*R54)475 B2 =ALP*( F1/R3 - F3*R12 + F3*X2*Y2*R54)476 B3 =ALP*( F1/R3 - F3*X2/R5) - B2477 B4 =-ALP*F3*XY/R5 - B1478 C1 =-ALP*Y*(R32 - X2*R53)479 C2 =-ALP*X*(R32 - Y2*R53)480 C3 =-ALP*F3*X*D/R5 - C2481 482 U1 = F0483 U2 = F0484 U3 = F0485 U11= F0486 U12= F0487 U21= F0488 U22= F0489 U31= F0490 U32= F0445 D = DEP 446 P = Y*CD + D*SD 447 Q = Y*SD - D*CD 448 S = P*SD + Q*CD 449 X2 = X*X 450 Y2 = Y*Y 451 XY = X*Y 452 D2 = D*D 453 R2= X2 + Y2 + D2 454 R = sqrt(R2) 455 R3 = R *R2 456 R5 = R3*R2 457 QR = F3*Q/R5 458 XR = F5*X2/R2 459 YR = F5*Y2/R2 460 XYR = F5*XY/R2 461 DR = F5*D /R2 462 RD = R + D 463 R12 = F1/(R*RD*RD) 464 R32 = R12*(F2*R + D)/ R2 465 R33 = R12*(F3*R + D)/(R2*RD) 466 R53 = R12*(F8*R2 + F9*R*D + F3*D2)/(R2*R2*RD) 467 R54 = R12*(F5*R2 + F4*R*D + D2)/R3*R12 468 469 A1 = ALP*Y*(R12-X2*R33) 470 A2 = ALP*X*(R12-Y2*R33) 471 A3 = ALP*X/R3 - A2 472 A4 = -ALP*XY*R32 473 A5 = ALP*( F1/(R*RD) - X2*R32 ) 474 B1 = ALP*(-F3*XY*R33 + F3*X2*XY*R54) 475 B2 = ALP*( F1/R3 - F3*R12 + F3*X2*Y2*R54) 476 B3 = ALP*( F1/R3 - F3*X2/R5) - B2 477 B4 = -ALP*F3*XY/R5 - B1 478 C1 = -ALP*Y*(R32 - X2*R53) 479 C2 = -ALP*X*(R32 - Y2*R53) 480 C3 = -ALP*F3*X*D/R5 - C2 481 482 U1 = F0 483 U2 = F0 484 U3 = F0 485 U11= F0 486 U12= F0 487 U21= F0 488 U22= F0 489 U31= F0 490 U32= F0 491 491 492 492 #====================================== … … 495 495 496 496 if POT1 != F0: 497 UN =POT1/PI2498 QRX =QR*X499 FX =F3*X/R5*SD500 U1 = U1 - UN*( QRX*X + A1*SD )501 U2 = U2 - UN*( QRX*Y + A2*SD )502 U3 = U3 - UN*( QRX*D + A4*SD )497 UN = POT1/PI2 498 QRX = QR*X 499 FX = F3*X/R5*SD 500 U1 = U1 - UN*( QRX*X + A1*SD ) 501 U2 = U2 - UN*( QRX*Y + A2*SD ) 502 U3 = U3 - UN*( QRX*D + A4*SD ) 503 503 504 U11 =U11- UN*( QRX* (F2-XR) + B1*SD )505 U12 =U12- UN*(-QRX*XYR + FX*X + B2*SD )506 U21 =U21- UN*( QR*Y*(F1-XR) + B2*SD )507 U22 =U22- UN*( QRX *(F1-YR) + FX*Y + B4*SD )508 U31 =U31- UN*( QR*D*(F1-XR) + C1*SD )509 U32 =U32- UN*(-QRX*DR*Y + FX*D + C2*SD )510 511 #====================================== 512 #===== DIP-SLIP CONTRIBUTION ===== 513 #====================================== 504 U11 = U11 - UN * ( QRX* (F2-XR) + B1*SD ) 505 U12 = U12 - UN * (-QRX*XYR + FX*X + B2*SD ) 506 U21 = U21 - UN * ( QR*Y*(F1-XR) + B2*SD ) 507 U22 = U22 - UN * ( QRX *(F1-YR) + FX*Y + B4*SD ) 508 U31 = U31 - UN * ( QR*D*(F1-XR) + C1*SD ) 509 U32 = U32 - UN * (-QRX*DR*Y + FX*D + C2*SD ) 510 511 #====================================== 512 #===== DIP-SLIP CONTRIBUTION ===== 513 #====================================== 514 514 515 515 if POT2 != F0: 516 UN =POT2/PI2517 SDCD =SD*CD518 QRP =QR*P519 FS =F3*S/R5520 U1 =U1 - UN*( QRP*X - A3*SDCD )521 U2 =U2 - UN*( QRP*Y - A1*SDCD )522 U3 =U3 - UN*( QRP*D - A5*SDCD )523 U11 =U11- UN*( QRP*(F1-XR) - B3*SDCD )524 U12 =U12- UN*(-QRP*XYR + FS*X - B1*SDCD )525 U21 =U21- UN*(-QRP*XYR - B1*SDCD )526 U22 =U22- UN*( QRP*(F1-YR) + FS*Y - B2*SDCD )527 U31 =U31- UN*(-QRP*DR*X - C3*SDCD )528 U32 =U32- UN*(-QRP*DR*Y + FS*D - C1*SDCD )529 530 #======================================== 531 #===== TENSILE-FAULT CONTRIBUTION ===== 532 #======================================== 516 UN = POT2/PI2 517 SDCD = SD*CD 518 QRP = QR*P 519 FS = F3*S/R5 520 U1 = U1 - UN*( QRP*X - A3*SDCD ) 521 U2 = U2 - UN*( QRP*Y - A1*SDCD ) 522 U3 = U3 - UN*( QRP*D - A5*SDCD ) 523 U11 = U11- UN*( QRP*(F1-XR) - B3*SDCD ) 524 U12 = U12- UN*(-QRP*XYR + FS*X - B1*SDCD ) 525 U21 = U21- UN*(-QRP*XYR - B1*SDCD ) 526 U22 = U22- UN*( QRP*(F1-YR) + FS*Y - B2*SDCD ) 527 U31 = U31- UN*(-QRP*DR*X - C3*SDCD ) 528 U32 = U32- UN*(-QRP*DR*Y + FS*D - C1*SDCD ) 529 530 #======================================== 531 #===== TENSILE-FAULT CONTRIBUTION ===== 532 #======================================== 533 533 534 534 if POT3 != F0: 535 UN =POT3/PI2536 SDSD =SD*SD537 QRQ =QR*Q538 FQ =F2*QR*SD539 U1 =U1 + UN*( QRQ*X - A3*SDSD )540 U2 =U2 + UN*( QRQ*Y - A1*SDSD )541 U3 =U3 + UN*( QRQ*D - A5*SDSD )542 U11 =U11+ UN*( QRQ*(F1-XR) - B3*SDSD )543 U12 =U12+ UN*(-QRQ*XYR + FQ*X - B1*SDSD )544 U21 =U21+ UN*(-QRQ*XYR - B1*SDSD )545 U22 =U22+ UN*( QRQ*(F1-YR) + FQ*Y - B2*SDSD )546 U31 =U31+ UN*(-QRQ*DR*X - C3*SDSD )547 U32 =U32+ UN*(-QRQ*DR*Y + FQ*D - C1*SDSD )548 535 UN = POT3/PI2 536 SDSD = SD*SD 537 QRQ = QR*Q 538 FQ = F2*QR*SD 539 U1 = U1 + UN*( QRQ*X - A3*SDSD ) 540 U2 = U2 + UN*( QRQ*Y - A1*SDSD ) 541 U3 = U3 + UN*( QRQ*D - A5*SDSD ) 542 U11 = U11+ UN*( QRQ*(F1-XR) - B3*SDSD ) 543 U12 = U12+ UN*(-QRQ*XYR + FQ*X - B1*SDSD ) 544 U21 = U21+ UN*(-QRQ*XYR - B1*SDSD ) 545 U22 = U22+ UN*( QRQ*(F1-YR) + FQ*Y - B2*SDSD ) 546 U31 = U31+ UN*(-QRQ*DR*X - C3*SDSD ) 547 U32 = U32+ UN*(-QRQ*DR*Y + FQ*D - C1*SDSD ) 548 -
trunk/anuga_core/source/anuga/shallow_water/test_data_manager.py
r7765 r7769 29 29 from anuga.config import netcdf_float, epsilon, g 30 30 31 31 32 # import all the boundaries - some are generic, some are shallow water 32 33 from boundaries import Reflective_boundary, \ … … 44 45 from anuga.geospatial_data.geospatial_data import Geospatial_data 45 46 46 47 class Test_Data_Manager(unittest.TestCase): 47 # use helper methods from other unit test 48 from anuga.file.test_mux import Test_Mux 49 50 51 class Test_Data_Manager(Test_Mux): 48 52 # Class variable 49 53 verbose = False … … 1232 1236 1233 1237 1234 def test_sww2domain1(self):1235 ################################################1236 #Create a test domain, and evolve and save it.1237 ################################################1238 from mesh_factory import rectangular1239 1240 #Create basic mesh1241 1242 yiel=0.011243 points, vertices, boundary = rectangular(10,10)1244 1245 #Create shallow water domain1246 domain = Domain(points, vertices, boundary)1247 domain.geo_reference = Geo_reference(56,11,11)1248 domain.smooth = False1249 domain.store = True1250 domain.set_name('bedslope')1251 domain.default_order=21252 #Bed-slope and friction1253 domain.set_quantity('elevation', lambda x,y: -x/3)1254 domain.set_quantity('friction', 0.1)1255 # Boundary conditions1256 from math import sin, pi1257 Br = Reflective_boundary(domain)1258 Bt = Transmissive_boundary(domain)1259 Bd = Dirichlet_boundary([0.2,0.,0.])1260 Bw = Time_boundary(domain=domain,f=lambda t: [(0.1*sin(t*2*pi)), 0.0, 0.0])1261 1262 #domain.set_boundary({'left': Bd, 'right': Br, 'top': Br, 'bottom': Br})1263 domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd})1264 1265 domain.quantities_to_be_stored['xmomentum'] = 21266 domain.quantities_to_be_stored['ymomentum'] = 21267 #Initial condition1268 h = 0.051269 elevation = domain.quantities['elevation'].vertex_values1270 domain.set_quantity('stage', elevation + h)1271 1272 domain.check_integrity()1273 #Evolution1274 #domain.tight_slope_limiters = 11275 for t in domain.evolve(yieldstep = yiel, finaltime = 0.05):1276 #domain.write_time()1277 pass1278 1279 1280 ##########################################1281 #Import the example's file as a new domain1282 ##########################################1283 from data_manager import sww2domain1284 import os1285 1286 filename = domain.datadir + os.sep + domain.get_name() + '.sww'1287 domain2 = sww2domain(filename,None,fail_if_NaN=False,verbose=self.verbose)1288 #points, vertices, boundary = rectangular(15,15)1289 #domain2.boundary = boundary1290 ###################1291 ##NOW TEST IT!!!1292 ###################1293 1294 os.remove(filename)1295 1296 bits = ['vertex_coordinates']1297 for quantity in domain.quantities_to_be_stored:1298 bits.append('get_quantity("%s").get_integral()' % quantity)1299 bits.append('get_quantity("%s").get_values()' % quantity)1300 1301 for bit in bits:1302 #print 'testing that domain.'+bit+' has been restored'1303 #print bit1304 #print 'done'1305 assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit))1306 1307 ######################################1308 #Now evolve them both, just to be sure1309 ######################################x1310 domain.time = 0.1311 from time import sleep1312 1313 final = .11314 domain.set_quantity('friction', 0.1)1315 domain.store = False1316 domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd})1317 1318 1319 for t in domain.evolve(yieldstep = yiel, finaltime = final):1320 #domain.write_time()1321 pass1322 1323 final = final - (domain2.starttime-domain.starttime)1324 #BUT since domain1 gets time hacked back to 0:1325 final = final + (domain2.starttime-domain.starttime)1326 1327 domain2.smooth = False1328 domain2.store = False1329 domain2.default_order=21330 domain2.set_quantity('friction', 0.1)1331 #Bed-slope and friction1332 # Boundary conditions1333 Bd2=Dirichlet_boundary([0.2,0.,0.])1334 domain2.boundary = domain.boundary1335 #print 'domain2.boundary'1336 #print domain2.boundary1337 domain2.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd})1338 #domain2.set_boundary({'exterior': Bd})1339 1340 domain2.check_integrity()1341 1342 for t in domain2.evolve(yieldstep = yiel, finaltime = final):1343 #domain2.write_time()1344 pass1345 1346 ###################1347 ##NOW TEST IT!!!1348 ##################1349 1350 bits = ['vertex_coordinates']1351 1352 for quantity in ['elevation','stage', 'ymomentum','xmomentum']:1353 bits.append('get_quantity("%s").get_integral()' %quantity)1354 bits.append('get_quantity("%s").get_values()' %quantity)1355 1356 #print bits1357 for bit in bits:1358 #print bit1359 #print eval('domain.'+bit)1360 #print eval('domain2.'+bit)1361 1362 #print eval('domain.'+bit+'-domain2.'+bit)1363 msg = 'Values in the two domains are different for ' + bit1364 assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit),1365 rtol=1.e-5, atol=3.e-8), msg1366 1367 1368 1238 def DISABLEDtest_sww2domain2(self): 1369 1239 ################################################################## … … 1413 1283 #Import the file as a new domain 1414 1284 ################################## 1415 from data_manager import sww2domain1285 from data_manager import load_sww_as_domain 1416 1286 import os 1417 1287 … … 1424 1294 # verbose=self.verbose) 1425 1295 try: 1426 domain2 = sww2domain(filename,1296 domain2 = load_sww_as_domain(filename, 1427 1297 boundary, 1428 1298 fail_if_NaN=True, … … 1431 1301 # Now import it, filling NaNs to be -9999 1432 1302 filler = -9999 1433 domain2 = sww2domain(filename,1303 domain2 = load_sww_as_domain(filename, 1434 1304 None, 1435 1305 fail_if_NaN=False, … … 1442 1312 # Now import it, filling NaNs to be 0 1443 1313 filler = -9999 1444 domain2 = sww2domain(filename,1314 domain2 = load_sww_as_domain(filename, 1445 1315 None, 1446 1316 fail_if_NaN=False, … … 1551 1421 1552 1422 1553 ##########################################1554 #Import the example's file as a new domain1555 ##########################################1556 from data_manager import sww2domain1557 import os1558 1559 1423 filename = domain.datadir + os.sep + domain.get_name() + '.sww' 1560 domain2 = sww2domain(filename,None,fail_if_NaN=False,verbose=self.verbose)1424 domain2 = load_sww_as_domain(filename,None,fail_if_NaN=False,verbose=self.verbose) 1561 1425 #points, vertices, boundary = rectangular(15,15) 1562 1426 #domain2.boundary = boundary … … 1812 1676 os.remove(root + '.dem') 1813 1677 os.remove(root + '_100.dem') 1814 1815 def test_urs2sts0(self):1816 """1817 Test single source1818 """1819 tide=01820 time_step_count = 31821 time_step = 21822 lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)]1823 n=len(lat_long_points)1824 first_tstep=num.ones(n,num.int)1825 first_tstep[0]+=11826 first_tstep[2]+=11827 last_tstep=(time_step_count)*num.ones(n,num.int)1828 last_tstep[0]-=11829 1830 gauge_depth=20*num.ones(n,num.float)1831 ha=2*num.ones((n,time_step_count),num.float)1832 ha[0]=num.arange(0,time_step_count)1833 ha[1]=num.arange(time_step_count,2*time_step_count)1834 ha[2]=num.arange(2*time_step_count,3*time_step_count)1835 ha[3]=num.arange(3*time_step_count,4*time_step_count)1836 ua=5*num.ones((n,time_step_count),num.float)1837 va=-10*num.ones((n,time_step_count),num.float)1838 1839 base_name, files = self.write_mux2(lat_long_points,1840 time_step_count, time_step,1841 first_tstep, last_tstep,1842 depth=gauge_depth,1843 ha=ha,1844 ua=ua,1845 va=va)1846 1847 urs2sts(base_name,1848 basename_out=base_name,1849 mean_stage=tide,verbose=False)1850 1851 # now I want to check the sts file ...1852 sts_file = base_name + '.sts'1853 1854 #Let's interigate the sww file1855 # Note, the sww info is not gridded. It is point data.1856 fid = NetCDFFile(sts_file)1857 1858 # Make x and y absolute1859 x = fid.variables['x'][:]1860 y = fid.variables['y'][:]1861 1862 geo_reference = Geo_reference(NetCDFObject=fid)1863 points = geo_reference.get_absolute(map(None, x, y))1864 points = ensure_numeric(points)1865 1866 x = points[:,0]1867 y = points[:,1]1868 1869 #Check that first coordinate is correctly represented1870 #Work out the UTM coordinates for first point1871 for i in range(4):1872 zone, e, n = redfearn(lat_long_points[i][0], lat_long_points[i][1])1873 assert num.allclose([x[i],y[i]], [e,n])1874 1875 #Check the time vector1876 times = fid.variables['time'][:]1877 1878 times_actual = []1879 for i in range(time_step_count):1880 times_actual.append(time_step * i)1881 1882 assert num.allclose(ensure_numeric(times),1883 ensure_numeric(times_actual))1884 1885 #Check first value1886 stage = fid.variables['stage'][:]1887 xmomentum = fid.variables['xmomentum'][:]1888 ymomentum = fid.variables['ymomentum'][:]1889 elevation = fid.variables['elevation'][:]1890 1891 # Set original data used to write mux file to be zero when gauges are1892 #not recdoring1893 ha[0][0]=0.01894 ha[0][time_step_count-1]=0.0;1895 ha[2][0]=0.0;1896 ua[0][0]=0.01897 ua[0][time_step_count-1]=0.0;1898 ua[2][0]=0.0;1899 va[0][0]=0.01900 va[0][time_step_count-1]=0.0;1901 va[2][0]=0.0;1902 1903 assert num.allclose(num.transpose(ha),stage) #Meters1904 1905 #Check the momentums - ua1906 #momentum = velocity*(stage-elevation)1907 # elevation = - depth1908 #momentum = velocity_ua *(stage+depth)1909 1910 depth=num.zeros((len(lat_long_points),time_step_count),num.float)1911 for i in range(len(lat_long_points)):1912 depth[i]=gauge_depth[i]+tide+ha[i]1913 assert num.allclose(num.transpose(ua*depth),xmomentum)1914 1915 #Check the momentums - va1916 #momentum = velocity*(stage-elevation)1917 # elevation = - depth1918 #momentum = velocity_va *(stage+depth)1919 1920 assert num.allclose(num.transpose(va*depth),ymomentum)1921 1922 # check the elevation values.1923 # -ve since urs measures depth, sww meshers height,1924 assert num.allclose(-elevation, gauge_depth) #Meters1925 1926 fid.close()1927 self.delete_mux(files)1928 os.remove(sts_file)1929 1930 def test_urs2sts_nonstandard_meridian(self):1931 """1932 Test single source using the meridian from zone 50 as a nonstandard meridian1933 """1934 tide=01935 time_step_count = 31936 time_step = 21937 lat_long_points =[(-21.,114.5),(-21.,113.5),(-21.,114.), (-21.,115.)]1938 n=len(lat_long_points)1939 first_tstep=num.ones(n,num.int)1940 first_tstep[0]+=11941 first_tstep[2]+=11942 last_tstep=(time_step_count)*num.ones(n,num.int)1943 last_tstep[0]-=11944 1945 gauge_depth=20*num.ones(n,num.float)1946 ha=2*num.ones((n,time_step_count),num.float)1947 ha[0]=num.arange(0,time_step_count)1948 ha[1]=num.arange(time_step_count,2*time_step_count)1949 ha[2]=num.arange(2*time_step_count,3*time_step_count)1950 ha[3]=num.arange(3*time_step_count,4*time_step_count)1951 ua=5*num.ones((n,time_step_count),num.float)1952 va=-10*num.ones((n,time_step_count),num.float)1953 1954 base_name, files = self.write_mux2(lat_long_points,1955 time_step_count, time_step,1956 first_tstep, last_tstep,1957 depth=gauge_depth,1958 ha=ha,1959 ua=ua,1960 va=va)1961 1962 urs2sts(base_name,1963 basename_out=base_name,1964 central_meridian=123,1965 mean_stage=tide,1966 verbose=False)1967 1968 # now I want to check the sts file ...1969 sts_file = base_name + '.sts'1970 1971 #Let's interigate the sww file1972 # Note, the sww info is not gridded. It is point data.1973 fid = NetCDFFile(sts_file)1974 1975 # Make x and y absolute1976 x = fid.variables['x'][:]1977 y = fid.variables['y'][:]1978 1979 geo_reference = Geo_reference(NetCDFObject=fid)1980 points = geo_reference.get_absolute(map(None, x, y))1981 points = ensure_numeric(points)1982 1983 x = points[:,0]1984 y = points[:,1]1985 1986 # Check that all coordinate are correctly represented1987 # Using the non standard projection (50)1988 for i in range(4):1989 zone, e, n = redfearn(lat_long_points[i][0],1990 lat_long_points[i][1],1991 central_meridian=123)1992 assert num.allclose([x[i],y[i]], [e,n])1993 assert zone==-11994 self.delete_mux(files)1995 1996 1997 1998 def test_urs2sts_individual_sources(self):1999 """Test that individual sources compare to actual urs output2000 Test that the first recording time is the smallest2001 over waveheight, easting and northing velocity2002 """2003 2004 # Get path where this test is run2005 path = get_pathname_from_package('anuga.shallow_water')2006 2007 testdir = os.path.join(path, 'urs_test_data')2008 ordering_filename=os.path.join(testdir, 'thinned_bound_order_test.txt')2009 2010 sources = ['1-z.grd','2-z.grd','3-z.grd']2011 2012 # Start times by source and station taken manually from urs header files2013 time_start_z = num.array([[10.0,11.5,13,14.5,17.7],2014 [9.8,11.2,12.7,14.2,17.4],2015 [9.5,10.9,12.4,13.9,17.1]])2016 2017 time_start_e = time_start_n = time_start_z2018 2019 # time step in urs output2020 delta_t = 0.12021 2022 # Make sts file for each source2023 for k, source_filename in enumerate(sources):2024 source_number = k + 1 # Source numbering starts at 12025 2026 urs_filenames = os.path.join(testdir, source_filename)2027 weights = [1.]2028 sts_name_out = 'test'2029 2030 urs2sts(urs_filenames,2031 basename_out=sts_name_out,2032 ordering_filename=ordering_filename,2033 weights=weights,2034 mean_stage=0.0,2035 verbose=False)2036 2037 # Read in sts file for this source file2038 fid = NetCDFFile(sts_name_out+'.sts', netcdf_mode_r) # Open existing file for read2039 x = fid.variables['x'][:]+fid.xllcorner # x-coordinates of vertices2040 y = fid.variables['y'][:]+fid.yllcorner # y-coordinates of vertices2041 elevation = fid.variables['elevation'][:]2042 time=fid.variables['time'][:]+fid.starttime2043 2044 # Get quantity data from sts file2045 quantity_names=['stage','xmomentum','ymomentum']2046 quantities = {}2047 for i, name in enumerate(quantity_names):2048 quantities[name] = fid.variables[name][:]2049 2050 # Make sure start time from sts file is the minimum starttime2051 # across all stations (for this source)2052 #print k, time_start_z[k,:]2053 starttime = min(time_start_z[k, :])2054 sts_starttime = fid.starttime[0]2055 msg = 'sts starttime for source %d was %f. Should have been %f'\2056 %(source_number, sts_starttime, starttime)2057 assert num.allclose(sts_starttime, starttime), msg2058 2059 # For each station, compare urs2sts output to known urs output2060 for j in range(len(x)):2061 index_start_urs = 02062 index_end_urs = 02063 index_start = 02064 index_end = 02065 count = 02066 2067 # read in urs test data for stage, e and n velocity2068 urs_file_name_z = 'z_'+str(source_number)+'_'+str(j)+'.csv'2069 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z))2070 urs_stage = dict['urs_stage']2071 urs_file_name_e = 'e_'+str(source_number)+'_'+str(j)+'.csv'2072 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e))2073 urs_e = dict['urs_e']2074 urs_file_name_n = 'n_'+str(source_number)+'_'+str(j)+'.csv'2075 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n))2076 urs_n = dict['urs_n']2077 2078 # find start and end time for stage2079 for i in range(len(urs_stage)):2080 if urs_stage[i] == 0.0:2081 index_start_urs_z = i+12082 if int(urs_stage[i]) == 99 and count <> 1:2083 count +=12084 index_end_urs_z = i2085 2086 if count == 0: index_end_urs_z = len(urs_stage)2087 2088 start_times_z = time_start_z[source_number-1]2089 2090 # start times for easting velocities should be the same as stage2091 start_times_e = time_start_e[source_number-1]2092 index_start_urs_e = index_start_urs_z2093 index_end_urs_e = index_end_urs_z2094 2095 # start times for northing velocities should be the same as stage2096 start_times_n = time_start_n[source_number-1]2097 index_start_urs_n = index_start_urs_z2098 index_end_urs_n = index_end_urs_z2099 2100 # Check that actual start time matches header information for stage2101 msg = 'stage start time from urs file is not the same as the '2102 msg += 'header file for source %i and station %i' %(source_number,j)2103 assert num.allclose(index_start_urs_z,start_times_z[j]/delta_t), msg2104 2105 msg = 'e velocity start time from urs file is not the same as the '2106 msg += 'header file for source %i and station %i' %(source_number,j)2107 assert num.allclose(index_start_urs_e,start_times_e[j]/delta_t), msg2108 2109 msg = 'n velocity start time from urs file is not the same as the '2110 msg += 'header file for source %i and station %i' %(source_number,j)2111 assert num.allclose(index_start_urs_n,start_times_n[j]/delta_t), msg2112 2113 # get index for start and end time for sts quantities2114 index_start_stage = 02115 index_end_stage = 02116 count = 02117 sts_stage = quantities['stage'][:,j]2118 for i in range(len(sts_stage)):2119 if sts_stage[i] <> 0.0 and count <> 1:2120 count += 12121 index_start_stage = i2122 if int(sts_stage[i]) == 99 and count <> 1:2123 count += 12124 index_end_stage = i2125 2126 index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z])2127 2128 sts_xmom = quantities['xmomentum'][:,j]2129 index_start_x = index_start_stage2130 index_end_x = index_start_x + len(urs_e[index_start_urs_e:index_end_urs_e])2131 2132 sts_ymom = quantities['ymomentum'][:,j]2133 index_start_y = index_start_stage2134 index_end_y = index_start_y + len(urs_n[index_start_urs_n:index_end_urs_n])2135 2136 # check that urs stage and sts stage are the same2137 msg = 'urs stage is not equal to sts stage for for source %i and station %i' %(source_number,j)2138 assert num.allclose(urs_stage[index_start_urs_z:index_end_urs_z],2139 sts_stage[index_start_stage:index_end_stage],2140 rtol=1.0e-6, atol=1.0e-5 ), msg2141 2142 # check that urs e velocity and sts xmomentum are the same2143 msg = 'urs e velocity is not equivalent to sts x momentum for for source %i and station %i' %(source_number,j)2144 assert num.allclose(urs_e[index_start_urs_e:index_end_urs_e]*(urs_stage[index_start_urs_e:index_end_urs_e]-elevation[j]),2145 sts_xmom[index_start_x:index_end_x],2146 rtol=1.0e-5, atol=1.0e-4 ), msg2147 2148 # check that urs n velocity and sts ymomentum are the same2149 #print 'urs n velocity', urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j])2150 #print 'sts momentum', sts_ymom[index_start_y:index_end_y]2151 msg = 'urs n velocity is not equivalent to sts y momentum for source %i and station %i' %(source_number,j)2152 assert num.allclose(urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j]),2153 -sts_ymom[index_start_y:index_end_y],2154 rtol=1.0e-5, atol=1.0e-4 ), msg2155 2156 2157 fid.close()2158 2159 os.remove(sts_name_out+'.sts')2160 2161 def test_urs2sts_combined_sources(self):2162 """Test that combined sources compare to actual urs output2163 Test that the first recording time is the smallest2164 over waveheight, easting and northing velocity2165 """2166 2167 # combined2168 time_start_z = num.array([9.5,10.9,12.4,13.9,17.1])2169 time_start_e = time_start_n = time_start_z2170 2171 # make sts file for combined sources2172 weights = [1., 2., 3.]2173 2174 path = get_pathname_from_package('anuga.shallow_water')2175 2176 testdir = os.path.join(path, 'urs_test_data')2177 ordering_filename=os.path.join(testdir, 'thinned_bound_order_test.txt')2178 2179 urs_filenames = [os.path.join(testdir,'1-z.grd'),2180 os.path.join(testdir,'2-z.grd'),2181 os.path.join(testdir,'3-z.grd')]2182 sts_name_out = 'test'2183 2184 urs2sts(urs_filenames,2185 basename_out=sts_name_out,2186 ordering_filename=ordering_filename,2187 weights=weights,2188 mean_stage=0.0,2189 verbose=False)2190 2191 # read in sts file for combined source2192 fid = NetCDFFile(sts_name_out+'.sts', netcdf_mode_r) # Open existing file for read2193 x = fid.variables['x'][:]+fid.xllcorner # x-coordinates of vertices2194 y = fid.variables['y'][:]+fid.yllcorner # y-coordinates of vertices2195 elevation = fid.variables['elevation'][:]2196 time=fid.variables['time'][:]+fid.starttime2197 2198 2199 # Check that stored permutation is as per default2200 permutation = range(len(x))2201 stored_permutation = fid.variables['permutation'][:]2202 msg = 'Permutation was not stored correctly. I got '2203 msg += str(stored_permutation)2204 assert num.allclose(stored_permutation, permutation), msg2205 2206 # Get quantity data from sts file2207 quantity_names=['stage','xmomentum','ymomentum']2208 quantities = {}2209 for i, name in enumerate(quantity_names):2210 quantities[name] = fid.variables[name][:]2211 2212 # For each station, compare urs2sts output to known urs output2213 delta_t = 0.12214 2215 # Make sure start time from sts file is the minimum starttime2216 # across all stations (for this source)2217 starttime = min(time_start_z[:])2218 sts_starttime = fid.starttime[0]2219 msg = 'sts starttime was %f. Should have been %f'\2220 %(sts_starttime, starttime)2221 assert num.allclose(sts_starttime, starttime), msg2222 2223 #stations = [1,2,3]2224 #for j in stations:2225 for j in range(len(x)):2226 index_start_urs_z = 02227 index_end_urs_z = 02228 index_start_urs_e = 02229 index_end_urs_e = 02230 index_start_urs_n = 02231 index_end_urs_n = 02232 count = 02233 2234 # read in urs test data for stage, e and n velocity2235 urs_file_name_z = 'z_combined_'+str(j)+'.csv'2236 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z))2237 urs_stage = dict['urs_stage']2238 urs_file_name_e = 'e_combined_'+str(j)+'.csv'2239 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e))2240 urs_e = dict['urs_e']2241 urs_file_name_n = 'n_combined_'+str(j)+'.csv'2242 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n))2243 urs_n = dict['urs_n']2244 2245 # find start and end time for stage2246 for i in range(len(urs_stage)):2247 if urs_stage[i] == 0.0:2248 index_start_urs_z = i+12249 if int(urs_stage[i]) == 99 and count <> 1:2250 count +=12251 index_end_urs_z = i2252 2253 if count == 0: index_end_urs_z = len(urs_stage)2254 2255 start_times_z = time_start_z[j]2256 2257 start_times_e = time_start_e[j]2258 index_start_urs_e = index_start_urs_z2259 2260 start_times_n = time_start_n[j]2261 index_start_urs_n = index_start_urs_z2262 2263 # Check that actual start time matches header information for stage2264 msg = 'stage start time from urs file is not the same as the '2265 msg += 'header file at station %i' %(j)2266 assert num.allclose(index_start_urs_z,start_times_z/delta_t), msg2267 2268 msg = 'e velocity start time from urs file is not the same as the '2269 msg += 'header file at station %i' %(j)2270 assert num.allclose(index_start_urs_e,start_times_e/delta_t), msg2271 2272 msg = 'n velocity start time from urs file is not the same as the '2273 msg += 'header file at station %i' %(j)2274 assert num.allclose(index_start_urs_n,start_times_n/delta_t), msg2275 2276 # get index for start and end time for sts quantities2277 index_start_stage = 02278 index_end_stage = 02279 index_start_x = 02280 index_end_x = 02281 index_start_y = 02282 index_end_y = 02283 count = 02284 count1 = 02285 sts_stage = quantities['stage'][:,j]2286 for i in range(len(sts_stage)):2287 if sts_stage[i] <> 0.0 and count <> 1:2288 count += 12289 index_start_stage = i2290 if int(urs_stage[i]) == 99 and count <> 1:2291 count +=12292 index_end_stage = i2293 2294 index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z])2295 2296 index_start_x = index_start_stage2297 index_end_x = index_start_x + len(urs_stage[index_start_urs_e:index_end_urs_e])2298 sts_xmom = quantities['ymomentum'][:,j]2299 2300 index_start_y = index_start_stage2301 index_end_y = index_start_y + len(urs_stage[index_start_urs_n:index_end_urs_n])2302 sts_ymom = quantities['ymomentum'][:,j]2303 2304 # check that urs stage and sts stage are the same2305 msg = 'urs stage is not equal to sts stage for station %i' %j2306 #print 'urs stage', urs_stage[index_start_urs_z:index_end_urs_z]2307 #print 'sts stage', sts_stage[index_start_stage:index_end_stage]2308 #print 'diff', max(urs_stage[index_start_urs_z:index_end_urs_z]-sts_stage[index_start_stage:index_end_stage])2309 #print 'index', index_start_stage, index_end_stage, len(sts_stage)2310 assert num.allclose(urs_stage[index_start_urs_z:index_end_urs_z],2311 sts_stage[index_start_stage:index_end_stage],2312 rtol=1.0e-5, atol=1.0e-4 ), msg2313 2314 # check that urs e velocity and sts xmomentum are the same2315 msg = 'urs e velocity is not equivalent to sts xmomentum for station %i' %j2316 assert num.allclose(urs_e[index_start_urs_e:index_end_urs_e]*(urs_stage[index_start_urs_e:index_end_urs_e]-elevation[j]),2317 sts_xmom[index_start_x:index_end_x],2318 rtol=1.0e-5, atol=1.0e-4 ), msg2319 2320 # check that urs n velocity and sts ymomentum are the same2321 msg = 'urs n velocity is not equivalent to sts ymomentum for station %i' %j2322 assert num.allclose(urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j]),2323 sts_ymom[index_start_y:index_end_y],2324 rtol=1.0e-5, atol=1.0e-4 ), msg2325 2326 fid.close()2327 2328 os.remove(sts_name_out+'.sts')2329 2330 2331 2332 def test_urs2sts_ordering(self):2333 """Test multiple sources with ordering file2334 """2335 2336 tide = 0.352337 time_step_count = 6 # I made this a little longer (Ole)2338 time_step = 22339 lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)]2340 n=len(lat_long_points)2341 first_tstep=num.ones(n,num.int)2342 first_tstep[0]+=12343 first_tstep[2]+=12344 last_tstep=(time_step_count)*num.ones(n,num.int)2345 last_tstep[0]-=12346 2347 gauge_depth=20*num.ones(n,num.float)2348 ha=2*num.ones((n,time_step_count),num.float)2349 ha[0]=num.arange(0,time_step_count)2350 ha[1]=num.arange(time_step_count,2*time_step_count)2351 ha[2]=num.arange(2*time_step_count,3*time_step_count)2352 ha[3]=num.arange(3*time_step_count,4*time_step_count)2353 ua=5*num.ones((n,time_step_count),num.float)2354 va=-10*num.ones((n,time_step_count),num.float)2355 2356 # Create two identical mux files to be combined by urs2sts2357 base_nameI, filesI = self.write_mux2(lat_long_points,2358 time_step_count, time_step,2359 first_tstep, last_tstep,2360 depth=gauge_depth,2361 ha=ha,2362 ua=ua,2363 va=va)2364 2365 base_nameII, filesII = self.write_mux2(lat_long_points,2366 time_step_count, time_step,2367 first_tstep, last_tstep,2368 depth=gauge_depth,2369 ha=ha,2370 ua=ua,2371 va=va)2372 2373 2374 # Create ordering file2375 permutation = [3,0,2]2376 2377 _, ordering_filename = tempfile.mkstemp('')2378 order_fid = open(ordering_filename, 'w')2379 order_fid.write('index, longitude, latitude\n')2380 for index in permutation:2381 order_fid.write('%d, %f, %f\n' %(index,2382 lat_long_points[index][1],2383 lat_long_points[index][0]))2384 order_fid.close()2385 2386 2387 2388 2389 # Call urs2sts with multiple mux files2390 urs2sts([base_nameI, base_nameII],2391 basename_out=base_nameI,2392 ordering_filename=ordering_filename,2393 weights=[1.0, 1.0],2394 mean_stage=tide,2395 verbose=False)2396 2397 # now I want to check the sts file ...2398 sts_file = base_nameI + '.sts'2399 2400 #Let's interrogate the sts file2401 # Note, the sts info is not gridded. It is point data.2402 fid = NetCDFFile(sts_file)2403 2404 # Check that original indices have been stored2405 stored_permutation = fid.variables['permutation'][:]2406 msg = 'Permutation was not stored correctly. I got '2407 msg += str(stored_permutation)2408 assert num.allclose(stored_permutation, permutation), msg2409 2410 2411 # Make x and y absolute2412 x = fid.variables['x'][:]2413 y = fid.variables['y'][:]2414 2415 geo_reference = Geo_reference(NetCDFObject=fid)2416 points = geo_reference.get_absolute(map(None, x, y))2417 points = ensure_numeric(points)2418 2419 x = points[:,0]2420 y = points[:,1]2421 2422 #print2423 #print x2424 #print y2425 for i, index in enumerate(permutation):2426 # Check that STS points are stored in the correct order2427 2428 # Work out the UTM coordinates sts point i2429 zone, e, n = redfearn(lat_long_points[index][0],2430 lat_long_points[index][1])2431 2432 #print i, [x[i],y[i]], [e,n]2433 assert num.allclose([x[i],y[i]], [e,n])2434 2435 2436 # Check the time vector2437 times = fid.variables['time'][:]2438 2439 times_actual = []2440 for i in range(time_step_count):2441 times_actual.append(time_step * i)2442 2443 assert num.allclose(ensure_numeric(times),2444 ensure_numeric(times_actual))2445 2446 2447 # Check sts values2448 stage = fid.variables['stage'][:]2449 xmomentum = fid.variables['xmomentum'][:]2450 ymomentum = fid.variables['ymomentum'][:]2451 elevation = fid.variables['elevation'][:]2452 2453 # Set original data used to write mux file to be zero when gauges are2454 # not recdoring2455 2456 ha[0][0]=0.02457 ha[0][time_step_count-1]=0.02458 ha[2][0]=0.02459 ua[0][0]=0.02460 ua[0][time_step_count-1]=0.02461 ua[2][0]=0.02462 va[0][0]=0.02463 va[0][time_step_count-1]=0.02464 va[2][0]=0.0;2465 2466 2467 # The stage stored in the .sts file should be the sum of the stage2468 # in the two mux2 files because both have weights = 1. In this case2469 # the mux2 files are the same so stage == 2.0 * ha2470 #print 2.0*num.transpose(ha) - stage2471 2472 ha_permutation = num.take(ha, permutation, axis=0)2473 ua_permutation = num.take(ua, permutation, axis=0)2474 va_permutation = num.take(va, permutation, axis=0)2475 gauge_depth_permutation = num.take(gauge_depth, permutation, axis=0)2476 2477 2478 assert num.allclose(2.0*num.transpose(ha_permutation)+tide, stage) # Meters2479 2480 #Check the momentums - ua2481 #momentum = velocity*(stage-elevation)2482 # elevation = - depth2483 #momentum = velocity_ua *(stage+depth)2484 2485 depth=num.zeros((len(lat_long_points),time_step_count),num.float)2486 for i in range(len(lat_long_points)):2487 depth[i]=gauge_depth[i]+tide+2.0*ha[i]2488 #2.0*ha necessary because using two files with weights=1 are used2489 2490 depth_permutation = num.take(depth, permutation, axis=0)2491 2492 2493 # The xmomentum stored in the .sts file should be the sum of the ua2494 # in the two mux2 files multiplied by the depth.2495 assert num.allclose(2.0*num.transpose(ua_permutation*depth_permutation), xmomentum)2496 2497 #Check the momentums - va2498 #momentum = velocity*(stage-elevation)2499 # elevation = - depth2500 #momentum = velocity_va *(stage+depth)2501 2502 # The ymomentum stored in the .sts file should be the sum of the va2503 # in the two mux2 files multiplied by the depth.2504 assert num.allclose(2.0*num.transpose(va_permutation*depth_permutation), ymomentum)2505 2506 # check the elevation values.2507 # -ve since urs measures depth, sww meshers height,2508 assert num.allclose(-gauge_depth_permutation, elevation) #Meters2509 2510 fid.close()2511 self.delete_mux(filesI)2512 self.delete_mux(filesII)2513 os.remove(sts_file)2514 2515 2516 2517 2518 2519 def Xtest_urs2sts_ordering_exception(self):2520 """Test that inconsistent lats and lons in ordering file are caught.2521 """2522 2523 tide=02524 time_step_count = 32525 time_step = 22526 lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)]2527 n=len(lat_long_points)2528 first_tstep=num.ones(n,num.int)2529 first_tstep[0]+=12530 first_tstep[2]+=12531 last_tstep=(time_step_count)*num.ones(n,num.int)2532 last_tstep[0]-=12533 2534 gauge_depth=20*num.ones(n,num.float)2535 ha=2*num.ones((n,time_step_count),num.float)2536 ha[0]=num.arange(0,time_step_count)2537 ha[1]=num.arange(time_step_count,2*time_step_count)2538 ha[2]=num.arange(2*time_step_count,3*time_step_count)2539 ha[3]=num.arange(3*time_step_count,4*time_step_count)2540 ua=5*num.ones((n,time_step_count),num.float)2541 va=-10*num.ones((n,time_step_count),num.float)2542 2543 # Create two identical mux files to be combined by urs2sts2544 base_nameI, filesI = self.write_mux2(lat_long_points,2545 time_step_count, time_step,2546 first_tstep, last_tstep,2547 depth=gauge_depth,2548 ha=ha,2549 ua=ua,2550 va=va)2551 2552 base_nameII, filesII = self.write_mux2(lat_long_points,2553 time_step_count, time_step,2554 first_tstep, last_tstep,2555 depth=gauge_depth,2556 ha=ha,2557 ua=ua,2558 va=va)2559 2560 2561 # Create ordering file2562 permutation = [3,0,2]2563 2564 # Do it wrongly and check that exception is being raised2565 _, ordering_filename = tempfile.mkstemp('')2566 order_fid = open(ordering_filename, 'w')2567 order_fid.write('index, longitude, latitude\n')2568 for index in permutation:2569 order_fid.write('%d, %f, %f\n' %(index,2570 lat_long_points[index][0],2571 lat_long_points[index][1]))2572 order_fid.close()2573 2574 try:2575 urs2sts([base_nameI, base_nameII],2576 basename_out=base_nameI,2577 ordering_filename=ordering_filename,2578 weights=[1.0, 1.0],2579 mean_stage=tide,2580 verbose=False)2581 os.remove(ordering_filename)2582 except:2583 pass2584 else:2585 msg = 'Should have caught wrong lat longs'2586 raise Exception, msg2587 2588 2589 self.delete_mux(filesI)2590 self.delete_mux(filesII)2591 2592 2593 2594 2595 def test_urs2sts_ordering_different_sources(self):2596 """Test multiple sources with ordering file, different source files and weights.2597 This test also has more variable values than the previous ones2598 """2599 2600 tide = 1.52601 time_step_count = 102602 time_step = 0.22603 2604 times_ref = num.arange(0, time_step_count*time_step, time_step)2605 #print 'time vector', times_ref2606 2607 lat_long_points = [(-21.5,114.5), (-21,114.5), (-21.5,115), (-21.,115.), (-22., 117.)]2608 n = len(lat_long_points)2609 2610 # Create non-trivial weights2611 #weights = [0.8, 1.5] # OK2612 #weights = [0.8, 10.5] # Fail (up to allclose tolerance)2613 #weights = [10.5, 10.5] # OK2614 #weights = [0.0, 10.5] # OK2615 #weights = [0.8, 0.] # OK2616 #weights = [8, 0.1] # OK2617 #weights = [0.8, 10.0] # OK2618 #weights = [0.8, 10.6] # OK2619 weights = [3.8, 7.6] # OK2620 #weights = [0.5, 0.5] # OK2621 #weights = [2., 2.] # OK2622 #weights = [0.0, 0.5] # OK2623 #weights = [1.0, 1.0] # OK2624 2625 2626 # Create different timeseries starting and ending at different times2627 first_tstep=num.ones(n,num.int)2628 first_tstep[0]+=2 # Point 0 starts at 22629 first_tstep[1]+=4 # Point 1 starts at 42630 first_tstep[2]+=3 # Point 2 starts at 32631 2632 last_tstep=(time_step_count)*num.ones(n,num.int)2633 last_tstep[0]-=1 # Point 0 ends 1 step early2634 last_tstep[1]-=2 # Point 1 ends 2 steps early2635 last_tstep[4]-=3 # Point 4 ends 3 steps early2636 2637 #print2638 #print 'time_step_count', time_step_count2639 #print 'time_step', time_step2640 #print 'first_tstep', first_tstep2641 #print 'last_tstep', last_tstep2642 2643 2644 # Create varying elevation data (positive values for seafloor)2645 gauge_depth=20*num.ones(n,num.float)2646 for i in range(n):2647 gauge_depth[i] += i**22648 2649 #print 'gauge_depth', gauge_depth2650 2651 # Create data to be written to first mux file2652 ha0=2*num.ones((n,time_step_count),num.float)2653 ha0[0]=num.arange(0,time_step_count)2654 ha0[1]=num.arange(time_step_count,2*time_step_count)2655 ha0[2]=num.arange(2*time_step_count,3*time_step_count)2656 ha0[3]=num.arange(3*time_step_count,4*time_step_count)2657 ua0=5*num.ones((n,time_step_count),num.float)2658 va0=-10*num.ones((n,time_step_count),num.float)2659 2660 # Ensure data used to write mux file to be zero when gauges are2661 # not recording2662 for i in range(n):2663 # For each point2664 2665 for j in range(0, first_tstep[i]-1) + range(last_tstep[i], time_step_count):2666 # For timesteps before and after recording range2667 ha0[i][j] = ua0[i][j] = va0[i][j] = 0.02668 2669 2670 2671 #print2672 #print 'using varying start and end time'2673 #print 'ha0', ha0[4]2674 #print 'ua0', ua02675 #print 'va0', va02676 2677 # Write first mux file to be combined by urs2sts2678 base_nameI, filesI = self.write_mux2(lat_long_points,2679 time_step_count, time_step,2680 first_tstep, last_tstep,2681 depth=gauge_depth,2682 ha=ha0,2683 ua=ua0,2684 va=va0)2685 2686 2687 2688 # Create data to be written to second mux file2689 ha1=num.ones((n,time_step_count),num.float)2690 ha1[0]=num.sin(times_ref)2691 ha1[1]=2*num.sin(times_ref - 3)2692 ha1[2]=5*num.sin(4*times_ref)2693 ha1[3]=num.sin(times_ref)2694 ha1[4]=num.sin(2*times_ref-0.7)2695 2696 ua1=num.zeros((n,time_step_count),num.float)2697 ua1[0]=3*num.cos(times_ref)2698 ua1[1]=2*num.sin(times_ref-0.7)2699 ua1[2]=num.arange(3*time_step_count,4*time_step_count)2700 ua1[4]=2*num.ones(time_step_count)2701 2702 va1=num.zeros((n,time_step_count),num.float)2703 va1[0]=2*num.cos(times_ref-0.87)2704 va1[1]=3*num.ones(time_step_count, num.int) #array default#2705 va1[3]=2*num.sin(times_ref-0.71)2706 2707 2708 # Ensure data used to write mux file to be zero when gauges are2709 # not recording2710 for i in range(n):2711 # For each point2712 2713 for j in range(0, first_tstep[i]-1) + range(last_tstep[i], time_step_count):2714 # For timesteps before and after recording range2715 ha1[i][j] = ua1[i][j] = va1[i][j] = 0.02716 2717 2718 #print2719 #print 'using varying start and end time'2720 #print 'ha1', ha1[4]2721 #print 'ua1', ua12722 #print 'va1', va12723 2724 2725 # Write second mux file to be combined by urs2sts2726 base_nameII, filesII = self.write_mux2(lat_long_points,2727 time_step_count, time_step,2728 first_tstep, last_tstep,2729 depth=gauge_depth,2730 ha=ha1,2731 ua=ua1,2732 va=va1)2733 2734 2735 # Create ordering file2736 permutation = [4,0,2]2737 2738 _, ordering_filename = tempfile.mkstemp('')2739 order_fid = open(ordering_filename, 'w')2740 order_fid.write('index, longitude, latitude\n')2741 for index in permutation:2742 order_fid.write('%d, %f, %f\n' %(index,2743 lat_long_points[index][1],2744 lat_long_points[index][0]))2745 order_fid.close()2746 2747 2748 2749 #------------------------------------------------------------2750 # Now read the mux files one by one without weights and test2751 2752 # Call urs2sts with mux file #02753 urs2sts([base_nameI],2754 basename_out=base_nameI,2755 ordering_filename=ordering_filename,2756 mean_stage=tide,2757 verbose=False)2758 2759 # Now read the sts file and check that values have been stored correctly.2760 sts_file = base_nameI + '.sts'2761 fid = NetCDFFile(sts_file)2762 2763 2764 # Check that original indices have been stored2765 stored_permutation = fid.variables['permutation'][:]2766 msg = 'Permutation was not stored correctly. I got '2767 msg += str(stored_permutation)2768 assert num.allclose(stored_permutation, permutation), msg2769 2770 2771 2772 2773 # Make x and y absolute2774 x = fid.variables['x'][:]2775 y = fid.variables['y'][:]2776 2777 geo_reference = Geo_reference(NetCDFObject=fid)2778 points = geo_reference.get_absolute(map(None, x, y))2779 points = ensure_numeric(points)2780 2781 x = points[:,0]2782 y = points[:,1]2783 2784 for i, index in enumerate(permutation):2785 # Check that STS points are stored in the correct order2786 2787 # Work out the UTM coordinates sts point i2788 zone, e, n = redfearn(lat_long_points[index][0],2789 lat_long_points[index][1])2790 2791 #print i, [x[i],y[i]], [e,n]2792 assert num.allclose([x[i],y[i]], [e,n])2793 2794 2795 # Check the time vector2796 times = fid.variables['time'][:]2797 assert num.allclose(ensure_numeric(times),2798 ensure_numeric(times_ref))2799 2800 2801 # Check sts values for mux #02802 stage0 = fid.variables['stage'][:].copy()2803 xmomentum0 = fid.variables['xmomentum'][:].copy()2804 ymomentum0 = fid.variables['ymomentum'][:].copy()2805 elevation0 = fid.variables['elevation'][:].copy()2806 2807 2808 #print 'stage', stage02809 #print 'xmomentum', xmomentum02810 #print 'ymomentum', ymomentum02811 #print 'elevation', elevation02812 2813 # The quantities stored in the .sts file should be the weighted sum of the2814 # quantities written to the mux2 files subject to the permutation vector.2815 2816 ha_ref = num.take(ha0, permutation, axis=0)2817 ua_ref = num.take(ua0, permutation, axis=0)2818 va_ref = num.take(va0, permutation, axis=0)2819 2820 gauge_depth_ref = num.take(gauge_depth, permutation, axis=0)2821 2822 assert num.allclose(num.transpose(ha_ref)+tide, stage0) # Meters2823 2824 2825 2826 #Check the momentums - ua2827 #momentum = velocity*(stage-elevation)2828 # elevation = - depth2829 #momentum = velocity_ua *(stage+depth)2830 2831 depth_ref = num.zeros((len(permutation), time_step_count), num.float)2832 for i in range(len(permutation)):2833 depth_ref[i]=gauge_depth_ref[i]+tide+ha_ref[i]2834 2835 2836 # The xmomentum stored in the .sts file should be the sum of the ua2837 # in the two mux2 files multiplied by the depth.2838 assert num.allclose(num.transpose(ua_ref*depth_ref), xmomentum0)2839 2840 #Check the momentums - va2841 #momentum = velocity*(stage-elevation)2842 # elevation = - depth2843 #momentum = velocity_va *(stage+depth)2844 2845 # The ymomentum stored in the .sts file should be the sum of the va2846 # in the two mux2 files multiplied by the depth.2847 2848 2849 #print transpose(va_ref*depth_ref)2850 #print ymomentum2851 assert num.allclose(num.transpose(va_ref*depth_ref), ymomentum0)2852 2853 # check the elevation values.2854 # -ve since urs measures depth, sww meshers height,2855 assert num.allclose(-gauge_depth_ref, elevation0)2856 2857 fid.close()2858 os.remove(sts_file)2859 2860 2861 2862 2863 # Call urs2sts with mux file #12864 urs2sts([base_nameII],2865 basename_out=base_nameI,2866 ordering_filename=ordering_filename,2867 mean_stage=tide,2868 verbose=False)2869 2870 # Now read the sts file and check that values have been stored correctly.2871 sts_file = base_nameI + '.sts'2872 fid = NetCDFFile(sts_file)2873 2874 2875 # Check that original indices have been stored2876 stored_permutation = fid.variables['permutation'][:]2877 msg = 'Permutation was not stored correctly. I got '2878 msg += str(stored_permutation)2879 assert num.allclose(stored_permutation, permutation), msg2880 2881 # Make x and y absolute2882 x = fid.variables['x'][:]2883 y = fid.variables['y'][:]2884 2885 geo_reference = Geo_reference(NetCDFObject=fid)2886 points = geo_reference.get_absolute(map(None, x, y))2887 points = ensure_numeric(points)2888 2889 x = points[:,0]2890 y = points[:,1]2891 2892 for i, index in enumerate(permutation):2893 # Check that STS points are stored in the correct order2894 2895 # Work out the UTM coordinates sts point i2896 zone, e, n = redfearn(lat_long_points[index][0],2897 lat_long_points[index][1])2898 2899 #print i, [x[i],y[i]], [e,n]2900 assert num.allclose([x[i],y[i]], [e,n])2901 2902 2903 # Check the time vector2904 times = fid.variables['time'][:]2905 assert num.allclose(ensure_numeric(times),2906 ensure_numeric(times_ref))2907 2908 2909 # Check sts values for mux #12910 stage1 = fid.variables['stage'][:].copy()2911 xmomentum1 = fid.variables['xmomentum'][:].copy()2912 ymomentum1 = fid.variables['ymomentum'][:].copy()2913 elevation1 = fid.variables['elevation'][:].copy()2914 2915 2916 #print 'stage', stage12917 #print 'xmomentum', xmomentum12918 #print 'ymomentum', ymomentum12919 #print 'elevation', elevation12920 2921 # The quantities stored in the .sts file should be the weighted sum of the2922 # quantities written to the mux2 files subject to the permutation vector.2923 2924 ha_ref = num.take(ha1, permutation, axis=0)2925 ua_ref = num.take(ua1, permutation, axis=0)2926 va_ref = num.take(va1, permutation, axis=0)2927 2928 gauge_depth_ref = num.take(gauge_depth, permutation, axis=0)2929 2930 2931 #print2932 #print stage12933 #print transpose(ha_ref)+tide - stage12934 2935 2936 assert num.allclose(num.transpose(ha_ref)+tide, stage1) # Meters2937 #import sys; sys.exit()2938 2939 #Check the momentums - ua2940 #momentum = velocity*(stage-elevation)2941 # elevation = - depth2942 #momentum = velocity_ua *(stage+depth)2943 2944 depth_ref = num.zeros((len(permutation), time_step_count), num.float)2945 for i in range(len(permutation)):2946 depth_ref[i]=gauge_depth_ref[i]+tide+ha_ref[i]2947 2948 2949 # The xmomentum stored in the .sts file should be the sum of the ua2950 # in the two mux2 files multiplied by the depth.2951 assert num.allclose(num.transpose(ua_ref*depth_ref), xmomentum1)2952 2953 #Check the momentums - va2954 #momentum = velocity*(stage-elevation)2955 # elevation = - depth2956 #momentum = velocity_va *(stage+depth)2957 2958 # The ymomentum stored in the .sts file should be the sum of the va2959 # in the two mux2 files multiplied by the depth.2960 2961 2962 #print transpose(va_ref*depth_ref)2963 #print ymomentum2964 assert num.allclose(num.transpose(va_ref*depth_ref), ymomentum1)2965 2966 # check the elevation values.2967 # -ve since urs measures depth, sww meshers height,2968 assert num.allclose(-gauge_depth_ref, elevation1)2969 2970 fid.close()2971 os.remove(sts_file)2972 2973 #----------------------2974 # Then read the mux files together and test2975 2976 2977 # Call urs2sts with multiple mux files2978 urs2sts([base_nameI, base_nameII],2979 basename_out=base_nameI,2980 ordering_filename=ordering_filename,2981 weights=weights,2982 mean_stage=tide,2983 verbose=False)2984 2985 # Now read the sts file and check that values have been stored correctly.2986 sts_file = base_nameI + '.sts'2987 fid = NetCDFFile(sts_file)2988 2989 # Make x and y absolute2990 x = fid.variables['x'][:]2991 y = fid.variables['y'][:]2992 2993 geo_reference = Geo_reference(NetCDFObject=fid)2994 points = geo_reference.get_absolute(map(None, x, y))2995 points = ensure_numeric(points)2996 2997 x = points[:,0]2998 y = points[:,1]2999 3000 for i, index in enumerate(permutation):3001 # Check that STS points are stored in the correct order3002 3003 # Work out the UTM coordinates sts point i3004 zone, e, n = redfearn(lat_long_points[index][0],3005 lat_long_points[index][1])3006 3007 #print i, [x[i],y[i]], [e,n]3008 assert num.allclose([x[i],y[i]], [e,n])3009 3010 3011 # Check the time vector3012 times = fid.variables['time'][:]3013 assert num.allclose(ensure_numeric(times),3014 ensure_numeric(times_ref))3015 3016 3017 # Check sts values3018 stage = fid.variables['stage'][:].copy()3019 xmomentum = fid.variables['xmomentum'][:].copy()3020 ymomentum = fid.variables['ymomentum'][:].copy()3021 elevation = fid.variables['elevation'][:].copy()3022 3023 3024 #print 'stage', stage3025 #print 'elevation', elevation3026 3027 # The quantities stored in the .sts file should be the weighted sum of the3028 # quantities written to the mux2 files subject to the permutation vector.3029 3030 ha_ref = (weights[0]*num.take(ha0, permutation, axis=0)3031 + weights[1]*num.take(ha1, permutation, axis=0))3032 ua_ref = (weights[0]*num.take(ua0, permutation, axis=0)3033 + weights[1]*num.take(ua1, permutation, axis=0))3034 va_ref = (weights[0]*num.take(va0, permutation, axis=0)3035 + weights[1]*num.take(va1, permutation, axis=0))3036 3037 gauge_depth_ref = num.take(gauge_depth, permutation, axis=0)3038 3039 3040 #print3041 #print stage3042 #print transpose(ha_ref)+tide - stage3043 3044 assert num.allclose(num.transpose(ha_ref)+tide, stage) # Meters3045 3046 #Check the momentums - ua3047 #momentum = velocity*(stage-elevation)3048 # elevation = - depth3049 #momentum = velocity_ua *(stage+depth)3050 3051 depth_ref = num.zeros((len(permutation), time_step_count), num.float)3052 for i in range(len(permutation)):3053 depth_ref[i]=gauge_depth_ref[i]+tide+ha_ref[i]3054 3055 3056 3057 3058 # The xmomentum stored in the .sts file should be the sum of the ua3059 # in the two mux2 files multiplied by the depth.3060 assert num.allclose(num.transpose(ua_ref*depth_ref), xmomentum)3061 3062 #Check the momentums - va3063 #momentum = velocity*(stage-elevation)3064 # elevation = - depth3065 #momentum = velocity_va *(stage+depth)3066 3067 # The ymomentum stored in the .sts file should be the sum of the va3068 # in the two mux2 files multiplied by the depth.3069 3070 3071 #print transpose(va_ref*depth_ref)3072 #print ymomentum3073 3074 assert num.allclose(num.transpose(va_ref*depth_ref), ymomentum)3075 3076 # check the elevation values.3077 # -ve since urs measures depth, sww meshers height,3078 assert num.allclose(-gauge_depth_ref, elevation) #Meters3079 3080 fid.close()3081 self.delete_mux(filesI)3082 self.delete_mux(filesII)3083 os.remove(sts_file)3084 3085 #---------------3086 # "Manually" add the timeseries up with weights and test3087 # Tide is discounted from individual results and added back in3088 #3089 3090 stage_man = weights[0]*(stage0-tide) + weights[1]*(stage1-tide) + tide3091 assert num.allclose(stage_man, stage)3092 3093 3094 def test_file_boundary_stsI(self):3095 """test_file_boundary_stsI(self):3096 """3097 3098 # FIXME (Ole): These tests should really move to3099 # test_generic_boundaries.py3100 3101 from anuga.shallow_water import Domain3102 from anuga.shallow_water import Reflective_boundary3103 from anuga.shallow_water import Dirichlet_boundary3104 from anuga.shallow_water import File_boundary3105 from anuga.pmesh.mesh_interface import create_mesh_from_regions3106 3107 bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],[6.02,97.02],[6.00,97.02]]3108 tide = 0.373109 time_step_count = 53110 time_step = 23111 lat_long_points =bounding_polygon[0:3]3112 n=len(lat_long_points)3113 first_tstep=num.ones(n,num.int)3114 last_tstep=(time_step_count)*num.ones(n,num.int)3115 3116 h = 203117 w = 23118 u = 103119 v = -103120 gauge_depth=h*num.ones(n,num.float)3121 ha=w*num.ones((n,time_step_count),num.float)3122 ua=u*num.ones((n,time_step_count),num.float)3123 va=v*num.ones((n,time_step_count),num.float)3124 base_name, files = self.write_mux2(lat_long_points,3125 time_step_count, time_step,3126 first_tstep, last_tstep,3127 depth=gauge_depth,3128 ha=ha,3129 ua=ua,3130 va=va)3131 3132 sts_file=base_name3133 urs2sts(base_name,3134 sts_file,3135 mean_stage=tide,3136 verbose=False)3137 self.delete_mux(files)3138 3139 #print 'start create mesh from regions'3140 for i in range(len(bounding_polygon)):3141 zone,bounding_polygon[i][0],bounding_polygon[i][1]=redfearn(bounding_polygon[i][0],bounding_polygon[i][1])3142 extent_res=10000003143 meshname = 'urs_test_mesh' + '.tsh'3144 interior_regions=None3145 boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}3146 create_mesh_from_regions(bounding_polygon,3147 boundary_tags=boundary_tags,3148 maximum_triangle_area=extent_res,3149 filename=meshname,3150 interior_regions=interior_regions,3151 verbose=False)3152 3153 domain_fbound = Domain(meshname)3154 domain_fbound.set_quantity('stage', tide)3155 Bf = File_boundary(sts_file+'.sts',3156 domain_fbound,3157 boundary_polygon=bounding_polygon)3158 Br = Reflective_boundary(domain_fbound)3159 Bd = Dirichlet_boundary([w+tide, u*(w+h+tide), v*(w+h+tide)])3160 3161 domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})3162 3163 # Check boundary object evaluates as it should3164 for i, ((vol_id, edge_id), B) in enumerate(domain_fbound.boundary_objects):3165 if B is Bf:3166 3167 qf = B.evaluate(vol_id, edge_id) # File boundary3168 qd = Bd.evaluate(vol_id, edge_id) # Dirichlet boundary3169 3170 assert num.allclose(qf, qd)3171 3172 3173 # Evolve3174 finaltime=time_step*(time_step_count-1)3175 yieldstep=time_step3176 temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float)3177 3178 for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,3179 finaltime=finaltime,3180 skip_initial_step=False)):3181 3182 D = domain_fbound3183 temp_fbound[i]=D.quantities['stage'].centroid_values[2]3184 3185 # Check that file boundary object has populated3186 # boundary array correctly3187 # FIXME (Ole): Do this for the other tests too!3188 for j, val in enumerate(D.get_quantity('stage').boundary_values):3189 3190 (vol_id, edge_id), B = D.boundary_objects[j]3191 if isinstance(B, File_boundary):3192 #print j, val3193 assert num.allclose(val, w + tide)3194 3195 3196 3197 domain_drchlt = Domain(meshname)3198 domain_drchlt.set_quantity('stage', tide)3199 Br = Reflective_boundary(domain_drchlt)3200 3201 domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br})3202 temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float)3203 3204 for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep,3205 finaltime=finaltime,3206 skip_initial_step=False)):3207 temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2]3208 3209 #print domain_fbound.quantities['stage'].vertex_values3210 #print domain_drchlt.quantities['stage'].vertex_values3211 3212 assert num.allclose(temp_fbound,temp_drchlt)3213 3214 assert num.allclose(domain_fbound.quantities['stage'].vertex_values,3215 domain_drchlt.quantities['stage'].vertex_values)3216 3217 assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values,3218 domain_drchlt.quantities['xmomentum'].vertex_values)3219 3220 assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values,3221 domain_drchlt.quantities['ymomentum'].vertex_values)3222 3223 3224 os.remove(sts_file+'.sts')3225 os.remove(meshname)3226 3227 3228 def test_file_boundary_stsI_beyond_model_time(self):3229 """test_file_boundary_stsI(self):3230 3231 Test that file_boundary can work when model time3232 exceeds available data.3233 This is optional and requires the user to specify a default3234 boundary object.3235 """3236 3237 # Don't do warnings in unit test3238 import warnings3239 warnings.simplefilter('ignore')3240 3241 from anuga.shallow_water import Domain3242 from anuga.shallow_water import Reflective_boundary3243 from anuga.shallow_water import Dirichlet_boundary3244 from anuga.shallow_water import File_boundary3245 from anuga.pmesh.mesh_interface import create_mesh_from_regions3246 3247 bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],3248 [6.02,97.02],[6.00,97.02]]3249 tide = 0.373250 time_step_count = 53251 time_step = 23252 lat_long_points = bounding_polygon[0:3]3253 n=len(lat_long_points)3254 first_tstep=num.ones(n,num.int)3255 last_tstep=(time_step_count)*num.ones(n,num.int)3256 3257 h = 203258 w = 23259 u = 103260 v = -103261 gauge_depth=h*num.ones(n,num.float)3262 ha=w*num.ones((n,time_step_count),num.float)3263 ua=u*num.ones((n,time_step_count),num.float)3264 va=v*num.ones((n,time_step_count),num.float)3265 base_name, files = self.write_mux2(lat_long_points,3266 time_step_count, time_step,3267 first_tstep, last_tstep,3268 depth=gauge_depth,3269 ha=ha,3270 ua=ua,3271 va=va)3272 3273 sts_file=base_name3274 urs2sts(base_name,3275 sts_file,3276 mean_stage=tide,3277 verbose=False)3278 self.delete_mux(files)3279 3280 #print 'start create mesh from regions'3281 for i in range(len(bounding_polygon)):3282 zone,\3283 bounding_polygon[i][0],\3284 bounding_polygon[i][1]=redfearn(bounding_polygon[i][0],3285 bounding_polygon[i][1])3286 3287 extent_res=10000003288 meshname = 'urs_test_mesh' + '.tsh'3289 interior_regions=None3290 boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}3291 create_mesh_from_regions(bounding_polygon,3292 boundary_tags=boundary_tags,3293 maximum_triangle_area=extent_res,3294 filename=meshname,3295 interior_regions=interior_regions,3296 verbose=False)3297 3298 domain_fbound = Domain(meshname)3299 domain_fbound.set_quantity('stage', tide)3300 3301 Br = Reflective_boundary(domain_fbound)3302 Bd = Dirichlet_boundary([w+tide, u*(w+h+tide), v*(w+h+tide)])3303 Bf = File_boundary(sts_file+'.sts',3304 domain_fbound,3305 boundary_polygon=bounding_polygon,3306 default_boundary=Bd) # Condition to be used3307 # if model time exceeds3308 # available data3309 3310 domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})3311 3312 # Check boundary object evaluates as it should3313 for i, ((vol_id, edge_id), B) in enumerate(domain_fbound.boundary_objects):3314 if B is Bf:3315 3316 qf = B.evaluate(vol_id, edge_id) # File boundary3317 qd = Bd.evaluate(vol_id, edge_id) # Dirichlet boundary3318 3319 assert num.allclose(qf, qd)3320 3321 3322 # Evolve3323 data_finaltime = time_step*(time_step_count-1)3324 finaltime = data_finaltime + 10 # Let model time exceed available data3325 yieldstep = time_step3326 temp_fbound=num.zeros(int(finaltime/yieldstep)+1, num.float)3327 3328 for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,3329 finaltime=finaltime,3330 skip_initial_step=False)):3331 3332 D = domain_fbound3333 temp_fbound[i]=D.quantities['stage'].centroid_values[2]3334 3335 # Check that file boundary object has populated3336 # boundary array correctly3337 # FIXME (Ole): Do this for the other tests too!3338 for j, val in enumerate(D.get_quantity('stage').boundary_values):3339 3340 (vol_id, edge_id), B = D.boundary_objects[j]3341 if isinstance(B, File_boundary):3342 #print j, val3343 assert num.allclose(val, w + tide)3344 3345 3346 domain_drchlt = Domain(meshname)3347 domain_drchlt.set_quantity('stage', tide)3348 Br = Reflective_boundary(domain_drchlt)3349 3350 domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br})3351 temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float)3352 3353 for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep,3354 finaltime=finaltime,3355 skip_initial_step=False)):3356 temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2]3357 3358 #print domain_fbound.quantities['stage'].vertex_values3359 #print domain_drchlt.quantities['stage'].vertex_values3360 3361 assert num.allclose(temp_fbound,temp_drchlt)3362 3363 assert num.allclose(domain_fbound.quantities['stage'].vertex_values,3364 domain_drchlt.quantities['stage'].vertex_values)3365 3366 assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values,3367 domain_drchlt.quantities['xmomentum'].vertex_values)3368 3369 assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values,3370 domain_drchlt.quantities['ymomentum'].vertex_values)3371 3372 os.remove(sts_file+'.sts')3373 os.remove(meshname)3374 3375 3376 def test_field_boundary_stsI_beyond_model_time(self):3377 """test_field_boundary(self):3378 3379 Test that field_boundary can work when model time3380 exceeds available data whilst adjusting mean_stage.3381 3382 """3383 3384 # Don't do warnings in unit test3385 import warnings3386 warnings.simplefilter('ignore')3387 3388 from anuga.shallow_water import Domain3389 from anuga.shallow_water import Reflective_boundary3390 from anuga.shallow_water import Dirichlet_boundary3391 from anuga.shallow_water import File_boundary3392 from anuga.pmesh.mesh_interface import create_mesh_from_regions3393 3394 bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],3395 [6.02,97.02],[6.00,97.02]]3396 tide = 0.373397 time_step_count = 53398 time_step = 23399 lat_long_points = bounding_polygon[0:3]3400 n=len(lat_long_points)3401 first_tstep=num.ones(n,num.int)3402 last_tstep=(time_step_count)*num.ones(n,num.int)3403 3404 h = 203405 w = 23406 u = 103407 v = -103408 gauge_depth=h*num.ones(n,num.float)3409 ha=w*num.ones((n,time_step_count),num.float)3410 ua=u*num.ones((n,time_step_count),num.float)3411 va=v*num.ones((n,time_step_count),num.float)3412 base_name, files = self.write_mux2(lat_long_points,3413 time_step_count, time_step,3414 first_tstep, last_tstep,3415 depth=gauge_depth,3416 ha=ha,3417 ua=ua,3418 va=va)3419 3420 sts_file=base_name3421 urs2sts(base_name,3422 sts_file,3423 mean_stage=0.0, # Deliberately let Field_boundary do the adjustment3424 verbose=False)3425 self.delete_mux(files)3426 3427 #print 'start create mesh from regions'3428 for i in range(len(bounding_polygon)):3429 zone,\3430 bounding_polygon[i][0],\3431 bounding_polygon[i][1]=redfearn(bounding_polygon[i][0],3432 bounding_polygon[i][1])3433 3434 extent_res=10000003435 meshname = 'urs_test_mesh' + '.tsh'3436 interior_regions=None3437 boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}3438 create_mesh_from_regions(bounding_polygon,3439 boundary_tags=boundary_tags,3440 maximum_triangle_area=extent_res,3441 filename=meshname,3442 interior_regions=interior_regions,3443 verbose=False)3444 3445 domain_fbound = Domain(meshname)3446 domain_fbound.set_quantity('stage', tide)3447 3448 Br = Reflective_boundary(domain_fbound)3449 Bd = Dirichlet_boundary([w+tide, u*(w+h), v*(w+h)])3450 Bdefault = Dirichlet_boundary([w, u*(w+h), v*(w+h)])3451 3452 Bf = Field_boundary(sts_file+'.sts',3453 domain_fbound,3454 mean_stage=tide, # Field boundary to adjust for tide3455 boundary_polygon=bounding_polygon,3456 default_boundary=Bdefault) # Condition to be used3457 # if model time exceeds3458 # available data3459 3460 domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})3461 3462 # Check boundary object evaluates as it should3463 for i, ((vol_id, edge_id), B) in enumerate(domain_fbound.boundary_objects):3464 if B is Bf:3465 3466 qf = B.evaluate(vol_id, edge_id) # Field boundary3467 qd = Bd.evaluate(vol_id, edge_id) # Dirichlet boundary3468 3469 msg = 'Got %s, should have been %s' %(qf, qd)3470 assert num.allclose(qf, qd), msg3471 3472 # Evolve3473 data_finaltime = time_step*(time_step_count-1)3474 finaltime = data_finaltime + 10 # Let model time exceed available data3475 yieldstep = time_step3476 temp_fbound=num.zeros(int(finaltime/yieldstep)+1, num.float)3477 3478 for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,3479 finaltime=finaltime,3480 skip_initial_step=False)):3481 3482 D = domain_fbound3483 temp_fbound[i]=D.quantities['stage'].centroid_values[2]3484 3485 # Check that file boundary object has populated3486 # boundary array correctly3487 # FIXME (Ole): Do this for the other tests too!3488 for j, val in enumerate(D.get_quantity('stage').boundary_values):3489 3490 (vol_id, edge_id), B = D.boundary_objects[j]3491 if isinstance(B, Field_boundary):3492 msg = 'Got %f should have been %f' %(val, w+tide)3493 assert num.allclose(val, w + tide), msg3494 3495 3496 def test_file_boundary_stsII(self):3497 """test_file_boundary_stsII(self):3498 3499 mux2 file has points not included in boundary3500 mux2 gauges are not stored with the same order as they are3501 found in bounding_polygon. This does not matter as long as bounding3502 polygon passed to file_function contains the mux2 points needed (in3503 the correct order).3504 """3505 3506 from anuga.shallow_water import Domain3507 from anuga.shallow_water import Reflective_boundary3508 from anuga.shallow_water import Dirichlet_boundary3509 from anuga.shallow_water import File_boundary3510 from anuga.pmesh.mesh_interface import create_mesh_from_regions3511 3512 bounding_polygon=[[6.01,97.0],[6.02,97.0],[6.02,97.02],[6.00,97.02],[6.0,97.0]]3513 tide = -2.203514 time_step_count = 203515 time_step = 23516 lat_long_points=bounding_polygon[0:2]3517 lat_long_points.insert(0,bounding_polygon[len(bounding_polygon)-1])3518 lat_long_points.insert(0,[6.0,97.01])3519 n=len(lat_long_points)3520 first_tstep=num.ones(n,num.int)3521 last_tstep=(time_step_count)*num.ones(n,num.int)3522 gauge_depth=20*num.ones(n,num.float)3523 ha=2*num.ones((n,time_step_count),num.float)3524 ua=10*num.ones((n,time_step_count),num.float)3525 va=-10*num.ones((n,time_step_count),num.float)3526 base_name, files = self.write_mux2(lat_long_points,3527 time_step_count,3528 time_step,3529 first_tstep,3530 last_tstep,3531 depth=gauge_depth,3532 ha=ha,3533 ua=ua,3534 va=va)3535 3536 sts_file=base_name3537 urs2sts(base_name,sts_file,mean_stage=tide,verbose=False)3538 self.delete_mux(files)3539 3540 #print 'start create mesh from regions'3541 for i in range(len(bounding_polygon)):3542 zone,\3543 bounding_polygon[i][0],\3544 bounding_polygon[i][1]=redfearn(bounding_polygon[i][0],3545 bounding_polygon[i][1])3546 3547 extent_res=10000003548 meshname = 'urs_test_mesh' + '.tsh'3549 interior_regions=None3550 boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}3551 # have to change boundary tags from last example because now bounding3552 # polygon starts in different place.3553 create_mesh_from_regions(bounding_polygon,boundary_tags=boundary_tags,3554 maximum_triangle_area=extent_res,filename=meshname,3555 interior_regions=interior_regions,verbose=False)3556 3557 domain_fbound = Domain(meshname)3558 domain_fbound.set_quantity('stage', tide)3559 Bf = File_boundary(sts_file+'.sts',3560 domain_fbound,3561 boundary_polygon=bounding_polygon)3562 Br = Reflective_boundary(domain_fbound)3563 3564 domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})3565 finaltime=time_step*(time_step_count-1)3566 yieldstep=time_step3567 temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float)3568 3569 for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,3570 finaltime=finaltime,3571 skip_initial_step = False)):3572 temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2]3573 3574 domain_drchlt = Domain(meshname)3575 domain_drchlt.set_quantity('stage', tide)3576 Br = Reflective_boundary(domain_drchlt)3577 Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide])3578 domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br})3579 temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float)3580 3581 for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep,3582 finaltime=finaltime,3583 skip_initial_step = False)):3584 temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2]3585 3586 3587 assert num.allclose(temp_fbound,temp_drchlt)3588 3589 #print domain_fbound.quantities['stage'].vertex_values3590 #print domain_drchlt.quantities['stage'].vertex_values3591 3592 3593 assert num.allclose(domain_fbound.quantities['stage'].vertex_values,3594 domain_drchlt.quantities['stage'].vertex_values)3595 3596 assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values,3597 domain_drchlt.quantities['xmomentum'].vertex_values)3598 3599 assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values,3600 domain_drchlt.quantities['ymomentum'].vertex_values)3601 3602 3603 3604 os.remove(sts_file+'.sts')3605 os.remove(meshname)3606 3607 3608 3609 def test_file_boundary_stsIII_ordering(self):3610 """test_file_boundary_stsIII_ordering(self):3611 Read correct points from ordering file and apply sts to boundary3612 """3613 from anuga.shallow_water import Domain3614 from anuga.shallow_water import Reflective_boundary3615 from anuga.shallow_water import Dirichlet_boundary3616 from anuga.shallow_water import File_boundary3617 from anuga.pmesh.mesh_interface import create_mesh_from_regions3618 3619 lat_long_points=[[6.01,97.0],[6.02,97.0],[6.05,96.9],[6.0,97.0]]3620 bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],3621 [6.02,97.02],[6.00,97.02]]3622 tide = 3.03623 time_step_count = 503624 time_step = 23625 n=len(lat_long_points)3626 first_tstep=num.ones(n,num.int)3627 last_tstep=(time_step_count)*num.ones(n,num.int)3628 gauge_depth=20*num.ones(n,num.float)3629 ha=2*num.ones((n,time_step_count),num.float)3630 ua=10*num.ones((n,time_step_count),num.float)3631 va=-10*num.ones((n,time_step_count),num.float)3632 base_name, files = self.write_mux2(lat_long_points,3633 time_step_count,3634 time_step,3635 first_tstep,3636 last_tstep,3637 depth=gauge_depth,3638 ha=ha,3639 ua=ua,3640 va=va)3641 3642 # Write order file3643 file_handle, order_base_name = tempfile.mkstemp("")3644 os.close(file_handle)3645 os.remove(order_base_name)3646 d=","3647 order_file=order_base_name+'order.txt'3648 fid=open(order_file,'w')3649 3650 # Write Header3651 header='index, longitude, latitude\n'3652 fid.write(header)3653 indices=[3,0,1]3654 for i in indices:3655 line=str(i)+d+str(lat_long_points[i][1])+d+\3656 str(lat_long_points[i][0])+"\n"3657 fid.write(line)3658 fid.close()3659 3660 sts_file=base_name3661 urs2sts(base_name,3662 basename_out=sts_file,3663 ordering_filename=order_file,3664 mean_stage=tide,3665 verbose=False)3666 self.delete_mux(files)3667 3668 boundary_polygon = create_sts_boundary(base_name)3669 3670 os.remove(order_file)3671 3672 # Append the remaining part of the boundary polygon to be defined by3673 # the user3674 bounding_polygon_utm=[]3675 for point in bounding_polygon:3676 zone,easting,northing=redfearn(point[0],point[1])3677 bounding_polygon_utm.append([easting,northing])3678 3679 boundary_polygon.append(bounding_polygon_utm[3])3680 boundary_polygon.append(bounding_polygon_utm[4])3681 3682 plot=False3683 if plot:3684 from pylab import plot,show,axis3685 boundary_polygon=ensure_numeric(boundary_polygon)3686 bounding_polygon_utm=ensure_numeric(bounding_polygon_utm)3687 #lat_long_points=ensure_numeric(lat_long_points)3688 #plot(lat_long_points[:,0],lat_long_points[:,1],'o')3689 plot(boundary_polygon[:,0], boundary_polygon[:,1],'d')3690 plot(bounding_polygon_utm[:,0],bounding_polygon_utm[:,1],'o')3691 show()3692 3693 assert num.allclose(bounding_polygon_utm,boundary_polygon)3694 3695 3696 extent_res=10000003697 meshname = 'urs_test_mesh' + '.tsh'3698 interior_regions=None3699 boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}3700 3701 # have to change boundary tags from last example because now bounding3702 # polygon starts in different place.3703 create_mesh_from_regions(boundary_polygon,boundary_tags=boundary_tags,3704 maximum_triangle_area=extent_res,filename=meshname,3705 interior_regions=interior_regions,verbose=False)3706 3707 domain_fbound = Domain(meshname)3708 domain_fbound.set_quantity('stage', tide)3709 Bf = File_boundary(sts_file+'.sts',3710 domain_fbound,3711 boundary_polygon=boundary_polygon)3712 Br = Reflective_boundary(domain_fbound)3713 3714 domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})3715 finaltime=time_step*(time_step_count-1)3716 yieldstep=time_step3717 temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float)3718 3719 for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,3720 finaltime=finaltime,3721 skip_initial_step = False)):3722 temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2]3723 3724 3725 domain_drchlt = Domain(meshname)3726 domain_drchlt.set_quantity('stage', tide)3727 Br = Reflective_boundary(domain_drchlt)3728 Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide])3729 domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br})3730 temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float)3731 3732 for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep,3733 finaltime=finaltime,3734 skip_initial_step = False)):3735 temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2]3736 3737 3738 #print domain_fbound.quantities['stage'].vertex_values3739 #print domain_drchlt.quantities['stage'].vertex_values3740 3741 assert num.allclose(temp_fbound,temp_drchlt)3742 3743 3744 assert num.allclose(domain_fbound.quantities['stage'].vertex_values,3745 domain_drchlt.quantities['stage'].vertex_values)3746 3747 assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values,3748 domain_drchlt.quantities['xmomentum'].vertex_values)3749 3750 assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values,3751 domain_drchlt.quantities['ymomentum'].vertex_values)3752 3753 # Use known Dirichlet condition (if sufficient timesteps have been taken)3754 3755 #FIXME: What do these assertions test? Also do they assume tide =03756 #print domain_fbound.quantities['stage'].vertex_values3757 #assert allclose(domain_drchlt.quantities['stage'].vertex_values[6], 2)3758 #assert allclose(domain_fbound.quantities['stage'].vertex_values[6], 2)3759 3760 3761 3762 try:3763 os.remove(sts_file+'.sts')3764 except:3765 # Windoze can't remove this file for some reason3766 pass3767 3768 os.remove(meshname)3769 3770 1678 3771 1679 … … 4181 2089 #### END TESTS FOR URS 2 SWW ### 4182 2090 4183 #### TESTS URS UNGRIDDED 2 SWW ###4184 def test_URS_points_needed(self):4185 4186 ll_lat = -21.54187 ll_long = 114.54188 grid_spacing = 1./60.4189 lat_amount = 304190 long_amount = 304191 zone = 504192 4193 boundary_polygon = [[250000,7660000],[280000,7660000],4194 [280000,7630000],[250000,7630000]]4195 geo=URS_points_needed(boundary_polygon, zone,4196 ll_lat, ll_long, grid_spacing,4197 lat_amount, long_amount,4198 verbose=self.verbose)4199 # to test this geo, can info from it be transfered onto the boundary4200 # poly?4201 #Maybe see if I can fit the data to the polygon - have to represent4202 # the poly as points though.4203 #geo.export_points_file("results.txt", as_lat_long=True)4204 results = frozenset(geo.get_data_points(as_lat_long=True))4205 #print 'results',results4206 4207 # These are a set of points that have to be in results4208 points = []4209 for i in range(18):4210 lat = -21.0 - 8./60 - grid_spacing * i4211 points.append((lat,degminsec2decimal_degrees(114,35,0)))4212 points.append((lat,degminsec2decimal_degrees(114,36,0)))4213 points.append((lat,degminsec2decimal_degrees(114,52,0)))4214 points.append((lat,degminsec2decimal_degrees(114,53,0)))4215 geo_answer = Geospatial_data(data_points=points,4216 points_are_lats_longs=True)4217 #geo_answer.export_points_file("answer.txt", as_lat_long=True)4218 answer = frozenset(points)4219 4220 outs = answer.difference(results)4221 #print "outs", outs4222 # This doesn't work. Though visualising the results shows that4223 # it is correct.4224 #assert answer.issubset(results)4225 # this is why;4226 #point (-21.133333333333333, 114.58333333333333)4227 #result (-21.133333332232368, 114.58333333300342)4228 4229 for point in points:4230 found = False4231 for result in results:4232 if num.allclose(point, result):4233 found = True4234 break4235 if not found:4236 assert False4237 4238 4239 def dave_test_URS_points_needed(self):4240 ll_lat = -21.516674241 ll_long = 114.516674242 grid_spacing = 2./60.4243 lat_amount = 154244 long_amount = 154245 4246 4247 boundary_polygon = [[250000,7660000],[280000,7660000],4248 [280000,7630000],[250000,7630000]]4249 URS_points_needed_to_file('a_test_example',boundary_polygon,4250 ll_lat, ll_long, grid_spacing,4251 lat_amount, long_amount,4252 verbose=self.verbose)4253 4254 def X_test_URS_points_neededII(self):4255 ll_lat = -21.54256 ll_long = 114.54257 grid_spacing = 1./60.4258 lat_amount = 304259 long_amount = 304260 4261 # change this so lats and longs are inputed, then converted4262 4263 #boundary_polygon = [[7660000,250000],[7660000,280000],4264 # [7630000,280000],[7630000,250000]]4265 URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing,4266 lat_amount, long_amount,4267 verbose=self.verbose)4268 4269 def test_URS_points_northern_hemisphere(self):4270 4271 LL_LAT = 8.04272 LL_LONG = 97.04273 GRID_SPACING = 2.0/60.04274 LAT_AMOUNT = 24275 LONG_AMOUNT = 24276 ZONE = 474277 4278 #4279 points = []4280 for i in range(2):4281 for j in range(2):4282 points.append((degminsec2decimal_degrees(8,1+i*2,0),4283 degminsec2decimal_degrees(97,1+i*2,0)))4284 #print "points", points4285 geo_poly = Geospatial_data(data_points=points,4286 points_are_lats_longs=True)4287 poly_lat_long = geo_poly.get_data_points(as_lat_long=False,4288 isSouthHemisphere=False)4289 #print "seg_lat_long", poly_lat_long4290 4291 # geo=URS_points_needed_to_file('test_example_poly3', poly_lat_long,4292 # ZONE,4293 # LL_LAT, LL_LONG,4294 # GRID_SPACING,4295 # LAT_AMOUNT, LONG_AMOUNT,4296 # isSouthernHemisphere=False,4297 # export_csv=True,4298 # verbose=self.verbose)4299 4300 geo=URS_points_needed(poly_lat_long,4301 ZONE,4302 LL_LAT, LL_LONG,4303 GRID_SPACING,4304 LAT_AMOUNT, LONG_AMOUNT,4305 isSouthHemisphere=False,4306 verbose=self.verbose)4307 4308 results = frozenset(geo.get_data_points(as_lat_long=True,4309 isSouthHemisphere=False))4310 #print 'results',results4311 4312 # These are a set of points that have to be in results4313 points = []4314 for i in range(2):4315 for j in range(2):4316 points.append((degminsec2decimal_degrees(8,i*2,0),4317 degminsec2decimal_degrees(97,i*2,0)))4318 #print "answer points", points4319 answer = frozenset(points)4320 4321 for point in points:4322 found = False4323 for result in results:4324 if num.allclose(point, result):4325 found = True4326 break4327 if not found:4328 assert False4329 4330 4331 def covered_in_other_tests_test_URS_points_needed_poly1(self):4332 # Values used for FESA 2007 results4333 # domain in southern hemisphere zone 514334 LL_LAT = -50.04335 LL_LONG = 80.04336 GRID_SPACING = 2.0/60.04337 LAT_AMOUNT = 48004338 LONG_AMOUNT = 36004339 ZONE = 514340 4341 poly1 = [[296361.89, 8091928.62],4342 [429495.07,8028278.82],4343 [447230.56,8000674.05],4344 [429661.2,7982177.6],4345 [236945.9,7897453.16],4346 [183493.44,7942782.27],4347 [226583.04,8008058.96]]4348 4349 URS_points_needed_to_file('test_example_poly2', poly1,4350 ZONE,4351 LL_LAT, LL_LONG,4352 GRID_SPACING,4353 LAT_AMOUNT, LONG_AMOUNT,4354 verbose=self.verbose)4355 4356 4357 4358 def covered_in_other_tests_test_URS_points_needed_poly2(self):4359 # Values used for 2004 validation work4360 # domain in northern hemisphere zone 474361 LL_LAT = 0.04362 LL_LONG = 90.04363 GRID_SPACING = 2.0/60.04364 LAT_AMOUNT = (15-LL_LAT)/GRID_SPACING4365 LONG_AMOUNT = (100-LL_LONG)/GRID_SPACING4366 ZONE = 474367 4368 poly2 = [[419336.424,810100.845],4369 [342405.0281,711455.8026],4370 [274649.9152,723352.9603],4371 [272089.092,972393.0131],4372 [347633.3754,968551.7784],4373 [427979.2022,885965.2313],4374 [427659.0993,875721.9386],4375 [429259.6138,861317.3083],4376 [436301.8775,840830.723]]4377 4378 URS_points_needed_to_file('test_example_poly2', poly2,4379 ZONE,4380 LL_LAT, LL_LONG,4381 GRID_SPACING,4382 LAT_AMOUNT, LONG_AMOUNT,4383 isSouthernHemisphere=False,4384 verbose=self.verbose)4385 4386 #### END TESTS URS UNGRIDDED 2 SWW ###4387 def test_Urs_points(self):4388 time_step_count = 34389 time_step = 24390 lat_long_points =[(-21.5,114.5),(-21.5,115),(-21.,115)]4391 base_name, files = self.write_mux(lat_long_points,4392 time_step_count, time_step)4393 for file in files:4394 urs = Urs_points(file)4395 assert time_step_count == urs.time_step_count4396 assert time_step == urs.time_step4397 4398 for lat_lon, dep in map(None, lat_long_points, urs.lonlatdep):4399 _ , e, n = redfearn(lat_lon[0], lat_lon[1])4400 assert num.allclose(n, dep[2])4401 4402 count = 04403 for slice in urs:4404 count += 14405 #print slice4406 for lat_lon, quantity in map(None, lat_long_points, slice):4407 _ , e, n = redfearn(lat_lon[0], lat_lon[1])4408 #print "quantity", quantity4409 #print "e", e4410 #print "n", n4411 if file[-5:] == WAVEHEIGHT_MUX_LABEL[-5:] or \4412 file[-5:] == NORTH_VELOCITY_LABEL[-5:] :4413 assert num.allclose(e, quantity)4414 if file[-5:] == EAST_VELOCITY_LABEL[-5:]:4415 assert num.allclose(n, quantity)4416 assert count == time_step_count4417 4418 self.delete_mux(files)4419 4420 def test_urs_ungridded2sww (self):4421 4422 #Zone: 504423 #Easting: 240992.578 Northing: 7620442.4724424 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4425 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4426 time_step_count = 24427 time_step = 4004428 tide = 90000004429 base_name, files = self.write_mux(lat_long,4430 time_step_count, time_step)4431 urs_ungridded2sww(base_name, mean_stage=tide,4432 verbose=self.verbose)4433 4434 # now I want to check the sww file ...4435 sww_file = base_name + '.sww'4436 4437 #Let's interigate the sww file4438 # Note, the sww info is not gridded. It is point data.4439 fid = NetCDFFile(sww_file)4440 4441 # Make x and y absolute4442 x = fid.variables['x'][:]4443 y = fid.variables['y'][:]4444 geo_reference = Geo_reference(NetCDFObject=fid)4445 points = geo_reference.get_absolute(map(None, x, y))4446 points = ensure_numeric(points)4447 x = points[:,0]4448 y = points[:,1]4449 4450 #Check that first coordinate is correctly represented4451 #Work out the UTM coordinates for first point4452 zone, e, n = redfearn(lat_long[0][0], lat_long[0][1])4453 assert num.allclose([x[0],y[0]], [e,n])4454 4455 #Check the time vector4456 times = fid.variables['time'][:]4457 4458 times_actual = []4459 for i in range(time_step_count):4460 times_actual.append(time_step * i)4461 4462 assert num.allclose(ensure_numeric(times),4463 ensure_numeric(times_actual))4464 4465 #Check first value4466 stage = fid.variables['stage'][:]4467 xmomentum = fid.variables['xmomentum'][:]4468 ymomentum = fid.variables['ymomentum'][:]4469 elevation = fid.variables['elevation'][:]4470 assert num.allclose(stage[0,0], e +tide) #Meters4471 4472 4473 #Check the momentums - ua4474 #momentum = velocity*(stage-elevation)4475 # elevation = - depth4476 #momentum = velocity_ua *(stage+depth)4477 # = n*(e+tide+n) based on how I'm writing these files4478 #4479 answer_x = n*(e+tide+n)4480 actual_x = xmomentum[0,0]4481 #print "answer_x",answer_x4482 #print "actual_x",actual_x4483 assert num.allclose(answer_x, actual_x) #Meters4484 4485 #Check the momentums - va4486 #momentum = velocity*(stage-elevation)4487 # elevation = - depth4488 #momentum = velocity_va *(stage+depth)4489 # = e*(e+tide+n) based on how I'm writing these files4490 #4491 answer_y = -1*e*(e+tide+n)4492 actual_y = ymomentum[0,0]4493 #print "answer_y",answer_y4494 #print "actual_y",actual_y4495 assert num.allclose(answer_y, actual_y) #Meters4496 4497 # check the stage values, first time step.4498 # These arrays are equal since the Easting values were used as4499 # the stage4500 assert num.allclose(stage[0], x +tide) #Meters4501 # check the elevation values.4502 # -ve since urs measures depth, sww meshers height,4503 # these arrays are equal since the northing values were used as4504 # the elevation4505 assert num.allclose(-elevation, y) #Meters4506 4507 fid.close()4508 self.delete_mux(files)4509 os.remove(sww_file)4510 4511 def test_urs_ungridded2swwII (self):4512 4513 #Zone: 504514 #Easting: 240992.578 Northing: 7620442.4724515 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4516 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4517 time_step_count = 24518 time_step = 4004519 tide = 90000004520 geo_reference = Geo_reference(50, 3434543,34534543)4521 base_name, files = self.write_mux(lat_long,4522 time_step_count, time_step)4523 urs_ungridded2sww(base_name, mean_stage=tide, origin = geo_reference,4524 verbose=self.verbose)4525 4526 # now I want to check the sww file ...4527 sww_file = base_name + '.sww'4528 4529 #Let's interigate the sww file4530 # Note, the sww info is not gridded. It is point data.4531 fid = NetCDFFile(sww_file)4532 4533 # Make x and y absolute4534 x = fid.variables['x'][:]4535 y = fid.variables['y'][:]4536 geo_reference = Geo_reference(NetCDFObject=fid)4537 points = geo_reference.get_absolute(map(None, x, y))4538 points = ensure_numeric(points)4539 x = points[:,0]4540 y = points[:,1]4541 4542 #Check that first coordinate is correctly represented4543 #Work out the UTM coordinates for first point4544 zone, e, n = redfearn(lat_long[0][0], lat_long[0][1])4545 assert num.allclose([x[0],y[0]], [e,n])4546 4547 #Check the time vector4548 times = fid.variables['time'][:]4549 4550 times_actual = []4551 for i in range(time_step_count):4552 times_actual.append(time_step * i)4553 4554 assert num.allclose(ensure_numeric(times),4555 ensure_numeric(times_actual))4556 4557 #Check first value4558 stage = fid.variables['stage'][:]4559 xmomentum = fid.variables['xmomentum'][:]4560 ymomentum = fid.variables['ymomentum'][:]4561 elevation = fid.variables['elevation'][:]4562 assert num.allclose(stage[0,0], e +tide) #Meters4563 4564 #Check the momentums - ua4565 #momentum = velocity*(stage-elevation)4566 # elevation = - depth4567 #momentum = velocity_ua *(stage+depth)4568 # = n*(e+tide+n) based on how I'm writing these files4569 #4570 answer_x = n*(e+tide+n)4571 actual_x = xmomentum[0,0]4572 #print "answer_x",answer_x4573 #print "actual_x",actual_x4574 assert num.allclose(answer_x, actual_x) #Meters4575 4576 #Check the momentums - va4577 #momentum = velocity*(stage-elevation)4578 # elevation = - depth4579 #momentum = velocity_va *(stage+depth)4580 # = e*(e+tide+n) based on how I'm writing these files4581 #4582 answer_y = -1*e*(e+tide+n)4583 actual_y = ymomentum[0,0]4584 #print "answer_y",answer_y4585 #print "actual_y",actual_y4586 assert num.allclose(answer_y, actual_y) #Meters4587 4588 # check the stage values, first time step.4589 # These arrays are equal since the Easting values were used as4590 # the stage4591 assert num.allclose(stage[0], x +tide) #Meters4592 # check the elevation values.4593 # -ve since urs measures depth, sww meshers height,4594 # these arrays are equal since the northing values were used as4595 # the elevation4596 assert num.allclose(-elevation, y) #Meters4597 4598 fid.close()4599 self.delete_mux(files)4600 os.remove(sww_file)4601 4602 def test_urs_ungridded2swwIII (self):4603 4604 #Zone: 504605 #Easting: 240992.578 Northing: 7620442.4724606 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4607 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4608 time_step_count = 24609 time_step = 4004610 tide = 90000004611 base_name, files = self.write_mux(lat_long,4612 time_step_count, time_step)4613 urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343),4614 verbose=self.verbose)4615 4616 # now I want to check the sww file ...4617 sww_file = base_name + '.sww'4618 4619 #Let's interigate the sww file4620 # Note, the sww info is not gridded. It is point data.4621 fid = NetCDFFile(sww_file)4622 4623 # Make x and y absolute4624 x = fid.variables['x'][:]4625 y = fid.variables['y'][:]4626 geo_reference = Geo_reference(NetCDFObject=fid)4627 points = geo_reference.get_absolute(map(None, x, y))4628 points = ensure_numeric(points)4629 x = points[:,0]4630 y = points[:,1]4631 4632 #Check that first coordinate is correctly represented4633 #Work out the UTM coordinates for first point4634 zone, e, n = redfearn(lat_long[0][0], lat_long[0][1])4635 assert num.allclose([x[0],y[0]], [e,n])4636 4637 #Check the time vector4638 times = fid.variables['time'][:]4639 4640 times_actual = []4641 for i in range(time_step_count):4642 times_actual.append(time_step * i)4643 4644 assert num.allclose(ensure_numeric(times),4645 ensure_numeric(times_actual))4646 4647 #Check first value4648 stage = fid.variables['stage'][:]4649 xmomentum = fid.variables['xmomentum'][:]4650 ymomentum = fid.variables['ymomentum'][:]4651 elevation = fid.variables['elevation'][:]4652 assert num.allclose(stage[0,0], e +tide) #Meters4653 4654 #Check the momentums - ua4655 #momentum = velocity*(stage-elevation)4656 # elevation = - depth4657 #momentum = velocity_ua *(stage+depth)4658 # = n*(e+tide+n) based on how I'm writing these files4659 #4660 answer_x = n*(e+tide+n)4661 actual_x = xmomentum[0,0]4662 #print "answer_x",answer_x4663 #print "actual_x",actual_x4664 assert num.allclose(answer_x, actual_x) #Meters4665 4666 #Check the momentums - va4667 #momentum = velocity*(stage-elevation)4668 # elevation = - depth4669 #momentum = velocity_va *(stage+depth)4670 # = e*(e+tide+n) based on how I'm writing these files4671 #4672 answer_y = -1*e*(e+tide+n)4673 actual_y = ymomentum[0,0]4674 #print "answer_y",answer_y4675 #print "actual_y",actual_y4676 assert num.allclose(answer_y, actual_y) #Meters4677 4678 # check the stage values, first time step.4679 # These arrays are equal since the Easting values were used as4680 # the stage4681 assert num.allclose(stage[0], x +tide) #Meters4682 # check the elevation values.4683 # -ve since urs measures depth, sww meshers height,4684 # these arrays are equal since the northing values were used as4685 # the elevation4686 assert num.allclose(-elevation, y) #Meters4687 4688 fid.close()4689 self.delete_mux(files)4690 os.remove(sww_file)4691 4692 4693 def test_urs_ungridded_hole (self):4694 4695 #Zone: 504696 #Easting: 240992.578 Northing: 7620442.4724697 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4698 lat_long = [[-20.5, 114.5],4699 [-20.6, 114.6],4700 [-20.5, 115.],4701 [-20.6, 115.],4702 [-20.5, 115.5],4703 [-20.6, 115.4],4704 4705 [-21., 114.5],4706 [-21., 114.6],4707 [-21., 115.5],4708 [-21., 115.4],4709 4710 [-21.5, 114.5],4711 [-21.4, 114.6],4712 [-21.5, 115.],4713 [-21.4, 115.],4714 [-21.5, 115.5],4715 [-21.4, 115.4]4716 ]4717 time_step_count = 64718 time_step = 1004719 tide = 90000004720 base_name, files = self.write_mux(lat_long,4721 time_step_count, time_step)4722 #Easting: 292110.784 Northing: 7676551.7104723 #Latitude: -21 0 ' 0.00000 '' Longitude: 115 0 ' 0.00000 ''4724 4725 urs_ungridded2sww(base_name, mean_stage=-240992.0,4726 hole_points_UTM=[ 292110.784, 7676551.710 ],4727 verbose=self.verbose)4728 4729 # now I want to check the sww file ...4730 sww_file = base_name + '.sww'4731 4732 #Let's interigate the sww file4733 # Note, the sww info is not gridded. It is point data.4734 fid = NetCDFFile(sww_file)4735 4736 number_of_volumes = fid.variables['volumes']4737 #print "number_of_volumes",len(number_of_volumes)4738 assert num.allclose(16, len(number_of_volumes))4739 4740 fid.close()4741 self.delete_mux(files)4742 #print "sww_file", sww_file4743 os.remove(sww_file)4744 4745 def test_urs_ungridded_holeII(self):4746 4747 # Check that if using a hole that returns no triangles,4748 # urs_ungridded2sww removes the hole label.4749 4750 lat_long = [[-20.5, 114.5],4751 [-20.6, 114.6],4752 [-20.5, 115.5],4753 [-20.6, 115.4],4754 4755 4756 [-21.5, 114.5],4757 [-21.4, 114.6],4758 [-21.5, 115.5],4759 [-21.4, 115.4]4760 ]4761 time_step_count = 64762 time_step = 1004763 tide = 90000004764 base_name, files = self.write_mux(lat_long,4765 time_step_count, time_step)4766 #Easting: 292110.784 Northing: 7676551.7104767 #Latitude: -21 0 ' 0.00000 '' Longitude: 115 0 ' 0.00000 ''4768 4769 urs_ungridded2sww(base_name, mean_stage=-240992.0,4770 hole_points_UTM=[ 292110.784, 7676551.710 ],4771 verbose=self.verbose)4772 4773 # now I want to check the sww file ...4774 sww_file = base_name + '.sww'4775 fid = NetCDFFile(sww_file)4776 4777 volumes = fid.variables['volumes']4778 #print "number_of_volumes",len(volumes)4779 4780 fid.close()4781 os.remove(sww_file)4782 4783 urs_ungridded2sww(base_name, mean_stage=-240992.0)4784 4785 # now I want to check the sww file ...4786 sww_file = base_name + '.sww'4787 fid = NetCDFFile(sww_file)4788 4789 volumes_again = fid.variables['volumes']4790 #print "number_of_volumes",len(volumes_again)4791 assert num.allclose(len(volumes_again),4792 len(volumes))4793 fid.close()4794 os.remove(sww_file)4795 self.delete_mux(files)4796 4797 def test_urs_ungridded2sww_mint_maxt (self):4798 4799 #Zone: 504800 #Easting: 240992.578 Northing: 7620442.4724801 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4802 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4803 time_step_count = 64804 time_step = 1004805 tide = 90000004806 base_name, files = self.write_mux(lat_long,4807 time_step_count, time_step)4808 urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343),4809 mint=101, maxt=500,4810 verbose=self.verbose)4811 4812 # now I want to check the sww file ...4813 sww_file = base_name + '.sww'4814 4815 #Let's interigate the sww file4816 # Note, the sww info is not gridded. It is point data.4817 fid = NetCDFFile(sww_file)4818 4819 # Make x and y absolute4820 x = fid.variables['x'][:]4821 y = fid.variables['y'][:]4822 geo_reference = Geo_reference(NetCDFObject=fid)4823 points = geo_reference.get_absolute(map(None, x, y))4824 points = ensure_numeric(points)4825 x = points[:,0]4826 y = points[:,1]4827 4828 #Check that first coordinate is correctly represented4829 #Work out the UTM coordinates for first point4830 zone, e, n = redfearn(lat_long[0][0], lat_long[0][1])4831 assert num.allclose([x[0],y[0]], [e,n])4832 4833 #Check the time vector4834 times = fid.variables['time'][:]4835 4836 times_actual = [0,100,200,300]4837 4838 assert num.allclose(ensure_numeric(times),4839 ensure_numeric(times_actual))4840 4841 #Check first value4842 stage = fid.variables['stage'][:]4843 xmomentum = fid.variables['xmomentum'][:]4844 ymomentum = fid.variables['ymomentum'][:]4845 elevation = fid.variables['elevation'][:]4846 assert num.allclose(stage[0,0], e +tide) #Meters4847 4848 #Check the momentums - ua4849 #momentum = velocity*(stage-elevation)4850 # elevation = - depth4851 #momentum = velocity_ua *(stage+depth)4852 # = n*(e+tide+n) based on how I'm writing these files4853 #4854 answer_x = n*(e+tide+n)4855 actual_x = xmomentum[0,0]4856 #print "answer_x",answer_x4857 #print "actual_x",actual_x4858 assert num.allclose(answer_x, actual_x) #Meters4859 4860 #Check the momentums - va4861 #momentum = velocity*(stage-elevation)4862 # elevation = - depth4863 #momentum = velocity_va *(stage+depth)4864 # = e*(e+tide+n) based on how I'm writing these files4865 #4866 answer_y = -1*e*(e+tide+n)4867 actual_y = ymomentum[0,0]4868 #print "answer_y",answer_y4869 #print "actual_y",actual_y4870 assert num.allclose(answer_y, actual_y) #Meters4871 4872 # check the stage values, first time step.4873 # These arrays are equal since the Easting values were used as4874 # the stage4875 assert num.allclose(stage[0], x +tide) #Meters4876 # check the elevation values.4877 # -ve since urs measures depth, sww meshers height,4878 # these arrays are equal since the northing values were used as4879 # the elevation4880 assert num.allclose(-elevation, y) #Meters4881 4882 fid.close()4883 self.delete_mux(files)4884 os.remove(sww_file)4885 4886 def test_urs_ungridded2sww_mint_maxtII (self):4887 4888 #Zone: 504889 #Easting: 240992.578 Northing: 7620442.4724890 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4891 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4892 time_step_count = 64893 time_step = 1004894 tide = 90000004895 base_name, files = self.write_mux(lat_long,4896 time_step_count, time_step)4897 urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343),4898 mint=0, maxt=100000,4899 verbose=self.verbose)4900 4901 # now I want to check the sww file ...4902 sww_file = base_name + '.sww'4903 4904 #Let's interigate the sww file4905 # Note, the sww info is not gridded. It is point data.4906 fid = NetCDFFile(sww_file)4907 4908 # Make x and y absolute4909 geo_reference = Geo_reference(NetCDFObject=fid)4910 points = geo_reference.get_absolute(map(None, fid.variables['x'][:],4911 fid.variables['y'][:]))4912 points = ensure_numeric(points)4913 x = points[:,0]4914 4915 #Check the time vector4916 times = fid.variables['time'][:]4917 4918 times_actual = [0,100,200,300,400,500]4919 assert num.allclose(ensure_numeric(times),4920 ensure_numeric(times_actual))4921 4922 #Check first value4923 stage = fid.variables['stage'][:]4924 assert num.allclose(stage[0], x +tide)4925 4926 fid.close()4927 self.delete_mux(files)4928 os.remove(sww_file)4929 4930 def test_urs_ungridded2sww_mint_maxtIII (self):4931 4932 #Zone: 504933 #Easting: 240992.578 Northing: 7620442.4724934 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4935 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4936 time_step_count = 64937 time_step = 1004938 tide = 90000004939 base_name, files = self.write_mux(lat_long,4940 time_step_count, time_step)4941 try:4942 urs_ungridded2sww(base_name, mean_stage=tide,4943 origin =(50,23432,4343),4944 mint=301, maxt=399,4945 verbose=self.verbose)4946 except:4947 pass4948 else:4949 self.failUnless(0 ==1, 'Bad input did not throw exception error!')4950 4951 self.delete_mux(files)4952 4953 def test_urs_ungridded2sww_mint_maxt_bad (self):4954 #Zone: 504955 #Easting: 240992.578 Northing: 7620442.4724956 #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 ''4957 lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]4958 time_step_count = 64959 time_step = 1004960 tide = 90000004961 base_name, files = self.write_mux(lat_long,4962 time_step_count, time_step)4963 try:4964 urs_ungridded2sww(base_name, mean_stage=tide,4965 origin =(50,23432,4343),4966 mint=301, maxt=301,4967 verbose=self.verbose)4968 except:4969 pass4970 else:4971 self.failUnless(0 ==1, 'Bad input did not throw exception error!')4972 4973 self.delete_mux(files)4974 4975 4976 def test_URS_points_needed_and_urs_ungridded2sww(self):4977 # This doesn't actually check anything4978 #4979 ll_lat = -21.54980 ll_long = 114.54981 grid_spacing = 1./60.4982 lat_amount = 304983 long_amount = 304984 time_step_count = 24985 time_step = 4004986 tide = -2000004987 zone = 504988 4989 boundary_polygon = [[250000,7660000],[280000,7660000],4990 [280000,7630000],[250000,7630000]]4991 geo=URS_points_needed(boundary_polygon, zone,4992 ll_lat, ll_long, grid_spacing,4993 lat_amount, long_amount,4994 verbose=self.verbose)4995 lat_long = geo.get_data_points(as_lat_long=True)4996 base_name, files = self.write_mux(lat_long,4997 time_step_count, time_step)4998 urs_ungridded2sww(base_name, mean_stage=tide,4999 verbose=self.verbose)5000 self.delete_mux(files)5001 os.remove( base_name + '.sww')5002 5003 def cache_test_URS_points_needed_and_urs_ungridded2sww(self):5004 5005 ll_lat = -21.55006 ll_long = 114.55007 grid_spacing = 1./60.5008 lat_amount = 305009 long_amount = 305010 time_step_count = 25011 time_step = 4005012 tide = -2000005013 zone = 505014 5015 boundary_polygon = [[250000,7660000],[270000,7650000],5016 [280000,7630000],[250000,7630000]]5017 geo=URS_points_needed(boundary_polygon, zone,5018 ll_lat, ll_long, grid_spacing,5019 lat_amount, long_amount, use_cache=True,5020 verbose=True)5021 5022 def visual_test_URS_points_needed_and_urs_ungridded2sww(self):5023 5024 ll_lat = -21.55025 ll_long = 114.55026 grid_spacing = 1./60.5027 lat_amount = 305028 long_amount = 305029 time_step_count = 25030 time_step = 4005031 tide = -2000005032 zone = 505033 5034 boundary_polygon = [[250000,7660000],[270000,7650000],5035 [280000,7630000],[250000,7630000]]5036 geo=URS_points_needed(boundary_polygon, zone,5037 ll_lat, ll_long, grid_spacing,5038 lat_amount, long_amount)5039 lat_long = geo.get_data_points(as_lat_long=True)5040 base_name, files = self.write_mux(lat_long,5041 time_step_count, time_step)5042 urs_ungridded2sww(base_name, mean_stage=tide)5043 self.delete_mux(files)5044 os.remove( base_name + '.sww')5045 # extend this so it interpolates onto the boundary.5046 # have it fail if there is NaN5047 2091 5048 2092 def test_triangulation(self): -
trunk/anuga_core/source/anuga/shallow_water/test_shallow_water_domain.py
r7736 r7769 1 1 #!/usr/bin/env python 2 2 3 import unittest, os 3 import unittest, os, time 4 4 import os.path 5 5 from math import pi, sqrt … … 7 7 8 8 from anuga.shallow_water import Domain 9 10 from Scientific.IO.NetCDF import NetCDFFile 11 from anuga.file.sww import extent_sww 9 12 10 13 from anuga.config import g, epsilon … … 14 17 from anuga.coordinate_transforms.geo_reference import Geo_reference 15 18 from anuga.geospatial_data.geospatial_data import Geospatial_data 16 from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular_cross 19 from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular_cross, \ 20 rectangular 17 21 from anuga.abstract_2d_finite_volumes.quantity import Quantity 18 22 from anuga.shallow_water.forcing import Inflow, Cross_section … … 1465 1469 This run tries it with georeferencing and with elevation = -1 1466 1470 """ 1467 1468 import time, os1469 from Scientific.IO.NetCDF import NetCDFFile1470 from mesh_factory import rectangular1471 1471 1472 1472 # Create basic mesh (20m x 3m) … … 5262 5262 was in March-April 2007 5263 5263 """ 5264 import time, os5265 from Scientific.IO.NetCDF import NetCDFFile5266 from data_manager import extent_sww5267 from mesh_factory import rectangular5268 5264 5269 5265 # Create basic mesh
Note: See TracChangeset
for help on using the changeset viewer.