Changeset 1360
- Timestamp:
- May 9, 2005, 10:03:30 PM (20 years ago)
- Location:
- inundation/ga/storm_surge
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
inundation/ga/storm_surge/pyvolution/data_manager.py
r1355 r1360 381 381 #other modules (I think). 382 382 383 #write a filename addon that won't break swollens reader 383 #write a filename addon that won't break swollens reader 384 384 #(10.sww is bad) 385 filename_ext = '_time_%s'%self.domain.time 385 filename_ext = '_time_%s'%self.domain.time 386 386 filename_ext = filename_ext.replace('.', '_') 387 #remember the old filename, then give domain a 387 #remember the old filename, then give domain a 388 388 #name with the extension 389 389 old_domain_filename = self.domain.filename … … 539 539 540 540 # Get X, Y and bed elevation Z 541 541 Q = domain.quantities['elevation'] 542 542 X,Y,Z,V = Q.get_vertex_values(xy=True, 543 543 precision = self.precision) … … 559 559 """ 560 560 from Scientific.IO.NetCDF import NetCDFFile 561 561 from time import sleep 562 562 563 563 #Get NetCDF 564 565 566 567 564 retries = 0 565 file_open = False 566 while not file_open and retries < 10: 567 try: 568 568 fid = NetCDFFile(self.filename, 'a') #Open existing file 569 570 571 572 573 %self.filename574 575 576 577 578 579 580 581 if not file_open:582 msg = 'File %s could not be opened for append' %self.filename583 569 except IOError: 570 #This could happen if someone was reading the file. 571 #In that case, wait a while and try again 572 msg = 'Warning (store_timestep): File %s could not be opened'\ 573 %self.filename 574 msg += ' - trying again' 575 print msg 576 retries += 1 577 sleep(1) 578 else: 579 file_open = True 580 581 if not file_open: 582 msg = 'File %s could not be opened for append' %self.filename 583 raise msg 584 584 585 585 … … 595 595 596 596 # Get quantity 597 597 Q = domain.quantities[name] 598 598 A,V = Q.get_vertex_values(xy=False, 599 599 precision = self.precision) … … 1025 1025 1026 1026 def sww2asc(basename_in, basename_out = None, 1027 quantity = None, 1027 quantity = None, 1028 1028 timestep = None, 1029 1029 reduction = None, … … 1061 1061 if quantity is given, out values from quantity otherwise default to 1062 1062 elevation 1063 1063 1064 1064 if timestep (an index) is given, output quantity at that timestep 1065 1065 1066 1066 if reduction is given use that to reduce quantity over all timesteps. 1067 1067 1068 1068 """ 1069 1069 from Numeric import array, Float, concatenate, NewAxis, zeros,\ 1070 1070 sometrue 1071 1071 1072 1072 1073 1073 #FIXME: Should be variable … … 1076 1076 false_northing = 10000000 1077 1077 NODATA_value = -9999 1078 1078 1079 1079 if quantity is None: 1080 1080 quantity = 'elevation' … … 1085 1085 if basename_out is None: 1086 1086 basename_out = basename_in + '_%s' %quantity 1087 1087 1088 1088 swwfile = basename_in + '.sww' 1089 1089 ascfile = basename_out + '.asc' 1090 prjfile = basename_out + '.prj' 1090 prjfile = basename_out + '.prj' 1091 1091 1092 1092 … … 1103 1103 ymin = min(y); ymax = max(y) 1104 1104 xmin = min(x); xmax = max(x) 1105 1105 1106 1106 number_of_timesteps = fid.dimensions['number_of_timesteps'] 1107 1107 number_of_points = fid.dimensions['number_of_points'] 1108 1108 if origin is None: 1109 1109 1110 1110 # get geo_reference 1111 1111 #sww files don't have to have a geo_ref … … 1114 1114 except AttributeError, e: 1115 1115 geo_reference = Geo_reference(DEFAULT_ZONE,0,0) 1116 1116 1117 1117 xllcorner = geo_reference.get_xllcorner() 1118 1118 yllcorner = geo_reference.get_yllcorner() … … 1122 1122 xllcorner = origin[1] 1123 1123 yllcorner = origin[2] 1124 1124 1125 1125 1126 1126 #Get quantity and reduce if applicable … … 1129 1129 if quantity.lower() == 'depth': 1130 1130 q = fid.variables['stage'][:] - fid.variables['elevation'][:] 1131 else: 1131 else: 1132 1132 q = fid.variables[quantity][:] 1133 1133 1134 1134 1135 1135 if len(q.shape) == 2: 1136 if verbose: print 'Reducing quantity %s' %quantity 1137 q_reduced = zeros( number_of_points, Float ) 1136 if verbose: print 'Reducing quantity %s' %quantity 1137 q_reduced = zeros( number_of_points, Float ) 1138 1138 1139 1139 for k in range(number_of_points): … … 1144 1144 #Now q has dimension: number_of_points 1145 1145 1146 1146 1147 1147 #Write prj file 1148 1148 if verbose: print 'Writing %s' %prjfile 1149 1149 prjid = open(prjfile, 'w') 1150 1150 prjid.write('Projection %s\n' %'UTM') 1151 prjid.write('Zone %d\n' %zone) 1151 prjid.write('Zone %d\n' %zone) 1152 1152 prjid.write('Datum %s\n' %datum) 1153 1153 prjid.write('Zunits NO\n') … … 1166 1166 newxllcorner = xmin+xllcorner 1167 1167 newyllcorner = ymin+yllcorner 1168 1168 1169 1169 x = x+xllcorner-newxllcorner 1170 1170 y = y+yllcorner-newyllcorner … … 1172 1172 vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1) 1173 1173 assert len(vertex_points.shape) == 2 1174 1175 1176 from Numeric import zeros, Float 1174 1175 1176 from Numeric import zeros, Float 1177 1177 grid_points = zeros ( (ncols*nrows, 2), Float ) 1178 1178 1179 1179 1180 1180 for i in xrange(nrows): 1181 yg = i*cellsize 1181 yg = i*cellsize 1182 1182 for j in xrange(ncols): 1183 1183 xg = j*cellsize … … 1187 1187 grid_points[k,0] = xg 1188 1188 grid_points[k,1] = yg 1189 1189 1190 1190 1191 1191 #Interpolate 1192 1192 1193 1193 from least_squares import Interpolation 1194 1194 from util import inside_polygon … … 1197 1197 #take forever. With expand_search set to False, some grid points might 1198 1198 #miss out.... 1199 1199 1200 1200 interp = Interpolation(vertex_points, volumes, grid_points, alpha=0.0, 1201 1201 precrop = False, expand_search = False, … … 1207 1207 1208 1208 #Write 1209 if verbose: print 'Writing %s' %ascfile 1209 if verbose: print 'Writing %s' %ascfile 1210 1210 ascid = open(ascfile, 'w') 1211 1211 … … 1215 1215 ascid.write('yllcorner %d\n' %newyllcorner) 1216 1216 ascid.write('cellsize %f\n' %cellsize) 1217 ascid.write('NODATA_value %d\n' %NODATA_value) 1217 ascid.write('NODATA_value %d\n' %NODATA_value) 1218 1218 1219 1219 … … 1221 1221 P = interp.mesh.get_boundary_polygon() 1222 1222 inside_indices = inside_polygon(grid_points, P) 1223 1223 1224 1224 for i in range(nrows): 1225 1225 if verbose and i%((nrows+10)/10)==0: 1226 1226 print 'Doing row %d of %d' %(i, nrows) 1227 1227 1228 1228 for j in range(ncols): 1229 1229 index = (nrows-i-1)*ncols+j 1230 1230 1231 1231 if sometrue(inside_indices == index): 1232 ascid.write('%f ' %grid_values[index]) 1232 ascid.write('%f ' %grid_values[index]) 1233 1233 else: 1234 1234 ascid.write('%d ' %NODATA_value) 1235 1235 1236 1236 ascid.write('\n') 1237 1237 1238 1238 #Close 1239 1239 ascid.close() 1240 1240 fid.close() 1241 1241 1242 1242 1243 1243 def convert_dem_from_ascii2netcdf(basename_in, basename_out = None, … … 1407 1407 fail_on_NaN = True, 1408 1408 NaN_filler = 0, 1409 1409 elevation = None, 1410 1410 inverted_bathymetry = False 1411 1411 ): #FIXME: Bathymetry should be obtained 1412 1412 #from MOST somehow. 1413 1413 #Alternatively from elsewhere 1414 #or, as a last resort, 1415 #specified here. 1416 #The value of -100 will work 1414 #or, as a last resort, 1415 #specified here. 1416 #The value of -100 will work 1417 1417 #for the Wollongong tsunami 1418 1418 #scenario but is very hacky … … 1440 1440 ferret2sww uses grid points as vertices in a triangular grid 1441 1441 counting vertices from lower left corner upwards, then right 1442 """ 1442 """ 1443 1443 1444 1444 import os 1445 1445 from Scientific.IO.NetCDF import NetCDFFile 1446 1446 from Numeric import Float, Int, Int32, searchsorted, zeros, array 1447 precision = Float 1447 precision = Float 1448 1448 1449 1449 … … 1452 1452 file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm) 1453 1453 file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s) 1454 file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s) 1455 file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m) 1454 file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s) 1455 file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m) 1456 1456 1457 1457 if basename_out is None: 1458 1458 swwname = basename_in + '.sww' 1459 1459 else: 1460 swwname = basename_out + '.sww' 1460 swwname = basename_out + '.sww' 1461 1461 1462 1462 times = file_h.variables['TIME'] … … 1466 1466 if mint == None: 1467 1467 jmin = 0 1468 else: 1468 else: 1469 1469 jmin = searchsorted(times, mint) 1470 1470 1471 1471 if maxt == None: 1472 1472 jmax=len(times) 1473 else: 1473 else: 1474 1474 jmax = searchsorted(times, maxt) 1475 1475 1476 1476 if minlat == None: 1477 1477 kmin=0 … … 1486 1486 if minlon == None: 1487 1487 lmin=0 1488 else: 1488 else: 1489 1489 lmin = searchsorted(longitudes, minlon) 1490 1490 1491 1491 if maxlon == None: 1492 1492 lmax = len(longitudes) 1493 1493 else: 1494 lmax = searchsorted(longitudes, maxlon) 1495 1496 1497 1498 times = times[jmin:jmax] 1494 lmax = searchsorted(longitudes, maxlon) 1495 1496 1497 1498 times = times[jmin:jmax] 1499 1499 latitudes = latitudes[kmin:kmax] 1500 1500 longitudes = longitudes[lmin:lmax] … … 1531 1531 else: 1532 1532 nan_e = None 1533 1533 1534 1534 #Cleanup 1535 1535 from Numeric import sometrue 1536 1536 1537 1537 missing = (amplitudes == nan_ha) 1538 1538 if sometrue (missing): … … 1574 1574 ####### 1575 1575 1576 1576 1577 1577 1578 1578 number_of_times = times.shape[0] … … 1582 1582 assert amplitudes.shape[0] == number_of_times 1583 1583 assert amplitudes.shape[1] == number_of_latitudes 1584 assert amplitudes.shape[2] == number_of_longitudes 1584 assert amplitudes.shape[2] == number_of_longitudes 1585 1585 1586 1586 if verbose: … … 1596 1596 print ' t in [%f, %f], len(t) == %d'\ 1597 1597 %(min(times.flat), max(times.flat), len(times.flat)) 1598 1599 q = amplitudes.flat 1598 1599 q = amplitudes.flat 1600 1600 name = 'Amplitudes (ha) [cm]' 1601 1601 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1602 1602 1603 1603 q = uspeed.flat 1604 name = 'Speeds (ua) [cm/s]' 1604 name = 'Speeds (ua) [cm/s]' 1605 1605 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1606 1606 1607 1607 q = vspeed.flat 1608 name = 'Speeds (va) [cm/s]' 1609 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1608 name = 'Speeds (va) [cm/s]' 1609 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1610 1610 1611 1611 q = elevations.flat 1612 name = 'Elevations (e) [m]' 1613 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1612 name = 'Elevations (e) [m]' 1613 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1614 1614 1615 1615 … … 1617 1617 number_of_points = number_of_latitudes*number_of_longitudes 1618 1618 number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2 1619 1619 1620 1620 1621 1621 file_h.close() 1622 1622 file_u.close() 1623 file_v.close() 1624 file_e.close() 1623 file_v.close() 1624 file_e.close() 1625 1625 1626 1626 1627 1627 # NetCDF file definition 1628 1628 outfile = NetCDFFile(swwname, 'w') 1629 1629 1630 1630 #Create new file 1631 1631 outfile.institution = 'Geoscience Australia' … … 1638 1638 1639 1639 #For sww compatibility 1640 outfile.smoothing = 'Yes' 1640 outfile.smoothing = 'Yes' 1641 1641 outfile.order = 1 1642 1642 1643 1643 #Start time in seconds since the epoch (midnight 1/1/1970) 1644 1644 outfile.starttime = starttime = times[0] 1645 1645 times = times - starttime #Store relative times 1646 1646 1647 1647 # dimension definitions 1648 1648 outfile.createDimension('number_of_volumes', number_of_volumes) … … 1650 1650 outfile.createDimension('number_of_vertices', 3) 1651 1651 outfile.createDimension('number_of_points', number_of_points) 1652 1653 1652 1653 1654 1654 #outfile.createDimension('number_of_timesteps', len(times)) 1655 1655 outfile.createDimension('number_of_timesteps', len(times)) … … 1659 1659 outfile.createVariable('y', precision, ('number_of_points',)) 1660 1660 outfile.createVariable('elevation', precision, ('number_of_points',)) 1661 1661 1662 1662 #FIXME: Backwards compatibility 1663 1663 outfile.createVariable('z', precision, ('number_of_points',)) 1664 1664 ################################# 1665 1665 1666 1666 outfile.createVariable('volumes', Int, ('number_of_volumes', 1667 1667 'number_of_vertices')) 1668 1668 1669 1669 outfile.createVariable('time', precision, 1670 1670 ('number_of_timesteps',)) 1671 1671 1672 1672 outfile.createVariable('stage', precision, 1673 1673 ('number_of_timesteps', … … 1677 1677 ('number_of_timesteps', 1678 1678 'number_of_points')) 1679 1679 1680 1680 outfile.createVariable('ymomentum', precision, 1681 1681 ('number_of_timesteps', … … 1691 1691 if verbose: print 'Making triangular grid' 1692 1692 #Check zone boundaries 1693 refzone, _, _ = redfearn(latitudes[0],longitudes[0]) 1693 refzone, _, _ = redfearn(latitudes[0],longitudes[0]) 1694 1694 1695 1695 vertices = {} 1696 i = 0 1696 i = 0 1697 1697 for k, lat in enumerate(latitudes): #Y direction 1698 1698 for l, lon in enumerate(longitudes): #X direction … … 1700 1700 vertices[l,k] = i 1701 1701 1702 zone, easting, northing = redfearn(lat,lon) 1702 zone, easting, northing = redfearn(lat,lon) 1703 1703 1704 1704 msg = 'Zone boundary crossed at longitude =', lon … … 1715 1715 for k in range(number_of_latitudes-1): #Y direction 1716 1716 v1 = vertices[l,k+1] 1717 v2 = vertices[l,k] 1718 v3 = vertices[l+1,k+1] 1719 v4 = vertices[l+1,k] 1717 v2 = vertices[l,k] 1718 v3 = vertices[l+1,k+1] 1719 v4 = vertices[l+1,k] 1720 1720 1721 1721 volumes.append([v1,v2,v3]) #Upper element 1722 volumes.append([v4,v3,v2]) #Lower element 1723 1724 volumes = array(volumes) 1722 volumes.append([v4,v3,v2]) #Lower element 1723 1724 volumes = array(volumes) 1725 1725 1726 1726 if origin == None: 1727 zone = refzone 1727 zone = refzone 1728 1728 xllcorner = min(x) 1729 1729 yllcorner = min(y) … … 1731 1731 zone = origin[0] 1732 1732 xllcorner = origin[1] 1733 yllcorner = origin[2] 1734 1735 1733 yllcorner = origin[2] 1734 1735 1736 1736 outfile.xllcorner = xllcorner 1737 1737 outfile.yllcorner = yllcorner … … 1747 1747 z = elevations 1748 1748 #FIXME: z should be obtained from MOST and passed in here 1749 1749 1750 1750 from Numeric import resize 1751 1751 z = resize(z,outfile.variables['z'][:].shape) … … 1756 1756 outfile.variables['time'][:] = times #Store time relative 1757 1757 outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64 1758 1758 1759 1759 1760 1760 … … 1762 1762 stage = outfile.variables['stage'] 1763 1763 xmomentum = outfile.variables['xmomentum'] 1764 ymomentum = outfile.variables['ymomentum'] 1764 ymomentum = outfile.variables['ymomentum'] 1765 1765 1766 1766 if verbose: print 'Converting quantities' … … 1781 1781 if verbose: 1782 1782 x = outfile.variables['x'][:] 1783 y = outfile.variables['y'][:] 1783 y = outfile.variables['y'][:] 1784 1784 print '------------------------------------------------' 1785 1785 print 'Statistics of output file:' … … 1800 1800 q = outfile.variables[name][:].flferret2swwat 1801 1801 print ' %s in [%f, %f]' %(name, min(q), max(q)) 1802 1803 1804 1805 1806 outfile.close() 1807 1802 1803 1804 1805 1806 outfile.close() 1807 1808 1808 1809 1809 … … 1850 1850 Usage: domain = sww2domain('file.sww',t=time (default = last time in file)) 1851 1851 1852 Boundary is not recommended if domian.smooth is not selected, as it 1853 uses unique coordinates, but not unique boundaries. This means that 1852 Boundary is not recommended if domian.smooth is not selected, as it 1853 uses unique coordinates, but not unique boundaries. This means that 1854 1854 the boundary file will not be compatable with the coordiantes, and will 1855 1855 give a different final boundary, or crash. … … 1891 1891 except: #AttributeError, e: 1892 1892 geo_reference = None 1893 1893 1894 1894 if verbose: print ' getting quantities' 1895 1895 for quantity in fid.variables.keys(): … … 1929 1929 if not boundary is None: 1930 1930 domain.boundary = boundary 1931 1931 1932 1932 domain.geo_reference = geo_reference 1933 1933 … … 1988 1988 1989 1989 def interpolated_quantity(saved_quantity,time_interp): 1990 1990 1991 1991 #given an index and ratio, interpolate quantity with respect to time. 1992 1992 index,ratio = time_interp 1993 Q = saved_quantity 1993 Q = saved_quantity 1994 1994 if ratio > 0: 1995 1995 q = (1-ratio)*Q[index]+ ratio*Q[index+1] … … 2022 2022 ratio = 0 2023 2023 else: 2024 #t is now between index and index+1 2024 #t is now between index and index+1 2025 2025 ratio = (tau - time[index])/(time[index+1] - time[index]) 2026 2026 return (index,ratio) … … 2042 2042 same_point[i]=point 2043 2043 #to change all point i references to point j 2044 else: 2044 else: 2045 2045 point_dict[point]=i 2046 2046 same_point[i]=point … … 2079 2079 else: new_boundary[(point0,point1)]=label 2080 2080 2081 boundary = new_boundary 2081 boundary = new_boundary 2082 2082 2083 2083 return coordinates,volumes,boundary 2084 2085 2084 2085 2086 2086 2087 2087 #OBSOLETE STUFF … … 2230 2230 2231 2231 The momentum is not always stored. 2232 2232 2233 2233 """ 2234 2234 from Scientific.IO.NetCDF import NetCDFFile … … 2243 2243 stage = fid.variables['stage'] #Water level 2244 2244 #xmomentum = fid.variables['xmomentum'] #Momentum in the x-direction 2245 #ymomentum = fid.variables['ymomentum'] #Momentum in the y-direction 2245 #ymomentum = fid.variables['ymomentum'] #Momentum in the y-direction 2246 2246 2247 2247 volumes = fid.variables['volumes'] #Connectivity -
inundation/ga/storm_surge/pyvolution/domain.py
r1290 r1360 16 16 def __init__(self, coordinates, vertices, boundary = None, 17 17 conserved_quantities = None, other_quantities = None, 18 tagged_elements = None, geo_reference = None ):18 tagged_elements = None, geo_reference = None, use_inscribed_circle=False): 19 19 20 20 Mesh.__init__(self, coordinates, vertices, boundary, 21 tagged_elements, geo_reference )21 tagged_elements, geo_reference, use_inscribed_circle) 22 22 23 23 from Numeric import zeros, Float -
inundation/ga/storm_surge/pyvolution/mesh.py
r1358 r1360 130 130 c = sqrt((x2-x0)**2+(y2-y0)**2) 131 131 132 self.radii[i]= self.areas[i]/(2*(a+b+c))132 self.radii[i]=2.0*self.areas[i]/(a+b+c) 133 133 134 134 -
inundation/ga/storm_surge/pyvolution/netherlands.py
r1295 r1360 63 63 z[i] = -x[i]/20+0.4 64 64 65 if (x[i] - 0.72)**2 + (y[i] - 0.4)**2 < 0.05**2:# or\66 #x[i] > 0.75 and x[i] < 0.9 and y[i] > 0.35 and y[i] < 0.45:67 z[i] = -x[i]/20+0.468 65 69 66 #Wall … … 83 80 N = 130 #size = 33800 84 81 N = 600 #Size = 720000 85 N = 4082 N = 60 86 83 87 84 #N = 15 … … 92 89 93 90 #Create shallow water domain 94 domain = Domain(points, vertices, boundary )91 domain = Domain(points, vertices, boundary, use_inscribed_circle=True) 95 92 96 93 domain.check_integrity() … … 105 102 106 103 #Set bed-slope and friction 107 inflow_stage = 0. 4104 inflow_stage = 0.1 108 105 manning = 0.02 109 106 Z = Weir(inflow_stage) … … 120 117 else: 121 118 domain.visualise = True 122 domain.visualise_color_stage = True119 domain.visualise_color_stage = False 123 120 domain.visualise_timer = True 124 121 domain.checkpoint = False 125 122 domain.store = False 123 126 124 127 125 -
inundation/ga/storm_surge/pyvolution/shallow_water.py
r1295 r1360 63 63 64 64 def __init__(self, coordinates, vertices, boundary = None, 65 tagged_elements = None, geo_reference = None): 65 tagged_elements = None, geo_reference = None, 66 use_inscribed_circle=False): 66 67 67 68 conserved_quantities = ['stage', 'xmomentum', 'ymomentum'] … … 69 70 Generic_domain.__init__(self, coordinates, vertices, boundary, 70 71 conserved_quantities, other_quantities, 71 tagged_elements, geo_reference )72 tagged_elements, geo_reference, use_inscribed_circle) 72 73 73 74 from config import minimum_allowed_height, g -
inundation/ga/storm_surge/pyvolution/test_data_manager.py
r1201 r1360 263 263 264 264 265 265 Q = self.domain.quantities['stage'] 266 266 Q0 = Q.vertex_values[:,0] 267 267 Q1 = Q.vertex_values[:,1] … … 321 321 322 322 #Check values 323 323 Q = self.domain.quantities['stage'] 324 324 Q0 = Q.vertex_values[:,0] 325 325 Q1 = Q.vertex_values[:,1] … … 335 335 assert allclose(A[4], (Q1[0] + Q2[1] + Q0[2] +\ 336 336 Q0[5] + Q2[6] + Q1[7])/6) 337 338 339 340 337 341 338 … … 380 377 381 378 #Check values 382 379 Q = self.domain.quantities['stage'] 383 380 Q0 = Q.vertex_values[:,0] 384 381 Q1 = Q.vertex_values[:,1] … … 432 429 433 430 fid.close() 431 434 432 os.remove(self.domain.writer.filename) 435 433 … … 596 594 #Setup 597 595 self.domain.filename = 'datatest' 598 596 599 597 prjfile = self.domain.filename + '_elevation.prj' 600 ascfile = self.domain.filename + '_elevation.asc' 598 ascfile = self.domain.filename + '_elevation.asc' 601 599 swwfile = self.domain.filename + '.sww' 602 600 603 601 self.domain.set_datadir('.') 604 602 self.domain.format = 'sww' … … 607 605 608 606 self.domain.geo_reference = Geo_reference(56,308500,6189000) 609 607 610 608 sww = get_dataobject(self.domain) 611 609 sww.store_connectivity() … … 630 628 631 629 #Export to ascii/prj files 632 sww2asc(self.domain.filename, 633 quantity = 'elevation', 630 sww2asc(self.domain.filename, 631 quantity = 'elevation', 634 632 cellsize = cellsize, 635 633 verbose = False) … … 655 653 L = lines[3].strip().split() 656 654 assert L[0].strip().lower() == 'zunits' 657 assert L[1].strip().lower() == 'no' 655 assert L[1].strip().lower() == 'no' 658 656 659 657 L = lines[4].strip().split() 660 658 assert L[0].strip().lower() == 'units' 661 assert L[1].strip().lower() == 'meters' 659 assert L[1].strip().lower() == 'meters' 662 660 663 661 L = lines[5].strip().split() … … 671 669 L = lines[7].strip().split() 672 670 assert L[0].strip().lower() == 'yshift' 673 assert L[1].strip().lower() == '10000000' 671 assert L[1].strip().lower() == '10000000' 674 672 675 673 L = lines[8].strip().split() 676 674 assert L[0].strip().lower() == 'parameters' 677 675 678 676 679 677 #Check asc file 680 678 ascid = open(ascfile) 681 679 lines = ascid.readlines() 682 ascid.close() 680 ascid.close() 683 681 684 682 L = lines[0].strip().split() … … 688 686 L = lines[1].strip().split() 689 687 assert L[0].strip().lower() == 'nrows' 690 assert L[1].strip().lower() == '5' 688 assert L[1].strip().lower() == '5' 691 689 692 690 L = lines[2].strip().split() … … 704 702 L = lines[5].strip().split() 705 703 assert L[0].strip() == 'NODATA_value' 706 assert L[1].strip().lower() == '-9999' 704 assert L[1].strip().lower() == '-9999' 707 705 708 706 #Check grid values 709 707 for j in range(5): 710 L = lines[6+j].strip().split() 708 L = lines[6+j].strip().split() 711 709 y = (4-j) * cellsize 712 710 for i in range(5): 713 711 assert allclose(float(L[i]), -i*cellsize - y) 714 712 715 713 716 714 fid.close() … … 718 716 #Cleanup 719 717 os.remove(prjfile) 720 os.remove(ascfile) 718 os.remove(ascfile) 721 719 os.remove(swwfile) 722 720 … … 735 733 #Setup 736 734 self.domain.filename = 'datatest' 737 735 738 736 prjfile = self.domain.filename + '_stage.prj' 739 ascfile = self.domain.filename + '_stage.asc' 737 ascfile = self.domain.filename + '_stage.asc' 740 738 swwfile = self.domain.filename + '.sww' 741 739 742 740 self.domain.set_datadir('.') 743 741 self.domain.format = 'sww' … … 746 744 747 745 self.domain.geo_reference = Geo_reference(56,308500,6189000) 748 749 746 747 750 748 sww = get_dataobject(self.domain) 751 749 sww.store_connectivity() … … 770 768 771 769 #Export to ascii/prj files 772 sww2asc(self.domain.filename, 773 quantity = 'stage', 770 sww2asc(self.domain.filename, 771 quantity = 'stage', 774 772 cellsize = cellsize, 775 773 reduction = min) … … 779 777 ascid = open(ascfile) 780 778 lines = ascid.readlines() 781 ascid.close() 779 ascid.close() 782 780 783 781 L = lines[0].strip().split() … … 787 785 L = lines[1].strip().split() 788 786 assert L[0].strip().lower() == 'nrows' 789 assert L[1].strip().lower() == '5' 787 assert L[1].strip().lower() == '5' 790 788 791 789 L = lines[2].strip().split() … … 803 801 L = lines[5].strip().split() 804 802 assert L[0].strip() == 'NODATA_value' 805 assert L[1].strip().lower() == '-9999' 806 807 803 assert L[1].strip().lower() == '-9999' 804 805 808 806 #Check grid values (where applicable) 809 807 for j in range(5): 810 808 if j%2 == 0: 811 L = lines[6+j].strip().split() 809 L = lines[6+j].strip().split() 812 810 jj = 4-j 813 811 for i in range(5): … … 825 823 #Cleanup 826 824 os.remove(prjfile) 827 os.remove(ascfile) 825 os.remove(ascfile) 828 826 #os.remove(swwfile) 829 827 … … 837 835 This test includes the writing of missing values 838 836 """ 839 837 840 838 import time, os 841 839 from Numeric import array, zeros, allclose, Float, concatenate … … 848 846 points = [ [1.0, 1.0], 849 847 [0.5, 0.5], [1.0, 0.5], 850 [0.0, 0.0], [0.5, 0.0], [1.0, 0.0]] 848 [0.0, 0.0], [0.5, 0.0], [1.0, 0.0]] 851 849 852 850 vertices = [ [4,1,3], [5,2,4], [1,4,2], [2,0,1]] 853 851 854 852 #Create shallow water domain 855 853 domain = Domain(points, vertices) … … 858 856 859 857 #Set some field values 860 domain.set_quantity('elevation', lambda x,y: -x-y) 858 domain.set_quantity('elevation', lambda x,y: -x-y) 861 859 domain.set_quantity('friction', 0.03) 862 860 … … 885 883 886 884 domain.filename = 'datatest' 887 885 888 886 prjfile = domain.filename + '_elevation.prj' 889 ascfile = domain.filename + '_elevation.asc' 887 ascfile = domain.filename + '_elevation.asc' 890 888 swwfile = domain.filename + '.sww' 891 889 892 890 domain.set_datadir('.') 893 891 domain.format = 'sww' … … 895 893 896 894 domain.geo_reference = Geo_reference(56,308500,6189000) 897 895 898 896 sww = get_dataobject(domain) 899 897 sww.store_connectivity() … … 916 914 except AttributeError, e: 917 915 geo_reference = Geo_reference(DEFAULT_ZONE,0,0) 918 916 919 917 #Export to ascii/prj files 920 sww2asc(domain.filename, 921 quantity = 'elevation', 918 sww2asc(domain.filename, 919 quantity = 'elevation', 922 920 cellsize = cellsize, 923 921 verbose = False) … … 927 925 ascid = open(ascfile) 928 926 lines = ascid.readlines() 929 ascid.close() 927 ascid.close() 930 928 931 929 L = lines[0].strip().split() … … 935 933 L = lines[1].strip().split() 936 934 assert L[0].strip().lower() == 'nrows' 937 assert L[1].strip().lower() == '5' 935 assert L[1].strip().lower() == '5' 938 936 939 937 L = lines[2].strip().split() … … 951 949 L = lines[5].strip().split() 952 950 assert L[0].strip() == 'NODATA_value' 953 assert L[1].strip().lower() == '-9999' 951 assert L[1].strip().lower() == '-9999' 954 952 955 953 956 954 #Check grid values 957 955 for j in range(5): 958 L = lines[6+j].strip().split() 956 L = lines[6+j].strip().split() 959 957 y = (4-j) * cellsize 960 958 for i in range(5): … … 971 969 #Cleanup 972 970 os.remove(prjfile) 973 os.remove(ascfile) 971 os.remove(ascfile) 974 972 os.remove(swwfile) 975 973 … … 1019 1017 stage = fid.variables['stage'][:] 1020 1018 xmomentum = fid.variables['xmomentum'][:] 1021 ymomentum = fid.variables['ymomentum'][:] 1019 ymomentum = fid.variables['ymomentum'][:] 1022 1020 1023 1021 #print ymomentum 1024 1022 1025 1023 assert allclose(stage[0,0], first_value/100) #Meters 1026 1024 … … 1109 1107 # LAT = -34.5, -34.33333, -34.16667, -34 ; 1110 1108 # ELEVATION = [-1 -2 -3 -4 1111 # -5 -6 -7 -8 1109 # -5 -6 -7 -8 1112 1110 # ... 1113 1111 # ... -16] … … 1146 1144 fid.variables[lat_name].units='degrees_north' 1147 1145 fid.variables[lat_name].assignValue(h2_list) 1148 1146 1149 1147 fid.createDimension('TIME',2) 1150 1148 fid.createVariable('TIME','d',('TIME',)) … … 1153 1151 fid.variables['TIME'].assignValue([0.,1.]) 1154 1152 if fid == fid3: break 1155 1153 1156 1154 1157 1155 for fid in [fid4]: … … 1173 1171 name[fid3]='VA' 1174 1172 name[fid4]='ELEVATION' 1175 1173 1176 1174 units = {} 1177 1175 units[fid1]='cm' … … 1185 1183 values[fid3]=[[[13., 12.,11.], [10.,9.,8.]],[[7.,6.,5.],[4.,3.,2.]]] 1186 1184 values[fid4]=[[-3000,-3100,-3200],[-4000,-5000,-6000]] 1187 1185 1188 1186 for fid in [fid1,fid2,fid3]: 1189 1187 fid.createVariable(name[fid],'d',('TIME',lat_name,long_name)) … … 1201 1199 fid.variables[name[fid]].missing_value = -99999999. 1202 1200 1203 1201 1204 1202 fid1.sync(); fid1.close() 1205 1203 fid2.sync(); fid2.close() … … 1210 1208 fid2 = NetCDFFile('test_e.nc','r') 1211 1209 fid3 = NetCDFFile('test_va.nc','r') 1212 1210 1213 1211 1214 1212 first_amp = fid1.variables['HA'][:][0,0,0] … … 1240 1238 stage = fid.variables['stage'][:] 1241 1239 xmomentum = fid.variables['xmomentum'][:] 1242 ymomentum = fid.variables['ymomentum'][:] 1240 ymomentum = fid.variables['ymomentum'][:] 1243 1241 1244 1242 #print ymomentum … … 1343 1341 yiel=0.01 1344 1342 points, vertices, boundary = rectangular(10,10) 1345 1343 1346 1344 #Create shallow water domain 1347 1345 domain = Domain(points, vertices, boundary) … … 1465 1463 #print bit 1466 1464 assert allclose(eval('domain.'+bit),eval('domain2.'+bit)) 1467 1465 1468 1466 1469 1467 def test_sww2domain2(self): … … 1507 1505 1508 1506 domain.check_integrity() 1509 1507 1510 1508 for t in domain.evolve(yieldstep = 1, finaltime = 2.0): 1511 1509 pass 1512 1510 #domain.write_time() 1513 1511 1514 1512 1515 1513 1516 1514 ################################## … … 1563 1561 coordinates2,volumes2,boundary2=weed(coordinates1,volumes1,boundary1) 1564 1562 1565 points2 = {(0.,0.):None,(1.,0.):None,(1.,1.):None,(2.,0.):None} 1563 points2 = {(0.,0.):None,(1.,0.):None,(1.,1.):None,(2.,0.):None} 1566 1564 1567 1565 assert len(points2)==len(coordinates2) … … 1577 1575 1578 1576 1579 #FIXME This fails - smooth makes the comparism too hard for allclose 1577 #FIXME This fails - smooth makes the comparism too hard for allclose 1580 1578 def ztest_sww2domain3(self): 1581 1579 ################################################ … … 1590 1588 yiel=0.01 1591 1589 points, vertices, boundary = rectangular(10,10) 1592 1590 1593 1591 #Create shallow water domain 1594 1592 domain = Domain(points, vertices, boundary) … … 1714 1712 print bit 1715 1713 assert allclose(eval('domain.'+bit),eval('domain2.'+bit)) 1716 1714 1717 1715 1718 1716 #------------------------------------------------------------- 1719 1717 if __name__ == "__main__": 1720 suite = unittest.makeSuite(Test_Data_Manager,'test') 1718 suite = unittest.makeSuite(Test_Data_Manager,'test') 1721 1719 #suite = unittest.makeSuite(Test_Data_Manager,'test_sww2domain') 1722 1720 runner = unittest.TextTestRunner()
Note: See TracChangeset
for help on using the changeset viewer.