Changeset 7744
- Timestamp:
- May 25, 2010, 12:47:58 PM (15 years ago)
- Location:
- anuga_core/source/anuga
- Files:
-
- 2 added
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_core/source/anuga/file_conversion/test_file_conversion.py
r7743 r7744 2241 2241 2242 2242 2243 def test_read_asc(self): 2244 """Test conversion from dem in ascii format to native NetCDF format 2245 """ 2246 2247 import time, os 2248 2249 from file_conversion import _read_asc 2250 #Write test asc file 2251 filename = tempfile.mktemp(".000") 2252 fid = open(filename, 'w') 2253 fid.write("""ncols 7 2254 nrows 4 2255 xllcorner 2000.5 2256 yllcorner 3000.5 2257 cellsize 25 2258 NODATA_value -9999 2259 97.921 99.285 125.588 180.830 258.645 342.872 415.836 2260 473.157 514.391 553.893 607.120 678.125 777.283 883.038 2261 984.494 1040.349 1008.161 900.738 730.882 581.430 514.980 2262 502.645 516.230 504.739 450.604 388.500 338.097 514.980 2263 """) 2264 fid.close() 2265 bath_metadata, grid = _read_asc(filename, verbose=self.verbose) 2266 self.failUnless(bath_metadata['xllcorner'] == 2000.5, 'Failed') 2267 self.failUnless(bath_metadata['yllcorner'] == 3000.5, 'Failed') 2268 self.failUnless(bath_metadata['cellsize'] == 25, 'Failed') 2269 self.failUnless(bath_metadata['NODATA_value'] == -9999, 'Failed') 2270 self.failUnless(grid[0][0] == 97.921, 'Failed') 2271 self.failUnless(grid[3][6] == 514.980, 'Failed') 2272 2273 os.remove(filename) 2274 2275 2276 #### TESTS FOR URS 2 SWW ### 2277 2278 def create_mux(self, points_num=None): 2279 # write all the mux stuff. 2280 time_step_count = 3 2281 time_step = 0.5 2282 2283 longitudes = [150.66667, 150.83334, 151., 151.16667] 2284 latitudes = [-34.5, -34.33333, -34.16667, -34] 2285 2286 if points_num == None: 2287 points_num = len(longitudes) * len(latitudes) 2288 2289 lonlatdeps = [] 2290 quantities = ['HA','UA','VA'] 2291 mux_names = [WAVEHEIGHT_MUX_LABEL, 2292 EAST_VELOCITY_LABEL, 2293 NORTH_VELOCITY_LABEL] 2294 quantities_init = [[],[],[]] 2295 # urs binary is latitude fastest 2296 for i,lon in enumerate(longitudes): 2297 for j,lat in enumerate(latitudes): 2298 _ , e, n = redfearn(lat, lon) 2299 lonlatdeps.append([lon, lat, n]) 2300 quantities_init[0].append(e) # HA 2301 quantities_init[1].append(n ) # UA 2302 quantities_init[2].append(e) # VA 2303 #print "lonlatdeps",lonlatdeps 2304 2305 file_handle, base_name = tempfile.mkstemp("") 2306 os.close(file_handle) 2307 os.remove(base_name) 2308 2309 files = [] 2310 for i,q in enumerate(quantities): 2311 quantities_init[i] = ensure_numeric(quantities_init[i]) 2312 #print "HA_init", HA_init 2313 q_time = num.zeros((time_step_count, points_num), num.float) 2314 for time in range(time_step_count): 2315 q_time[time,:] = quantities_init[i] #* time * 4 2316 2317 #Write C files 2318 columns = 3 # long, lat , depth 2319 file = base_name + mux_names[i] 2320 #print "base_name file",file 2321 f = open(file, 'wb') 2322 files.append(file) 2323 f.write(pack('i',points_num)) 2324 f.write(pack('i',time_step_count)) 2325 f.write(pack('f',time_step)) 2326 2327 #write lat/long info 2328 for lonlatdep in lonlatdeps: 2329 for float in lonlatdep: 2330 f.write(pack('f',float)) 2331 2332 # Write quantity info 2333 for time in range(time_step_count): 2334 for point_i in range(points_num): 2335 f.write(pack('f',q_time[time,point_i])) 2336 #print " mux_names[i]", mux_names[i] 2337 #print "f.write(pack('f',q_time[time,i]))", q_time[time,point_i] 2338 f.close() 2339 return base_name, files 2340 2243 2341 #------------------------------------------------------------- 2244 2342 -
anuga_core/source/anuga/shallow_water/data_manager.py
r7743 r7744 88 88 from anuga.load_mesh.loadASCII import export_mesh_file 89 89 from anuga.geometry.polygon import intersection 90 from anuga.file_conversion.sww2dem import sww2dem 90 91 91 92 from anuga.utilities.system_tools import get_vars_in_expression … … 99 100 DataFileNotOpenError, DataTimeError, DataDomainError, \ 100 101 NewQuantity 101 102 103 # Default block size for sww2dem()104 DEFAULT_BLOCK_SIZE = 10000105 106 107 ######108 # formula mappings109 ######110 111 quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5',112 'depth':'stage-elevation',113 'speed': \114 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'}115 116 117 102 118 103 … … 279 264 280 265 281 282 ##283 # @brief Convert CSV file to a dictionary of arrays.284 # @param file_name The path to the file to read.285 def load_csv_as_array(file_name):286 """287 Convert CSV files of the form:288 289 time, discharge, velocity290 0.0, 1.2, 0.0291 0.1, 3.2, 1.1292 ...293 294 to a dictionary of numeric arrays.295 296 297 See underlying function load_csv_as_dict for more details.298 """299 300 X, _ = load_csv_as_dict(file_name)301 302 Y = {}303 for key in X.keys():304 Y[key] = num.array([float(x) for x in X[key]])305 306 return Y307 266 308 267 … … 764 723 verbose=verbose) 765 724 766 767 ##768 # @brief Convert SWW file to DEM file (.asc or .ers).769 # @param basename_in770 # @param basename_out771 # @param quantity772 # @param timestep773 # @param reduction774 # @param cellsize775 # @param number_of_decimal_places776 # @param NODATA_value777 # @param easting_min778 # @param easting_max779 # @param northing_min780 # @param northing_max781 # @param verbose782 # @param origin783 # @param datum784 # @param format785 # @return786 def sww2dem(basename_in, basename_out=None,787 quantity=None, # defaults to elevation788 reduction=None,789 cellsize=10,790 number_of_decimal_places=None,791 NODATA_value=-9999,792 easting_min=None,793 easting_max=None,794 northing_min=None,795 northing_max=None,796 verbose=False,797 origin=None,798 datum='WGS84',799 format='ers',800 block_size=None):801 """Read SWW file and convert to Digitial Elevation model format802 (.asc or .ers)803 804 Example (ASC):805 ncols 3121806 nrows 1800807 xllcorner 722000808 yllcorner 5893000809 cellsize 25810 NODATA_value -9999811 138.3698 137.4194 136.5062 135.5558 ..........812 813 The number of decimal places can be specified by the user to save814 on disk space requirements by specifying in the call to sww2dem.815 816 Also write accompanying file with same basename_in but extension .prj817 used to fix the UTM zone, datum, false northings and eastings.818 819 The prj format is assumed to be as820 821 Projection UTM822 Zone 56823 Datum WGS84824 Zunits NO825 Units METERS826 Spheroid WGS84827 Xshift 0.0000000000828 Yshift 10000000.0000000000829 Parameters830 831 832 The parameter quantity must be the name of an existing quantity or833 an expression involving existing quantities. The default is834 'elevation'. Quantity is not a list of quantities.835 836 if timestep (an index) is given, output quantity at that timestep837 838 if reduction is given and its an index, output quantity at that timestep. If reduction is given839 and is a built in function, use that to reduce quantity over all timesteps.840 841 datum842 843 format can be either 'asc' or 'ers'844 block_size - sets the number of slices along the non-time axis to845 process in one block.846 """847 848 import sys849 import types850 851 from anuga.geometry.polygon import inside_polygon, outside_polygon852 from anuga.abstract_2d_finite_volumes.util import \853 apply_expression_to_dictionary854 855 msg = 'Format must be either asc or ers'856 assert format.lower() in ['asc', 'ers'], msg857 858 false_easting = 500000859 false_northing = 10000000860 861 if quantity is None:862 quantity = 'elevation'863 864 if reduction is None:865 reduction = max866 867 if basename_out is None:868 basename_out = basename_in + '_%s' % quantity869 870 if quantity_formula.has_key(quantity):871 quantity = quantity_formula[quantity]872 873 if number_of_decimal_places is None:874 number_of_decimal_places = 3875 876 if block_size is None:877 block_size = DEFAULT_BLOCK_SIZE878 879 # Read SWW file880 swwfile = basename_in + '.sww'881 demfile = basename_out + '.' + format882 883 # Read sww file884 if verbose:885 log.critical('Reading from %s' % swwfile)886 log.critical('Output directory is %s' % basename_out)887 888 from Scientific.IO.NetCDF import NetCDFFile889 fid = NetCDFFile(swwfile)890 891 #Get extent and reference892 x = fid.variables['x'][:]893 y = fid.variables['y'][:]894 volumes = fid.variables['volumes'][:]895 if type(reduction) is not types.BuiltinFunctionType:896 times = fid.variables['time'][reduction]897 else:898 times = fid.variables['time'][:]899 900 number_of_timesteps = fid.dimensions['number_of_timesteps']901 number_of_points = fid.dimensions['number_of_points']902 903 if origin is None:904 # Get geo_reference905 # sww files don't have to have a geo_ref906 try:907 geo_reference = Geo_reference(NetCDFObject=fid)908 except AttributeError, e:909 geo_reference = Geo_reference() # Default georef object910 911 xllcorner = geo_reference.get_xllcorner()912 yllcorner = geo_reference.get_yllcorner()913 zone = geo_reference.get_zone()914 else:915 zone = origin[0]916 xllcorner = origin[1]917 yllcorner = origin[2]918 919 # FIXME: Refactor using code from Interpolation_function.statistics920 # (in interpolate.py)921 # Something like print swwstats(swwname)922 if verbose:923 log.critical('------------------------------------------------')924 log.critical('Statistics of SWW file:')925 log.critical(' Name: %s' % swwfile)926 log.critical(' Reference:')927 log.critical(' Lower left corner: [%f, %f]' % (xllcorner, yllcorner))928 if type(reduction) is not types.BuiltinFunctionType:929 log.critical(' Time: %f' % times)930 else:931 log.critical(' Start time: %f' % fid.starttime[0])932 log.critical(' Extent:')933 log.critical(' x [m] in [%f, %f], len(x) == %d'934 %(num.min(x), num.max(x), len(x.flat)))935 log.critical(' y [m] in [%f, %f], len(y) == %d'936 % (num.min(y), num.max(y), len(y.flat)))937 if type(reduction) is not types.BuiltinFunctionType:938 log.critical(' t [s] = %f, len(t) == %d' % (times, 1))939 else:940 log.critical(' t [s] in [%f, %f], len(t) == %d'941 % (min(times), max(times), len(times)))942 log.critical(' Quantities [SI units]:')943 944 # Comment out for reduced memory consumption945 for name in ['stage', 'xmomentum', 'ymomentum']:946 q = fid.variables[name][:].flatten()947 if type(reduction) is not types.BuiltinFunctionType:948 q = q[reduction*len(x):(reduction+1)*len(x)]949 if verbose: log.critical(' %s in [%f, %f]'950 % (name, min(q), max(q)))951 for name in ['elevation']:952 q = fid.variables[name][:].flatten()953 if verbose: log.critical(' %s in [%f, %f]'954 % (name, min(q), max(q)))955 956 # Get the variables in the supplied expression.957 # This may throw a SyntaxError exception.958 var_list = get_vars_in_expression(quantity)959 960 # Check that we have the required variables in the SWW file.961 missing_vars = []962 for name in var_list:963 try:964 _ = fid.variables[name]965 except:966 missing_vars.append(name)967 if missing_vars:968 msg = ("In expression '%s', variables %s are not in the SWW file '%s'"969 % (quantity, swwfile))970 raise Exception, msg971 972 # Create result array and start filling, block by block.973 result = num.zeros(number_of_points, num.float)974 975 for start_slice in xrange(0, number_of_points, block_size):976 # Limit slice size to array end if at last block977 end_slice = min(start_slice + block_size, number_of_points)978 979 # Get slices of all required variables980 q_dict = {}981 for name in var_list:982 # check if variable has time axis983 if len(fid.variables[name].shape) == 2:984 q_dict[name] = fid.variables[name][:,start_slice:end_slice]985 else: # no time axis986 q_dict[name] = fid.variables[name][start_slice:end_slice]987 988 # Evaluate expression with quantities found in SWW file989 res = apply_expression_to_dictionary(quantity, q_dict)990 991 if len(res.shape) == 2:992 new_res = num.zeros(res.shape[1], num.float)993 for k in xrange(res.shape[1]):994 if type(reduction) is not types.BuiltinFunctionType:995 new_res[k] = res[reduction,k]996 else:997 new_res[k] = reduction(res[:,k])998 res = new_res999 1000 result[start_slice:end_slice] = res1001 1002 # Post condition: Now q has dimension: number_of_points1003 assert len(result.shape) == 11004 assert result.shape[0] == number_of_points1005 1006 if verbose:1007 log.critical('Processed values for %s are in [%f, %f]'1008 % (quantity, min(result), max(result)))1009 1010 # Create grid and update xll/yll corner and x,y1011 # Relative extent1012 if easting_min is None:1013 xmin = min(x)1014 else:1015 xmin = easting_min - xllcorner1016 1017 if easting_max is None:1018 xmax = max(x)1019 else:1020 xmax = easting_max - xllcorner1021 1022 if northing_min is None:1023 ymin = min(y)1024 else:1025 ymin = northing_min - yllcorner1026 1027 if northing_max is None:1028 ymax = max(y)1029 else:1030 ymax = northing_max - yllcorner1031 1032 msg = 'xmax must be greater than or equal to xmin.\n'1033 msg += 'I got xmin = %f, xmax = %f' %(xmin, xmax)1034 assert xmax >= xmin, msg1035 1036 msg = 'ymax must be greater than or equal to xmin.\n'1037 msg += 'I got ymin = %f, ymax = %f' %(ymin, ymax)1038 assert ymax >= ymin, msg1039 1040 if verbose: log.critical('Creating grid')1041 ncols = int((xmax-xmin)/cellsize) + 11042 nrows = int((ymax-ymin)/cellsize) + 11043 1044 # New absolute reference and coordinates1045 newxllcorner = xmin + xllcorner1046 newyllcorner = ymin + yllcorner1047 1048 x = x + xllcorner - newxllcorner1049 y = y + yllcorner - newyllcorner1050 1051 vertex_points = num.concatenate ((x[:,num.newaxis], y[:,num.newaxis]), axis=1)1052 assert len(vertex_points.shape) == 21053 1054 grid_points = num.zeros ((ncols*nrows, 2), num.float)1055 1056 for i in xrange(nrows):1057 if format.lower() == 'asc':1058 yg = i * cellsize1059 else:1060 # this will flip the order of the y values for ers1061 yg = (nrows-i) * cellsize1062 1063 for j in xrange(ncols):1064 xg = j * cellsize1065 k = i*ncols + j1066 1067 grid_points[k, 0] = xg1068 grid_points[k, 1] = yg1069 1070 # Interpolate1071 from anuga.fit_interpolate.interpolate import Interpolate1072 1073 # Remove loners from vertex_points, volumes here1074 vertex_points, volumes = remove_lone_verts(vertex_points, volumes)1075 # export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})1076 interp = Interpolate(vertex_points, volumes, verbose = verbose)1077 1078 # Interpolate using quantity values1079 if verbose: log.critical('Interpolating')1080 grid_values = interp.interpolate(result, grid_points).flatten()1081 1082 if verbose:1083 log.critical('Interpolated values are in [%f, %f]'1084 % (num.min(grid_values), num.max(grid_values)))1085 1086 # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)1087 P = interp.mesh.get_boundary_polygon()1088 outside_indices = outside_polygon(grid_points, P, closed=True)1089 1090 for i in outside_indices:1091 grid_values[i] = NODATA_value1092 1093 if format.lower() == 'ers':1094 # setup ERS header information1095 grid_values = num.reshape(grid_values, (nrows, ncols))1096 header = {}1097 header['datum'] = '"' + datum + '"'1098 # FIXME The use of hardwired UTM and zone number needs to be made optional1099 # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)1100 header['projection'] = '"UTM-' + str(zone) + '"'1101 header['coordinatetype'] = 'EN'1102 if header['coordinatetype'] == 'LL':1103 header['longitude'] = str(newxllcorner)1104 header['latitude'] = str(newyllcorner)1105 elif header['coordinatetype'] == 'EN':1106 header['eastings'] = str(newxllcorner)1107 header['northings'] = str(newyllcorner)1108 header['nullcellvalue'] = str(NODATA_value)1109 header['xdimension'] = str(cellsize)1110 header['ydimension'] = str(cellsize)1111 header['value'] = '"' + quantity + '"'1112 #header['celltype'] = 'IEEE8ByteReal' #FIXME: Breaks unit test1113 1114 #Write1115 if verbose: log.critical('Writing %s' % demfile)1116 1117 import ermapper_grids1118 1119 ermapper_grids.write_ermapper_grid(demfile, grid_values, header)1120 1121 fid.close()1122 else:1123 #Write to Ascii format1124 #Write prj file1125 prjfile = basename_out + '.prj'1126 1127 if verbose: log.critical('Writing %s' % prjfile)1128 prjid = open(prjfile, 'w')1129 prjid.write('Projection %s\n' %'UTM')1130 prjid.write('Zone %d\n' %zone)1131 prjid.write('Datum %s\n' %datum)1132 prjid.write('Zunits NO\n')1133 prjid.write('Units METERS\n')1134 prjid.write('Spheroid %s\n' %datum)1135 prjid.write('Xshift %d\n' %false_easting)1136 prjid.write('Yshift %d\n' %false_northing)1137 prjid.write('Parameters\n')1138 prjid.close()1139 1140 if verbose: log.critical('Writing %s' % demfile)1141 1142 ascid = open(demfile, 'w')1143 1144 ascid.write('ncols %d\n' %ncols)1145 ascid.write('nrows %d\n' %nrows)1146 ascid.write('xllcorner %d\n' %newxllcorner)1147 ascid.write('yllcorner %d\n' %newyllcorner)1148 ascid.write('cellsize %f\n' %cellsize)1149 ascid.write('NODATA_value %d\n' %NODATA_value)1150 1151 #Get bounding polygon from mesh1152 #P = interp.mesh.get_boundary_polygon()1153 #inside_indices = inside_polygon(grid_points, P)1154 1155 # change printoptions so that a long string of zeros in not1156 # summarized as [0.0, 0.0, 0.0, ... 0.0, 0.0, 0.0]1157 #printoptions = num.get_printoptions()1158 #num.set_printoptions(threshold=sys.maxint)1159 1160 format = '%.'+'%g' % number_of_decimal_places +'e'1161 for i in range(nrows):1162 if verbose and i % ((nrows+10)/10) == 0:1163 log.critical('Doing row %d of %d' % (i, nrows))1164 1165 base_index = (nrows-i-1)*ncols1166 1167 slice = grid_values[base_index:base_index+ncols]1168 1169 num.savetxt(ascid, slice.reshape(1,ncols), format, ' ' )1170 1171 1172 #Close1173 ascid.close()1174 fid.close()1175 1176 return basename_out1177 1178 ################################################################################1179 # Obsolete functions - maintained for backwards compatibility1180 ################################################################################1181 1182 ##1183 # @brief1184 # @param basename_in1185 # @param basename_out1186 # @param quantity1187 # @param timestep1188 # @param reduction1189 # @param cellsize1190 # @param verbose1191 # @param origin1192 # @note OBSOLETE - use sww2dem() instead.1193 def sww2asc(basename_in, basename_out = None,1194 quantity = None,1195 reduction = None,1196 cellsize = 10,1197 verbose = False,1198 origin = None):1199 1200 log.critical('sww2asc will soon be obsoleted - please use sww2dem')1201 sww2dem(basename_in,1202 basename_out = basename_out,1203 quantity = quantity,1204 reduction = reduction,1205 cellsize = cellsize,1206 number_of_decimal_places = number_of_decimal_places,1207 verbose = verbose,1208 origin = origin,1209 datum = 'WGS84',1210 format = 'asc')1211 1212 1213 ##1214 # @brief1215 # @param basename_in1216 # @param basename_out1217 # @param quantity1218 # @param timestep1219 # @param reduction1220 # @param cellsize1221 # @param verbose1222 # @param origin1223 # @param datum1224 # @note OBSOLETE - use sww2dem() instead.1225 def sww2ers(basename_in, basename_out=None,1226 quantity=None,1227 reduction=None,1228 cellsize=10,1229 verbose=False,1230 origin=None,1231 datum='WGS84'):1232 log.critical('sww2ers will soon be obsoleted - please use sww2dem')1233 sww2dem(basename_in,1234 basename_out=basename_out,1235 quantity=quantity,1236 reduction=reduction,1237 cellsize=cellsize,1238 number_of_decimal_places=number_of_decimal_places,1239 verbose=verbose,1240 origin=origin,1241 datum=datum,1242 format='ers')1243 725 1244 726 ################################################################################ … … 3544 3026 ################################################################################ 3545 3027 3546 ##3547 # @brief Store screen output and errors to a file.3548 # @param dir_name Path to directory for output files (default '.').3549 # @param myid3550 # @param numprocs3551 # @param extra_info3552 # @param verbose True if this function is to be verbose.3553 def start_screen_catcher(dir_name=None, myid='', numprocs='', extra_info='',3554 verbose=True):3555 """3556 Used to store screen output and errors to file, if run on multiple3557 processes each processor will have its own output and error file.3558 3559 extra_info - is used as a string that can identify outputs with another3560 string eg. '_other'3561 3562 FIXME: Would be good if you could suppress all the screen output and3563 only save it to file... however it seems a bit tricky as this capture3564 techique response to sys.stdout and by this time it is already printed out.3565 """3566 3567 import sys3568 3569 if dir_name == None:3570 dir_name = getcwd()3571 3572 if access(dir_name, W_OK) == 0:3573 if verbose: log.critical('Making directory %s' % dir_name)3574 mkdir (dir_name,0777)3575 3576 if myid != '':3577 myid = '_' + str(myid)3578 if numprocs != '':3579 numprocs = '_' + str(numprocs)3580 if extra_info != '':3581 extra_info = '_' + str(extra_info)3582 3583 screen_output_name = join(dir_name, "screen_output%s%s%s.txt" %3584 (myid, numprocs, extra_info))3585 screen_error_name = join(dir_name, "screen_error%s%s%s.txt" %3586 (myid, numprocs, extra_info))3587 3588 if verbose: log.critical('Starting ScreenCatcher, all output will be '3589 'stored in %s' % screen_output_name)3590 3591 # used to catch screen output to file3592 sys.stdout = Screen_Catcher(screen_output_name)3593 sys.stderr = Screen_Catcher(screen_error_name)3594 3595 3596 ##3597 # @brief A class to catch stdout and stderr and write to files.3598 class Screen_Catcher:3599 """this simply catches the screen output and stores it to file defined by3600 start_screen_catcher (above)3601 """3602 3603 ##3604 # @brief Initialize this instance of Screen_Catcher.3605 # @param filename The path to the file to write to.3606 def __init__(self, filename):3607 self.filename = filename3608 if exists(self.filename)is True:3609 log.critical('Old existing file "%s" has been deleted'3610 % self.filename)3611 remove(self.filename)3612 3613 ##3614 # @brief Write output to the file.3615 # @param stuff The string to write.3616 def write(self, stuff):3617 fid = open(self.filename, 'a')3618 fid.write(stuff)3619 fid.close()3620 3621 3622 ##3623 # @brief Copy a file to a directory, and optionally append another file to it.3624 # @param dir_name Target directory.3625 # @param filename Path to file to copy to directory 'dir_name'.3626 # @param filename2 Optional path to file to append to copied file.3627 # @param verbose True if this function is to be verbose.3628 # @note Allow filenames to be either a string or sequence of strings.3629 def copy_code_files(dir_name, filename1, filename2=None, verbose=False):3630 """Copies "filename1" and "filename2" to "dir_name".3631 3632 Each 'filename' may be a string or list of filename strings.3633 3634 Filenames must be absolute pathnames3635 """3636 3637 ##3638 # @brief copies a file or sequence to destination directory.3639 # @param dest The destination directory to copy to.3640 # @param file A filename string or sequence of filename strings.3641 def copy_file_or_sequence(dest, file):3642 if hasattr(file, '__iter__'):3643 for f in file:3644 shutil.copy(f, dir_name)3645 if verbose:3646 log.critical('File %s copied' % f)3647 else:3648 shutil.copy(file, dir_name)3649 if verbose:3650 log.critical('File %s copied' % file)3651 3652 # check we have a destination directory, create if necessary3653 if not os.path.isdir(dir_name):3654 if verbose:3655 log.critical('Make directory %s' % dir_name)3656 mkdir(dir_name, 0777)3657 3658 if verbose:3659 log.critical('Output directory: %s' % dir_name)3660 3661 copy_file_or_sequence(dir_name, filename1)3662 3663 if not filename2 is None:3664 copy_file_or_sequence(dir_name, filename2)3665 3666 3028 3667 3029 ## -
anuga_core/source/anuga/shallow_water/test_data_manager.py
r7743 r7744 20 20 from anuga.file_conversion.file_conversion import tsh2sww, \ 21 21 asc_csiro2sww, pmesh_to_domain_instance 22 22 from anuga.coordinate_transforms.geo_reference import Geo_reference 23 23 from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees 24 24 from anuga.abstract_2d_finite_volumes.util import file_function 25 25 from anuga.utilities.system_tools import get_pathname_from_package 26 from anuga.utilities.file_utils import del_dir, load_csv_as_dict 26 from anuga.utilities.file_utils import del_dir, load_csv_as_dict, \ 27 load_csv_as_array 27 28 from anuga.anuga_exceptions import ANUGAError 28 29 from anuga.utilities.numerical_tools import ensure_numeric, mean … … 866 867 867 868 868 def test_sww2dem_asc_elevation_depth(self):869 """test_sww2dem_asc_elevation_depth870 871 Test that sww information can be converted correctly to asc/prj872 format readable by e.g. ArcView873 874 Also check geo_reference is correct875 """876 877 import time, os878 from Scientific.IO.NetCDF import NetCDFFile879 880 # Setup881 self.domain.set_name('datatest')882 883 prjfile = self.domain.get_name() + '_elevation.prj'884 ascfile = self.domain.get_name() + '_elevation.asc'885 swwfile = self.domain.get_name() + '.sww'886 887 self.domain.set_datadir('.')888 self.domain.format = 'sww'889 self.domain.smooth = True890 self.domain.set_quantity('elevation', lambda x,y: -x-y)891 self.domain.set_quantity('stage', 1.0)892 893 self.domain.geo_reference = Geo_reference(56, 308500, 6189000)894 895 sww = SWW_file(self.domain)896 sww.store_connectivity()897 sww.store_timestep()898 899 900 self.domain.evolve_to_end(finaltime = 0.01)901 sww.store_timestep()902 903 cellsize = 0.25904 #Check contents905 #Get NetCDF906 907 fid = NetCDFFile(sww.filename, netcdf_mode_r)908 909 # Get the variables910 x = fid.variables['x'][:]911 y = fid.variables['y'][:]912 z = fid.variables['elevation'][:]913 time = fid.variables['time'][:]914 stage = fid.variables['stage'][:]915 916 # Check georeferencig: zone, xllcorner and yllcorner917 assert fid.zone[0] == 56918 assert fid.xllcorner[0] == 308500919 assert fid.yllcorner[0] == 6189000920 921 922 fid.close()923 924 #Export to ascii/prj files925 sww2dem(self.domain.get_name(),926 quantity = 'elevation',927 cellsize = cellsize,928 number_of_decimal_places = 9,929 verbose = self.verbose,930 format = 'asc')931 932 #Check prj (meta data)933 prjid = open(prjfile)934 lines = prjid.readlines()935 prjid.close()936 937 L = lines[0].strip().split()938 assert L[0].strip().lower() == 'projection'939 assert L[1].strip().lower() == 'utm'940 941 L = lines[1].strip().split()942 assert L[0].strip().lower() == 'zone'943 assert L[1].strip().lower() == '56'944 945 L = lines[2].strip().split()946 assert L[0].strip().lower() == 'datum'947 assert L[1].strip().lower() == 'wgs84'948 949 L = lines[3].strip().split()950 assert L[0].strip().lower() == 'zunits'951 assert L[1].strip().lower() == 'no'952 953 L = lines[4].strip().split()954 assert L[0].strip().lower() == 'units'955 assert L[1].strip().lower() == 'meters'956 957 L = lines[5].strip().split()958 assert L[0].strip().lower() == 'spheroid'959 assert L[1].strip().lower() == 'wgs84'960 961 L = lines[6].strip().split()962 assert L[0].strip().lower() == 'xshift'963 assert L[1].strip().lower() == '500000'964 965 L = lines[7].strip().split()966 assert L[0].strip().lower() == 'yshift'967 assert L[1].strip().lower() == '10000000'968 969 L = lines[8].strip().split()970 assert L[0].strip().lower() == 'parameters'971 972 973 #Check asc file974 ascid = open(ascfile)975 lines = ascid.readlines()976 ascid.close()977 978 L = lines[0].strip().split()979 assert L[0].strip().lower() == 'ncols'980 assert L[1].strip().lower() == '5'981 982 L = lines[1].strip().split()983 assert L[0].strip().lower() == 'nrows'984 assert L[1].strip().lower() == '5'985 986 L = lines[2].strip().split()987 assert L[0].strip().lower() == 'xllcorner'988 assert num.allclose(float(L[1].strip().lower()), 308500)989 990 L = lines[3].strip().split()991 assert L[0].strip().lower() == 'yllcorner'992 assert num.allclose(float(L[1].strip().lower()), 6189000)993 994 L = lines[4].strip().split()995 assert L[0].strip().lower() == 'cellsize'996 assert num.allclose(float(L[1].strip().lower()), cellsize)997 998 L = lines[5].strip().split()999 assert L[0].strip() == 'NODATA_value'1000 assert L[1].strip().lower() == '-9999'1001 1002 #Check grid values1003 for j in range(5):1004 L = lines[6+j].strip().split()1005 y = (4-j) * cellsize1006 for i in range(5):1007 assert num.allclose(float(L[i]), -i*cellsize - y)1008 1009 #Cleanup1010 os.remove(prjfile)1011 os.remove(ascfile)1012 1013 #Export to ascii/prj files1014 sww2dem(self.domain.get_name(),1015 quantity = 'depth',1016 cellsize = cellsize,1017 number_of_decimal_places = 9,1018 verbose = self.verbose,1019 format = 'asc')1020 1021 #Check asc file1022 ascfile = self.domain.get_name() + '_depth.asc'1023 prjfile = self.domain.get_name() + '_depth.prj'1024 ascid = open(ascfile)1025 lines = ascid.readlines()1026 ascid.close()1027 1028 L = lines[0].strip().split()1029 assert L[0].strip().lower() == 'ncols'1030 assert L[1].strip().lower() == '5'1031 1032 L = lines[1].strip().split()1033 assert L[0].strip().lower() == 'nrows'1034 assert L[1].strip().lower() == '5'1035 1036 L = lines[2].strip().split()1037 assert L[0].strip().lower() == 'xllcorner'1038 assert num.allclose(float(L[1].strip().lower()), 308500)1039 1040 L = lines[3].strip().split()1041 assert L[0].strip().lower() == 'yllcorner'1042 assert num.allclose(float(L[1].strip().lower()), 6189000)1043 1044 L = lines[4].strip().split()1045 assert L[0].strip().lower() == 'cellsize'1046 assert num.allclose(float(L[1].strip().lower()), cellsize)1047 1048 L = lines[5].strip().split()1049 assert L[0].strip() == 'NODATA_value'1050 assert L[1].strip().lower() == '-9999'1051 1052 #Check grid values1053 for j in range(5):1054 L = lines[6+j].strip().split()1055 y = (4-j) * cellsize1056 for i in range(5):1057 assert num.allclose(float(L[i]), 1 - (-i*cellsize - y))1058 1059 1060 #Cleanup1061 os.remove(prjfile)1062 os.remove(ascfile)1063 os.remove(swwfile)1064 1065 1066 869 def test_export_grid(self): 1067 870 """ … … 1556 1359 os.remove(ascfile) 1557 1360 os.remove(swwfile2) 1558 1559 def test_sww2dem_larger(self):1560 """Test that sww information can be converted correctly to asc/prj1561 format readable by e.g. ArcView. Here:1562 1563 ncols 111564 nrows 111565 xllcorner 3085001566 yllcorner 61890001567 cellsize 10.0000001568 NODATA_value -99991569 -100 -110 -120 -130 -140 -150 -160 -170 -180 -190 -2001570 -90 -100 -110 -120 -130 -140 -150 -160 -170 -180 -1901571 -80 -90 -100 -110 -120 -130 -140 -150 -160 -170 -1801572 -70 -80 -90 -100 -110 -120 -130 -140 -150 -160 -1701573 -60 -70 -80 -90 -100 -110 -120 -130 -140 -150 -1601574 -50 -60 -70 -80 -90 -100 -110 -120 -130 -140 -1501575 -40 -50 -60 -70 -80 -90 -100 -110 -120 -130 -1401576 -30 -40 -50 -60 -70 -80 -90 -100 -110 -120 -1301577 -20 -30 -40 -50 -60 -70 -80 -90 -100 -110 -1201578 -10 -20 -30 -40 -50 -60 -70 -80 -90 -100 -1101579 0 -10 -20 -30 -40 -50 -60 -70 -80 -90 -1001580 1581 """1582 1583 import time, os1584 from Scientific.IO.NetCDF import NetCDFFile1585 1586 #Setup1587 1588 from mesh_factory import rectangular1589 1590 #Create basic mesh (100m x 100m)1591 points, vertices, boundary = rectangular(2, 2, 100, 100)1592 1593 #Create shallow water domain1594 domain = Domain(points, vertices, boundary)1595 domain.default_order = 21596 1597 domain.set_name('datatest')1598 1599 prjfile = domain.get_name() + '_elevation.prj'1600 ascfile = domain.get_name() + '_elevation.asc'1601 swwfile = domain.get_name() + '.sww'1602 1603 domain.set_datadir('.')1604 domain.format = 'sww'1605 domain.smooth = True1606 domain.geo_reference = Geo_reference(56, 308500, 6189000)1607 1608 #1609 domain.set_quantity('elevation', lambda x,y: -x-y)1610 domain.set_quantity('stage', 0)1611 1612 B = Transmissive_boundary(domain)1613 domain.set_boundary( {'left': B, 'right': B, 'top': B, 'bottom': B})1614 1615 1616 #1617 sww = SWW_file(domain)1618 sww.store_connectivity()1619 sww.store_timestep()1620 1621 domain.tight_slope_limiters = 11622 domain.evolve_to_end(finaltime = 0.01)1623 sww.store_timestep()1624 1625 cellsize = 10 #10m grid1626 1627 1628 #Check contents1629 #Get NetCDF1630 1631 fid = NetCDFFile(sww.filename, netcdf_mode_r)1632 1633 # Get the variables1634 x = fid.variables['x'][:]1635 y = fid.variables['y'][:]1636 z = fid.variables['elevation'][:]1637 time = fid.variables['time'][:]1638 stage = fid.variables['stage'][:]1639 1640 1641 #Export to ascii/prj files1642 sww2dem(domain.get_name(),1643 quantity = 'elevation',1644 cellsize = cellsize,1645 number_of_decimal_places = 9,1646 verbose = self.verbose,1647 format = 'asc',1648 block_size=2)1649 1650 1651 #Check prj (meta data)1652 prjid = open(prjfile)1653 lines = prjid.readlines()1654 prjid.close()1655 1656 L = lines[0].strip().split()1657 assert L[0].strip().lower() == 'projection'1658 assert L[1].strip().lower() == 'utm'1659 1660 L = lines[1].strip().split()1661 assert L[0].strip().lower() == 'zone'1662 assert L[1].strip().lower() == '56'1663 1664 L = lines[2].strip().split()1665 assert L[0].strip().lower() == 'datum'1666 assert L[1].strip().lower() == 'wgs84'1667 1668 L = lines[3].strip().split()1669 assert L[0].strip().lower() == 'zunits'1670 assert L[1].strip().lower() == 'no'1671 1672 L = lines[4].strip().split()1673 assert L[0].strip().lower() == 'units'1674 assert L[1].strip().lower() == 'meters'1675 1676 L = lines[5].strip().split()1677 assert L[0].strip().lower() == 'spheroid'1678 assert L[1].strip().lower() == 'wgs84'1679 1680 L = lines[6].strip().split()1681 assert L[0].strip().lower() == 'xshift'1682 assert L[1].strip().lower() == '500000'1683 1684 L = lines[7].strip().split()1685 assert L[0].strip().lower() == 'yshift'1686 assert L[1].strip().lower() == '10000000'1687 1688 L = lines[8].strip().split()1689 assert L[0].strip().lower() == 'parameters'1690 1691 1692 #Check asc file1693 ascid = open(ascfile)1694 lines = ascid.readlines()1695 ascid.close()1696 1697 L = lines[0].strip().split()1698 assert L[0].strip().lower() == 'ncols'1699 assert L[1].strip().lower() == '11'1700 1701 L = lines[1].strip().split()1702 assert L[0].strip().lower() == 'nrows'1703 assert L[1].strip().lower() == '11'1704 1705 L = lines[2].strip().split()1706 assert L[0].strip().lower() == 'xllcorner'1707 assert num.allclose(float(L[1].strip().lower()), 308500)1708 1709 L = lines[3].strip().split()1710 assert L[0].strip().lower() == 'yllcorner'1711 assert num.allclose(float(L[1].strip().lower()), 6189000)1712 1713 L = lines[4].strip().split()1714 assert L[0].strip().lower() == 'cellsize'1715 assert num.allclose(float(L[1].strip().lower()), cellsize)1716 1717 L = lines[5].strip().split()1718 assert L[0].strip() == 'NODATA_value'1719 assert L[1].strip().lower() == '-9999'1720 1721 #Check grid values (FIXME: Use same strategy for other sww2dem tests)1722 for i, line in enumerate(lines[6:]):1723 for j, value in enumerate( line.split() ):1724 assert num.allclose(float(value), -(10-i+j)*cellsize,1725 atol=1.0e-12, rtol=1.0e-12)1726 1727 # Note: Equality can be obtained in this case,1728 # but it is better to use allclose.1729 #assert float(value) == -(10-i+j)*cellsize1730 1731 1732 fid.close()1733 1734 #Cleanup1735 os.remove(prjfile)1736 os.remove(ascfile)1737 os.remove(swwfile)1738 1739 1740 1741 def test_sww2dem_larger_zero(self):1742 """Test that sww information can be converted correctly to asc/prj1743 format readable by e.g. Arcview. This example has rows with a1744 large number of zeros1745 1746 ncols 20011747 nrows 21748 xllcorner 3085001749 yllcorner 61890001750 cellsize 1.0000001751 NODATA_value -99991752 0.0 ....1753 """1754 1755 import time, os1756 from Scientific.IO.NetCDF import NetCDFFile1757 1758 #Setup1759 1760 from mesh_factory import rectangular_cross1761 1762 #Create basic mesh (100m x 100m)1763 points, vertices, boundary = rectangular_cross(2000, 1, 2000.0, 1.0)1764 1765 #Create shallow water domain1766 domain = Domain(points, vertices, boundary)1767 domain.default_order = 11768 1769 domain.set_name('datatest')1770 1771 prjfile = domain.get_name() + '_elevation.prj'1772 ascfile = domain.get_name() + '_elevation.asc'1773 swwfile = domain.get_name() + '.sww'1774 1775 domain.set_datadir('.')1776 domain.format = 'sww'1777 domain.smooth = True1778 domain.geo_reference = Geo_reference(56, 308500, 6189000)1779 1780 #1781 domain.set_quantity('elevation', 0)1782 domain.set_quantity('stage', 0)1783 1784 B = Transmissive_boundary(domain)1785 domain.set_boundary( {'left': B, 'right': B, 'top': B, 'bottom': B})1786 1787 1788 #1789 sww = SWW_file(domain)1790 sww.store_connectivity()1791 sww.store_timestep()1792 1793 domain.tight_slope_limiters = 11794 domain.evolve_to_end(finaltime = 0.01)1795 sww.store_timestep()1796 1797 cellsize = 1.0 #0.1 grid1798 1799 1800 #Check contents1801 #Get NetCDF1802 1803 fid = NetCDFFile(sww.filename, netcdf_mode_r)1804 1805 # Get the variables1806 x = fid.variables['x'][:]1807 y = fid.variables['y'][:]1808 z = fid.variables['elevation'][:]1809 time = fid.variables['time'][:]1810 stage = fid.variables['stage'][:]1811 1812 1813 #Export to ascii/prj files1814 sww2dem(domain.get_name(),1815 quantity = 'elevation',1816 cellsize = cellsize,1817 number_of_decimal_places = 9,1818 verbose = self.verbose,1819 format = 'asc',1820 block_size=2)1821 1822 1823 #Check prj (meta data)1824 prjid = open(prjfile)1825 lines = prjid.readlines()1826 prjid.close()1827 1828 1829 L = lines[0].strip().split()1830 assert L[0].strip().lower() == 'projection'1831 assert L[1].strip().lower() == 'utm'1832 1833 L = lines[1].strip().split()1834 assert L[0].strip().lower() == 'zone'1835 assert L[1].strip().lower() == '56'1836 1837 L = lines[2].strip().split()1838 assert L[0].strip().lower() == 'datum'1839 assert L[1].strip().lower() == 'wgs84'1840 1841 L = lines[3].strip().split()1842 assert L[0].strip().lower() == 'zunits'1843 assert L[1].strip().lower() == 'no'1844 1845 L = lines[4].strip().split()1846 assert L[0].strip().lower() == 'units'1847 assert L[1].strip().lower() == 'meters'1848 1849 L = lines[5].strip().split()1850 assert L[0].strip().lower() == 'spheroid'1851 assert L[1].strip().lower() == 'wgs84'1852 1853 L = lines[6].strip().split()1854 assert L[0].strip().lower() == 'xshift'1855 assert L[1].strip().lower() == '500000'1856 1857 L = lines[7].strip().split()1858 assert L[0].strip().lower() == 'yshift'1859 assert L[1].strip().lower() == '10000000'1860 1861 L = lines[8].strip().split()1862 assert L[0].strip().lower() == 'parameters'1863 1864 1865 #Check asc file1866 ascid = open(ascfile)1867 lines = ascid.readlines()1868 ascid.close()1869 1870 1871 1872 L = lines[0].strip().split()1873 assert L[0].strip().lower() == 'ncols'1874 assert L[1].strip().lower() == '2001'1875 1876 L = lines[1].strip().split()1877 assert L[0].strip().lower() == 'nrows'1878 assert L[1].strip().lower() == '2'1879 1880 L = lines[2].strip().split()1881 assert L[0].strip().lower() == 'xllcorner'1882 assert num.allclose(float(L[1].strip().lower()), 308500)1883 1884 L = lines[3].strip().split()1885 assert L[0].strip().lower() == 'yllcorner'1886 assert num.allclose(float(L[1].strip().lower()), 6189000)1887 1888 L = lines[4].strip().split()1889 assert L[0].strip().lower() == 'cellsize'1890 assert num.allclose(float(L[1].strip().lower()), cellsize)1891 1892 L = lines[5].strip().split()1893 assert L[0].strip() == 'NODATA_value'1894 assert L[1].strip().lower() == '-9999'1895 1896 #Check grid values (FIXME: Use same strategy for other sww2dem tests)1897 for i, line in enumerate(lines[6:]):1898 for j, value in enumerate( line.split() ):1899 assert num.allclose(float(value), 0.0,1900 atol=1.0e-12, rtol=1.0e-12)1901 1902 # Note: Equality can be obtained in this case,1903 # but it is better to use allclose.1904 #assert float(value) == -(10-i+j)*cellsize1905 1906 1907 fid.close()1908 1909 1910 #Cleanup1911 os.remove(prjfile)1912 os.remove(ascfile)1913 os.remove(swwfile)1914 1915 1916 1917 1918 def test_sww2dem_boundingbox(self):1919 """Test that sww information can be converted correctly to asc/prj1920 format readable by e.g. ArcView.1921 This will test that mesh can be restricted by bounding box1922 1923 Original extent is 100m x 100m:1924 1925 Eastings: 308500 - 3086001926 Northings: 6189000 - 61891001927 1928 Bounding box changes this to the 50m x 50m square defined by1929 1930 Eastings: 308530 - 3085701931 Northings: 6189050 - 61891001932 1933 The cropped values should be1934 1935 -130 -140 -150 -160 -1701936 -120 -130 -140 -150 -1601937 -110 -120 -130 -140 -1501938 -100 -110 -120 -130 -1401939 -90 -100 -110 -120 -1301940 -80 -90 -100 -110 -1201941 1942 and the new lower reference point should be1943 Eastings: 3085301944 Northings: 61890501945 1946 Original dataset is the same as in test_sww2dem_larger()1947 1948 """1949 1950 import time, os1951 from Scientific.IO.NetCDF import NetCDFFile1952 1953 #Setup1954 1955 from mesh_factory import rectangular1956 1957 #Create basic mesh (100m x 100m)1958 points, vertices, boundary = rectangular(2, 2, 100, 100)1959 1960 #Create shallow water domain1961 domain = Domain(points, vertices, boundary)1962 domain.default_order = 21963 1964 domain.set_name('datatest')1965 1966 prjfile = domain.get_name() + '_elevation.prj'1967 ascfile = domain.get_name() + '_elevation.asc'1968 swwfile = domain.get_name() + '.sww'1969 1970 domain.set_datadir('.')1971 domain.format = 'sww'1972 domain.smooth = True1973 domain.geo_reference = Geo_reference(56, 308500, 6189000)1974 1975 #1976 domain.set_quantity('elevation', lambda x,y: -x-y)1977 domain.set_quantity('stage', 0)1978 1979 B = Transmissive_boundary(domain)1980 domain.set_boundary( {'left': B, 'right': B, 'top': B, 'bottom': B})1981 1982 1983 #1984 sww = SWW_file(domain)1985 sww.store_connectivity()1986 sww.store_timestep()1987 1988 #domain.tight_slope_limiters = 11989 domain.evolve_to_end(finaltime = 0.01)1990 sww.store_timestep()1991 1992 cellsize = 10 #10m grid1993 1994 1995 #Check contents1996 #Get NetCDF1997 1998 fid = NetCDFFile(sww.filename, netcdf_mode_r)1999 2000 # Get the variables2001 x = fid.variables['x'][:]2002 y = fid.variables['y'][:]2003 z = fid.variables['elevation'][:]2004 time = fid.variables['time'][:]2005 stage = fid.variables['stage'][:]2006 2007 2008 # Export to ascii/prj files2009 sww2dem(domain.get_name(),2010 quantity = 'elevation',2011 cellsize = cellsize,2012 number_of_decimal_places = 9,2013 easting_min = 308530,2014 easting_max = 308570,2015 northing_min = 6189050,2016 northing_max = 6189100,2017 verbose = self.verbose,2018 format = 'asc')2019 2020 fid.close()2021 2022 2023 # Check prj (meta data)2024 prjid = open(prjfile)2025 lines = prjid.readlines()2026 prjid.close()2027 2028 L = lines[0].strip().split()2029 assert L[0].strip().lower() == 'projection'2030 assert L[1].strip().lower() == 'utm'2031 2032 L = lines[1].strip().split()2033 assert L[0].strip().lower() == 'zone'2034 assert L[1].strip().lower() == '56'2035 2036 L = lines[2].strip().split()2037 assert L[0].strip().lower() == 'datum'2038 assert L[1].strip().lower() == 'wgs84'2039 2040 L = lines[3].strip().split()2041 assert L[0].strip().lower() == 'zunits'2042 assert L[1].strip().lower() == 'no'2043 2044 L = lines[4].strip().split()2045 assert L[0].strip().lower() == 'units'2046 assert L[1].strip().lower() == 'meters'2047 2048 L = lines[5].strip().split()2049 assert L[0].strip().lower() == 'spheroid'2050 assert L[1].strip().lower() == 'wgs84'2051 2052 L = lines[6].strip().split()2053 assert L[0].strip().lower() == 'xshift'2054 assert L[1].strip().lower() == '500000'2055 2056 L = lines[7].strip().split()2057 assert L[0].strip().lower() == 'yshift'2058 assert L[1].strip().lower() == '10000000'2059 2060 L = lines[8].strip().split()2061 assert L[0].strip().lower() == 'parameters'2062 2063 2064 #Check asc file2065 ascid = open(ascfile)2066 lines = ascid.readlines()2067 ascid.close()2068 2069 L = lines[0].strip().split()2070 assert L[0].strip().lower() == 'ncols'2071 assert L[1].strip().lower() == '5'2072 2073 L = lines[1].strip().split()2074 assert L[0].strip().lower() == 'nrows'2075 assert L[1].strip().lower() == '6'2076 2077 L = lines[2].strip().split()2078 assert L[0].strip().lower() == 'xllcorner'2079 assert num.allclose(float(L[1].strip().lower()), 308530)2080 2081 L = lines[3].strip().split()2082 assert L[0].strip().lower() == 'yllcorner'2083 assert num.allclose(float(L[1].strip().lower()), 6189050)2084 2085 L = lines[4].strip().split()2086 assert L[0].strip().lower() == 'cellsize'2087 assert num.allclose(float(L[1].strip().lower()), cellsize)2088 2089 L = lines[5].strip().split()2090 assert L[0].strip() == 'NODATA_value'2091 assert L[1].strip().lower() == '-9999'2092 2093 #Check grid values2094 for i, line in enumerate(lines[6:]):2095 for j, value in enumerate( line.split() ):2096 #assert float(value) == -(10-i+j)*cellsize2097 assert float(value) == -(10-i+j+3)*cellsize2098 2099 2100 2101 #Cleanup2102 os.remove(prjfile)2103 os.remove(ascfile)2104 os.remove(swwfile)2105 2106 2107 2108 def test_sww2dem_asc_stage_reduction(self):2109 """Test that sww information can be converted correctly to asc/prj2110 format readable by e.g. ArcView2111 2112 This tests the reduction of quantity stage using min2113 """2114 2115 import time, os2116 from Scientific.IO.NetCDF import NetCDFFile2117 2118 #Setup2119 self.domain.set_name('datatest')2120 2121 prjfile = self.domain.get_name() + '_stage.prj'2122 ascfile = self.domain.get_name() + '_stage.asc'2123 swwfile = self.domain.get_name() + '.sww'2124 2125 self.domain.set_datadir('.')2126 self.domain.format = 'sww'2127 self.domain.smooth = True2128 self.domain.set_quantity('elevation', lambda x,y: -x-y)2129 2130 self.domain.geo_reference = Geo_reference(56,308500,6189000)2131 2132 2133 sww = SWW_file(self.domain)2134 sww.store_connectivity()2135 sww.store_timestep()2136 2137 #self.domain.tight_slope_limiters = 12138 self.domain.evolve_to_end(finaltime = 0.01)2139 sww.store_timestep()2140 2141 cellsize = 0.252142 #Check contents2143 #Get NetCDF2144 2145 fid = NetCDFFile(sww.filename, netcdf_mode_r)2146 2147 # Get the variables2148 x = fid.variables['x'][:]2149 y = fid.variables['y'][:]2150 z = fid.variables['elevation'][:]2151 time = fid.variables['time'][:]2152 stage = fid.variables['stage'][:]2153 2154 2155 #Export to ascii/prj files2156 sww2dem(self.domain.get_name(),2157 quantity = 'stage',2158 cellsize = cellsize,2159 number_of_decimal_places = 9,2160 reduction = min,2161 format = 'asc',2162 verbose=self.verbose)2163 2164 2165 #Check asc file2166 ascid = open(ascfile)2167 lines = ascid.readlines()2168 ascid.close()2169 2170 L = lines[0].strip().split()2171 assert L[0].strip().lower() == 'ncols'2172 assert L[1].strip().lower() == '5'2173 2174 L = lines[1].strip().split()2175 assert L[0].strip().lower() == 'nrows'2176 assert L[1].strip().lower() == '5'2177 2178 L = lines[2].strip().split()2179 assert L[0].strip().lower() == 'xllcorner'2180 assert num.allclose(float(L[1].strip().lower()), 308500)2181 2182 L = lines[3].strip().split()2183 assert L[0].strip().lower() == 'yllcorner'2184 assert num.allclose(float(L[1].strip().lower()), 6189000)2185 2186 L = lines[4].strip().split()2187 assert L[0].strip().lower() == 'cellsize'2188 assert num.allclose(float(L[1].strip().lower()), cellsize)2189 2190 L = lines[5].strip().split()2191 assert L[0].strip() == 'NODATA_value'2192 assert L[1].strip().lower() == '-9999'2193 2194 2195 #Check grid values (where applicable)2196 for j in range(5):2197 if j%2 == 0:2198 L = lines[6+j].strip().split()2199 jj = 4-j2200 for i in range(5):2201 if i%2 == 0:2202 index = jj/2 + i/2*32203 val0 = stage[0,index]2204 val1 = stage[1,index]2205 2206 #print i, j, index, ':', L[i], val0, val12207 assert num.allclose(float(L[i]), min(val0, val1))2208 2209 2210 fid.close()2211 2212 #Cleanup2213 os.remove(prjfile)2214 os.remove(ascfile)2215 os.remove(swwfile)2216 2217 def test_sww2dem_asc_stage_time(self):2218 """Test that sww information can be converted correctly to asc/prj2219 format readable by e.g. ArcView2220 2221 This tests the reduction of quantity stage using min2222 """2223 2224 import time, os2225 from Scientific.IO.NetCDF import NetCDFFile2226 2227 #Setup2228 self.domain.set_name('datatest')2229 2230 prjfile = self.domain.get_name() + '_stage.prj'2231 ascfile = self.domain.get_name() + '_stage.asc'2232 swwfile = self.domain.get_name() + '.sww'2233 2234 self.domain.set_datadir('.')2235 self.domain.format = 'sww'2236 self.domain.smooth = True2237 self.domain.set_quantity('elevation', lambda x,y: -x-y)2238 2239 self.domain.geo_reference = Geo_reference(56,308500,6189000)2240 2241 sww = SWW_file(self.domain)2242 sww.store_connectivity()2243 sww.store_timestep()2244 2245 #self.domain.tight_slope_limiters = 12246 self.domain.evolve_to_end(finaltime = 0.01)2247 sww.store_timestep()2248 2249 cellsize = 0.252250 #Check contents2251 #Get NetCDF2252 2253 fid = NetCDFFile(sww.filename, netcdf_mode_r)2254 2255 # Get the variables2256 x = fid.variables['x'][:]2257 y = fid.variables['y'][:]2258 z = fid.variables['elevation'][:]2259 time = fid.variables['time'][:]2260 stage = fid.variables['stage'][:]2261 2262 #Export to ascii/prj files2263 sww2dem(self.domain.get_name(),2264 quantity = 'stage',2265 cellsize = cellsize,2266 number_of_decimal_places = 9,2267 reduction = 1,2268 format = 'asc',2269 verbose=self.verbose)2270 2271 2272 #Check asc file2273 ascid = open(ascfile)2274 lines = ascid.readlines()2275 ascid.close()2276 2277 L = lines[0].strip().split()2278 assert L[0].strip().lower() == 'ncols'2279 assert L[1].strip().lower() == '5'2280 2281 L = lines[1].strip().split()2282 assert L[0].strip().lower() == 'nrows'2283 assert L[1].strip().lower() == '5'2284 2285 L = lines[2].strip().split()2286 assert L[0].strip().lower() == 'xllcorner'2287 assert num.allclose(float(L[1].strip().lower()), 308500)2288 2289 L = lines[3].strip().split()2290 assert L[0].strip().lower() == 'yllcorner'2291 assert num.allclose(float(L[1].strip().lower()), 6189000)2292 2293 L = lines[4].strip().split()2294 assert L[0].strip().lower() == 'cellsize'2295 assert num.allclose(float(L[1].strip().lower()), cellsize)2296 2297 L = lines[5].strip().split()2298 assert L[0].strip() == 'NODATA_value'2299 assert L[1].strip().lower() == '-9999'2300 2301 #Check grid values (where applicable)2302 for j in range(5):2303 if j%2 == 0:2304 L = lines[6+j].strip().split()2305 jj = 4-j2306 for i in range(5):2307 if i%2 == 0:2308 index = jj/2 + i/2*32309 2310 val = stage[1,index]2311 2312 assert num.allclose(float(L[i]), val)2313 2314 fid.close()2315 2316 #Cleanup2317 os.remove(prjfile)2318 os.remove(ascfile)2319 os.remove(swwfile)2320 2321 2322 def test_sww2dem_asc_derived_quantity(self):2323 """Test that sww information can be converted correctly to asc/prj2324 format readable by e.g. ArcView2325 2326 This tests the use of derived quantities2327 """2328 2329 import time, os2330 from Scientific.IO.NetCDF import NetCDFFile2331 2332 #Setup2333 self.domain.set_name('datatest')2334 2335 prjfile = self.domain.get_name() + '_depth.prj'2336 ascfile = self.domain.get_name() + '_depth.asc'2337 swwfile = self.domain.get_name() + '.sww'2338 2339 self.domain.set_datadir('.')2340 self.domain.format = 'sww'2341 self.domain.smooth = True2342 self.domain.set_quantity('elevation', lambda x,y: -x-y)2343 self.domain.set_quantity('stage', 0.0)2344 2345 self.domain.geo_reference = Geo_reference(56,308500,6189000)2346 2347 2348 sww = SWW_file(self.domain)2349 sww.store_connectivity()2350 sww.store_timestep()2351 2352 #self.domain.tight_slope_limiters = 12353 self.domain.evolve_to_end(finaltime = 0.01)2354 sww.store_timestep()2355 2356 cellsize = 0.252357 #Check contents2358 #Get NetCDF2359 2360 fid = NetCDFFile(sww.filename, netcdf_mode_r)2361 2362 # Get the variables2363 x = fid.variables['x'][:]2364 y = fid.variables['y'][:]2365 z = fid.variables['elevation'][:]2366 time = fid.variables['time'][:]2367 stage = fid.variables['stage'][:]2368 2369 2370 #Export to ascii/prj files2371 sww2dem(self.domain.get_name(),2372 basename_out = 'datatest_depth',2373 quantity = 'stage - elevation',2374 cellsize = cellsize,2375 number_of_decimal_places = 9,2376 reduction = min,2377 format = 'asc',2378 verbose = self.verbose)2379 2380 2381 #Check asc file2382 ascid = open(ascfile)2383 lines = ascid.readlines()2384 ascid.close()2385 2386 L = lines[0].strip().split()2387 assert L[0].strip().lower() == 'ncols'2388 assert L[1].strip().lower() == '5'2389 2390 L = lines[1].strip().split()2391 assert L[0].strip().lower() == 'nrows'2392 assert L[1].strip().lower() == '5'2393 2394 L = lines[2].strip().split()2395 assert L[0].strip().lower() == 'xllcorner'2396 assert num.allclose(float(L[1].strip().lower()), 308500)2397 2398 L = lines[3].strip().split()2399 assert L[0].strip().lower() == 'yllcorner'2400 assert num.allclose(float(L[1].strip().lower()), 6189000)2401 2402 L = lines[4].strip().split()2403 assert L[0].strip().lower() == 'cellsize'2404 assert num.allclose(float(L[1].strip().lower()), cellsize)2405 2406 L = lines[5].strip().split()2407 assert L[0].strip() == 'NODATA_value'2408 assert L[1].strip().lower() == '-9999'2409 2410 2411 #Check grid values (where applicable)2412 for j in range(5):2413 if j%2 == 0:2414 L = lines[6+j].strip().split()2415 jj = 4-j2416 for i in range(5):2417 if i%2 == 0:2418 index = jj/2 + i/2*32419 val0 = stage[0,index] - z[index]2420 val1 = stage[1,index] - z[index]2421 2422 #print i, j, index, ':', L[i], val0, val12423 assert num.allclose(float(L[i]), min(val0, val1))2424 2425 2426 fid.close()2427 2428 #Cleanup2429 os.remove(prjfile)2430 os.remove(ascfile)2431 os.remove(swwfile)2432 2433 2434 2435 2436 2437 def test_sww2dem_asc_missing_points(self):2438 """Test that sww information can be converted correctly to asc/prj2439 format readable by e.g. ArcView2440 2441 This test includes the writing of missing values2442 """2443 2444 import time, os2445 from Scientific.IO.NetCDF import NetCDFFile2446 2447 #Setup mesh not coinciding with rectangle.2448 #This will cause missing values to occur in gridded data2449 2450 2451 points = [ [1.0, 1.0],2452 [0.5, 0.5], [1.0, 0.5],2453 [0.0, 0.0], [0.5, 0.0], [1.0, 0.0]]2454 2455 vertices = [ [4,1,3], [5,2,4], [1,4,2], [2,0,1]]2456 2457 #Create shallow water domain2458 domain = Domain(points, vertices)2459 domain.default_order=22460 2461 2462 #Set some field values2463 domain.set_quantity('elevation', lambda x,y: -x-y)2464 domain.set_quantity('friction', 0.03)2465 2466 2467 ######################2468 # Boundary conditions2469 B = Transmissive_boundary(domain)2470 domain.set_boundary( {'exterior': B} )2471 2472 2473 ######################2474 #Initial condition - with jumps2475 2476 bed = domain.quantities['elevation'].vertex_values2477 stage = num.zeros(bed.shape, num.float)2478 2479 h = 0.32480 for i in range(stage.shape[0]):2481 if i % 2 == 0:2482 stage[i,:] = bed[i,:] + h2483 else:2484 stage[i,:] = bed[i,:]2485 2486 domain.set_quantity('stage', stage)2487 domain.distribute_to_vertices_and_edges()2488 2489 domain.set_name('datatest')2490 2491 prjfile = domain.get_name() + '_elevation.prj'2492 ascfile = domain.get_name() + '_elevation.asc'2493 swwfile = domain.get_name() + '.sww'2494 2495 domain.set_datadir('.')2496 domain.format = 'sww'2497 domain.smooth = True2498 2499 domain.geo_reference = Geo_reference(56,308500,6189000)2500 2501 sww = SWW_file(domain)2502 sww.store_connectivity()2503 sww.store_timestep()2504 2505 cellsize = 0.252506 #Check contents2507 #Get NetCDF2508 2509 fid = NetCDFFile(swwfile, netcdf_mode_r)2510 2511 # Get the variables2512 x = fid.variables['x'][:]2513 y = fid.variables['y'][:]2514 z = fid.variables['elevation'][:]2515 time = fid.variables['time'][:]2516 2517 try:2518 geo_reference = Geo_reference(NetCDFObject=fid)2519 except AttributeError, e:2520 geo_reference = Geo_reference(DEFAULT_ZONE,0,0)2521 2522 #Export to ascii/prj files2523 sww2dem(domain.get_name(),2524 quantity = 'elevation',2525 cellsize = cellsize,2526 number_of_decimal_places = 9,2527 verbose = self.verbose,2528 format = 'asc')2529 2530 2531 #Check asc file2532 ascid = open(ascfile)2533 lines = ascid.readlines()2534 ascid.close()2535 2536 L = lines[0].strip().split()2537 assert L[0].strip().lower() == 'ncols'2538 assert L[1].strip().lower() == '5'2539 2540 L = lines[1].strip().split()2541 assert L[0].strip().lower() == 'nrows'2542 assert L[1].strip().lower() == '5'2543 2544 L = lines[2].strip().split()2545 assert L[0].strip().lower() == 'xllcorner'2546 assert num.allclose(float(L[1].strip().lower()), 308500)2547 2548 L = lines[3].strip().split()2549 assert L[0].strip().lower() == 'yllcorner'2550 assert num.allclose(float(L[1].strip().lower()), 6189000)2551 2552 L = lines[4].strip().split()2553 assert L[0].strip().lower() == 'cellsize'2554 assert num.allclose(float(L[1].strip().lower()), cellsize)2555 2556 L = lines[5].strip().split()2557 assert L[0].strip() == 'NODATA_value'2558 assert L[1].strip().lower() == '-9999'2559 2560 #Check grid values2561 for j in range(5):2562 L = lines[6+j].strip().split()2563 assert len(L) == 52564 y = (4-j) * cellsize2565 2566 for i in range(5):2567 #print i2568 if i+j >= 4:2569 assert num.allclose(float(L[i]), -i*cellsize - y)2570 else:2571 #Missing values2572 assert num.allclose(float(L[i]), -9999)2573 2574 2575 2576 fid.close()2577 2578 #Cleanup2579 os.remove(prjfile)2580 os.remove(ascfile)2581 os.remove(swwfile)2582 2583 def test_sww2ers_simple(self):2584 """Test that sww information can be converted correctly to asc/prj2585 format readable by e.g. ArcView2586 """2587 2588 import time, os2589 from Scientific.IO.NetCDF import NetCDFFile2590 2591 2592 NODATA_value = 17583232593 2594 #Setup2595 self.domain.set_name('datatest')2596 2597 headerfile = self.domain.get_name() + '.ers'2598 swwfile = self.domain.get_name() + '.sww'2599 2600 self.domain.set_datadir('.')2601 self.domain.format = 'sww'2602 self.domain.smooth = True2603 self.domain.set_quantity('elevation', lambda x,y: -x-y)2604 2605 self.domain.geo_reference = Geo_reference(56,308500,6189000)2606 2607 sww = SWW_file(self.domain)2608 sww.store_connectivity()2609 sww.store_timestep()2610 2611 #self.domain.tight_slope_limiters = 12612 self.domain.evolve_to_end(finaltime = 0.01)2613 sww.store_timestep()2614 2615 cellsize = 0.252616 #Check contents2617 #Get NetCDF2618 2619 fid = NetCDFFile(sww.filename, netcdf_mode_r)2620 2621 # Get the variables2622 x = fid.variables['x'][:]2623 y = fid.variables['y'][:]2624 z = fid.variables['elevation'][:]2625 time = fid.variables['time'][:]2626 stage = fid.variables['stage'][:]2627 2628 2629 #Export to ers files2630 sww2dem(self.domain.get_name(),2631 quantity = 'elevation',2632 cellsize = cellsize,2633 number_of_decimal_places = 9,2634 NODATA_value = NODATA_value,2635 verbose = self.verbose,2636 format = 'ers')2637 2638 #Check header data2639 from ermapper_grids import read_ermapper_header, read_ermapper_data2640 2641 header = read_ermapper_header(self.domain.get_name() + '_elevation.ers')2642 #print header2643 assert header['projection'].lower() == '"utm-56"'2644 assert header['datum'].lower() == '"wgs84"'2645 assert header['units'].lower() == '"meters"'2646 assert header['value'].lower() == '"elevation"'2647 assert header['xdimension'] == '0.25'2648 assert header['ydimension'] == '0.25'2649 assert float(header['eastings']) == 308500.0 #xllcorner2650 assert float(header['northings']) == 6189000.0 #yllcorner2651 assert int(header['nroflines']) == 52652 assert int(header['nrofcellsperline']) == 52653 assert int(header['nullcellvalue']) == NODATA_value2654 #FIXME - there is more in the header2655 2656 2657 #Check grid data2658 grid = read_ermapper_data(self.domain.get_name() + '_elevation')2659 2660 #FIXME (Ole): Why is this the desired reference grid for -x-y?2661 ref_grid = [NODATA_value, NODATA_value, NODATA_value, NODATA_value, NODATA_value,2662 -1, -1.25, -1.5, -1.75, -2.0,2663 -0.75, -1.0, -1.25, -1.5, -1.75,2664 -0.5, -0.75, -1.0, -1.25, -1.5,2665 -0.25, -0.5, -0.75, -1.0, -1.25]2666 2667 2668 #print grid2669 assert num.allclose(grid, ref_grid)2670 2671 fid.close()2672 2673 #Cleanup2674 #FIXME the file clean-up doesn't work (eg Permission Denied Error)2675 #Done (Ole) - it was because sww2ers didn't close it's sww file2676 os.remove(sww.filename)2677 os.remove(self.domain.get_name() + '_elevation')2678 os.remove(self.domain.get_name() + '_elevation.ers')2679 1361 2680 1362 … … 3347 2029 os.remove(root + '_100.dem') 3348 2030 3349 def xxxtestz_sww2ers_real(self): 3350 """Test that sww information can be converted correctly to asc/prj 3351 format readable by e.g. ArcView 3352 """ 3353 3354 import time, os 3355 3356 # the memory optimised least squares 3357 # cellsize = 20, # this one seems to hang 3358 # cellsize = 200000, # Ran 1 test in 269.703s 3359 #Ran 1 test in 267.344s 3360 # cellsize = 20000, # Ran 1 test in 460.922s 3361 # cellsize = 2000 #Ran 1 test in 5340.250s 3362 # cellsize = 200 #this one seems to hang, building matirx A 3363 3364 # not optimised 3365 # seems to hang 3366 # cellsize = 2000 # Ran 1 test in 5334.563s 3367 #Export to ascii/prj files 3368 sww2dem('karratha_100m', 3369 quantity = 'depth', 3370 cellsize = 200000, 3371 number_of_decimal_places = 9, 3372 verbose = True) 3373 3374 def test_read_asc(self): 3375 """Test conversion from dem in ascii format to native NetCDF format 3376 """ 3377 3378 import time, os 3379 3380 from file_conversion import _read_asc 3381 #Write test asc file 3382 filename = tempfile.mktemp(".000") 3383 fid = open(filename, 'w') 3384 fid.write("""ncols 7 3385 nrows 4 3386 xllcorner 2000.5 3387 yllcorner 3000.5 3388 cellsize 25 3389 NODATA_value -9999 3390 97.921 99.285 125.588 180.830 258.645 342.872 415.836 3391 473.157 514.391 553.893 607.120 678.125 777.283 883.038 3392 984.494 1040.349 1008.161 900.738 730.882 581.430 514.980 3393 502.645 516.230 504.739 450.604 388.500 338.097 514.980 3394 """) 3395 fid.close() 3396 bath_metadata, grid = _read_asc(filename, verbose=self.verbose) 3397 self.failUnless(bath_metadata['xllcorner'] == 2000.5, 'Failed') 3398 self.failUnless(bath_metadata['yllcorner'] == 3000.5, 'Failed') 3399 self.failUnless(bath_metadata['cellsize'] == 25, 'Failed') 3400 self.failUnless(bath_metadata['NODATA_value'] == -9999, 'Failed') 3401 self.failUnless(grid[0][0] == 97.921, 'Failed') 3402 self.failUnless(grid[3][6] == 514.980, 'Failed') 3403 3404 os.remove(filename) 3405 3406 3407 #### TESTS FOR URS 2 SWW ### 3408 3409 def create_mux(self, points_num=None): 3410 # write all the mux stuff. 3411 time_step_count = 3 3412 time_step = 0.5 3413 3414 longitudes = [150.66667, 150.83334, 151., 151.16667] 3415 latitudes = [-34.5, -34.33333, -34.16667, -34] 3416 3417 if points_num == None: 3418 points_num = len(longitudes) * len(latitudes) 3419 3420 lonlatdeps = [] 3421 quantities = ['HA','UA','VA'] 3422 mux_names = [WAVEHEIGHT_MUX_LABEL, 3423 EAST_VELOCITY_LABEL, 3424 NORTH_VELOCITY_LABEL] 3425 quantities_init = [[],[],[]] 3426 # urs binary is latitude fastest 3427 for i,lon in enumerate(longitudes): 3428 for j,lat in enumerate(latitudes): 3429 _ , e, n = redfearn(lat, lon) 3430 lonlatdeps.append([lon, lat, n]) 3431 quantities_init[0].append(e) # HA 3432 quantities_init[1].append(n ) # UA 3433 quantities_init[2].append(e) # VA 3434 #print "lonlatdeps",lonlatdeps 3435 3436 file_handle, base_name = tempfile.mkstemp("") 3437 os.close(file_handle) 3438 os.remove(base_name) 3439 3440 files = [] 3441 for i,q in enumerate(quantities): 3442 quantities_init[i] = ensure_numeric(quantities_init[i]) 3443 #print "HA_init", HA_init 3444 q_time = num.zeros((time_step_count, points_num), num.float) 3445 for time in range(time_step_count): 3446 q_time[time,:] = quantities_init[i] #* time * 4 3447 3448 #Write C files 3449 columns = 3 # long, lat , depth 3450 file = base_name + mux_names[i] 3451 #print "base_name file",file 3452 f = open(file, 'wb') 3453 files.append(file) 3454 f.write(pack('i',points_num)) 3455 f.write(pack('i',time_step_count)) 3456 f.write(pack('f',time_step)) 3457 3458 #write lat/long info 3459 for lonlatdep in lonlatdeps: 3460 for float in lonlatdep: 3461 f.write(pack('f',float)) 3462 3463 # Write quantity info 3464 for time in range(time_step_count): 3465 for point_i in range(points_num): 3466 f.write(pack('f',q_time[time,point_i])) 3467 #print " mux_names[i]", mux_names[i] 3468 #print "f.write(pack('f',q_time[time,i]))", q_time[time,point_i] 3469 f.close() 3470 return base_name, files 3471 2031 3472 2032 def write_mux(self, lat_long_points, time_step_count, time_step, 3473 2033 depth=None, ha=None, ua=None, va=None): -
anuga_core/source/anuga/shallow_water/test_forcing.py
r7735 r7744 6 6 from boundaries import Reflective_boundary 7 7 from anuga.coordinate_transforms.geo_reference import Geo_reference 8 from anuga.file_conversion.file_conversion import timefile2netcdf 8 9 from forcing import * 9 10 … … 246 247 247 248 fid.close() 248 249 # Convert ASCII file to NetCDF (Which is what we really like!)250 from file_conversion import timefile2netcdf251 249 252 250 timefile2netcdf(filename) … … 338 336 339 337 fid.close() 340 341 # Convert ASCII file to NetCDF (Which is what we really like!)342 from file_conversion import timefile2netcdf343 338 344 339 timefile2netcdf(filename, time_as_seconds=True) -
anuga_core/source/anuga/utilities/file_utils.py
r7735 r7744 6 6 import os, sys 7 7 import csv 8 import numpy as num 8 9 9 10 ## … … 362 363 363 364 return attribute_dic, title_index_dic 365 366 367 368 ## 369 # @brief Convert CSV file to a dictionary of arrays. 370 # @param file_name The path to the file to read. 371 def load_csv_as_array(file_name): 372 """ 373 Convert CSV files of the form: 374 375 time, discharge, velocity 376 0.0, 1.2, 0.0 377 0.1, 3.2, 1.1 378 ... 379 380 to a dictionary of numeric arrays. 381 382 383 See underlying function load_csv_as_dict for more details. 384 """ 385 386 X, _ = load_csv_as_dict(file_name) 387 388 Y = {} 389 for key in X.keys(): 390 Y[key] = num.array([float(x) for x in X[key]]) 391 392 return Y
Note: See TracChangeset
for help on using the changeset viewer.