[7778] | 1 | """ |
---|
| 2 | Module to convert SWW to DEM files. |
---|
| 3 | """ |
---|
| 4 | |
---|
[7744] | 5 | # external modules |
---|
[7780] | 6 | import os |
---|
[7744] | 7 | import numpy as num |
---|
| 8 | |
---|
| 9 | # ANUGA modules |
---|
| 10 | from anuga.abstract_2d_finite_volumes.util import remove_lone_verts |
---|
| 11 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
| 12 | from anuga.utilities.system_tools import get_vars_in_expression |
---|
| 13 | import anuga.utilities.log as log |
---|
[7780] | 14 | from anuga.utilities.file_utils import get_all_swwfiles |
---|
[7744] | 15 | |
---|
| 16 | |
---|
| 17 | ###### |
---|
| 18 | # formula mappings |
---|
| 19 | ###### |
---|
| 20 | |
---|
| 21 | quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5', |
---|
| 22 | 'depth':'stage-elevation', |
---|
| 23 | 'speed': \ |
---|
| 24 | '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'} |
---|
| 25 | |
---|
| 26 | |
---|
| 27 | |
---|
| 28 | # Default block size for sww2dem() |
---|
| 29 | DEFAULT_BLOCK_SIZE = 10000 |
---|
| 30 | |
---|
[7814] | 31 | def sww2dem(name_in, name_out, |
---|
[7744] | 32 | quantity=None, # defaults to elevation |
---|
| 33 | reduction=None, |
---|
| 34 | cellsize=10, |
---|
| 35 | number_of_decimal_places=None, |
---|
| 36 | NODATA_value=-9999, |
---|
| 37 | easting_min=None, |
---|
| 38 | easting_max=None, |
---|
| 39 | northing_min=None, |
---|
| 40 | northing_max=None, |
---|
| 41 | verbose=False, |
---|
| 42 | origin=None, |
---|
| 43 | datum='WGS84', |
---|
| 44 | block_size=None): |
---|
| 45 | """Read SWW file and convert to Digitial Elevation model format |
---|
| 46 | (.asc or .ers) |
---|
| 47 | |
---|
| 48 | Example (ASC): |
---|
| 49 | ncols 3121 |
---|
| 50 | nrows 1800 |
---|
| 51 | xllcorner 722000 |
---|
| 52 | yllcorner 5893000 |
---|
| 53 | cellsize 25 |
---|
| 54 | NODATA_value -9999 |
---|
| 55 | 138.3698 137.4194 136.5062 135.5558 .......... |
---|
| 56 | |
---|
| 57 | The number of decimal places can be specified by the user to save |
---|
| 58 | on disk space requirements by specifying in the call to sww2dem. |
---|
| 59 | |
---|
| 60 | Also write accompanying file with same basename_in but extension .prj |
---|
| 61 | used to fix the UTM zone, datum, false northings and eastings. |
---|
| 62 | |
---|
| 63 | The prj format is assumed to be as |
---|
| 64 | |
---|
| 65 | Projection UTM |
---|
| 66 | Zone 56 |
---|
| 67 | Datum WGS84 |
---|
| 68 | Zunits NO |
---|
| 69 | Units METERS |
---|
| 70 | Spheroid WGS84 |
---|
| 71 | Xshift 0.0000000000 |
---|
| 72 | Yshift 10000000.0000000000 |
---|
| 73 | Parameters |
---|
| 74 | |
---|
| 75 | |
---|
| 76 | The parameter quantity must be the name of an existing quantity or |
---|
| 77 | an expression involving existing quantities. The default is |
---|
| 78 | 'elevation'. Quantity is not a list of quantities. |
---|
| 79 | |
---|
| 80 | if timestep (an index) is given, output quantity at that timestep |
---|
| 81 | |
---|
| 82 | if reduction is given and its an index, output quantity at that timestep. If reduction is given |
---|
| 83 | and is a built in function, use that to reduce quantity over all timesteps. |
---|
| 84 | |
---|
| 85 | datum |
---|
| 86 | |
---|
| 87 | format can be either 'asc' or 'ers' |
---|
| 88 | block_size - sets the number of slices along the non-time axis to |
---|
| 89 | process in one block. |
---|
| 90 | """ |
---|
| 91 | |
---|
| 92 | import sys |
---|
| 93 | import types |
---|
| 94 | |
---|
| 95 | from anuga.geometry.polygon import inside_polygon, outside_polygon |
---|
| 96 | from anuga.abstract_2d_finite_volumes.util import \ |
---|
| 97 | apply_expression_to_dictionary |
---|
| 98 | |
---|
[7814] | 99 | basename_in, in_ext = os.path.splitext(name_in) |
---|
| 100 | basename_out, out_ext = os.path.splitext(name_out) |
---|
| 101 | out_ext = out_ext.lower() |
---|
[7744] | 102 | |
---|
[7814] | 103 | if in_ext != '.sww': |
---|
| 104 | raise IOError('Input format for %s must be .sww' % name_in) |
---|
| 105 | |
---|
| 106 | if out_ext not in ['.asc', '.ers']: |
---|
| 107 | raise IOError('Format for %s must be either asc or ers.' % name_out) |
---|
| 108 | |
---|
[7744] | 109 | false_easting = 500000 |
---|
| 110 | false_northing = 10000000 |
---|
| 111 | |
---|
| 112 | if quantity is None: |
---|
| 113 | quantity = 'elevation' |
---|
| 114 | |
---|
| 115 | if reduction is None: |
---|
| 116 | reduction = max |
---|
| 117 | |
---|
| 118 | if quantity_formula.has_key(quantity): |
---|
| 119 | quantity = quantity_formula[quantity] |
---|
| 120 | |
---|
| 121 | if number_of_decimal_places is None: |
---|
| 122 | number_of_decimal_places = 3 |
---|
| 123 | |
---|
| 124 | if block_size is None: |
---|
| 125 | block_size = DEFAULT_BLOCK_SIZE |
---|
| 126 | |
---|
| 127 | # Read sww file |
---|
| 128 | if verbose: |
---|
[7814] | 129 | log.critical('Reading from %s' % name_in) |
---|
| 130 | log.critical('Output directory is %s' % name_out) |
---|
[7744] | 131 | |
---|
| 132 | from Scientific.IO.NetCDF import NetCDFFile |
---|
[7814] | 133 | fid = NetCDFFile(name_in) |
---|
[7744] | 134 | |
---|
| 135 | #Get extent and reference |
---|
| 136 | x = fid.variables['x'][:] |
---|
| 137 | y = fid.variables['y'][:] |
---|
| 138 | volumes = fid.variables['volumes'][:] |
---|
| 139 | if type(reduction) is not types.BuiltinFunctionType: |
---|
| 140 | times = fid.variables['time'][reduction] |
---|
| 141 | else: |
---|
| 142 | times = fid.variables['time'][:] |
---|
| 143 | |
---|
| 144 | number_of_timesteps = fid.dimensions['number_of_timesteps'] |
---|
| 145 | number_of_points = fid.dimensions['number_of_points'] |
---|
| 146 | |
---|
| 147 | if origin is None: |
---|
| 148 | # Get geo_reference |
---|
| 149 | # sww files don't have to have a geo_ref |
---|
| 150 | try: |
---|
| 151 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
| 152 | except AttributeError, e: |
---|
| 153 | geo_reference = Geo_reference() # Default georef object |
---|
| 154 | |
---|
| 155 | xllcorner = geo_reference.get_xllcorner() |
---|
| 156 | yllcorner = geo_reference.get_yllcorner() |
---|
| 157 | zone = geo_reference.get_zone() |
---|
| 158 | else: |
---|
| 159 | zone = origin[0] |
---|
| 160 | xllcorner = origin[1] |
---|
| 161 | yllcorner = origin[2] |
---|
| 162 | |
---|
| 163 | # FIXME: Refactor using code from Interpolation_function.statistics |
---|
| 164 | # (in interpolate.py) |
---|
| 165 | # Something like print swwstats(swwname) |
---|
| 166 | if verbose: |
---|
| 167 | log.critical('------------------------------------------------') |
---|
| 168 | log.critical('Statistics of SWW file:') |
---|
[7814] | 169 | log.critical(' Name: %s' % name_in) |
---|
[7744] | 170 | log.critical(' Reference:') |
---|
| 171 | log.critical(' Lower left corner: [%f, %f]' % (xllcorner, yllcorner)) |
---|
| 172 | if type(reduction) is not types.BuiltinFunctionType: |
---|
| 173 | log.critical(' Time: %f' % times) |
---|
| 174 | else: |
---|
| 175 | log.critical(' Start time: %f' % fid.starttime[0]) |
---|
| 176 | log.critical(' Extent:') |
---|
| 177 | log.critical(' x [m] in [%f, %f], len(x) == %d' |
---|
| 178 | %(num.min(x), num.max(x), len(x.flat))) |
---|
| 179 | log.critical(' y [m] in [%f, %f], len(y) == %d' |
---|
| 180 | % (num.min(y), num.max(y), len(y.flat))) |
---|
| 181 | if type(reduction) is not types.BuiltinFunctionType: |
---|
| 182 | log.critical(' t [s] = %f, len(t) == %d' % (times, 1)) |
---|
| 183 | else: |
---|
| 184 | log.critical(' t [s] in [%f, %f], len(t) == %d' |
---|
| 185 | % (min(times), max(times), len(times))) |
---|
| 186 | log.critical(' Quantities [SI units]:') |
---|
| 187 | |
---|
| 188 | # Comment out for reduced memory consumption |
---|
| 189 | for name in ['stage', 'xmomentum', 'ymomentum']: |
---|
| 190 | q = fid.variables[name][:].flatten() |
---|
| 191 | if type(reduction) is not types.BuiltinFunctionType: |
---|
| 192 | q = q[reduction*len(x):(reduction+1)*len(x)] |
---|
| 193 | if verbose: log.critical(' %s in [%f, %f]' |
---|
| 194 | % (name, min(q), max(q))) |
---|
| 195 | for name in ['elevation']: |
---|
| 196 | q = fid.variables[name][:].flatten() |
---|
| 197 | if verbose: log.critical(' %s in [%f, %f]' |
---|
| 198 | % (name, min(q), max(q))) |
---|
| 199 | |
---|
| 200 | # Get the variables in the supplied expression. |
---|
| 201 | # This may throw a SyntaxError exception. |
---|
| 202 | var_list = get_vars_in_expression(quantity) |
---|
| 203 | |
---|
| 204 | # Check that we have the required variables in the SWW file. |
---|
| 205 | missing_vars = [] |
---|
| 206 | for name in var_list: |
---|
| 207 | try: |
---|
| 208 | _ = fid.variables[name] |
---|
| 209 | except: |
---|
| 210 | missing_vars.append(name) |
---|
| 211 | if missing_vars: |
---|
| 212 | msg = ("In expression '%s', variables %s are not in the SWW file '%s'" |
---|
[7814] | 213 | % (quantity, name_in)) |
---|
[7744] | 214 | raise Exception, msg |
---|
| 215 | |
---|
| 216 | # Create result array and start filling, block by block. |
---|
| 217 | result = num.zeros(number_of_points, num.float) |
---|
| 218 | |
---|
| 219 | for start_slice in xrange(0, number_of_points, block_size): |
---|
| 220 | # Limit slice size to array end if at last block |
---|
| 221 | end_slice = min(start_slice + block_size, number_of_points) |
---|
| 222 | |
---|
| 223 | # Get slices of all required variables |
---|
| 224 | q_dict = {} |
---|
| 225 | for name in var_list: |
---|
| 226 | # check if variable has time axis |
---|
| 227 | if len(fid.variables[name].shape) == 2: |
---|
| 228 | q_dict[name] = fid.variables[name][:,start_slice:end_slice] |
---|
| 229 | else: # no time axis |
---|
| 230 | q_dict[name] = fid.variables[name][start_slice:end_slice] |
---|
| 231 | |
---|
| 232 | # Evaluate expression with quantities found in SWW file |
---|
| 233 | res = apply_expression_to_dictionary(quantity, q_dict) |
---|
| 234 | |
---|
| 235 | if len(res.shape) == 2: |
---|
| 236 | new_res = num.zeros(res.shape[1], num.float) |
---|
| 237 | for k in xrange(res.shape[1]): |
---|
| 238 | if type(reduction) is not types.BuiltinFunctionType: |
---|
| 239 | new_res[k] = res[reduction,k] |
---|
| 240 | else: |
---|
| 241 | new_res[k] = reduction(res[:,k]) |
---|
| 242 | res = new_res |
---|
| 243 | |
---|
| 244 | result[start_slice:end_slice] = res |
---|
| 245 | |
---|
| 246 | # Post condition: Now q has dimension: number_of_points |
---|
| 247 | assert len(result.shape) == 1 |
---|
| 248 | assert result.shape[0] == number_of_points |
---|
| 249 | |
---|
| 250 | if verbose: |
---|
| 251 | log.critical('Processed values for %s are in [%f, %f]' |
---|
| 252 | % (quantity, min(result), max(result))) |
---|
| 253 | |
---|
| 254 | # Create grid and update xll/yll corner and x,y |
---|
| 255 | # Relative extent |
---|
| 256 | if easting_min is None: |
---|
| 257 | xmin = min(x) |
---|
| 258 | else: |
---|
| 259 | xmin = easting_min - xllcorner |
---|
| 260 | |
---|
| 261 | if easting_max is None: |
---|
| 262 | xmax = max(x) |
---|
| 263 | else: |
---|
| 264 | xmax = easting_max - xllcorner |
---|
| 265 | |
---|
| 266 | if northing_min is None: |
---|
| 267 | ymin = min(y) |
---|
| 268 | else: |
---|
| 269 | ymin = northing_min - yllcorner |
---|
| 270 | |
---|
| 271 | if northing_max is None: |
---|
| 272 | ymax = max(y) |
---|
| 273 | else: |
---|
| 274 | ymax = northing_max - yllcorner |
---|
| 275 | |
---|
| 276 | msg = 'xmax must be greater than or equal to xmin.\n' |
---|
| 277 | msg += 'I got xmin = %f, xmax = %f' %(xmin, xmax) |
---|
| 278 | assert xmax >= xmin, msg |
---|
| 279 | |
---|
| 280 | msg = 'ymax must be greater than or equal to xmin.\n' |
---|
| 281 | msg += 'I got ymin = %f, ymax = %f' %(ymin, ymax) |
---|
| 282 | assert ymax >= ymin, msg |
---|
| 283 | |
---|
| 284 | if verbose: log.critical('Creating grid') |
---|
| 285 | ncols = int((xmax-xmin)/cellsize) + 1 |
---|
| 286 | nrows = int((ymax-ymin)/cellsize) + 1 |
---|
| 287 | |
---|
| 288 | # New absolute reference and coordinates |
---|
| 289 | newxllcorner = xmin + xllcorner |
---|
| 290 | newyllcorner = ymin + yllcorner |
---|
| 291 | |
---|
| 292 | x = x + xllcorner - newxllcorner |
---|
| 293 | y = y + yllcorner - newyllcorner |
---|
| 294 | |
---|
| 295 | vertex_points = num.concatenate ((x[:,num.newaxis], y[:,num.newaxis]), axis=1) |
---|
| 296 | assert len(vertex_points.shape) == 2 |
---|
| 297 | |
---|
| 298 | grid_points = num.zeros ((ncols*nrows, 2), num.float) |
---|
| 299 | |
---|
| 300 | for i in xrange(nrows): |
---|
[7814] | 301 | if out_ext == '.asc': |
---|
[7744] | 302 | yg = i * cellsize |
---|
| 303 | else: |
---|
| 304 | # this will flip the order of the y values for ers |
---|
| 305 | yg = (nrows-i) * cellsize |
---|
| 306 | |
---|
| 307 | for j in xrange(ncols): |
---|
| 308 | xg = j * cellsize |
---|
| 309 | k = i*ncols + j |
---|
| 310 | |
---|
| 311 | grid_points[k, 0] = xg |
---|
| 312 | grid_points[k, 1] = yg |
---|
| 313 | |
---|
| 314 | # Interpolate |
---|
| 315 | from anuga.fit_interpolate.interpolate import Interpolate |
---|
| 316 | |
---|
| 317 | # Remove loners from vertex_points, volumes here |
---|
| 318 | vertex_points, volumes = remove_lone_verts(vertex_points, volumes) |
---|
| 319 | # export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes}) |
---|
| 320 | interp = Interpolate(vertex_points, volumes, verbose = verbose) |
---|
| 321 | |
---|
| 322 | # Interpolate using quantity values |
---|
| 323 | if verbose: log.critical('Interpolating') |
---|
| 324 | grid_values = interp.interpolate(result, grid_points).flatten() |
---|
| 325 | |
---|
| 326 | if verbose: |
---|
| 327 | log.critical('Interpolated values are in [%f, %f]' |
---|
| 328 | % (num.min(grid_values), num.max(grid_values))) |
---|
| 329 | |
---|
| 330 | # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh) |
---|
| 331 | P = interp.mesh.get_boundary_polygon() |
---|
| 332 | outside_indices = outside_polygon(grid_points, P, closed=True) |
---|
| 333 | |
---|
| 334 | for i in outside_indices: |
---|
| 335 | grid_values[i] = NODATA_value |
---|
| 336 | |
---|
[7814] | 337 | if out_ext == '.ers': |
---|
[7744] | 338 | # setup ERS header information |
---|
| 339 | grid_values = num.reshape(grid_values, (nrows, ncols)) |
---|
| 340 | header = {} |
---|
| 341 | header['datum'] = '"' + datum + '"' |
---|
| 342 | # FIXME The use of hardwired UTM and zone number needs to be made optional |
---|
| 343 | # FIXME Also need an automatic test for coordinate type (i.e. EN or LL) |
---|
| 344 | header['projection'] = '"UTM-' + str(zone) + '"' |
---|
| 345 | header['coordinatetype'] = 'EN' |
---|
| 346 | if header['coordinatetype'] == 'LL': |
---|
| 347 | header['longitude'] = str(newxllcorner) |
---|
| 348 | header['latitude'] = str(newyllcorner) |
---|
| 349 | elif header['coordinatetype'] == 'EN': |
---|
| 350 | header['eastings'] = str(newxllcorner) |
---|
| 351 | header['northings'] = str(newyllcorner) |
---|
| 352 | header['nullcellvalue'] = str(NODATA_value) |
---|
| 353 | header['xdimension'] = str(cellsize) |
---|
| 354 | header['ydimension'] = str(cellsize) |
---|
| 355 | header['value'] = '"' + quantity + '"' |
---|
| 356 | #header['celltype'] = 'IEEE8ByteReal' #FIXME: Breaks unit test |
---|
| 357 | |
---|
| 358 | #Write |
---|
| 359 | if verbose: log.critical('Writing %s' % demfile) |
---|
| 360 | |
---|
| 361 | import ermapper_grids |
---|
| 362 | |
---|
[7814] | 363 | ermapper_grids.write_ermapper_grid(name_out, grid_values, header) |
---|
[7744] | 364 | |
---|
| 365 | fid.close() |
---|
| 366 | else: |
---|
| 367 | #Write to Ascii format |
---|
| 368 | #Write prj file |
---|
| 369 | prjfile = basename_out + '.prj' |
---|
| 370 | |
---|
| 371 | if verbose: log.critical('Writing %s' % prjfile) |
---|
| 372 | prjid = open(prjfile, 'w') |
---|
| 373 | prjid.write('Projection %s\n' %'UTM') |
---|
| 374 | prjid.write('Zone %d\n' %zone) |
---|
| 375 | prjid.write('Datum %s\n' %datum) |
---|
| 376 | prjid.write('Zunits NO\n') |
---|
| 377 | prjid.write('Units METERS\n') |
---|
| 378 | prjid.write('Spheroid %s\n' %datum) |
---|
| 379 | prjid.write('Xshift %d\n' %false_easting) |
---|
| 380 | prjid.write('Yshift %d\n' %false_northing) |
---|
| 381 | prjid.write('Parameters\n') |
---|
| 382 | prjid.close() |
---|
| 383 | |
---|
[7814] | 384 | if verbose: log.critical('Writing %s' % name_out) |
---|
[7744] | 385 | |
---|
[7814] | 386 | ascid = open(name_out, 'w') |
---|
[7744] | 387 | |
---|
| 388 | ascid.write('ncols %d\n' %ncols) |
---|
| 389 | ascid.write('nrows %d\n' %nrows) |
---|
| 390 | ascid.write('xllcorner %d\n' %newxllcorner) |
---|
| 391 | ascid.write('yllcorner %d\n' %newyllcorner) |
---|
| 392 | ascid.write('cellsize %f\n' %cellsize) |
---|
| 393 | ascid.write('NODATA_value %d\n' %NODATA_value) |
---|
| 394 | |
---|
| 395 | #Get bounding polygon from mesh |
---|
| 396 | #P = interp.mesh.get_boundary_polygon() |
---|
| 397 | #inside_indices = inside_polygon(grid_points, P) |
---|
| 398 | |
---|
| 399 | # change printoptions so that a long string of zeros in not |
---|
| 400 | # summarized as [0.0, 0.0, 0.0, ... 0.0, 0.0, 0.0] |
---|
| 401 | #printoptions = num.get_printoptions() |
---|
| 402 | #num.set_printoptions(threshold=sys.maxint) |
---|
| 403 | |
---|
| 404 | format = '%.'+'%g' % number_of_decimal_places +'e' |
---|
| 405 | for i in range(nrows): |
---|
| 406 | if verbose and i % ((nrows+10)/10) == 0: |
---|
| 407 | log.critical('Doing row %d of %d' % (i, nrows)) |
---|
| 408 | |
---|
| 409 | base_index = (nrows-i-1)*ncols |
---|
| 410 | |
---|
| 411 | slice = grid_values[base_index:base_index+ncols] |
---|
| 412 | |
---|
| 413 | num.savetxt(ascid, slice.reshape(1,ncols), format, ' ' ) |
---|
| 414 | |
---|
| 415 | |
---|
| 416 | #Close |
---|
| 417 | ascid.close() |
---|
| 418 | fid.close() |
---|
| 419 | |
---|
| 420 | return basename_out |
---|
[7776] | 421 | |
---|
| 422 | |
---|
| 423 | |
---|
| 424 | def sww2dem_batch(basename_in, extra_name_out=None, |
---|
| 425 | quantities=None, # defaults to elevation |
---|
| 426 | reduction=None, |
---|
| 427 | cellsize=10, |
---|
| 428 | number_of_decimal_places=None, |
---|
| 429 | NODATA_value=-9999, |
---|
| 430 | easting_min=None, |
---|
| 431 | easting_max=None, |
---|
| 432 | northing_min=None, |
---|
| 433 | northing_max=None, |
---|
| 434 | verbose=False, |
---|
| 435 | origin=None, |
---|
| 436 | datum='WGS84', |
---|
| 437 | format='ers'): |
---|
| 438 | """Wrapper for sww2dem. |
---|
| 439 | See sww2dem to find out what most of the parameters do. |
---|
| 440 | |
---|
[7814] | 441 | basename_in is a path to sww file/s, without the .sww extension. |
---|
| 442 | |
---|
[7776] | 443 | Quantities is a list of quantities. Each quantity will be |
---|
| 444 | calculated for each sww file. |
---|
| 445 | |
---|
| 446 | This returns the basenames of the files returned, which is made up |
---|
| 447 | of the dir and all of the file name, except the extension. |
---|
| 448 | |
---|
| 449 | This function returns the names of the files produced. |
---|
| 450 | |
---|
| 451 | It will also produce as many output files as there are input sww files. |
---|
| 452 | """ |
---|
| 453 | |
---|
| 454 | if quantities is None: |
---|
| 455 | quantities = ['elevation'] |
---|
| 456 | |
---|
| 457 | if type(quantities) is str: |
---|
| 458 | quantities = [quantities] |
---|
| 459 | |
---|
| 460 | # How many sww files are there? |
---|
| 461 | dir, base = os.path.split(basename_in) |
---|
| 462 | |
---|
| 463 | iterate_over = get_all_swwfiles(dir, base, verbose) |
---|
| 464 | |
---|
| 465 | if dir == "": |
---|
| 466 | dir = "." # Unix compatibility |
---|
| 467 | |
---|
| 468 | files_out = [] |
---|
| 469 | for sww_file in iterate_over: |
---|
| 470 | for quantity in quantities: |
---|
| 471 | if extra_name_out is None: |
---|
| 472 | basename_out = sww_file + '_' + quantity |
---|
| 473 | else: |
---|
| 474 | basename_out = sww_file + '_' + quantity + '_' + extra_name_out |
---|
| 475 | |
---|
[7814] | 476 | file_out = sww2dem(dir+os.sep+sww_file+'.sww', |
---|
| 477 | dir+os.sep+basename_out, |
---|
[7776] | 478 | quantity, |
---|
| 479 | reduction, |
---|
| 480 | cellsize, |
---|
| 481 | number_of_decimal_places, |
---|
| 482 | NODATA_value, |
---|
| 483 | easting_min, |
---|
| 484 | easting_max, |
---|
| 485 | northing_min, |
---|
| 486 | northing_max, |
---|
| 487 | verbose, |
---|
| 488 | origin, |
---|
| 489 | datum, |
---|
| 490 | format) |
---|
| 491 | files_out.append(file_out) |
---|
| 492 | return files_out |
---|
| 493 | |
---|