Changeset 7758
- Timestamp:
- May 31, 2010, 4:45:39 PM (14 years ago)
- Location:
- trunk/anuga_core/source/anuga/file_conversion
- Files:
-
- 8 added
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/anuga_core/source/anuga/file_conversion/__init__.py
r7751 r7758 7 7 8 8 9 #from shallow_water_balanced_domain import Swb_domain 10 11 12 9 # commonly used imports 10 from anuga.file_conversion.file_conversion import sww2obj, dat2obj, \ 11 timefile2netcdf, tsh2sww, urs2sww, urs2nc 12 13 from anuga.file_conversion.dem2pts import dem2pts 14 from anuga.file_conversion.esri2sww import esri2sww 15 from anuga.file_conversion.sww2dem import sww2dem 16 from anuga.file_conversion.asc2dem import asc2dem 17 from anuga.file_conversion.ferret2sww import ferret2sww -
trunk/anuga_core/source/anuga/file_conversion/file_conversion.py
r7743 r7758 316 316 sww.store_connectivity() 317 317 sww.store_timestep() 318 319 320 ##321 # @brief Convert CSIRO ESRI file to an SWW boundary file.322 # @param bath_dir323 # @param elevation_dir324 # @param ucur_dir325 # @param vcur_dir326 # @param sww_file327 # @param minlat328 # @param maxlat329 # @param minlon330 # @param maxlon331 # @param zscale332 # @param mean_stage333 # @param fail_on_NaN334 # @param elevation_NaN_filler335 # @param bath_prefix336 # @param elevation_prefix337 # @param verbose338 # @note Also convert latitude and longitude to UTM. All coordinates are339 # assumed to be given in the GDA94 datum.340 def asc_csiro2sww(bath_dir,341 elevation_dir,342 ucur_dir,343 vcur_dir,344 sww_file,345 minlat=None, maxlat=None,346 minlon=None, maxlon=None,347 zscale=1,348 mean_stage=0,349 fail_on_NaN=True,350 elevation_NaN_filler=0,351 bath_prefix='ba',352 elevation_prefix='el',353 verbose=False):354 """355 Produce an sww boundary file, from esri ascii data from CSIRO.356 357 Also convert latitude and longitude to UTM. All coordinates are358 assumed to be given in the GDA94 datum.359 360 assume:361 All files are in esri ascii format362 363 4 types of information364 bathymetry365 elevation366 u velocity367 v velocity368 369 Assumptions370 The metadata of all the files is the same371 Each type is in a seperate directory372 One bath file with extention .000373 The time period is less than 24hrs and uniform.374 """375 376 from Scientific.IO.NetCDF import NetCDFFile377 378 from anuga.coordinate_transforms.redfearn import redfearn379 380 # So if we want to change the precision it's done here381 precision = netcdf_float382 383 # go in to the bath dir and load the only file,384 bath_files = os.listdir(bath_dir)385 bath_file = bath_files[0]386 bath_dir_file = bath_dir + os.sep + bath_file387 bath_metadata, bath_grid = _read_asc(bath_dir_file)388 389 #Use the date.time of the bath file as a basis for390 #the start time for other files391 base_start = bath_file[-12:]392 393 #go into the elevation dir and load the 000 file394 elevation_dir_file = elevation_dir + os.sep + elevation_prefix \395 + base_start396 397 elevation_files = os.listdir(elevation_dir)398 ucur_files = os.listdir(ucur_dir)399 vcur_files = os.listdir(vcur_dir)400 elevation_files.sort()401 402 # the first elevation file should be the403 # file with the same base name as the bath data404 assert elevation_files[0] == 'el' + base_start405 406 number_of_latitudes = bath_grid.shape[0]407 number_of_longitudes = bath_grid.shape[1]408 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2409 410 longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \411 for x in range(number_of_longitudes)]412 latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \413 for y in range(number_of_latitudes)]414 415 # reverse order of lat, so the first lat represents the first grid row416 latitudes.reverse()417 418 kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],longitudes[:],419 minlat=minlat, maxlat=maxlat,420 minlon=minlon, maxlon=maxlon)421 422 bath_grid = bath_grid[kmin:kmax,lmin:lmax]423 latitudes = latitudes[kmin:kmax]424 longitudes = longitudes[lmin:lmax]425 number_of_latitudes = len(latitudes)426 number_of_longitudes = len(longitudes)427 number_of_times = len(os.listdir(elevation_dir))428 number_of_points = number_of_latitudes * number_of_longitudes429 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2430 431 #Work out the times432 if len(elevation_files) > 1:433 # Assume: The time period is less than 24hrs.434 time_period = (int(elevation_files[1][-3:]) \435 - int(elevation_files[0][-3:])) * 60*60436 times = [x*time_period for x in range(len(elevation_files))]437 else:438 times = [0.0]439 440 if verbose:441 log.critical('------------------------------------------------')442 log.critical('Statistics:')443 log.critical(' Extent (lat/lon):')444 log.critical(' lat in [%f, %f], len(lat) == %d'445 % (min(latitudes), max(latitudes), len(latitudes)))446 log.critical(' lon in [%f, %f], len(lon) == %d'447 % (min(longitudes), max(longitudes), len(longitudes)))448 log.critical(' t in [%f, %f], len(t) == %d'449 % (min(times), max(times), len(times)))450 451 ######### WRITE THE SWW FILE #############452 453 # NetCDF file definition454 outfile = NetCDFFile(sww_file, netcdf_mode_w)455 456 #Create new file457 outfile.institution = 'Geoscience Australia'458 outfile.description = 'Converted from XXX'459 460 #For sww compatibility461 outfile.smoothing = 'Yes'462 outfile.order = 1463 464 #Start time in seconds since the epoch (midnight 1/1/1970)465 outfile.starttime = starttime = times[0]466 467 # dimension definitions468 outfile.createDimension('number_of_volumes', number_of_volumes)469 outfile.createDimension('number_of_vertices', 3)470 outfile.createDimension('number_of_points', number_of_points)471 outfile.createDimension('number_of_timesteps', number_of_times)472 473 # variable definitions474 outfile.createVariable('x', precision, ('number_of_points',))475 outfile.createVariable('y', precision, ('number_of_points',))476 outfile.createVariable('elevation', precision, ('number_of_points',))477 478 #FIXME: Backwards compatibility479 #outfile.createVariable('z', precision, ('number_of_points',))480 #################################481 482 outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',483 'number_of_vertices'))484 485 outfile.createVariable('time', precision, ('number_of_timesteps',))486 487 outfile.createVariable('stage', precision, ('number_of_timesteps',488 'number_of_points'))489 490 outfile.createVariable('xmomentum', precision, ('number_of_timesteps',491 'number_of_points'))492 493 outfile.createVariable('ymomentum', precision, ('number_of_timesteps',494 'number_of_points'))495 496 #Store497 from anuga.coordinate_transforms.redfearn import redfearn498 499 x = num.zeros(number_of_points, num.float) #Easting500 y = num.zeros(number_of_points, num.float) #Northing501 502 if verbose: log.critical('Making triangular grid')503 504 #Get zone of 1st point.505 refzone, _, _ = redfearn(latitudes[0], longitudes[0])506 507 vertices = {}508 i = 0509 for k, lat in enumerate(latitudes):510 for l, lon in enumerate(longitudes):511 vertices[l,k] = i512 513 zone, easting, northing = redfearn(lat, lon)514 515 #msg = 'Zone boundary crossed at longitude =', lon516 #assert zone == refzone, msg517 #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)518 x[i] = easting519 y[i] = northing520 i += 1521 522 #Construct 2 triangles per 'rectangular' element523 volumes = []524 for l in range(number_of_longitudes-1): #X direction525 for k in range(number_of_latitudes-1): #Y direction526 v1 = vertices[l,k+1]527 v2 = vertices[l,k]528 v3 = vertices[l+1,k+1]529 v4 = vertices[l+1,k]530 531 #Note, this is different to the ferrit2sww code532 #since the order of the lats is reversed.533 volumes.append([v1,v3,v2]) #Upper element534 volumes.append([v4,v2,v3]) #Lower element535 536 volumes = num.array(volumes, num.int) #array default#537 538 geo_ref = Geo_reference(refzone, min(x), min(y))539 geo_ref.write_NetCDF(outfile)540 541 # This will put the geo ref in the middle542 #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)543 544 if verbose:545 log.critical('------------------------------------------------')546 log.critical('More Statistics:')547 log.critical(' Extent (/lon):')548 log.critical(' x in [%f, %f], len(lat) == %d'549 % (min(x), max(x), len(x)))550 log.critical(' y in [%f, %f], len(lon) == %d'551 % (min(y), max(y), len(y)))552 log.critical('geo_ref: ', geo_ref)553 554 z = num.resize(bath_grid,outfile.variables['elevation'][:].shape)555 outfile.variables['x'][:] = x - geo_ref.get_xllcorner()556 outfile.variables['y'][:] = y - geo_ref.get_yllcorner()557 # FIXME (Ole): Remove once viewer has been recompiled and changed558 # to use elevation instead of z559 #outfile.variables['z'][:] = z560 outfile.variables['elevation'][:] = z561 outfile.variables['volumes'][:] = volumes.astype(num.int32) # On Opteron 64562 563 stage = outfile.variables['stage']564 xmomentum = outfile.variables['xmomentum']565 ymomentum = outfile.variables['ymomentum']566 567 outfile.variables['time'][:] = times #Store time relative568 569 if verbose: log.critical('Converting quantities')570 571 n = number_of_times572 for j in range(number_of_times):573 # load in files574 elevation_meta, elevation_grid = \575 _read_asc(elevation_dir + os.sep + elevation_files[j])576 577 _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])578 _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])579 580 #cut matrix to desired size581 elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]582 u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]583 v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]584 585 # handle missing values586 missing = (elevation_grid == elevation_meta['NODATA_value'])587 if num.sometrue (missing):588 if fail_on_NaN:589 msg = 'File %s contains missing values' \590 % (elevation_files[j])591 raise DataMissingValuesError, msg592 else:593 elevation_grid = elevation_grid*(missing==0) \594 + missing*elevation_NaN_filler595 596 if verbose and j % ((n+10)/10) == 0: log.critical(' Doing %d of %d'597 % (j, n))598 599 i = 0600 for k in range(number_of_latitudes): #Y direction601 for l in range(number_of_longitudes): #X direction602 w = zscale*elevation_grid[k,l] + mean_stage603 stage[j,i] = w604 h = w - z[i]605 xmomentum[j,i] = u_momentum_grid[k,l]*h606 ymomentum[j,i] = v_momentum_grid[k,l]*h607 i += 1608 609 outfile.close()610 611 612 613 def _read_asc(filename, verbose=False):614 """Read esri file from the following ASCII format (.asc)615 616 Example:617 618 ncols 3121619 nrows 1800620 xllcorner 722000621 yllcorner 5893000622 cellsize 25623 NODATA_value -9999624 138.3698 137.4194 136.5062 135.5558 ..........625 626 filename Path to the file to read.627 verbose True if this function is to be verbose.628 """629 630 datafile = open(filename)631 632 if verbose: log.critical('Reading DEM from %s' % filename)633 634 lines = datafile.readlines()635 datafile.close()636 637 if verbose: log.critical('Got %d lines' % len(lines))638 639 ncols = int(lines.pop(0).split()[1].strip())640 nrows = int(lines.pop(0).split()[1].strip())641 xllcorner = float(lines.pop(0).split()[1].strip())642 yllcorner = float(lines.pop(0).split()[1].strip())643 cellsize = float(lines.pop(0).split()[1].strip())644 NODATA_value = float(lines.pop(0).split()[1].strip())645 646 assert len(lines) == nrows647 648 #Store data649 grid = []650 651 n = len(lines)652 for i, line in enumerate(lines):653 cells = line.split()654 assert len(cells) == ncols655 grid.append(num.array([float(x) for x in cells]))656 grid = num.array(grid)657 658 return {'xllcorner':xllcorner,659 'yllcorner':yllcorner,660 'cellsize':cellsize,661 'NODATA_value':NODATA_value}, grid662 318 663 319 -
trunk/anuga_core/source/anuga/file_conversion/test_file_conversion.py
r7744 r7758 11 11 from anuga.config import netcdf_float, epsilon, g 12 12 from Scientific.IO.NetCDF import NetCDFFile 13 from anuga.file_conversion.file_conversion import tsh2sww, \ 14 pmesh_to_domain_instance 15 13 16 import sys 14 17 import unittest
Note: See TracChangeset
for help on using the changeset viewer.