Changeset 7735
- Timestamp:
- May 20, 2010, 12:36:35 PM (15 years ago)
- Location:
- anuga_core/source/anuga
- Files:
-
- 2 added
- 5 deleted
- 15 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_core/source/anuga/abstract_2d_finite_volumes/test_gauge.py
r7693 r7735 10 10 from anuga.abstract_2d_finite_volumes.gauge import * 11 11 from anuga.config import epsilon 12 from anuga.shallow_water.data_manager import timefile2netcdf,del_dir13 12 14 13 from anuga.utilities.numerical_tools import NAN,mean … … 18 17 from anuga.pmesh.mesh import Mesh 19 18 from anuga.shallow_water import Domain, Transmissive_boundary 20 from anuga.shallow_water.data_manager import SWW_file 19 from anuga.shallow_water.sww_file import SWW_file 20 from anuga.shallow_water.file_conversion import timefile2netcdf 21 from anuga.utilities.file_utils import del_dir 21 22 from csv import reader,writer 22 23 import time -
anuga_core/source/anuga/abstract_2d_finite_volumes/test_generic_boundary_conditions.py
r7573 r7735 404 404 405 405 #Convert ASCII file to NetCDF (Which is what we really like!) 406 from anuga.shallow_water. data_managerimport timefile2netcdf406 from anuga.shallow_water.file_conversion import timefile2netcdf 407 407 408 408 timefile2netcdf(filename, quantity_names = ['stage', 'xmomentum']) -
anuga_core/source/anuga/abstract_2d_finite_volumes/test_util.py
r7695 r7735 9 9 from anuga.abstract_2d_finite_volumes.util import * 10 10 from anuga.config import epsilon 11 from anuga.shallow_water.data_manager import timefile2netcdf,del_dir 11 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 12 from anuga.shallow_water.file_conversion import timefile2netcdf 13 from anuga.utilities.file_utils import del_dir 12 14 13 15 from anuga.utilities.numerical_tools import NAN … … 17 19 from anuga.pmesh.mesh import Mesh 18 20 from anuga.shallow_water import Domain, Transmissive_boundary 19 from anuga.shallow_water. data_managerimport SWW_file21 from anuga.shallow_water.sww_file import SWW_file 20 22 from csv import reader,writer 21 23 import time -
anuga_core/source/anuga/abstract_2d_finite_volumes/util.py
r7711 r7735 650 650 651 651 from os import sep 652 from anuga. shallow_water.data_manager import\653 get_all_files_with_extension, csv2dict652 from anuga.utilities.file_utils import load_csv_as_dict, \ 653 get_all_files_with_extension 654 654 655 655 seconds_in_hour = 3600 … … 955 955 """ 956 956 957 from anuga.shallow_water.data_manager import get_all_directories_with_name,\ 958 get_maximum_inundation_data,\ 959 csv2dict 957 from anuga.shallow_water.data_manager import \ 958 get_maximum_inundation_data, csv2dict 960 959 961 960 file = open(runup_filename, "w") -
anuga_core/source/anuga/culvert_flows/culvert_class.py
r7711 r7735 1 1 import sys 2 2 3 from anuga.shallow_water. shallow_water_domainimport Inflow, General_forcing3 from anuga.shallow_water.forcing import Inflow, General_forcing 4 4 from anuga.culvert_flows.culvert_polygons import create_culvert_polygons 5 5 from anuga.utilities.system_tools import log_to_file -
anuga_core/source/anuga/culvert_flows/new_culvert_class.py
r7711 r7735 1 1 import sys 2 2 3 from anuga.shallow_water. shallow_water_domainimport Inflow, General_forcing3 from anuga.shallow_water.forcing import Inflow, General_forcing 4 4 from anuga.culvert_flows.culvert_polygons import create_culvert_polygons 5 5 from anuga.utilities.system_tools import log_to_file -
anuga_core/source/anuga/damage_modelling/test_inundation_damage.py
r7342 r7735 15 15 from anuga.shallow_water import Domain, Transmissive_boundary 16 16 from anuga.utilities.numerical_tools import mean 17 from anuga.shallow_water. data_managerimport SWW_file17 from anuga.shallow_water.sww_file import SWW_file 18 18 19 19 import numpy as num … … 293 293 fd = open(csv_file,'wb') 294 294 writer = csv.writer(fd) 295 writer.writerow(['x','y',STR_VALUE_LABEL,CONT_VALUE_LABEL,'ROOF_TYPE',WALL_TYPE_LABEL, SHORE_DIST_LABEL]) 295 writer.writerow(['x', 'y', STR_VALUE_LABEL, CONT_VALUE_LABEL, \ 296 'ROOF_TYPE', WALL_TYPE_LABEL, SHORE_DIST_LABEL]) 296 297 writer.writerow([5.5,0.5,'10','130000','Metal','Timber',20]) 297 298 writer.writerow([4.5,1.0,'150','76000','Metal','Double Brick',20]) -
anuga_core/source/anuga/fit_interpolate/test_interpolate.py
r7722 r7735 24 24 from anuga.shallow_water import Domain, Transmissive_boundary 25 25 from anuga.utilities.numerical_tools import mean, NAN 26 from anuga.shallow_water. data_managerimport SWW_file26 from anuga.shallow_water.sww_file import SWW_file 27 27 from anuga.geospatial_data.geospatial_data import Geospatial_data 28 28 from anuga.pmesh.mesh import Mesh -
anuga_core/source/anuga/shallow_water/boundaries.py
r7732 r7735 188 188 189 189 def __init__(self, domain=None, function=None): 190 """ Instantiate a Transmissive_n_momentum_zero_t_momentum_set_stage_boundary. 190 """ Instantiate a 191 Transmissive_n_momentum_zero_t_momentum_set_stage_boundary. 191 192 domain is the domain containing the boundary 192 193 function is the function to apply … … 209 210 def __repr__(self): 210 211 """ Return a representation of this instance. """ 211 return 'Transmissive_n_momentum_zero_t_momentum_set_stage_boundary(%s)' %self.domain 212 msg='Transmissive_n_momentum_zero_t_momentum_set_stage_boundary' 213 msg+='(%s)' %self.domain 214 return msg 212 215 213 216 … … 393 396 394 397 # Compute depth based on Manning's formula v = 1/n h^{2/3} sqrt(S) 395 # Where v is velocity, n is manning's coefficient, h is depth and S is the slope into the domain. 398 # Where v is velocity, n is manning's coefficient, h is depth 399 # and S is the slope into the domain. 396 400 # Let mu be the momentum (vh), then this equation becomes: mu = 1/n h^{5/3} sqrt(S) 397 401 # from which we can isolate depth to get -
anuga_core/source/anuga/shallow_water/data_manager.py
r7711 r7735 94 94 import anuga.utilities.log as log 95 95 96 from anuga.utilities.file_utils import create_filename,\ 97 get_all_swwfiles, load_csv_as_dict 98 from sww_file import Read_sww, Write_sww 99 96 100 97 101 # Default block size for sww2dem() … … 118 122 'speed': \ 119 123 '(xmomentum**2 + ymomentum**2)**0.5/(stage-elevation+1.e-6/(stage-elevation))'} 120 121 122 ##123 # @brief Convert a possible filename into a standard form.124 # @param s Filename to process.125 # @return The new filename string.126 def make_filename(s):127 """Transform argument string into a Sexsuitable filename128 """129 130 s = s.strip()131 s = s.replace(' ', '_')132 s = s.replace('(', '')133 s = s.replace(')', '')134 s = s.replace('__', '_')135 136 return s137 138 139 ##140 # @brief Check that a specified filesystem directory path exists.141 # @param path The dirstory path to check.142 # @param verbose True if this function is to be verbose.143 # @note If directory path doesn't exist, it will be created.144 def check_dir(path, verbose=None):145 """Check that specified path exists.146 If path does not exist it will be created if possible147 148 USAGE:149 checkdir(path, verbose):150 151 ARGUMENTS:152 path -- Directory153 verbose -- Flag verbose output (default: None)154 155 RETURN VALUE:156 Verified path including trailing separator157 """158 159 import os.path160 161 if sys.platform in ['nt', 'dos', 'win32', 'what else?']:162 unix = 0163 else:164 unix = 1165 166 # add terminal separator, if it's not already there167 if path[-1] != os.sep:168 path = path + os.sep169 170 # expand ~ or ~username in path171 path = os.path.expanduser(path)172 173 # create directory if required174 if not (os.access(path, os.R_OK and os.W_OK) or path == ''):175 try:176 exitcode = os.mkdir(path)177 178 # Change access rights if possible179 if unix:180 exitcode = os.system('chmod 775 ' + path)181 else:182 pass # FIXME: What about access rights under Windows?183 184 if verbose: log.critical('MESSAGE: Directory %s created.' % path)185 except:186 log.critical('WARNING: Directory %s could not be created.' % path)187 if unix:188 path = '/tmp/'189 else:190 path = 'C:' + os.sep191 192 log.critical("Using directory '%s' instead" % path)193 194 return path195 196 197 ##198 # @brief Delete directory and all sub-directories.199 # @param path Path to the directory to delete.200 def del_dir(path):201 """Recursively delete directory path and all its contents202 """203 204 if os.path.isdir(path):205 for file in os.listdir(path):206 X = os.path.join(path, file)207 208 if os.path.isdir(X) and not os.path.islink(X):209 del_dir(X)210 else:211 try:212 os.remove(X)213 except:214 log.critical("Could not remove file %s" % X)215 216 os.rmdir(path)217 218 219 ##220 # @brief ??221 # @param path222 # @param __func__223 # @param verbose True if this function is to be verbose.224 # @note ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007225 def rmgeneric(path, func, verbose=False):226 ERROR_STR= """Error removing %(path)s, %(error)s """227 228 try:229 func(path)230 if verbose: log.critical('Removed %s' % path)231 except OSError, (errno, strerror):232 log.critical(ERROR_STR % {'path' : path, 'error': strerror })233 234 235 ##236 # @brief Remove directory and all sub-directories.237 # @param path Filesystem path to directory to remove.238 # @param verbose True if this function is to be verbose.239 def removeall(path, verbose=False):240 if not os.path.isdir(path):241 return242 243 for x in os.listdir(path):244 fullpath = os.path.join(path, x)245 if os.path.isfile(fullpath):246 f = os.remove247 rmgeneric(fullpath, f)248 elif os.path.isdir(fullpath):249 removeall(fullpath)250 f = os.rmdir251 rmgeneric(fullpath, f, verbose)252 253 254 ##255 # @brief Create a standard filename.256 # @param datadir Directory where file is to be created.257 # @param filename Filename 'stem'.258 # @param format Format of the file, becomes filename extension.259 # @param size Size of file, becomes part of filename.260 # @param time Time (float), becomes part of filename.261 # @return The complete filename path, including directory.262 # @note The containing directory is created, if necessary.263 def create_filename(datadir, filename, format, size=None, time=None):264 FN = check_dir(datadir) + filename265 266 if size is not None:267 FN += '_size%d' % size268 269 if time is not None:270 FN += '_time%.2f' % time271 272 FN += '.' + format273 274 return FN275 276 277 ##278 # @brief Get all files with a standard name and a given set of attributes.279 # @param datadir Directory files must be in.280 # @param filename Filename stem.281 # @param format Filename extension.282 # @param size Filename size.283 # @return A list of fielnames (including directory) that match the attributes.284 def get_files(datadir, filename, format, size):285 """Get all file (names) with given name, size and format286 """287 288 import glob289 290 dir = check_dir(datadir)291 pattern = dir + os.sep + filename + '_size=%d*.%s' % (size, format)292 293 return glob.glob(pattern)294 295 296 ##297 # @brief Generic class for storing output to e.g. visualisation or checkpointing298 class Data_format:299 """Generic interface to data formats300 """301 302 ##303 # @brief Instantiate this instance.304 # @param domain305 # @param extension306 # @param mode The mode of the underlying file.307 def __init__(self, domain, extension, mode=netcdf_mode_w):308 assert mode[0] in ['r', 'w', 'a'], \309 "Mode %s must be either:\n" % mode + \310 " 'w' (write)\n" + \311 " 'r' (read)\n" + \312 " 'a' (append)"313 314 #Create filename315 self.filename = create_filename(domain.get_datadir(),316 domain.get_name(), extension)317 318 self.timestep = 0319 self.domain = domain320 321 # Exclude ghosts in case this is a parallel domain322 self.number_of_nodes = domain.number_of_full_nodes323 self.number_of_volumes = domain.number_of_full_triangles324 #self.number_of_volumes = len(domain)325 326 #FIXME: Should we have a general set_precision function?327 328 329 ##330 # @brief Class for storing output to e.g. visualisation331 class SWW_file(Data_format):332 """Interface to native NetCDF format (.sww) for storing model output333 334 There are two kinds of data335 336 1: Constant data: Vertex coordinates and field values. Stored once337 2: Variable data: Conserved quantities. Stored once per timestep.338 339 All data is assumed to reside at vertex locations.340 """341 342 ##343 # @brief Instantiate this instance.344 # @param domain ??345 # @param mode Mode of the underlying data file.346 # @param max_size ??347 # @param recursion ??348 # @note Prepare the underlying data file if mode starts with 'w'.349 def __init__(self, domain,350 mode=netcdf_mode_w, max_size=2000000000, recursion=False):351 from Scientific.IO.NetCDF import NetCDFFile352 353 self.precision = netcdf_float32 # Use single precision for quantities354 self.recursion = recursion355 self.mode = mode356 if hasattr(domain, 'max_size'):357 self.max_size = domain.max_size # File size max is 2Gig358 else:359 self.max_size = max_size360 if hasattr(domain, 'minimum_storable_height'):361 self.minimum_storable_height = domain.minimum_storable_height362 else:363 self.minimum_storable_height = default_minimum_storable_height364 365 # Call parent constructor366 Data_format.__init__(self, domain, 'sww', mode)367 368 # Get static and dynamic quantities from domain369 static_quantities = []370 dynamic_quantities = []371 372 for q in domain.quantities_to_be_stored:373 flag = domain.quantities_to_be_stored[q]374 375 msg = 'Quantity %s is requested to be stored ' % q376 msg += 'but it does not exist in domain.quantities'377 assert q in domain.quantities, msg378 379 assert flag in [1,2]380 if flag == 1: static_quantities.append(q)381 if flag == 2: dynamic_quantities.append(q)382 383 384 # NetCDF file definition385 fid = NetCDFFile(self.filename, mode)386 if mode[0] == 'w':387 description = 'Output from anuga.abstract_2d_finite_volumes ' \388 'suitable for plotting'389 390 self.writer = Write_sww(static_quantities, dynamic_quantities)391 self.writer.store_header(fid,392 domain.starttime,393 self.number_of_volumes,394 self.domain.number_of_full_nodes,395 description=description,396 smoothing=domain.smooth,397 order=domain.default_order,398 sww_precision=self.precision)399 400 # Extra optional information401 if hasattr(domain, 'texture'):402 fid.texture = domain.texture403 404 if domain.quantities_to_be_monitored is not None:405 fid.createDimension('singleton', 1)406 fid.createDimension('two', 2)407 408 poly = domain.monitor_polygon409 if poly is not None:410 N = len(poly)411 fid.createDimension('polygon_length', N)412 fid.createVariable('extrema.polygon',413 self.precision,414 ('polygon_length', 'two'))415 fid.variables['extrema.polygon'][:] = poly416 417 interval = domain.monitor_time_interval418 if interval is not None:419 fid.createVariable('extrema.time_interval',420 self.precision,421 ('two',))422 fid.variables['extrema.time_interval'][:] = interval423 424 for q in domain.quantities_to_be_monitored:425 fid.createVariable(q + '.extrema', self.precision,426 ('numbers_in_range',))427 fid.createVariable(q + '.min_location', self.precision,428 ('numbers_in_range',))429 fid.createVariable(q + '.max_location', self.precision,430 ('numbers_in_range',))431 fid.createVariable(q + '.min_time', self.precision,432 ('singleton',))433 fid.createVariable(q + '.max_time', self.precision,434 ('singleton',))435 436 fid.close()437 438 ##439 # @brief Store connectivity data into the underlying data file.440 def store_connectivity(self):441 """Store information about nodes, triangles and static quantities442 443 Writes x,y coordinates of triangles and their connectivity.444 445 Store also any quantity that has been identified as static.446 """447 448 # FIXME: Change name to reflect the fact thta this function449 # stores both connectivity (triangulation) and static quantities450 451 from Scientific.IO.NetCDF import NetCDFFile452 453 domain = self.domain454 455 # append to the NetCDF file456 fid = NetCDFFile(self.filename, netcdf_mode_a)457 458 # Get X, Y from one (any) of the quantities459 Q = domain.quantities.values()[0]460 X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)461 462 # store the connectivity data463 points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)464 self.writer.store_triangulation(fid,465 points,466 V.astype(num.float32),467 points_georeference=\468 domain.geo_reference)469 470 471 # Get names of static quantities472 static_quantities = {}473 for name in self.writer.static_quantities:474 Q = domain.quantities[name]475 A, _ = Q.get_vertex_values(xy=False,476 precision=self.precision)477 static_quantities[name] = A478 479 # Store static quantities480 self.writer.store_static_quantities(fid, **static_quantities)481 482 fid.close()483 484 ##485 # @brief Store time and time dependent quantities486 # to the underlying data file.487 def store_timestep(self):488 """Store time and time dependent quantities489 """490 491 from Scientific.IO.NetCDF import NetCDFFile492 import types493 from time import sleep494 from os import stat495 496 # Get NetCDF497 retries = 0498 file_open = False499 while not file_open and retries < 10:500 try:501 # Open existing file502 fid = NetCDFFile(self.filename, netcdf_mode_a)503 except IOError:504 # This could happen if someone was reading the file.505 # In that case, wait a while and try again506 msg = 'Warning (store_timestep): File %s could not be opened' \507 % self.filename508 msg += ' - trying step %s again' % self.domain.time509 log.critical(msg)510 retries += 1511 sleep(1)512 else:513 file_open = True514 515 if not file_open:516 msg = 'File %s could not be opened for append' % self.filename517 raise DataFileNotOpenError, msg518 519 # Check to see if the file is already too big:520 time = fid.variables['time']521 i = len(time) + 1522 file_size = stat(self.filename)[6]523 file_size_increase = file_size / i524 if file_size + file_size_increase > self.max_size * 2**self.recursion:525 # In order to get the file name and start time correct,526 # I change the domain.filename and domain.starttime.527 # This is the only way to do this without changing528 # other modules (I think).529 530 # Write a filename addon that won't break the anuga viewers531 # (10.sww is bad)532 filename_ext = '_time_%s' % self.domain.time533 filename_ext = filename_ext.replace('.', '_')534 535 # Remember the old filename, then give domain a536 # name with the extension537 old_domain_filename = self.domain.get_name()538 if not self.recursion:539 self.domain.set_name(old_domain_filename + filename_ext)540 541 # Temporarily change the domain starttime to the current time542 old_domain_starttime = self.domain.starttime543 self.domain.starttime = self.domain.get_time()544 545 # Build a new data_structure.546 next_data_structure = SWW_file(self.domain, mode=self.mode,547 max_size=self.max_size,548 recursion=self.recursion+1)549 if not self.recursion:550 log.critical(' file_size = %s' % file_size)551 log.critical(' saving file to %s'552 % next_data_structure.filename)553 554 # Set up the new data_structure555 self.domain.writer = next_data_structure556 557 # Store connectivity and first timestep558 next_data_structure.store_connectivity()559 next_data_structure.store_timestep()560 fid.sync()561 fid.close()562 563 # Restore the old starttime and filename564 self.domain.starttime = old_domain_starttime565 self.domain.set_name(old_domain_filename)566 else:567 self.recursion = False568 domain = self.domain569 570 # Get the variables571 time = fid.variables['time']572 i = len(time)573 574 if 'stage' in self.writer.dynamic_quantities:575 # Select only those values for stage,576 # xmomentum and ymomentum (if stored) where577 # depth exceeds minimum_storable_height578 #579 # In this branch it is assumed that elevation580 # is also available as a quantity581 582 Q = domain.quantities['stage']583 w, _ = Q.get_vertex_values(xy=False)584 585 Q = domain.quantities['elevation']586 z, _ = Q.get_vertex_values(xy=False)587 588 storable_indices = (w-z >= self.minimum_storable_height)589 else:590 # Very unlikely branch591 storable_indices = None # This means take all592 593 594 # Now store dynamic quantities595 dynamic_quantities = {}596 for name in self.writer.dynamic_quantities:597 netcdf_array = fid.variables[name]598 599 Q = domain.quantities[name]600 A, _ = Q.get_vertex_values(xy=False,601 precision=self.precision)602 603 if storable_indices is not None:604 if name == 'stage':605 A = num.choose(storable_indices, (z, A))606 607 if name in ['xmomentum', 'ymomentum']:608 # Get xmomentum where depth exceeds609 # minimum_storable_height610 611 # Define a zero vector of same size and type as A612 # for use with momenta613 null = num.zeros(num.size(A), A.dtype.char)614 A = num.choose(storable_indices, (null, A))615 616 dynamic_quantities[name] = A617 618 619 # Store dynamic quantities620 self.writer.store_quantities(fid,621 time=self.domain.time,622 sww_precision=self.precision,623 **dynamic_quantities)624 625 626 # Update extrema if requested627 domain = self.domain628 if domain.quantities_to_be_monitored is not None:629 for q, info in domain.quantities_to_be_monitored.items():630 if info['min'] is not None:631 fid.variables[q + '.extrema'][0] = info['min']632 fid.variables[q + '.min_location'][:] = \633 info['min_location']634 fid.variables[q + '.min_time'][0] = info['min_time']635 636 if info['max'] is not None:637 fid.variables[q + '.extrema'][1] = info['max']638 fid.variables[q + '.max_location'][:] = \639 info['max_location']640 fid.variables[q + '.max_time'][0] = info['max_time']641 642 # Flush and close643 fid.sync()644 fid.close()645 646 647 ##648 # @brief Class to open an sww file so that domain can be populated with quantity values649 class Read_sww:650 651 def __init__(self, source):652 """The source parameter is assumed to be a NetCDF sww file.653 """654 655 self.source = source656 657 self.frame_number = 0658 659 fin = NetCDFFile(self.source, 'r')660 661 self.time = num.array(fin.variables['time'], num.float)662 self.last_frame_number = self.time.shape[0] - 1663 664 self.frames = num.arange(self.last_frame_number+1)665 666 fin.close()667 668 self.read_mesh()669 670 self.quantities = {}671 672 self.read_quantities()673 674 675 def read_mesh(self):676 fin = NetCDFFile(self.source, 'r')677 678 self.vertices = num.array(fin.variables['volumes'], num.int)679 680 self.x = x = num.array(fin.variables['x'], num.float)681 self.y = y = num.array(fin.variables['y'], num.float)682 683 assert len(self.x) == len(self.y)684 685 self.xmin = num.min(x)686 self.xmax = num.max(x)687 self.ymin = num.min(y)688 self.ymax = num.max(y)689 690 691 692 fin.close()693 694 def read_quantities(self, frame_number=0):695 696 assert frame_number >= 0 and frame_number <= self.last_frame_number697 698 self.frame_number = frame_number699 700 M = len(self.x)/3701 702 fin = NetCDFFile(self.source, 'r')703 704 for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \705 '_range' not in n, \706 fin.variables.keys()):707 if len(fin.variables[q].shape) == 1: # Not a time-varying quantity708 self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)709 else: # Time-varying, get the current timestep data710 self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)711 fin.close()712 return self.quantities713 714 def get_bounds(self):715 return [self.xmin, self.xmax, self.ymin, self.ymax]716 717 def get_last_frame_number(self):718 return self.last_frame_number719 720 def get_time(self):721 return self.time[self.frame_number]722 723 724 ##725 # @brief Class for handling checkpoints data726 # @note This is not operational at the moment727 class CPT_file(Data_format):728 """Interface to native NetCDF format (.cpt) to be729 used for checkpointing (one day)730 """731 732 ##733 # @brief Initialize this instantiation.734 # @param domain ??735 # @param mode Mode of underlying data file (default WRITE).736 def __init__(self, domain, mode=netcdf_mode_w):737 from Scientific.IO.NetCDF import NetCDFFile738 739 self.precision = netcdf_float #Use full precision740 741 Data_format.__init__(self, domain, 'sww', mode)742 743 # NetCDF file definition744 fid = NetCDFFile(self.filename, mode)745 if mode[0] == 'w':746 # Create new file747 fid.institution = 'Geoscience Australia'748 fid.description = 'Checkpoint data'749 #fid.smooth = domain.smooth750 fid.order = domain.default_order751 752 # Dimension definitions753 fid.createDimension('number_of_volumes', self.number_of_volumes)754 fid.createDimension('number_of_vertices', 3)755 756 # Store info at all vertices (no smoothing)757 fid.createDimension('number_of_points', 3*self.number_of_volumes)758 fid.createDimension('number_of_timesteps', None) #extensible759 760 # Variable definitions761 762 # Mesh763 fid.createVariable('x', self.precision, ('number_of_points',))764 fid.createVariable('y', self.precision, ('number_of_points',))765 766 767 fid.createVariable('volumes', netcdf_int, ('number_of_volumes',768 'number_of_vertices'))769 770 fid.createVariable('time', self.precision, ('number_of_timesteps',))771 772 #Allocate space for all quantities773 for name in domain.quantities.keys():774 fid.createVariable(name, self.precision,775 ('number_of_timesteps',776 'number_of_points'))777 778 fid.close()779 780 ##781 # @brief Store connectivity data to underlying data file.782 def store_checkpoint(self):783 """Write x,y coordinates of triangles.784 Write connectivity (785 constituting786 the bed elevation.787 """788 789 from Scientific.IO.NetCDF import NetCDFFile790 791 domain = self.domain792 793 #Get NetCDF794 fid = NetCDFFile(self.filename, netcdf_mode_a)795 796 # Get the variables797 x = fid.variables['x']798 y = fid.variables['y']799 800 volumes = fid.variables['volumes']801 802 # Get X, Y and bed elevation Z803 Q = domain.quantities['elevation']804 X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)805 806 x[:] = X.astype(self.precision)807 y[:] = Y.astype(self.precision)808 z[:] = Z.astype(self.precision)809 810 volumes[:] = V811 812 fid.close()813 814 ##815 # @brief Store time and named quantities to underlying data file.816 # @param name817 def store_timestep(self, name):818 """Store time and named quantity to file819 """820 821 from Scientific.IO.NetCDF import NetCDFFile822 from time import sleep823 824 #Get NetCDF825 retries = 0826 file_open = False827 while not file_open and retries < 10:828 try:829 fid = NetCDFFile(self.filename, netcdf_mode_a)830 except IOError:831 #This could happen if someone was reading the file.832 #In that case, wait a while and try again833 msg = 'Warning (store_timestep): File %s could not be opened' \834 ' - trying again' % self.filename835 log.critical(msg)836 retries += 1837 sleep(1)838 else:839 file_open = True840 841 if not file_open:842 msg = 'File %s could not be opened for append' % self.filename843 raise DataFileNotOpenError, msg844 845 domain = self.domain846 847 # Get the variables848 time = fid.variables['time']849 stage = fid.variables['stage']850 i = len(time)851 852 #Store stage853 time[i] = self.domain.time854 855 # Get quantity856 Q = domain.quantities[name]857 A,V = Q.get_vertex_values(xy=False, precision=self.precision)858 859 stage[i,:] = A.astype(self.precision)860 861 #Flush and close862 fid.sync()863 fid.close()864 124 865 125 … … 924 184 #The values are the index positions of file columns. 925 185 self._attribute_dic, self._title_index_dic = \ 926 csv2dict(self._file_name, title_check_list=title_check_list) 186 load_csv_as_dict(self._file_name, \ 187 title_check_list=title_check_list) 927 188 try: 928 189 #Have code here that handles caps or lower … … 1186 447 will be excluded 1187 448 1188 See underlying function csv2dict for more details.1189 """ 1190 1191 X, _ = csv2dict(file_name)449 See underlying function load_csv_as_dict for more details. 450 """ 451 452 X, _ = load_csv_as_dict(file_name) 1192 453 1193 454 msg = 'Polygon csv file must have 3 or 4 columns' … … 1268 529 # @brief Convert CSV file to a dictionary of arrays. 1269 530 # @param file_name The path to the file to read. 1270 def csv2array(file_name):531 def load_csv_as_array(file_name): 1271 532 """ 1272 533 Convert CSV files of the form: … … 1280 541 1281 542 1282 See underlying function csv2dict for more details.1283 """ 1284 1285 X, _ = csv2dict(file_name)543 See underlying function load_csv_as_dict for more details. 544 """ 545 546 X, _ = load_csv_as_dict(file_name) 1286 547 1287 548 Y = {} … … 1291 552 return Y 1292 553 1293 1294 ##1295 # @brief Read a CSV file and convert to a dictionary of {key: column}.1296 # @param file_name The path to the file to read.1297 # @param title_check_list List of titles that *must* be columns in the file.1298 # @return Two dicts: ({key:column}, {title:index}).1299 # @note WARNING: Values are returned as strings.1300 def csv2dict(file_name, title_check_list=None):1301 """1302 Load in the csv as a dictionary, title as key and column info as value.1303 Also, create a dictionary, title as key and column index as value,1304 to keep track of the column order.1305 1306 Two dictionaries are returned.1307 1308 WARNING: Values are returned as strings.1309 Do this to change a list of strings to a list of floats1310 time = [float(x) for x in time]1311 """1312 1313 # FIXME(Ole): Consider dealing with files without headers1314 # FIXME(Ole): Consider a wrapper automatically converting text fields1315 # to the right type by trying for: int, float, string1316 1317 attribute_dic = {}1318 title_index_dic = {}1319 titles_stripped = [] # List of titles1320 1321 reader = csv.reader(file(file_name))1322 1323 # Read in and manipulate the title info1324 titles = reader.next()1325 for i, title in enumerate(titles):1326 header = title.strip()1327 titles_stripped.append(header)1328 title_index_dic[header] = i1329 title_count = len(titles_stripped)1330 1331 # Check required columns1332 if title_check_list is not None:1333 for title_check in title_check_list:1334 if not title_index_dic.has_key(title_check):1335 msg = 'Reading error. This row is not present %s' % title_check1336 raise IOError, msg1337 1338 # Create a dictionary of column values, indexed by column title1339 for line in reader:1340 n = len(line) # Number of entries1341 if n != title_count:1342 msg = 'Entry in file %s had %d columns ' % (file_name, n)1343 msg += 'although there were %d headers' % title_count1344 raise IOError, msg1345 for i, value in enumerate(line):1346 attribute_dic.setdefault(titles_stripped[i], []).append(value)1347 1348 return attribute_dic, title_index_dic1349 554 1350 555 … … 1390 595 1391 596 outfile.close() 1392 1393 1394 #########################################################1395 #Conversion routines1396 ########################################################1397 1398 ##1399 # @brief Convert SWW data to OBJ data.1400 # @param basefilename Stem of filename, needs size and extension added.1401 # @param size The number of lines to write.1402 def sww2obj(basefilename, size):1403 """Convert netcdf based data output to obj1404 """1405 1406 from Scientific.IO.NetCDF import NetCDFFile1407 1408 # Get NetCDF1409 FN = create_filename('.', basefilename, 'sww', size)1410 log.critical('Reading from %s' % FN)1411 fid = NetCDFFile(FN, netcdf_mode_r) #Open existing file for read1412 1413 # Get the variables1414 x = fid.variables['x']1415 y = fid.variables['y']1416 z = fid.variables['elevation']1417 time = fid.variables['time']1418 stage = fid.variables['stage']1419 1420 M = size #Number of lines1421 xx = num.zeros((M,3), num.float)1422 yy = num.zeros((M,3), num.float)1423 zz = num.zeros((M,3), num.float)1424 1425 for i in range(M):1426 for j in range(3):1427 xx[i,j] = x[i+j*M]1428 yy[i,j] = y[i+j*M]1429 zz[i,j] = z[i+j*M]1430 1431 # Write obj for bathymetry1432 FN = create_filename('.', basefilename, 'obj', size)1433 write_obj(FN,xx,yy,zz)1434 1435 # Now read all the data with variable information, combine with1436 # x,y info and store as obj1437 for k in range(len(time)):1438 t = time[k]1439 log.critical('Processing timestep %f' % t)1440 1441 for i in range(M):1442 for j in range(3):1443 zz[i,j] = stage[k,i+j*M]1444 1445 #Write obj for variable data1446 #FN = create_filename(basefilename, 'obj', size, time=t)1447 FN = create_filename('.', basefilename[:5], 'obj', size, time=t)1448 write_obj(FN, xx, yy, zz)1449 1450 1451 ##1452 # @brief1453 # @param basefilename Stem of filename, needs size and extension added.1454 def dat2obj(basefilename):1455 """Convert line based data output to obj1456 FIXME: Obsolete?1457 """1458 1459 import glob, os1460 from anuga.config import data_dir1461 1462 # Get bathymetry and x,y's1463 lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()1464 1465 M = len(lines) #Number of lines1466 x = num.zeros((M,3), num.float)1467 y = num.zeros((M,3), num.float)1468 z = num.zeros((M,3), num.float)1469 1470 for i, line in enumerate(lines):1471 tokens = line.split()1472 values = map(float, tokens)1473 1474 for j in range(3):1475 x[i,j] = values[j*3]1476 y[i,j] = values[j*3+1]1477 z[i,j] = values[j*3+2]1478 1479 # Write obj for bathymetry1480 write_obj(data_dir + os.sep + basefilename + '_geometry', x, y, z)1481 1482 # Now read all the data files with variable information, combine with1483 # x,y info and store as obj.1484 1485 files = glob.glob(data_dir + os.sep + basefilename + '*.dat')1486 for filename in files:1487 log.critical('Processing %s' % filename)1488 1489 lines = open(data_dir + os.sep + filename, 'r').readlines()1490 assert len(lines) == M1491 root, ext = os.path.splitext(filename)1492 1493 # Get time from filename1494 i0 = filename.find('_time=')1495 if i0 == -1:1496 #Skip bathymetry file1497 continue1498 1499 i0 += 6 #Position where time starts1500 i1 = filename.find('.dat')1501 1502 if i1 > i0:1503 t = float(filename[i0:i1])1504 else:1505 raise DataTimeError, 'Hmmmm'1506 1507 for i, line in enumerate(lines):1508 tokens = line.split()1509 values = map(float,tokens)1510 1511 for j in range(3):1512 z[i,j] = values[j]1513 1514 # Write obj for variable data1515 write_obj(data_dir + os.sep + basefilename + '_time=%.4f' % t, x, y, z)1516 1517 597 1518 598 ## … … 1573 653 outfile.close() 1574 654 1575 1576 ##1577 # @brief Convert DEM data to PTS data.1578 # @param basename_in Stem of input filename.1579 # @param basename_out Stem of output filename.1580 # @param easting_min1581 # @param easting_max1582 # @param northing_min1583 # @param northing_max1584 # @param use_cache1585 # @param verbose1586 # @return1587 def dem2pts(basename_in, basename_out=None,1588 easting_min=None, easting_max=None,1589 northing_min=None, northing_max=None,1590 use_cache=False, verbose=False,):1591 """Read Digitial Elevation model from the following NetCDF format (.dem)1592 1593 Example:1594 1595 ncols 31211596 nrows 18001597 xllcorner 7220001598 yllcorner 58930001599 cellsize 251600 NODATA_value -99991601 138.3698 137.4194 136.5062 135.5558 ..........1602 1603 Convert to NetCDF pts format which is1604 1605 points: (Nx2) float array1606 elevation: N float array1607 """1608 1609 kwargs = {'basename_out': basename_out,1610 'easting_min': easting_min,1611 'easting_max': easting_max,1612 'northing_min': northing_min,1613 'northing_max': northing_max,1614 'verbose': verbose}1615 1616 if use_cache is True:1617 from caching import cache1618 result = cache(_dem2pts, basename_in, kwargs,1619 dependencies = [basename_in + '.dem'],1620 verbose = verbose)1621 1622 else:1623 result = apply(_dem2pts, [basename_in], kwargs)1624 1625 return result1626 1627 1628 ##1629 # @brief1630 # @param basename_in1631 # @param basename_out1632 # @param verbose1633 # @param easting_min1634 # @param easting_max1635 # @param northing_min1636 # @param northing_max1637 def _dem2pts(basename_in, basename_out=None, verbose=False,1638 easting_min=None, easting_max=None,1639 northing_min=None, northing_max=None):1640 """Read Digitial Elevation model from the following NetCDF format (.dem)1641 1642 Internal function. See public function dem2pts for details.1643 """1644 1645 # FIXME: Can this be written feasibly using write_pts?1646 1647 import os1648 from Scientific.IO.NetCDF import NetCDFFile1649 1650 root = basename_in1651 1652 # Get NetCDF1653 infile = NetCDFFile(root + '.dem', netcdf_mode_r)1654 1655 if verbose: log.critical('Reading DEM from %s' % (root + '.dem'))1656 1657 ncols = infile.ncols[0]1658 nrows = infile.nrows[0]1659 xllcorner = infile.xllcorner[0] # Easting of lower left corner1660 yllcorner = infile.yllcorner[0] # Northing of lower left corner1661 cellsize = infile.cellsize[0]1662 NODATA_value = infile.NODATA_value[0]1663 dem_elevation = infile.variables['elevation']1664 1665 zone = infile.zone[0]1666 false_easting = infile.false_easting[0]1667 false_northing = infile.false_northing[0]1668 1669 # Text strings1670 projection = infile.projection1671 datum = infile.datum1672 units = infile.units1673 1674 # Get output file1675 if basename_out == None:1676 ptsname = root + '.pts'1677 else:1678 ptsname = basename_out + '.pts'1679 1680 if verbose: log.critical('Store to NetCDF file %s' % ptsname)1681 1682 # NetCDF file definition1683 outfile = NetCDFFile(ptsname, netcdf_mode_w)1684 1685 # Create new file1686 outfile.institution = 'Geoscience Australia'1687 outfile.description = 'NetCDF pts format for compact and portable ' \1688 'storage of spatial point data'1689 1690 # Assign default values1691 if easting_min is None: easting_min = xllcorner1692 if easting_max is None: easting_max = xllcorner + ncols*cellsize1693 if northing_min is None: northing_min = yllcorner1694 if northing_max is None: northing_max = yllcorner + nrows*cellsize1695 1696 # Compute offsets to update georeferencing1697 easting_offset = xllcorner - easting_min1698 northing_offset = yllcorner - northing_min1699 1700 # Georeferencing1701 outfile.zone = zone1702 outfile.xllcorner = easting_min # Easting of lower left corner1703 outfile.yllcorner = northing_min # Northing of lower left corner1704 outfile.false_easting = false_easting1705 outfile.false_northing = false_northing1706 1707 outfile.projection = projection1708 outfile.datum = datum1709 outfile.units = units1710 1711 # Grid info (FIXME: probably not going to be used, but heck)1712 outfile.ncols = ncols1713 outfile.nrows = nrows1714 1715 dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))1716 totalnopoints = nrows*ncols1717 1718 # Calculating number of NODATA_values for each row in clipped region1719 # FIXME: use array operations to do faster1720 nn = 01721 k = 01722 i1_0 = 01723 j1_0 = 01724 thisj = 01725 thisi = 01726 for i in range(nrows):1727 y = (nrows-i-1)*cellsize + yllcorner1728 for j in range(ncols):1729 x = j*cellsize + xllcorner1730 if easting_min <= x <= easting_max \1731 and northing_min <= y <= northing_max:1732 thisj = j1733 thisi = i1734 if dem_elevation_r[i,j] == NODATA_value:1735 nn += 11736 1737 if k == 0:1738 i1_0 = i1739 j1_0 = j1740 1741 k += 11742 1743 index1 = j1_01744 index2 = thisj1745 1746 # Dimension definitions1747 nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))1748 ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))1749 1750 clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)1751 nopoints = clippednopoints-nn1752 1753 clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]1754 1755 if verbose:1756 log.critical('There are %d values in the elevation' % totalnopoints)1757 log.critical('There are %d values in the clipped elevation'1758 % clippednopoints)1759 log.critical('There are %d NODATA_values in the clipped elevation' % nn)1760 1761 outfile.createDimension('number_of_points', nopoints)1762 outfile.createDimension('number_of_dimensions', 2) #This is 2d data1763 1764 # Variable definitions1765 outfile.createVariable('points', netcdf_float, ('number_of_points',1766 'number_of_dimensions'))1767 outfile.createVariable('elevation', netcdf_float, ('number_of_points',))1768 1769 # Get handles to the variables1770 points = outfile.variables['points']1771 elevation = outfile.variables['elevation']1772 1773 lenv = index2-index1+11774 1775 # Store data1776 global_index = 01777 # for i in range(nrows):1778 for i in range(i1_0, thisi+1, 1):1779 if verbose and i % ((nrows+10)/10) == 0:1780 log.critical('Processing row %d of %d' % (i, nrows))1781 1782 lower_index = global_index1783 1784 v = dem_elevation_r[i,index1:index2+1]1785 no_NODATA = num.sum(v == NODATA_value)1786 if no_NODATA > 0:1787 newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA1788 else:1789 newcols = lenv # ncols_in_bounding_box1790 1791 telev = num.zeros(newcols, num.float)1792 tpoints = num.zeros((newcols, 2), num.float)1793 1794 local_index = 01795 1796 y = (nrows-i-1)*cellsize + yllcorner1797 #for j in range(ncols):1798 for j in range(j1_0,index2+1,1):1799 x = j*cellsize + xllcorner1800 if easting_min <= x <= easting_max \1801 and northing_min <= y <= northing_max \1802 and dem_elevation_r[i,j] <> NODATA_value:1803 tpoints[local_index, :] = [x-easting_min, y-northing_min]1804 telev[local_index] = dem_elevation_r[i, j]1805 global_index += 11806 local_index += 11807 1808 upper_index = global_index1809 1810 if upper_index == lower_index + newcols:1811 points[lower_index:upper_index, :] = tpoints1812 elevation[lower_index:upper_index] = telev1813 1814 assert global_index == nopoints, 'index not equal to number of points'1815 1816 infile.close()1817 outfile.close()1818 655 1819 656 … … 3456 2293 3457 2294 outfile.close() 3458 3459 3460 ##3461 # @brief Convert time-series text file to TMS file.3462 # @param filename3463 # @param quantity_names3464 # @param time_as_seconds3465 def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):3466 """Template for converting typical text files with time series to3467 NetCDF tms file.3468 3469 The file format is assumed to be either two fields separated by a comma:3470 3471 time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...3472 3473 E.g3474 3475 31/08/04 00:00:00, 1.328223 0 03476 31/08/04 00:15:00, 1.292912 0 03477 3478 or time (seconds), value0 value1 value2 ...3479 3480 0.0, 1.328223 0 03481 0.1, 1.292912 0 03482 3483 will provide a time dependent function f(t) with three attributes3484 3485 filename is assumed to be the rootname with extenisons .txt and .sww3486 """3487 3488 import time, calendar3489 from anuga.config import time_format3490 from anuga.utilities.numerical_tools import ensure_numeric3491 3492 file_text = filename + '.txt'3493 fid = open(file_text)3494 line = fid.readline()3495 fid.close()3496 3497 fields = line.split(',')3498 msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \3499 % file_text3500 assert len(fields) == 2, msg3501 3502 if not time_as_seconds:3503 try:3504 starttime = calendar.timegm(time.strptime(fields[0], time_format))3505 except ValueError:3506 msg = 'First field in file %s must be' % file_text3507 msg += ' date-time with format %s.\n' % time_format3508 msg += 'I got %s instead.' % fields[0]3509 raise DataTimeError, msg3510 else:3511 try:3512 starttime = float(fields[0])3513 except Error:3514 msg = "Bad time format"3515 raise DataTimeError, msg3516 3517 # Split values3518 values = []3519 for value in fields[1].split():3520 values.append(float(value))3521 3522 q = ensure_numeric(values)3523 3524 msg = 'ERROR: File must contain at least one independent value'3525 assert len(q.shape) == 1, msg3526 3527 # Read times proper3528 from anuga.config import time_format3529 import time, calendar3530 3531 fid = open(file_text)3532 lines = fid.readlines()3533 fid.close()3534 3535 N = len(lines)3536 d = len(q)3537 3538 T = num.zeros(N, num.float) # Time3539 Q = num.zeros((N, d), num.float) # Values3540 3541 for i, line in enumerate(lines):3542 fields = line.split(',')3543 if not time_as_seconds:3544 realtime = calendar.timegm(time.strptime(fields[0], time_format))3545 else:3546 realtime = float(fields[0])3547 T[i] = realtime - starttime3548 3549 for j, value in enumerate(fields[1].split()):3550 Q[i, j] = float(value)3551 3552 msg = 'File %s must list time as a monotonuosly ' % filename3553 msg += 'increasing sequence'3554 assert num.alltrue(T[1:] - T[:-1] > 0), msg3555 3556 #Create NetCDF file3557 from Scientific.IO.NetCDF import NetCDFFile3558 3559 fid = NetCDFFile(filename + '.tms', netcdf_mode_w)3560 3561 fid.institution = 'Geoscience Australia'3562 fid.description = 'Time series'3563 3564 #Reference point3565 #Start time in seconds since the epoch (midnight 1/1/1970)3566 #FIXME: Use Georef3567 fid.starttime = starttime3568 3569 # dimension definitions3570 #fid.createDimension('number_of_volumes', self.number_of_volumes)3571 #fid.createDimension('number_of_vertices', 3)3572 3573 fid.createDimension('number_of_timesteps', len(T))3574 3575 fid.createVariable('time', netcdf_float, ('number_of_timesteps',))3576 3577 fid.variables['time'][:] = T3578 3579 for i in range(Q.shape[1]):3580 try:3581 name = quantity_names[i]3582 except:3583 name = 'Attribute%d' % i3584 3585 fid.createVariable(name, netcdf_float, ('number_of_timesteps',))3586 fid.variables[name][:] = Q[:,i]3587 3588 fid.close()3589 2295 3590 2296 … … 4054 2760 4055 2761 ## 4056 # @brief4057 # @param filename4058 # @param verbose4059 def tsh2sww(filename, verbose=False):4060 """4061 to check if a tsh/msh file 'looks' good.4062 """4063 4064 if verbose == True: log.critical('Creating domain from %s' % filename)4065 4066 domain = pmesh_to_domain_instance(filename, Domain)4067 4068 if verbose == True: log.critical("Number of triangles = %s" % len(domain))4069 4070 domain.smooth = True4071 domain.format = 'sww' #Native netcdf visualisation format4072 file_path, filename = path.split(filename)4073 filename, ext = path.splitext(filename)4074 domain.set_name(filename)4075 domain.reduction = mean4076 4077 if verbose == True: log.critical("file_path = %s" % file_path)4078 4079 if file_path == "":4080 file_path = "."4081 domain.set_datadir(file_path)4082 4083 if verbose == True:4084 log.critical("Output written to %s%s%s.%s"4085 % (domain.get_datadir(), sep, domain.get_name(),4086 domain.format))4087 4088 sww = SWW_file(domain)4089 sww.store_connectivity()4090 sww.store_timestep()4091 4092 4093 ##4094 # @brief Convert CSIRO ESRI file to an SWW boundary file.4095 # @param bath_dir4096 # @param elevation_dir4097 # @param ucur_dir4098 # @param vcur_dir4099 # @param sww_file4100 # @param minlat4101 # @param maxlat4102 # @param minlon4103 # @param maxlon4104 # @param zscale4105 # @param mean_stage4106 # @param fail_on_NaN4107 # @param elevation_NaN_filler4108 # @param bath_prefix4109 # @param elevation_prefix4110 # @param verbose4111 # @note Also convert latitude and longitude to UTM. All coordinates are4112 # assumed to be given in the GDA94 datum.4113 def asc_csiro2sww(bath_dir,4114 elevation_dir,4115 ucur_dir,4116 vcur_dir,4117 sww_file,4118 minlat=None, maxlat=None,4119 minlon=None, maxlon=None,4120 zscale=1,4121 mean_stage=0,4122 fail_on_NaN=True,4123 elevation_NaN_filler=0,4124 bath_prefix='ba',4125 elevation_prefix='el',4126 verbose=False):4127 """4128 Produce an sww boundary file, from esri ascii data from CSIRO.4129 4130 Also convert latitude and longitude to UTM. All coordinates are4131 assumed to be given in the GDA94 datum.4132 4133 assume:4134 All files are in esri ascii format4135 4136 4 types of information4137 bathymetry4138 elevation4139 u velocity4140 v velocity4141 4142 Assumptions4143 The metadata of all the files is the same4144 Each type is in a seperate directory4145 One bath file with extention .0004146 The time period is less than 24hrs and uniform.4147 """4148 4149 from Scientific.IO.NetCDF import NetCDFFile4150 4151 from anuga.coordinate_transforms.redfearn import redfearn4152 4153 precision = netcdf_float # So if we want to change the precision its done here4154 4155 # go in to the bath dir and load the only file,4156 bath_files = os.listdir(bath_dir)4157 bath_file = bath_files[0]4158 bath_dir_file = bath_dir + os.sep + bath_file4159 bath_metadata, bath_grid = _read_asc(bath_dir_file)4160 4161 #Use the date.time of the bath file as a basis for4162 #the start time for other files4163 base_start = bath_file[-12:]4164 4165 #go into the elevation dir and load the 000 file4166 elevation_dir_file = elevation_dir + os.sep + elevation_prefix \4167 + base_start4168 4169 elevation_files = os.listdir(elevation_dir)4170 ucur_files = os.listdir(ucur_dir)4171 vcur_files = os.listdir(vcur_dir)4172 elevation_files.sort()4173 4174 # the first elevation file should be the4175 # file with the same base name as the bath data4176 assert elevation_files[0] == 'el' + base_start4177 4178 number_of_latitudes = bath_grid.shape[0]4179 number_of_longitudes = bath_grid.shape[1]4180 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 24181 4182 longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \4183 for x in range(number_of_longitudes)]4184 latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \4185 for y in range(number_of_latitudes)]4186 4187 # reverse order of lat, so the first lat represents the first grid row4188 latitudes.reverse()4189 4190 kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],4191 minlat=minlat, maxlat=maxlat,4192 minlon=minlon, maxlon=maxlon)4193 4194 bath_grid = bath_grid[kmin:kmax,lmin:lmax]4195 latitudes = latitudes[kmin:kmax]4196 longitudes = longitudes[lmin:lmax]4197 number_of_latitudes = len(latitudes)4198 number_of_longitudes = len(longitudes)4199 number_of_times = len(os.listdir(elevation_dir))4200 number_of_points = number_of_latitudes * number_of_longitudes4201 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 24202 4203 #Work out the times4204 if len(elevation_files) > 1:4205 # Assume: The time period is less than 24hrs.4206 time_period = (int(elevation_files[1][-3:]) \4207 - int(elevation_files[0][-3:])) * 60*604208 times = [x*time_period for x in range(len(elevation_files))]4209 else:4210 times = [0.0]4211 4212 if verbose:4213 log.critical('------------------------------------------------')4214 log.critical('Statistics:')4215 log.critical(' Extent (lat/lon):')4216 log.critical(' lat in [%f, %f], len(lat) == %d'4217 % (min(latitudes), max(latitudes), len(latitudes)))4218 log.critical(' lon in [%f, %f], len(lon) == %d'4219 % (min(longitudes), max(longitudes), len(longitudes)))4220 log.critical(' t in [%f, %f], len(t) == %d'4221 % (min(times), max(times), len(times)))4222 4223 ######### WRITE THE SWW FILE #############4224 4225 # NetCDF file definition4226 outfile = NetCDFFile(sww_file, netcdf_mode_w)4227 4228 #Create new file4229 outfile.institution = 'Geoscience Australia'4230 outfile.description = 'Converted from XXX'4231 4232 #For sww compatibility4233 outfile.smoothing = 'Yes'4234 outfile.order = 14235 4236 #Start time in seconds since the epoch (midnight 1/1/1970)4237 outfile.starttime = starttime = times[0]4238 4239 # dimension definitions4240 outfile.createDimension('number_of_volumes', number_of_volumes)4241 outfile.createDimension('number_of_vertices', 3)4242 outfile.createDimension('number_of_points', number_of_points)4243 outfile.createDimension('number_of_timesteps', number_of_times)4244 4245 # variable definitions4246 outfile.createVariable('x', precision, ('number_of_points',))4247 outfile.createVariable('y', precision, ('number_of_points',))4248 outfile.createVariable('elevation', precision, ('number_of_points',))4249 4250 #FIXME: Backwards compatibility4251 #outfile.createVariable('z', precision, ('number_of_points',))4252 #################################4253 4254 outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',4255 'number_of_vertices'))4256 4257 outfile.createVariable('time', precision, ('number_of_timesteps',))4258 4259 outfile.createVariable('stage', precision, ('number_of_timesteps',4260 'number_of_points'))4261 4262 outfile.createVariable('xmomentum', precision, ('number_of_timesteps',4263 'number_of_points'))4264 4265 outfile.createVariable('ymomentum', precision, ('number_of_timesteps',4266 'number_of_points'))4267 4268 #Store4269 from anuga.coordinate_transforms.redfearn import redfearn4270 4271 x = num.zeros(number_of_points, num.float) #Easting4272 y = num.zeros(number_of_points, num.float) #Northing4273 4274 if verbose: log.critical('Making triangular grid')4275 4276 #Get zone of 1st point.4277 refzone, _, _ = redfearn(latitudes[0], longitudes[0])4278 4279 vertices = {}4280 i = 04281 for k, lat in enumerate(latitudes):4282 for l, lon in enumerate(longitudes):4283 vertices[l,k] = i4284 4285 zone, easting, northing = redfearn(lat, lon)4286 4287 #msg = 'Zone boundary crossed at longitude =', lon4288 #assert zone == refzone, msg4289 #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)4290 x[i] = easting4291 y[i] = northing4292 i += 14293 4294 #Construct 2 triangles per 'rectangular' element4295 volumes = []4296 for l in range(number_of_longitudes-1): #X direction4297 for k in range(number_of_latitudes-1): #Y direction4298 v1 = vertices[l,k+1]4299 v2 = vertices[l,k]4300 v3 = vertices[l+1,k+1]4301 v4 = vertices[l+1,k]4302 4303 #Note, this is different to the ferrit2sww code4304 #since the order of the lats is reversed.4305 volumes.append([v1,v3,v2]) #Upper element4306 volumes.append([v4,v2,v3]) #Lower element4307 4308 volumes = num.array(volumes, num.int) #array default#4309 4310 geo_ref = Geo_reference(refzone, min(x), min(y))4311 geo_ref.write_NetCDF(outfile)4312 4313 # This will put the geo ref in the middle4314 #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)4315 4316 if verbose:4317 log.critical('------------------------------------------------')4318 log.critical('More Statistics:')4319 log.critical(' Extent (/lon):')4320 log.critical(' x in [%f, %f], len(lat) == %d'4321 % (min(x), max(x), len(x)))4322 log.critical(' y in [%f, %f], len(lon) == %d'4323 % (min(y), max(y), len(y)))4324 log.critical('geo_ref: ', geo_ref)4325 4326 z = num.resize(bath_grid,outfile.variables['elevation'][:].shape)4327 outfile.variables['x'][:] = x - geo_ref.get_xllcorner()4328 outfile.variables['y'][:] = y - geo_ref.get_yllcorner()4329 # FIXME (Ole): Remove once viewer has been recompiled and changed4330 # to use elevation instead of z4331 #outfile.variables['z'][:] = z4332 outfile.variables['elevation'][:] = z4333 outfile.variables['volumes'][:] = volumes.astype(num.int32) # On Opteron 644334 4335 stage = outfile.variables['stage']4336 xmomentum = outfile.variables['xmomentum']4337 ymomentum = outfile.variables['ymomentum']4338 4339 outfile.variables['time'][:] = times #Store time relative4340 4341 if verbose: log.critical('Converting quantities')4342 4343 n = number_of_times4344 for j in range(number_of_times):4345 # load in files4346 elevation_meta, elevation_grid = \4347 _read_asc(elevation_dir + os.sep + elevation_files[j])4348 4349 _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])4350 _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])4351 4352 #cut matrix to desired size4353 elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]4354 u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]4355 v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]4356 4357 # handle missing values4358 missing = (elevation_grid == elevation_meta['NODATA_value'])4359 if num.sometrue (missing):4360 if fail_on_NaN:4361 msg = 'File %s contains missing values' \4362 % (elevation_files[j])4363 raise DataMissingValuesError, msg4364 else:4365 elevation_grid = elevation_grid*(missing==0) \4366 + missing*elevation_NaN_filler4367 4368 if verbose and j % ((n+10)/10) == 0: log.critical(' Doing %d of %d'4369 % (j, n))4370 4371 i = 04372 for k in range(number_of_latitudes): #Y direction4373 for l in range(number_of_longitudes): #X direction4374 w = zscale*elevation_grid[k,l] + mean_stage4375 stage[j,i] = w4376 h = w - z[i]4377 xmomentum[j,i] = u_momentum_grid[k,l]*h4378 ymomentum[j,i] = v_momentum_grid[k,l]*h4379 i += 14380 4381 outfile.close()4382 4383 4384 ##4385 2762 # @brief Return max&min indexes (for slicing) of area specified. 4386 2763 # @param latitudes_ref ?? … … 5951 4328 5952 4329 5953 # @brief A class to write an SWW file.5954 class Write_sww:5955 from anuga.shallow_water.shallow_water_domain import Domain5956 5957 RANGE = '_range'5958 EXTREMA = ':extrema'5959 5960 ##5961 # brief Instantiate the SWW writer class.5962 def __init__(self, static_quantities, dynamic_quantities):5963 """Initialise Write_sww with two list af quantity names:5964 5965 static_quantities (e.g. elevation or friction):5966 Stored once at the beginning of the simulation in a 1D array5967 of length number_of_points5968 dynamic_quantities (e.g stage):5969 Stored every timestep in a 2D array with5970 dimensions number_of_points X number_of_timesteps5971 5972 """5973 self.static_quantities = static_quantities5974 self.dynamic_quantities = dynamic_quantities5975 5976 5977 ##5978 # @brief Store a header in the SWW file.5979 # @param outfile Open handle to the file that will be written.5980 # @param times A list of time slices *or* a start time.5981 # @param number_of_volumes The number of triangles.5982 # @param number_of_points The number of points.5983 # @param description The internal file description string.5984 # @param smoothing True if smoothing is to be used.5985 # @param order5986 # @param sww_precision Data type of the quantity written (netcdf constant)5987 # @param verbose True if this function is to be verbose.5988 # @note If 'times' is a list, the info will be made relative.5989 def store_header(self,5990 outfile,5991 times,5992 number_of_volumes,5993 number_of_points,5994 description='Generated by ANUGA',5995 smoothing=True,5996 order=1,5997 sww_precision=netcdf_float32,5998 verbose=False):5999 """Write an SWW file header.6000 6001 outfile - the open file that will be written6002 times - A list of the time slice times OR a start time6003 Note, if a list is given the info will be made relative.6004 number_of_volumes - the number of triangles6005 """6006 6007 outfile.institution = 'Geoscience Australia'6008 outfile.description = description6009 6010 # For sww compatibility6011 if smoothing is True:6012 # Smoothing to be depreciated6013 outfile.smoothing = 'Yes'6014 outfile.vertices_are_stored_uniquely = 'False'6015 else:6016 # Smoothing to be depreciated6017 outfile.smoothing = 'No'6018 outfile.vertices_are_stored_uniquely = 'True'6019 outfile.order = order6020 6021 try:6022 revision_number = get_revision_number()6023 except:6024 revision_number = None6025 # Allow None to be stored as a string6026 outfile.revision_number = str(revision_number)6027 6028 # This is being used to seperate one number from a list.6029 # what it is actually doing is sorting lists from numeric arrays.6030 if isinstance(times, (list, num.ndarray)):6031 number_of_times = len(times)6032 times = ensure_numeric(times)6033 if number_of_times == 0:6034 starttime = 06035 else:6036 starttime = times[0]6037 times = times - starttime #Store relative times6038 else:6039 number_of_times = 06040 starttime = times6041 6042 6043 outfile.starttime = starttime6044 6045 # dimension definitions6046 outfile.createDimension('number_of_volumes', number_of_volumes)6047 outfile.createDimension('number_of_vertices', 3)6048 outfile.createDimension('numbers_in_range', 2)6049 6050 if smoothing is True:6051 outfile.createDimension('number_of_points', number_of_points)6052 # FIXME(Ole): This will cause sww files for parallel domains to6053 # have ghost nodes stored (but not used by triangles).6054 # To clean this up, we have to change get_vertex_values and6055 # friends in quantity.py (but I can't be bothered right now)6056 else:6057 outfile.createDimension('number_of_points', 3*number_of_volumes)6058 6059 outfile.createDimension('number_of_timesteps', number_of_times)6060 6061 # variable definitions6062 outfile.createVariable('x', sww_precision, ('number_of_points',))6063 outfile.createVariable('y', sww_precision, ('number_of_points',))6064 6065 outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',6066 'number_of_vertices'))6067 6068 # Doing sww_precision instead of Float gives cast errors.6069 outfile.createVariable('time', netcdf_float,6070 ('number_of_timesteps',))6071 6072 6073 for q in self.static_quantities:6074 6075 outfile.createVariable(q, sww_precision,6076 ('number_of_points',))6077 6078 outfile.createVariable(q + Write_sww.RANGE, sww_precision,6079 ('numbers_in_range',))6080 6081 # Initialise ranges with small and large sentinels.6082 # If this was in pure Python we could have used None sensibly6083 outfile.variables[q+Write_sww.RANGE][0] = max_float # Min6084 outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max6085 6086 #if 'elevation' in self.static_quantities:6087 # # FIXME: Backwards compat - get rid of z once old view has retired6088 # outfile.createVariable('z', sww_precision,6089 # ('number_of_points',))6090 6091 for q in self.dynamic_quantities:6092 outfile.createVariable(q, sww_precision, ('number_of_timesteps',6093 'number_of_points'))6094 outfile.createVariable(q + Write_sww.RANGE, sww_precision,6095 ('numbers_in_range',))6096 6097 # Initialise ranges with small and large sentinels.6098 # If this was in pure Python we could have used None sensibly6099 outfile.variables[q+Write_sww.RANGE][0] = max_float # Min6100 outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max6101 6102 if isinstance(times, (list, num.ndarray)):6103 outfile.variables['time'][:] = times # Store time relative6104 6105 if verbose:6106 log.critical('------------------------------------------------')6107 log.critical('Statistics:')6108 log.critical(' t in [%f, %f], len(t) == %d'6109 % (num.min(times), num.max(times), len(times.flat)))6110 6111 ##6112 # @brief Store triangulation data in the underlying file.6113 # @param outfile Open handle to underlying file.6114 # @param points_utm List or array of points in UTM.6115 # @param volumes6116 # @param zone6117 # @param new_origin georeference that the points can be set to.6118 # @param points_georeference The georeference of the points_utm.6119 # @param verbose True if this function is to be verbose.6120 def store_triangulation(self,6121 outfile,6122 points_utm,6123 volumes,6124 zone=None,6125 new_origin=None,6126 points_georeference=None,6127 verbose=False):6128 """6129 new_origin - qa georeference that the points can be set to. (Maybe6130 do this before calling this function.)6131 6132 points_utm - currently a list or array of the points in UTM.6133 points_georeference - the georeference of the points_utm6134 6135 How about passing new_origin and current_origin.6136 If you get both, do a convertion from the old to the new.6137 6138 If you only get new_origin, the points are absolute,6139 convert to relative6140 6141 if you only get the current_origin the points are relative, store6142 as relative.6143 6144 if you get no georefs create a new georef based on the minimums of6145 points_utm. (Another option would be to default to absolute)6146 6147 Yes, and this is done in another part of the code.6148 Probably geospatial.6149 6150 If you don't supply either geo_refs, then supply a zone. If not6151 the default zone will be used.6152 6153 precon:6154 header has been called.6155 """6156 6157 number_of_points = len(points_utm)6158 volumes = num.array(volumes)6159 points_utm = num.array(points_utm)6160 6161 # Given the two geo_refs and the points, do the stuff6162 # described in the method header6163 # if this is needed else where, pull out as a function6164 points_georeference = ensure_geo_reference(points_georeference)6165 new_origin = ensure_geo_reference(new_origin)6166 if new_origin is None and points_georeference is not None:6167 points = points_utm6168 geo_ref = points_georeference6169 else:6170 if new_origin is None:6171 new_origin = Geo_reference(zone, min(points_utm[:,0]),6172 min(points_utm[:,1]))6173 points = new_origin.change_points_geo_ref(points_utm,6174 points_georeference)6175 geo_ref = new_origin6176 6177 # At this stage I need a georef and points6178 # the points are relative to the georef6179 geo_ref.write_NetCDF(outfile)6180 6181 # This will put the geo ref in the middle6182 #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)6183 6184 x = points[:,0]6185 y = points[:,1]6186 6187 if verbose:6188 log.critical('------------------------------------------------')6189 log.critical('More Statistics:')6190 log.critical(' Extent (/lon):')6191 log.critical(' x in [%f, %f], len(lat) == %d'6192 % (min(x), max(x), len(x)))6193 log.critical(' y in [%f, %f], len(lon) == %d'6194 % (min(y), max(y), len(y)))6195 #log.critical(' z in [%f, %f], len(z) == %d'6196 # % (min(elevation), max(elevation), len(elevation)))6197 log.critical('geo_ref: %s' % str(geo_ref))6198 log.critical('------------------------------------------------')6199 6200 outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()6201 outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()6202 outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 646203 6204 6205 6206 # @brief Write the static quantity data to the underlying file.6207 # @param outfile Handle to open underlying file.6208 # @param sww_precision Format of quantity data to write (default Float32).6209 # @param verbose True if this function is to be verbose.6210 # @param **quant6211 def store_static_quantities(self,6212 outfile,6213 sww_precision=num.float32,6214 verbose=False,6215 **quant):6216 """6217 Write the static quantity info.6218 6219 **quant is extra keyword arguments passed in. These must be6220 the numpy arrays to be stored in the sww file at each timestep.6221 6222 The argument sww_precision allows for storing as either6223 * single precision (default): num.float326224 * double precision: num.float64 or num.float6225 6226 Precondition:6227 store_triangulation and6228 store_header have been called.6229 """6230 6231 # The dictionary quant must contain numpy arrays for each name.6232 # These will typically be quantities from Domain such as friction6233 #6234 # Arrays not listed in static_quantitiues will be ignored, silently.6235 #6236 # This method will also write the ranges for each quantity,6237 # e.g. stage_range, xmomentum_range and ymomentum_range6238 for q in self.static_quantities:6239 if not quant.has_key(q):6240 msg = 'Values for quantity %s was not specified in ' % q6241 msg += 'store_quantities so they cannot be stored.'6242 raise NewQuantity, msg6243 else:6244 q_values = ensure_numeric(quant[q])6245 6246 x = q_values.astype(sww_precision)6247 outfile.variables[q][:] = x6248 6249 # This populates the _range values6250 outfile.variables[q + Write_sww.RANGE][0] = num.min(x)6251 outfile.variables[q + Write_sww.RANGE][1] = num.max(x)6252 6253 # FIXME: Hack for backwards compatibility with old viewer6254 #if 'elevation' in self.static_quantities:6255 # outfile.variables['z'][:] = outfile.variables['elevation'][:]6256 6257 6258 6259 6260 6261 ##6262 # @brief Write the quantity data to the underlying file.6263 # @param outfile Handle to open underlying file.6264 # @param sww_precision Format of quantity data to write (default Float32).6265 # @param slice_index6266 # @param time6267 # @param verbose True if this function is to be verbose.6268 # @param **quant6269 def store_quantities(self,6270 outfile,6271 sww_precision=num.float32,6272 slice_index=None,6273 time=None,6274 verbose=False,6275 **quant):6276 """6277 Write the quantity info at each timestep.6278 6279 **quant is extra keyword arguments passed in. These must be6280 the numpy arrays to be stored in the sww file at each timestep.6281 6282 if the time array is already been built, use the slice_index6283 to specify the index.6284 6285 Otherwise, use time to increase the time dimension6286 6287 Maybe make this general, but the viewer assumes these quantities,6288 so maybe we don't want it general - unless the viewer is general6289 6290 The argument sww_precision allows for storing as either6291 * single precision (default): num.float326292 * double precision: num.float64 or num.float6293 6294 Precondition:6295 store_triangulation and6296 store_header have been called.6297 """6298 6299 if time is not None:6300 file_time = outfile.variables['time']6301 slice_index = len(file_time)6302 file_time[slice_index] = time6303 else:6304 slice_index = int(slice_index) # Has to be cast in case it was numpy.int6305 6306 # Write the named dynamic quantities6307 # The dictionary quant must contain numpy arrays for each name.6308 # These will typically be the conserved quantities from Domain6309 # (Typically stage, xmomentum, ymomentum).6310 #6311 # Arrays not listed in dynamic_quantitiues will be ignored, silently.6312 #6313 # This method will also write the ranges for each quantity,6314 # e.g. stage_range, xmomentum_range and ymomentum_range6315 for q in self.dynamic_quantities:6316 if not quant.has_key(q):6317 msg = 'Values for quantity %s was not specified in ' % q6318 msg += 'store_quantities so they cannot be stored.'6319 raise NewQuantity, msg6320 else:6321 q_values = ensure_numeric(quant[q])6322 6323 x = q_values.astype(sww_precision)6324 outfile.variables[q][slice_index] = x6325 6326 6327 # This updates the _range values6328 q_range = outfile.variables[q + Write_sww.RANGE][:]6329 q_values_min = num.min(q_values)6330 if q_values_min < q_range[0]:6331 outfile.variables[q + Write_sww.RANGE][0] = q_values_min6332 q_values_max = num.max(q_values)6333 if q_values_max > q_range[1]:6334 outfile.variables[q + Write_sww.RANGE][1] = q_values_max6335 6336 ##6337 # @brief Print the quantities in the underlying file.6338 # @param outfile UNUSED.6339 def verbose_quantities(self, outfile):6340 log.critical('------------------------------------------------')6341 log.critical('More Statistics:')6342 for q in self.dynamic_quantities:6343 log.critical(' %s in [%f, %f]'6344 % (q, outfile.variables[q+Write_sww.RANGE][0],6345 outfile.variables[q+Write_sww.RANGE][1]))6346 log.critical('------------------------------------------------')6347 6348 6349 4330 ## 6350 4331 # @brief Obsolete? … … 7652 5633 7653 5634 ## 7654 # @brief Find all SWW files in a directory with given stem name.7655 # @param look_in_dir The directory to look in.7656 # @param base_name The file stem name.7657 # @param verbose True if this function is to be verbose.7658 # @return A list of found filename strings.7659 # @note Will accept 'base_name' with or without '.sww' extension.7660 # @note If no files found, raises IOError exception.7661 def get_all_swwfiles(look_in_dir='', base_name='', verbose=False):7662 '''7663 Finds all the sww files in a "look_in_dir" which contains a "base_name".7664 will accept base_name with or without the extension ".sww"7665 7666 Returns: a list of strings7667 7668 Usage: iterate_over = get_all_swwfiles(dir, name)7669 then7670 for swwfile in iterate_over:7671 do stuff7672 7673 Check "export_grids" and "get_maximum_inundation_data" for examples7674 '''7675 7676 # plus tests the extension7677 name, extension = os.path.splitext(base_name)7678 7679 if extension != '' and extension != '.sww':7680 msg = 'file %s%s must be a NetCDF sww file!' % (base_name, extension)7681 raise IOError, msg7682 7683 if look_in_dir == "":7684 look_in_dir = "." # Unix compatibility7685 7686 dir_ls = os.listdir(look_in_dir)7687 iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == '.sww']7688 if len(iterate_over) == 0:7689 msg = 'No files of the base name %s' % name7690 raise IOError, msg7691 7692 if verbose: log.critical('iterate over %s' % iterate_over)7693 7694 return iterate_over7695 7696 7697 ##7698 # @brief Find all files in a directory that contain a string and have extension.7699 # @param look_in_dir Path to the directory to look in.7700 # @param base_name Stem filename of the file(s) of interest.7701 # @param extension Extension of the files to look for.7702 # @param verbose True if this function is to be verbose.7703 # @return A list of found filename strings.7704 # @note If no files found, raises IOError exception.7705 def get_all_files_with_extension(look_in_dir='',7706 base_name='',7707 extension='.sww',7708 verbose=False):7709 '''Find all files in a directory with given stem name.7710 Finds all the sww files in a "look_in_dir" which contains a "base_name".7711 7712 Returns: a list of strings7713 7714 Usage: iterate_over = get_all_swwfiles(dir, name)7715 then7716 for swwfile in iterate_over:7717 do stuff7718 7719 Check "export_grids" and "get_maximum_inundation_data" for examples7720 '''7721 7722 # plus tests the extension7723 name, ext = os.path.splitext(base_name)7724 7725 if ext != '' and ext != extension:7726 msg = 'base_name %s must be a file with %s extension!' \7727 % (base_name, extension)7728 raise IOError, msg7729 7730 if look_in_dir == "":7731 look_in_dir = "." # Unix compatibility7732 7733 dir_ls = os.listdir(look_in_dir)7734 iterate_over = [x[:-4] for x in dir_ls if name in x and x[-4:] == extension]7735 7736 if len(iterate_over) == 0:7737 msg = 'No files of the base name %s in %s' % (name, look_in_dir)7738 raise IOError, msg7739 7740 if verbose: log.critical('iterate over %s' % iterate_over)7741 7742 return iterate_over7743 7744 7745 ##7746 # @brief Find all files in a directory that contain a given string.7747 # @param look_in_dir Path to the directory to look in.7748 # @param base_name String that files must contain.7749 # @param verbose True if this function is to be verbose.7750 def get_all_directories_with_name(look_in_dir='', base_name='', verbose=False):7751 '''7752 Finds all the directories in a "look_in_dir" which contains a "base_name".7753 7754 Returns: a list of strings7755 7756 Usage: iterate_over = get_all_directories_with_name(dir, name)7757 then: for swwfile in iterate_over:7758 do stuff7759 7760 Check "export_grids" and "get_maximum_inundation_data" for examples7761 '''7762 7763 if look_in_dir == "":7764 look_in_dir = "." # Unix compatibility7765 7766 dir_ls = os.listdir(look_in_dir)7767 iterate_over = [x for x in dir_ls if base_name in x]7768 7769 if len(iterate_over) == 0:7770 msg = 'No files of the base name %s' % base_name7771 raise IOError, msg7772 7773 if verbose: log.critical('iterate over %s' % iterate_over)7774 7775 return iterate_over7776 7777 7778 ##7779 5635 # @brief Convert points to a polygon (?) 7780 5636 # @param points_file The points file. -
anuga_core/source/anuga/shallow_water/shallow_water_domain.py
r7733 r7735 664 664 """ 665 665 666 from anuga.shallow_water. data_managerimport SWW_file666 from anuga.shallow_water.sww_file import SWW_file 667 667 668 668 # Initialise writer -
anuga_core/source/anuga/shallow_water/test_data_manager.py
r7618 r7735 18 18 from anuga.shallow_water import * 19 19 from anuga.shallow_water.data_manager import * 20 from anuga.shallow_water.sww_file import SWW_file 21 from anuga.shallow_water.file_conversion import tsh2sww, \ 22 asc_csiro2sww, pmesh_to_domain_instance, \ 23 dem2pts 20 24 from anuga.config import epsilon, g 21 25 from anuga.utilities.anuga_exceptions import ANUGAError … … 24 28 from anuga.abstract_2d_finite_volumes.util import file_function 25 29 from anuga.utilities.system_tools import get_pathname_from_package 30 from anuga.utilities.file_utils import del_dir, load_csv_as_dict 26 31 from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a 27 32 from anuga.config import netcdf_float … … 4444 4449 4445 4450 import time, os 4446 from Scientific.IO.NetCDF import NetCDFFile4447 4451 4448 4452 # the memory optimised least squares … … 4469 4473 4470 4474 import time, os 4471 from Scientific.IO.NetCDF import NetCDFFile4472 4475 4473 4476 from data_manager import _read_asc … … 7710 7713 x = fid.variables['x'][:]+fid.xllcorner # x-coordinates of vertices 7711 7714 y = fid.variables['y'][:]+fid.yllcorner # y-coordinates of vertices 7712 7715 elevation = fid.variables['elevation'][:] 7713 7716 time=fid.variables['time'][:]+fid.starttime 7714 7717 … … 7736 7739 count = 0 7737 7740 7738 7741 # read in urs test data for stage, e and n velocity 7739 7742 urs_file_name_z = 'z_'+str(source_number)+'_'+str(j)+'.csv' 7740 dict = csv2array(os.path.join(testdir, urs_file_name_z))7743 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z)) 7741 7744 urs_stage = dict['urs_stage'] 7742 7745 urs_file_name_e = 'e_'+str(source_number)+'_'+str(j)+'.csv' 7743 dict = csv2array(os.path.join(testdir, urs_file_name_e))7746 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e)) 7744 7747 urs_e = dict['urs_e'] 7745 7748 urs_file_name_n = 'n_'+str(source_number)+'_'+str(j)+'.csv' 7746 dict = csv2array(os.path.join(testdir, urs_file_name_n))7749 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n)) 7747 7750 urs_n = dict['urs_n'] 7748 7751 7749 # find start and end time for stage 7752 # find start and end time for stage 7750 7753 for i in range(len(urs_stage)): 7751 7754 if urs_stage[i] == 0.0: … … 7774 7777 assert num.allclose(index_start_urs_z,start_times_z[j]/delta_t), msg 7775 7778 7776 7779 msg = 'e velocity start time from urs file is not the same as the ' 7777 7780 msg += 'header file for source %i and station %i' %(source_number,j) 7778 7779 7780 7781 assert num.allclose(index_start_urs_e,start_times_e[j]/delta_t), msg 7782 7783 msg = 'n velocity start time from urs file is not the same as the ' 7781 7784 msg += 'header file for source %i and station %i' %(source_number,j) 7782 7783 7784 7785 assert num.allclose(index_start_urs_n,start_times_n[j]/delta_t), msg 7786 7787 # get index for start and end time for sts quantities 7785 7788 index_start_stage = 0 7786 7789 index_end_stage = 0 … … 7810 7813 sts_stage[index_start_stage:index_end_stage], 7811 7814 rtol=1.0e-6, atol=1.0e-5 ), msg 7812 7815 7813 7816 # check that urs e velocity and sts xmomentum are the same 7814 7817 msg = 'urs e velocity is not equivalent to sts x momentum for for source %i and station %i' %(source_number,j) 7815 7818 assert num.allclose(urs_e[index_start_urs_e:index_end_urs_e]*(urs_stage[index_start_urs_e:index_end_urs_e]-elevation[j]), 7816 7819 sts_xmom[index_start_x:index_end_x], 7817 7820 rtol=1.0e-5, atol=1.0e-4 ), msg 7818 7821 7819 7822 # check that urs n velocity and sts ymomentum are the same 7820 7823 #print 'urs n velocity', urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j]) … … 7864 7867 x = fid.variables['x'][:]+fid.xllcorner # x-coordinates of vertices 7865 7868 y = fid.variables['y'][:]+fid.yllcorner # y-coordinates of vertices 7866 7869 elevation = fid.variables['elevation'][:] 7867 7870 time=fid.variables['time'][:]+fid.starttime 7868 7871 … … 7892 7895 assert num.allclose(sts_starttime, starttime), msg 7893 7896 7894 7897 #stations = [1,2,3] 7895 7898 #for j in stations: 7896 7899 for j in range(len(x)): 7897 7900 index_start_urs_z = 0 7898 7901 index_end_urs_z = 0 7899 7902 index_start_urs_e = 0 7900 7903 index_end_urs_e = 0 7901 7904 index_start_urs_n = 0 7902 7905 index_end_urs_n = 0 7903 7906 count = 0 7904 7907 7905 7908 # read in urs test data for stage, e and n velocity 7906 7909 urs_file_name_z = 'z_combined_'+str(j)+'.csv' 7907 dict = csv2array(os.path.join(testdir, urs_file_name_z))7910 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z)) 7908 7911 urs_stage = dict['urs_stage'] 7909 7912 urs_file_name_e = 'e_combined_'+str(j)+'.csv' 7910 dict = csv2array(os.path.join(testdir, urs_file_name_e))7913 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e)) 7911 7914 urs_e = dict['urs_e'] 7912 7915 urs_file_name_n = 'n_combined_'+str(j)+'.csv' 7913 dict = csv2array(os.path.join(testdir, urs_file_name_n))7916 dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n)) 7914 7917 urs_n = dict['urs_n'] 7915 7918 7916 # find start and end time for stage 7919 # find start and end time for stage 7917 7920 for i in range(len(urs_stage)): 7918 7921 if urs_stage[i] == 0.0: … … 7937 7940 assert num.allclose(index_start_urs_z,start_times_z/delta_t), msg 7938 7941 7939 7942 msg = 'e velocity start time from urs file is not the same as the ' 7940 7943 msg += 'header file at station %i' %(j) 7941 7942 7943 7944 assert num.allclose(index_start_urs_e,start_times_e/delta_t), msg 7945 7946 msg = 'n velocity start time from urs file is not the same as the ' 7944 7947 msg += 'header file at station %i' %(j) 7945 7946 7947 7948 assert num.allclose(index_start_urs_n,start_times_n/delta_t), msg 7949 7950 # get index for start and end time for sts quantities 7948 7951 index_start_stage = 0 7949 7952 index_end_stage = 0 … … 7963 7966 index_end_stage = i 7964 7967 7965 7966 7967 7968 7969 7970 7971 7972 7973 7968 index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z]) 7969 7970 index_start_x = index_start_stage 7971 index_end_x = index_start_x + len(urs_stage[index_start_urs_e:index_end_urs_e]) 7972 sts_xmom = quantities['ymomentum'][:,j] 7973 7974 index_start_y = index_start_stage 7975 index_end_y = index_start_y + len(urs_stage[index_start_urs_n:index_end_urs_n]) 7976 sts_ymom = quantities['ymomentum'][:,j] 7974 7977 7975 7978 # check that urs stage and sts stage are the same 7976 7979 msg = 'urs stage is not equal to sts stage for station %i' %j 7977 7980 #print 'urs stage', urs_stage[index_start_urs_z:index_end_urs_z] 7978 7981 #print 'sts stage', sts_stage[index_start_stage:index_end_stage] 7979 7982 #print 'diff', max(urs_stage[index_start_urs_z:index_end_urs_z]-sts_stage[index_start_stage:index_end_stage]) … … 7982 7985 sts_stage[index_start_stage:index_end_stage], 7983 7986 rtol=1.0e-5, atol=1.0e-4 ), msg 7984 7987 7985 7988 # check that urs e velocity and sts xmomentum are the same 7986 7989 msg = 'urs e velocity is not equivalent to sts xmomentum for station %i' %j … … 7988 7991 sts_xmom[index_start_x:index_end_x], 7989 7992 rtol=1.0e-5, atol=1.0e-4 ), msg 7990 7993 7991 7994 # check that urs n velocity and sts ymomentum are the same 7992 7995 msg = 'urs n velocity is not equivalent to sts ymomentum for station %i' %j … … 7995 7998 rtol=1.0e-5, atol=1.0e-4 ), msg 7996 7999 7997 8000 fid.close() 7998 8001 7999 8002 os.remove(sts_name_out+'.sts') -
anuga_core/source/anuga/shallow_water/test_forcing.py
r7733 r7735 10 10 import numpy as num 11 11 12 ### Helper functions13 ###14 12 15 13 def scalar_func_list(t, x, y): … … 21 19 return [17.7] 22 20 23 # Variable windfield implemented using functions 21 24 22 def speed(t, x, y): 25 """Large speeds halfway between center and edges 23 """ 24 Variable windfield implemented using functions 25 Large speeds halfway between center and edges 26 26 27 27 Low speeds at center and edges … … 248 248 249 249 # Convert ASCII file to NetCDF (Which is what we really like!) 250 from data_managerimport timefile2netcdf250 from file_conversion import timefile2netcdf 251 251 252 252 timefile2netcdf(filename) … … 340 340 341 341 # Convert ASCII file to NetCDF (Which is what we really like!) 342 from data_managerimport timefile2netcdf342 from file_conversion import timefile2netcdf 343 343 344 344 timefile2netcdf(filename, time_as_seconds=True) -
anuga_core/source/anuga/shallow_water/test_shallow_water_domain.py
r7733 r7735 6070 6070 from anuga.abstract_2d_finite_volumes.mesh_factory import rectangular_cross 6071 6071 from anuga.shallow_water import Domain 6072 from anuga.shallow_water. shallow_water_domainimport Reflective_boundary6072 from anuga.shallow_water.boundaries import Reflective_boundary 6073 6073 from anuga.shallow_water.shallow_water_domain import Dirichlet_boundary 6074 6074 from anuga.shallow_water.shallow_water_domain import Rainfall -
anuga_core/source/anuga/shallow_water/tsh2sww.py
r7340 r7735 13 13 from anuga.pyvolution.pmesh2domain import pmesh_to_domain_instance 14 14 import time, os 15 from anuga.pyvolution. data_managerimport SWW_file15 from anuga.pyvolution.sww_file import SWW_file 16 16 from anuga.utilities.numerical_tools import mean 17 17 import anuga.utilities.log as log
Note: See TracChangeset
for help on using the changeset viewer.