Changeset 6080
- Timestamp:
- Dec 16, 2008, 10:15:44 AM (15 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
anuga_core/source/anuga/shallow_water/data_manager.py
r5982 r6080 49 49 """ 50 50 51 # This file was reverted from changeset:5484 to changeset:5470 on 10th July 51 # This file was reverted from changeset:5484 to changeset:5470 on 10th July 52 52 # by Ole. 53 53 54 import os, sys 55 import csv 54 56 import exceptions 55 class TitleValueError(exceptions.Exception): pass 56 class DataMissingValuesError(exceptions.Exception): pass 57 class DataFileNotOpenError(exceptions.Exception): pass 58 class DataTimeError(exceptions.Exception): pass 59 class DataDomainError(exceptions.Exception): pass 60 class NewQuantity(exceptions.Exception): pass 61 62 63 64 import csv 65 import os, sys 57 import string 66 58 import shutil 67 59 from struct import unpack 68 60 import array as p_array 69 #import time, os70 61 from os import sep, path, remove, mkdir, access, F_OK, W_OK, getcwd 71 72 62 73 63 from Numeric import concatenate, array, Float, Int, Int32, resize, \ … … 76 66 argmax, alltrue, shape, Float32, size 77 67 78 import string79 80 68 from Scientific.IO.NetCDF import NetCDFFile 81 #from shutil import copy82 69 from os.path import exists, basename, join 83 70 from os import getcwd 84 85 71 86 72 from anuga.coordinate_transforms.redfearn import redfearn, \ … … 90 76 from anuga.geospatial_data.geospatial_data import Geospatial_data,\ 91 77 ensure_absolute 92 from anuga.config import minimum_storable_height as default_minimum_storable_height 78 from anuga.config import minimum_storable_height as \ 79 default_minimum_storable_height 93 80 from anuga.config import max_float 94 81 from anuga.utilities.numerical_tools import ensure_numeric, mean … … 100 87 from anuga.abstract_2d_finite_volumes.util import get_revision_number, \ 101 88 remove_lone_verts, sww2timeseries, get_centroid_values 102 89 103 90 from anuga.abstract_2d_finite_volumes.neighbour_mesh import segment_midpoints 104 91 from anuga.load_mesh.loadASCII import export_mesh_file … … 106 93 107 94 95 ###### 96 # Exception classes 97 ###### 98 99 class TitleValueError(exceptions.Exception): pass 100 class DataMissingValuesError(exceptions.Exception): pass 101 class DataFileNotOpenError(exceptions.Exception): pass 102 class DataTimeError(exceptions.Exception): pass 103 class DataDomainError(exceptions.Exception): pass 104 class NewQuantity(exceptions.Exception): pass 105 106 107 ###### 108 108 # formula mappings 109 ###### 109 110 110 111 quantity_formula = {'momentum':'(xmomentum**2 + ymomentum**2)**0.5', … … 114 115 115 116 116 117 ## 118 # @brief Convert a possible filename into a standard form. 119 # @param s Filename to process. 120 # @return The new filename string. 117 121 def make_filename(s): 118 122 """Transform argument string into a Sexsuitable filename … … 128 132 129 133 134 ## 135 # @brief Check that a specified filesystem directory path exists. 136 # @param path The dirstory path to check. 137 # @param verbose True if this function is to be verbose. 138 # @note If directory path doesn't exist, it will be created. 130 139 def check_dir(path, verbose=None): 131 140 """Check that specified path exists. … … 141 150 RETURN VALUE: 142 151 Verified path including trailing separator 143 144 152 """ 145 153 … … 151 159 unix = 1 152 160 153 161 # add terminal separator, if it's not already there 154 162 if path[-1] != os.sep: 155 path = path + os.sep # Add separator for directories 156 157 path = os.path.expanduser(path) # Expand ~ or ~user in pathname 158 if not (os.access(path,os.R_OK and os.W_OK) or path == ''): 163 path = path + os.sep 164 165 # expand ~ or ~username in path 166 path = os.path.expanduser(path) 167 168 # create directory if required 169 if not (os.access(path, os.R_OK and os.W_OK) or path == ''): 159 170 try: 160 exitcode =os.mkdir(path)171 exitcode = os.mkdir(path) 161 172 162 173 # Change access rights if possible 163 #164 174 if unix: 165 exitcode =os.system('chmod 775 '+path)175 exitcode = os.system('chmod 775 ' + path) 166 176 else: 167 pass # FIXME: What about acces rights under Windows?177 pass # FIXME: What about access rights under Windows? 168 178 169 179 if verbose: print 'MESSAGE: Directory', path, 'created.' 170 171 180 except: 172 181 print 'WARNING: Directory', path, 'could not be created.' … … 174 183 path = '/tmp/' 175 184 else: 176 path = 'C:' 177 178 print 'Using directory %s instead' %path 179 180 return(path) 181 182 183 185 path = 'C:' + os.sep 186 187 print "Using directory '%s' instead" % path 188 189 return path 190 191 192 ## 193 # @brief Delete directory and all sub-directories. 194 # @param path Path to the directory to delete. 184 195 def del_dir(path): 185 196 """Recursively delete directory path and all its contents 186 197 """ 187 188 import os189 198 190 199 if os.path.isdir(path): 191 200 for file in os.listdir(path): 192 201 X = os.path.join(path, file) 193 194 202 195 203 if os.path.isdir(X) and not os.path.islink(X): … … 199 207 os.remove(X) 200 208 except: 201 print "Could not remove file %s" % X209 print "Could not remove file %s" % X 202 210 203 211 os.rmdir(path) 204 205 206 # ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007 207 def rmgeneric(path, __func__,verbose=False): 212 213 214 ## 215 # @brief ?? 216 # @param path 217 # @param __func__ 218 # @param verbose True if this function is to be verbose. 219 # @note ANOTHER OPTION, IF NEED IN THE FUTURE, Nick B 7/2007 220 def rmgeneric(path, func, verbose=False): 208 221 ERROR_STR= """Error removing %(path)s, %(error)s """ 209 222 210 223 try: 211 __func__(path)224 func(path) 212 225 if verbose: print 'Removed ', path 213 226 except OSError, (errno, strerror): 214 227 print ERROR_STR % {'path' : path, 'error': strerror } 215 216 def removeall(path,verbose=False): 217 228 229 230 ## 231 # @brief Remove directory and all sub-directories. 232 # @param path Filesystem path to directory to remove. 233 # @param verbose True if this function is to be verbose. 234 def removeall(path, verbose=False): 218 235 if not os.path.isdir(path): 219 236 return 220 221 files=os.listdir(path) 222 223 for x in files: 224 fullpath=os.path.join(path, x) 237 238 for x in os.listdir(path): 239 fullpath = os.path.join(path, x) 225 240 if os.path.isfile(fullpath): 226 f =os.remove241 f = os.remove 227 242 rmgeneric(fullpath, f) 228 243 elif os.path.isdir(fullpath): 229 244 removeall(fullpath) 230 f=os.rmdir 231 rmgeneric(fullpath, f,verbose) 232 233 234 245 f = os.rmdir 246 rmgeneric(fullpath, f, verbose) 247 248 249 ## 250 # @brief Create a standard filename. 251 # @param datadir Directory where file is to be created. 252 # @param filename Filename 'stem'. 253 # @param format Format of the file, becomes filename extension. 254 # @param size Size of file, becomes part of filename. 255 # @param time Time (float), becomes part of filename. 256 # @return The complete filename path, including directory. 257 # @note The containing directory is created, if necessary. 235 258 def create_filename(datadir, filename, format, size=None, time=None): 236 237 import os238 #from anuga.config import data_dir239 240 259 FN = check_dir(datadir) + filename 241 260 242 261 if size is not None: 243 FN += '_size%d' % size262 FN += '_size%d' % size 244 263 245 264 if time is not None: 246 FN += '_time%.2f' % time265 FN += '_time%.2f' % time 247 266 248 267 FN += '.' + format 268 249 269 return FN 250 270 251 271 272 ## 273 # @brief Get all files with a standard name and a given set of attributes. 274 # @param datadir Directory files must be in. 275 # @param filename Filename stem. 276 # @param format Filename extension. 277 # @param size Filename size. 278 # @return A list of fielnames (including directory) that match the attributes. 252 279 def get_files(datadir, filename, format, size): 253 280 """Get all file (names) with given name, size and format … … 256 283 import glob 257 284 258 import os259 #from anuga.config import data_dir260 261 285 dir = check_dir(datadir) 262 263 pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format) 286 pattern = dir + os.sep + filename + '_size=%d*.%s' % (size, format) 287 264 288 return glob.glob(pattern) 265 289 266 290 267 268 # Generic class for storing output to e.g. visualisation or checkpointing291 ## 292 # @brief Generic class for storing output to e.g. visualisation or checkpointing 269 293 class Data_format: 270 294 """Generic interface to data formats 271 295 """ 272 296 273 274 def __init__(self, domain, extension, mode = 'w'): 275 assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\ 276 ''' 'w' (write)'''+\ 277 ''' 'r' (read)''' +\ 278 ''' 'a' (append)''' 297 ## 298 # @brief Instantiate this instance. 299 # @param domain 300 # @param extension 301 # @param mode The mode of the underlying file. 302 def __init__(self, domain, extension, mode='w'): 303 assert mode in ['r', 'w', 'a'], \ 304 "Mode %s must be either:\n" % mode + \ 305 " 'w' (write)\n" + \ 306 " 'r' (read)\n" + \ 307 " 'a' (append)" 279 308 280 309 #Create filename … … 282 311 domain.get_name(), extension) 283 312 284 #print 'F', self.filename285 313 self.timestep = 0 286 314 self.domain = domain 287 288 289 315 290 316 # Exclude ghosts in case this is a parallel domain 291 self.number_of_nodes = domain.number_of_full_nodes 317 self.number_of_nodes = domain.number_of_full_nodes 292 318 self.number_of_volumes = domain.number_of_full_triangles 293 #self.number_of_volumes = len(domain) 294 295 296 319 #self.number_of_volumes = len(domain) 297 320 298 321 #FIXME: Should we have a general set_precision function? 299 322 300 323 301 302 # Class for storing output to e.g. visualisation324 ## 325 # @brief Class for storing output to e.g. visualisation 303 326 class Data_format_sww(Data_format): 304 327 """Interface to native NetCDF format (.sww) for storing model output … … 312 335 """ 313 336 314 315 def __init__(self, domain, mode = 'w',\ 316 max_size = 2000000000, 317 recursion = False): 337 ## 338 # @brief Instantiate this instance. 339 # @param domain ?? 340 # @param mode Mode of the underlying data file. 341 # @param max_size ?? 342 # @param recursion ?? 343 # @note Prepare the undelying data file if mode is 'w'. 344 def __init__(self, domain, mode='w', max_size=2000000000, recursion=False): 318 345 from Scientific.IO.NetCDF import NetCDFFile 319 346 from Numeric import Int, Float, Float32 320 347 321 348 self.precision = Float32 #Use single precision for quantities 349 self.recursion = recursion 350 self.mode = mode 322 351 if hasattr(domain, 'max_size'): 323 352 self.max_size = domain.max_size #file size max is 2Gig 324 353 else: 325 354 self.max_size = max_size 326 self.recursion = recursion327 self.mode = mode328 329 Data_format.__init__(self, domain, 'sww', mode)330 331 355 if hasattr(domain, 'minimum_storable_height'): 332 356 self.minimum_storable_height = domain.minimum_storable_height … … 334 358 self.minimum_storable_height = default_minimum_storable_height 335 359 360 # call owning constructor 361 Data_format.__init__(self, domain, 'sww', mode) 362 336 363 # NetCDF file definition 337 364 fid = NetCDFFile(self.filename, mode) 338 339 365 if mode == 'w': 340 description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting' 366 description = 'Output from anuga.abstract_2d_finite_volumes ' \ 367 'suitable for plotting' 341 368 self.writer = Write_sww() 342 369 self.writer.store_header(fid, … … 355 382 if domain.quantities_to_be_monitored is not None: 356 383 fid.createDimension('singleton', 1) 357 fid.createDimension('two', 2) 384 fid.createDimension('two', 2) 358 385 359 386 poly = domain.monitor_polygon … … 363 390 fid.createVariable('extrema.polygon', 364 391 self.precision, 365 ('polygon_length', 366 'two')) 367 fid.variables['extrema.polygon'][:] = poly 368 369 392 ('polygon_length', 'two')) 393 fid.variables['extrema.polygon'][:] = poly 394 370 395 interval = domain.monitor_time_interval 371 396 if interval is not None: … … 375 400 fid.variables['extrema.time_interval'][:] = interval 376 401 377 378 402 for q in domain.quantities_to_be_monitored: 379 #print 'doing', q 380 fid.createVariable(q+'.extrema', self.precision, 403 fid.createVariable(q + '.extrema', self.precision, 381 404 ('numbers_in_range',)) 382 fid.createVariable(q +'.min_location', self.precision,405 fid.createVariable(q + '.min_location', self.precision, 383 406 ('numbers_in_range',)) 384 fid.createVariable(q +'.max_location', self.precision,407 fid.createVariable(q + '.max_location', self.precision, 385 408 ('numbers_in_range',)) 386 fid.createVariable(q +'.min_time', self.precision,409 fid.createVariable(q + '.min_time', self.precision, 387 410 ('singleton',)) 388 fid.createVariable(q +'.max_time', self.precision,411 fid.createVariable(q + '.max_time', self.precision, 389 412 ('singleton',)) 390 413 391 392 414 fid.close() 393 415 394 416 ## 417 # @brief Store connectivity data into the underlying data file. 395 418 def store_connectivity(self): 396 419 """Specialisation of store_connectivity for net CDF format … … 401 424 402 425 from Scientific.IO.NetCDF import NetCDFFile 403 404 426 from Numeric import concatenate, Int 405 427 406 428 domain = self.domain 407 429 408 # Get NetCDF409 fid = NetCDFFile(self.filename, 'a') #Open existing file for append430 # append to the NetCDF file 431 fid = NetCDFFile(self.filename, 'a') 410 432 411 433 # Get the variables … … 413 435 y = fid.variables['y'] 414 436 z = fid.variables['elevation'] 415 416 437 volumes = fid.variables['volumes'] 417 438 418 439 # Get X, Y and bed elevation Z 419 440 Q = domain.quantities['elevation'] 420 X,Y,Z,V = Q.get_vertex_values(xy=True, 421 precision=self.precision) 422 423 # 441 X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision) 442 443 # store the connectivity data 424 444 points = concatenate( (X[:,NewAxis],Y[:,NewAxis]), axis=1 ) 425 445 self.writer.store_triangulation(fid, … … 427 447 V.astype(volumes.typecode()), 428 448 Z, 429 points_georeference= \ 430 domain.geo_reference) 431 432 # Close 449 points_georeference=\ 450 domain.geo_reference) 451 433 452 fid.close() 434 453 435 454 ## 455 # @brief Store time and named quantities to the underlying data file. 456 # @param names The names of the quantities to store. 457 # @note If 'names' not supplied, store a standard set of quantities. 436 458 def store_timestep(self, names=None): 437 459 """Store time and named quantities to file 438 460 """ 439 461 440 462 from Scientific.IO.NetCDF import NetCDFFile 441 463 import types 442 464 from time import sleep 443 465 from os import stat 444 445 466 from Numeric import choose 446 447 467 448 468 if names is None: 449 469 # Standard shallow water wave equation quantitites in ANUGA 450 470 names = ['stage', 'xmomentum', 'ymomentum'] 451 452 # Get NetCDF 471 472 # Get NetCDF 453 473 retries = 0 454 474 file_open = False … … 459 479 # This could happen if someone was reading the file. 460 480 # In that case, wait a while and try again 461 msg = 'Warning (store_timestep): File %s could not be opened' \462 % self.filename463 msg += ' - trying step %s again' % self.domain.time481 msg = 'Warning (store_timestep): File %s could not be opened' \ 482 % self.filename 483 msg += ' - trying step %s again' % self.domain.time 464 484 print msg 465 485 retries += 1 … … 469 489 470 490 if not file_open: 471 msg = 'File %s could not be opened for append' % self.filename491 msg = 'File %s could not be opened for append' % self.filename 472 492 raise DataFileNotOpenError, msg 473 474 475 493 476 494 # Check to see if the file is already too big: 477 495 time = fid.variables['time'] 478 i = len(time) +1496 i = len(time) + 1 479 497 file_size = stat(self.filename)[6] 480 file_size_increase = file_size/i481 if file_size + file_size_increase > self.max_size *(2**self.recursion):498 file_size_increase = file_size / i 499 if file_size + file_size_increase > self.max_size * 2**self.recursion: 482 500 # In order to get the file name and start time correct, 483 501 # I change the domain.filename and domain.starttime. … … 487 505 # Write a filename addon that won't break swollens reader 488 506 # (10.sww is bad) 489 filename_ext = '_time_%s' %self.domain.time507 filename_ext = '_time_%s' % self.domain.time 490 508 filename_ext = filename_ext.replace('.', '_') 491 509 492 510 # Remember the old filename, then give domain a 493 511 # name with the extension 494 512 old_domain_filename = self.domain.get_name() 495 513 if not self.recursion: 496 self.domain.set_name(old_domain_filename+filename_ext) 497 514 self.domain.set_name(old_domain_filename + filename_ext) 498 515 499 516 # Change the domain starttime to the current time … … 502 519 503 520 # Build a new data_structure. 504 next_data_structure=\ 505 Data_format_sww(self.domain, mode=self.mode,\ 506 max_size = self.max_size,\ 507 recursion = self.recursion+1) 521 next_data_structure = Data_format_sww(self.domain, mode=self.mode, 522 max_size=self.max_size, 523 recursion=self.recursion+1) 508 524 if not self.recursion: 509 print ' file_size = %s'%file_size 510 print ' saving file to %s'%next_data_structure.filename 525 print ' file_size = %s' % file_size 526 print ' saving file to %s' % next_data_structure.filename 527 511 528 #set up the new data_structure 512 529 self.domain.writer = next_data_structure … … 520 537 #restore the old starttime and filename 521 538 self.domain.starttime = old_domain_starttime 522 self.domain.set_name(old_domain_filename) 539 self.domain.set_name(old_domain_filename) 523 540 else: 524 541 self.recursion = False … … 534 551 names = [names] 535 552 536 if 'stage' in names and 'xmomentum' in names and\537 'ymomentum' in names:538 553 if 'stage' in names \ 554 and 'xmomentum' in names \ 555 and 'ymomentum' in names: 539 556 # Get stage, elevation, depth and select only those 540 557 # values where minimum_storable_height is exceeded 541 558 Q = domain.quantities['stage'] 542 A, _ = Q.get_vertex_values(xy = False, 543 precision = self.precision) 559 A, _ = Q.get_vertex_values(xy=False, precision=self.precision) 544 560 z = fid.variables['elevation'] 545 561 546 storable_indices = A-z[:] >= self.minimum_storable_height562 storable_indices = (A-z[:] >= self.minimum_storable_height) 547 563 stage = choose(storable_indices, (z[:], A)) 548 564 549 565 # Define a zero vector of same size and type as A 550 566 # for use with momenta 551 567 null = zeros(size(A), A.typecode()) 552 568 553 569 # Get xmomentum where depth exceeds minimum_storable_height 554 570 Q = domain.quantities['xmomentum'] 555 xmom, _ = Q.get_vertex_values(xy =False,556 precision =self.precision)571 xmom, _ = Q.get_vertex_values(xy=False, 572 precision=self.precision) 557 573 xmomentum = choose(storable_indices, (null, xmom)) 558 574 559 575 560 576 # Get ymomentum where depth exceeds minimum_storable_height 561 577 Q = domain.quantities['ymomentum'] 562 ymom, _ = Q.get_vertex_values(xy =False,563 precision =self.precision)564 ymomentum = choose(storable_indices, (null, ymom)) 565 566 # Write quantities to NetCDF567 self.writer.store_quantities(fid, 578 ymom, _ = Q.get_vertex_values(xy=False, 579 precision=self.precision) 580 ymomentum = choose(storable_indices, (null, ymom)) 581 582 # Write quantities to underlying data file 583 self.writer.store_quantities(fid, 568 584 time=self.domain.time, 569 585 sww_precision=self.precision, … … 572 588 ymomentum=ymomentum) 573 589 else: 574 msg = 'Quantities stored must be: stage, xmomentum, ymomentum .'575 msg += ' InsteadI got: ' + str(names)590 msg = 'Quantities stored must be: stage, xmomentum, ymomentum, ' 591 msg += 'but I got: ' + str(names) 576 592 raise Exception, msg 577 578 579 593 580 594 # Update extrema if requested … … 582 596 if domain.quantities_to_be_monitored is not None: 583 597 for q, info in domain.quantities_to_be_monitored.items(): 584 585 598 if info['min'] is not None: 586 599 fid.variables[q + '.extrema'][0] = info['min'] 587 fid.variables[q + '.min_location'][:] = \600 fid.variables[q + '.min_location'][:] = \ 588 601 info['min_location'] 589 602 fid.variables[q + '.min_time'][0] = info['min_time'] 590 603 591 604 if info['max'] is not None: 592 605 fid.variables[q + '.extrema'][1] = info['max'] 593 fid.variables[q + '.max_location'][:] = \606 fid.variables[q + '.max_location'][:] = \ 594 607 info['max_location'] 595 608 fid.variables[q + '.max_time'][0] = info['max_time'] 596 597 598 609 599 610 # Flush and close … … 602 613 603 614 604 605 # Class for handling checkpoints data615 ## 616 # @brief Class for handling checkpoints data 606 617 class Data_format_cpt(Data_format): 607 618 """Interface to native NetCDF format (.cpt) 608 619 """ 609 620 610 611 def __init__(self, domain, mode = 'w'): 621 ## 622 # @brief Initialize this instantiation. 623 # @param domain ?? 624 # @param mode Mode of underlying data file (default WRITE). 625 def __init__(self, domain, mode='w'): 612 626 from Scientific.IO.NetCDF import NetCDFFile 613 627 from Numeric import Int, Float, Float … … 617 631 Data_format.__init__(self, domain, 'sww', mode) 618 632 619 620 633 # NetCDF file definition 621 634 fid = NetCDFFile(self.filename, mode) 622 623 635 if mode == 'w': 624 636 #Create new file … … 646 658 'number_of_vertices')) 647 659 648 fid.createVariable('time', self.precision, 649 ('number_of_timesteps',)) 660 fid.createVariable('time', self.precision, ('number_of_timesteps',)) 650 661 651 662 #Allocate space for all quantities … … 655 666 'number_of_points')) 656 667 657 #Close658 668 fid.close() 659 669 660 670 ## 671 # @brief Store connectivity data to underlying data file. 661 672 def store_checkpoint(self): 662 """ 663 Write x,y coordinates of triangles. 673 """Write x,y coordinates of triangles. 664 674 Write connectivity ( 665 675 constituting … … 668 678 669 679 from Scientific.IO.NetCDF import NetCDFFile 670 671 680 from Numeric import concatenate 672 681 … … 674 683 675 684 #Get NetCDF 676 fid = NetCDFFile(self.filename, 'a') #Open existing file for append685 fid = NetCDFFile(self.filename, 'a') 677 686 678 687 # Get the variables … … 684 693 # Get X, Y and bed elevation Z 685 694 Q = domain.quantities['elevation'] 686 X,Y,Z,V = Q.get_vertex_values(xy=True, 687 precision = self.precision) 688 689 695 X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision) 690 696 691 697 x[:] = X.astype(self.precision) … … 695 701 volumes[:] = V 696 702 697 #Close698 703 fid.close() 699 704 700 705 ## 706 # @brief Store tiem and named quantities to underlying data file. 707 # @param name 701 708 def store_timestep(self, name): 702 709 """Store time and named quantity to file 703 710 """ 711 704 712 from Scientific.IO.NetCDF import NetCDFFile 705 713 from time import sleep … … 710 718 while not file_open and retries < 10: 711 719 try: 712 fid = NetCDFFile(self.filename, 'a') #Open existing file720 fid = NetCDFFile(self.filename, 'a') 713 721 except IOError: 714 722 #This could happen if someone was reading the file. 715 723 #In that case, wait a while and try again 716 msg = 'Warning (store_timestep): File %s could not be opened'\ 717 %self.filename 718 msg += ' - trying again' 724 msg = 'Warning (store_timestep): File %s could not be opened' \ 725 ' - trying again' % self.filename 719 726 print msg 720 727 retries += 1 … … 724 731 725 732 if not file_open: 726 msg = 'File %s could not be opened for append' %self.filename 727 raise DataFileNotOPenError, msg 728 733 msg = 'File %s could not be opened for append' % self.filename 734 raise DataFileNotOpenError, msg 729 735 730 736 domain = self.domain … … 740 746 # Get quantity 741 747 Q = domain.quantities[name] 742 A,V = Q.get_vertex_values(xy=False, 743 precision = self.precision) 748 A,V = Q.get_vertex_values(xy=False, precision=self.precision) 744 749 745 750 stage[i,:] = A.astype(self.precision) … … 750 755 751 756 752 #### NED is national exposure database (name changed to NEXIS) 753 757 ## 758 # @brief Class for National Exposure Database storage (NEXIS). 759 754 760 LAT_TITLE = 'LATITUDE' 755 761 LONG_TITLE = 'LONGITUDE' 756 762 X_TITLE = 'x' 757 763 Y_TITLE = 'y' 764 758 765 class Exposure_csv: 766 767 ## 768 # @brief Instantiate this instance. 769 # @param file_name Name of underlying data file. 770 # @param latitude_title ?? 771 # @param longitude_title ?? 772 # @param is_x_y_locations ?? 773 # @param x_title ?? 774 # @param y_title ?? 775 # @param refine_polygon ?? 776 # @param title_check_list ?? 759 777 def __init__(self,file_name, latitude_title=LAT_TITLE, 760 778 longitude_title=LONG_TITLE, is_x_y_locations=None, … … 772 790 each column is a 'set' of data 773 791 774 Feel free to use/expand it to read other csv files. 775 776 792 Feel free to use/expand it to read other csv files. 793 777 794 It is not for adding and deleting rows 778 795 779 796 Can geospatial handle string attributes? It's not made for them. 780 797 Currently it can't load and save string att's. … … 785 802 786 803 The location info is in the geospatial attribute. 787 788 789 804 """ 805 790 806 self._file_name = file_name 791 807 self._geospatial = None # … … 794 810 #The keys are the column titles. 795 811 #The values are lists of column data 796 812 797 813 # self._title_index_dic is a dictionary. 798 814 #The keys are the column titles. … … 801 817 csv2dict(self._file_name, title_check_list=title_check_list) 802 818 try: 803 #Have code here that handles caps or lower 819 #Have code here that handles caps or lower 804 820 lats = self._attribute_dic[latitude_title] 805 821 longs = self._attribute_dic[longitude_title] 806 807 822 except KeyError: 808 823 # maybe a warning.. … … 812 827 pass 813 828 else: 814 self._geospatial = Geospatial_data(latitudes =lats,815 longitudes =longs)829 self._geospatial = Geospatial_data(latitudes=lats, 830 longitudes=longs) 816 831 817 832 if is_x_y_locations is True: … … 828 843 else: 829 844 self._geospatial = Geospatial_data(data_points=points) 830 845 831 846 # create a list of points that are in the refining_polygon 832 847 # described by a list of indexes representing the points 833 848 849 ## 850 # @brief Create a comparison method. 851 # @param self This object. 852 # @param other The other object. 853 # @return True if objects are 'same'. 834 854 def __cmp__(self, other): 835 #print "self._attribute_dic",self._attribute_dic 836 #print "other._attribute_dic",other._attribute_dic 837 #print "self._title_index_dic", self._title_index_dic 838 #print "other._title_index_dic", other._title_index_dic 839 840 #check that a is an instance of this class 855 #check that 'other' is an instance of this class 841 856 if isinstance(self, type(other)): 842 857 result = cmp(self._attribute_dic, other._attribute_dic) 843 if result <> 0:858 if result <> 0: 844 859 return result 845 # The order of the columns is important. Therefore.. 860 861 # The order of the columns is important. Therefore.. 846 862 result = cmp(self._title_index_dic, other._title_index_dic) 847 if result <> 0:863 if result <> 0: 848 864 return result 849 for self_ls, other_ls in map(None, self._attribute_dic, \850 other._attribute_dic):865 for self_ls, other_ls in map(None, self._attribute_dic, 866 other._attribute_dic): 851 867 result = cmp(self._attribute_dic[self_ls], 852 868 other._attribute_dic[other_ls]) 853 if result <> 0:869 if result <> 0: 854 870 return result 855 871 return 0 856 872 else: 857 873 return 1 858 859 874 875 ## 876 # @brief Get a list of column values given a column name. 877 # @param column_name The name of the column to get values from. 878 # @param use_refind_polygon Unused?? 860 879 def get_column(self, column_name, use_refind_polygon=False): 861 880 """ … … 865 884 do this to change a list of strings to a list of floats 866 885 time = [float(x) for x in time] 867 886 868 887 Not implemented: 869 888 if use_refind_polygon is True, only return values in the 870 889 refined polygon 871 890 """ 891 872 892 if not self._attribute_dic.has_key(column_name): 873 msg = 'There r is no column called %s!' %column_name893 msg = 'There is no column called %s!' % column_name 874 894 raise TitleValueError, msg 895 875 896 return self._attribute_dic[column_name] 876 897 877 878 def get_value(self, value_column_name, 879 known_column_name, 880 known_values, 881 use_refind_polygon=False): 898 ## 899 # @brief ?? 900 # @param value_column_name ?? 901 # @param known_column_name ?? 902 # @param known_values ?? 903 # @param use_refind_polygon ?? 904 def get_value(self, value_column_name, known_column_name, 905 known_values, use_refind_polygon=False): 882 906 """ 883 907 Do linear interpolation on the known_colum, using the known_value, 884 908 to return a value of the column_value_name. 885 909 """ 910 886 911 pass 887 912 888 913 ## 914 # @brief Get a geospatial object that describes the locations. 915 # @param use_refind_polygon Unused?? 889 916 def get_location(self, use_refind_polygon=False): 890 917 """ … … 893 920 894 921 Note, if there is not location info, this returns None. 895 922 896 923 Not implemented: 897 924 if use_refind_polygon is True, only return values in the 898 925 refined polygon 899 926 """ 927 900 928 return self._geospatial 901 929 930 ## 931 # @brief Add column to 'end' of CSV data. 932 # @param column_name The new column name. 933 # @param column_values The new column values. 934 # @param overwrite If True, overwrites last column, doesn't add at end. 902 935 def set_column(self, column_name, column_values, overwrite=False): 903 936 """ … … 906 939 907 940 Set overwrite to True if you want to overwrite a column. 908 941 909 942 Note, in column_name white space is removed and case is not checked. 910 943 Precondition 911 944 The column_name and column_values cannot have comma's in it. 912 945 """ 913 946 947 # sanity checks 914 948 value_row_count = \ 915 len(self._attribute_dic[self._title_index_dic.keys()[0]])916 if len(column_values) <> value_row_count: 949 len(self._attribute_dic[self._title_index_dic.keys()[0]]) 950 if len(column_values) <> value_row_count: 917 951 msg = 'The number of column values must equal the number of rows.' 918 952 raise DataMissingValuesError, msg 919 953 954 # check new column name isn't already used, and we aren't overwriting 920 955 if self._attribute_dic.has_key(column_name): 921 956 if not overwrite: 922 msg = 'Column name %s already in use!' % column_name957 msg = 'Column name %s already in use!' % column_name 923 958 raise TitleValueError, msg 924 959 else: 925 960 # New title. Add it to the title index. 926 961 self._title_index_dic[column_name] = len(self._title_index_dic) 962 927 963 self._attribute_dic[column_name] = column_values 928 #print "self._title_index_dic[column_name]",self._title_index_dic 929 964 965 ## 966 # @brief Save the exposure CSV file. 967 # @param file_name If supplied, use this filename, not original. 930 968 def save(self, file_name=None): 931 969 """ 932 970 Save the exposure csv file 933 971 """ 972 934 973 if file_name is None: 935 974 file_name = self._file_name 936 937 fd = open(file_name, 'wb')975 976 fd = open(file_name, 'wb') 938 977 writer = csv.writer(fd) 939 978 940 979 #Write the title to a cvs file 941 line = [None] * len(self._title_index_dic)980 line = [None] * len(self._title_index_dic) 942 981 for title in self._title_index_dic.iterkeys(): 943 line[self._title_index_dic[title]] = title982 line[self._title_index_dic[title]] = title 944 983 writer.writerow(line) 945 984 946 985 # Write the values to a cvs file 947 986 value_row_count = \ 948 len(self._attribute_dic[self._title_index_dic.keys()[0]])987 len(self._attribute_dic[self._title_index_dic.keys()[0]]) 949 988 for row_i in range(value_row_count): 950 line = [None] * len(self._title_index_dic)989 line = [None] * len(self._title_index_dic) 951 990 for title in self._title_index_dic.iterkeys(): 952 line[self._title_index_dic[title]] = \991 line[self._title_index_dic[title]] = \ 953 992 self._attribute_dic[title][row_i] 954 993 writer.writerow(line) 955 994 956 995 996 ## 997 # @brief Convert CSV file to a dictionary of arrays. 998 # @param file_name The path to the file to read. 957 999 def csv2array(file_name): 958 """Convert CSV files of the form 959 1000 """ 1001 Convert CSV files of the form: 1002 960 1003 time, discharge, velocity 961 1004 0.0, 1.2, 0.0 962 1005 0.1, 3.2, 1.1 963 1006 ... 964 1007 965 1008 to a dictionary of numeric arrays. 966 967 1009 1010 968 1011 See underlying function csv2dict for more details. 969 970 """ 971 972 1012 """ 1013 973 1014 X, _ = csv2dict(file_name) 974 1015 975 1016 Y = {} 976 1017 for key in X.keys(): 977 1018 Y[key] = array([float(x) for x in X[key]]) 978 979 return Y 980 981 1019 1020 return Y 1021 1022 1023 ## 1024 # @brief Read a CSV file and convert to a dictionary of {key: column}. 1025 # @param file_name The path to the file to read. 1026 # @param title_check_list List of titles that *must* be columns in the file. 1027 # @return Two dicts: ({key:column}, {title:index}). 1028 # @note WARNING: Values are returned as strings. 982 1029 def csv2dict(file_name, title_check_list=None): 983 1030 """ 984 Load in the csv as a dic, title as key and column info as value ,.1031 Load in the csv as a dic, title as key and column info as value. 985 1032 Also, create a dic, title as key and column index as value, 986 to keep track of the column order. 1033 to keep track of the column order. 987 1034 988 1035 Two dictionaries are returned. 989 1036 990 1037 WARNING: Values are returned as strings. 991 do this to change a list of strings to a list of floats 992 time = [float(x) for x in time] 993 994 995 """ 996 997 # 1038 Do this to change a list of strings to a list of floats 1039 time = [float(x) for x in time] 1040 """ 1041 998 1042 attribute_dic = {} 999 1043 title_index_dic = {} 1000 1044 titles_stripped = [] # list of titles 1045 1001 1046 reader = csv.reader(file(file_name)) 1002 1047 … … 1006 1051 titles_stripped.append(title.strip()) 1007 1052 title_index_dic[title.strip()] = i 1008 title_count = len(titles_stripped) 1009 #print "title_index_dic",title_index_dic 1053 title_count = len(titles_stripped) 1054 1055 # check required columns 1010 1056 if title_check_list is not None: 1011 1057 for title_check in title_check_list: 1012 #msg = "Reading error. This row is not present ", title_check1058 #msg = "Reading error. This row is not present ", title_check 1013 1059 #assert title_index_dic.has_key(title_check), msg 1014 1060 if not title_index_dic.has_key(title_check): 1015 1061 #reader.close() 1016 msg = "Reading error. This row is not present ", \ 1017 title_check 1062 msg = "Reading error. This row is not present ", title_check 1018 1063 raise IOError, msg 1019 1020 1021 1022 #create a dic of colum values, indexed by column title 1064 1065 #create a dic of column values, indexed by column title 1023 1066 for line in reader: 1024 1067 if len(line) <> title_count: 1025 1068 raise IOError #FIXME make this nicer 1026 1069 for i, value in enumerate(line): 1027 attribute_dic.setdefault(titles_stripped[i], []).append(value)1028 1070 attribute_dic.setdefault(titles_stripped[i], []).append(value) 1071 1029 1072 return attribute_dic, title_index_dic 1030 1073 1031 1074 1032 #Auxiliary 1033 def write_obj(filename,x,y,z): 1034 """Store x,y,z vectors into filename (obj format) 1075 ## 1076 # @brief 1077 # @param filename 1078 # @param x 1079 # @param y 1080 # @param z 1081 def write_obj(filename, x, y, z): 1082 """Store x,y,z vectors into filename (obj format). 1083 1035 1084 Vectors are assumed to have dimension (M,3) where 1036 1085 M corresponds to the number elements. … … 1040 1089 1041 1090 e.g. the x coordinate of vertex 1 of element i is in x[i,1] 1042 1043 """ 1044 #print 'Writing obj to %s' % filename 1091 """ 1045 1092 1046 1093 import os.path … … 1052 1099 FN = filename + '.obj' 1053 1100 1054 1055 1101 outfile = open(FN, 'wb') 1056 1102 outfile.write("# Triangulation as an obj file\n") 1057 1103 1058 1104 M, N = x.shape 1059 assert N ==3 #Assuming three vertices per element1105 assert N == 3 #Assuming three vertices per element 1060 1106 1061 1107 for i in range(M): 1062 1108 for j in range(N): 1063 outfile.write("v %f %f %f\n" % (x[i,j], y[i,j],z[i,j]))1109 outfile.write("v %f %f %f\n" % (x[i,j], y[i,j], z[i,j])) 1064 1110 1065 1111 for i in range(M): 1066 base = i *N1067 outfile.write("f %d %d %d\n" % (base+1, base+2,base+3))1112 base = i * N 1113 outfile.write("f %d %d %d\n" % (base+1, base+2, base+3)) 1068 1114 1069 1115 outfile.close() … … 1074 1120 ######################################################## 1075 1121 1122 ## 1123 # @brief Convert SWW data to OBJ data. 1124 # @param basefilename Stem of filename, needs size and extension added. 1125 # @param size The number of lines to write. 1076 1126 def sww2obj(basefilename, size): 1077 1127 """Convert netcdf based data output to obj 1078 1128 """ 1129 1079 1130 from Scientific.IO.NetCDF import NetCDFFile 1080 1081 1131 from Numeric import Float, zeros 1082 1132 1083 # Get NetCDF1133 # Get NetCDF 1084 1134 FN = create_filename('.', basefilename, 'sww', size) 1085 1135 print 'Reading from ', FN 1086 1136 fid = NetCDFFile(FN, 'r') #Open existing file for read 1087 1088 1137 1089 1138 # Get the variables … … 1105 1154 zz[i,j] = z[i+j*M] 1106 1155 1107 # Write obj for bathymetry1156 # Write obj for bathymetry 1108 1157 FN = create_filename('.', basefilename, 'obj', size) 1109 1158 write_obj(FN,xx,yy,zz) 1110 1159 1111 1112 #Now read all the data with variable information, combine with 1113 #x,y info and store as obj 1114 1160 # Now read all the data with variable information, combine with 1161 # x,y info and store as obj 1115 1162 for k in range(len(time)): 1116 1163 t = time[k] … … 1121 1168 zz[i,j] = stage[k,i+j*M] 1122 1169 1123 1124 1170 #Write obj for variable data 1125 1171 #FN = create_filename(basefilename, 'obj', size, time=t) 1126 1172 FN = create_filename('.', basefilename[:5], 'obj', size, time=t) 1127 write_obj(FN,xx,yy,zz) 1128 1129 1173 write_obj(FN, xx, yy, zz) 1174 1175 1176 ## 1177 # @brief 1178 # @param basefilename Stem of filename, needs size and extension added. 1130 1179 def dat2obj(basefilename): 1131 1180 """Convert line based data output to obj … … 1135 1184 import glob, os 1136 1185 from anuga.config import data_dir 1137 1138 1139 # Get bathymetry and x,y's1186 from Numeric import zeros, Float 1187 1188 # Get bathymetry and x,y's 1140 1189 lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines() 1141 1142 from Numeric import zeros, Float1143 1190 1144 1191 M = len(lines) #Number of lines … … 1147 1194 z = zeros((M,3), Float) 1148 1195 1149 ##i = 01150 1196 for i, line in enumerate(lines): 1151 1197 tokens = line.split() 1152 values = map(float, tokens)1198 values = map(float, tokens) 1153 1199 1154 1200 for j in range(3): … … 1157 1203 z[i,j] = values[j*3+2] 1158 1204 1159 ##i += 1 1160 1161 1162 #Write obj for bathymetry 1163 write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z) 1164 1165 1166 #Now read all the data files with variable information, combine with 1167 #x,y info 1168 #and store as obj 1169 1170 files = glob.glob(data_dir+os.sep+basefilename+'*.dat') 1171 1205 # Write obj for bathymetry 1206 write_obj(data_dir + os.sep + basefilename + '_geometry', x, y, z) 1207 1208 # Now read all the data files with variable information, combine with 1209 # x,y info and store as obj. 1210 1211 files = glob.glob(data_dir + os.sep + basefilename + '*.dat') 1172 1212 for filename in files: 1173 1213 print 'Processing %s' % filename 1174 1214 1175 lines = open(data_dir +os.sep+filename,'r').readlines()1215 lines = open(data_dir + os.sep + filename, 'r').readlines() 1176 1216 assert len(lines) == M 1177 1217 root, ext = os.path.splitext(filename) 1178 1218 1179 # Get time from filename1219 # Get time from filename 1180 1220 i0 = filename.find('_time=') 1181 1221 if i0 == -1: … … 1191 1231 raise DataTimeError, 'Hmmmm' 1192 1232 1193 1194 1195 ##i = 01196 1233 for i, line in enumerate(lines): 1197 1234 tokens = line.split() … … 1201 1238 z[i,j] = values[j] 1202 1239 1203 ##i += 1 1204 1205 #Write obj for variable data 1206 write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z) 1207 1208 1209 def filter_netcdf(filename1, filename2, first=0, last=None, step = 1): 1240 # Write obj for variable data 1241 write_obj(data_dir + os.sep + basefilename + '_time=%.4f' % t, x, y, z) 1242 1243 1244 ## 1245 # @brief Filter data file, selecting timesteps first:step:last. 1246 # @param filename1 Data file to filter. 1247 # @param filename2 File to write filtered timesteps to. 1248 # @param first First timestep. 1249 # @param last Last timestep. 1250 # @param step Timestep stride. 1251 def filter_netcdf(filename1, filename2, first=0, last=None, step=1): 1210 1252 """Read netcdf filename1, pick timesteps first:step:last and save to 1211 1253 nettcdf file filename2 1212 1254 """ 1255 1213 1256 from Scientific.IO.NetCDF import NetCDFFile 1214 1257 1215 # Get NetCDF1258 # Get NetCDF 1216 1259 infile = NetCDFFile(filename1, 'r') #Open existing file for read 1217 1260 outfile = NetCDFFile(filename2, 'w') #Open new file 1218 1261 1219 1220 #Copy dimensions 1262 # Copy dimensions 1221 1263 for d in infile.dimensions: 1222 1264 outfile.createDimension(d, infile.dimensions[d]) 1223 1265 1266 # Copy variable definitions 1224 1267 for name in infile.variables: 1225 1268 var = infile.variables[name] 1226 1269 outfile.createVariable(name, var.typecode(), var.dimensions) 1227 1270 1228 1229 #Copy the static variables 1271 # Copy the static variables 1230 1272 for name in infile.variables: 1231 1273 if name == 'time' or name == 'stage': 1232 1274 pass 1233 1275 else: 1234 #Copy1235 1276 outfile.variables[name][:] = infile.variables[name][:] 1236 1277 1237 # Copy selected timesteps1278 # Copy selected timesteps 1238 1279 time = infile.variables['time'] 1239 1280 stage = infile.variables['stage'] … … 1247 1288 selection = range(first, last, step) 1248 1289 for i, j in enumerate(selection): 1249 print 'Copying timestep %d of %d (%f)' % (j, last-first, time[j])1290 print 'Copying timestep %d of %d (%f)' % (j, last-first, time[j]) 1250 1291 newtime[i] = time[j] 1251 1292 newstage[i,:] = stage[j,:] 1252 1293 1253 # Close1294 # Close 1254 1295 infile.close() 1255 1296 outfile.close() 1256 1297 1257 1298 1299 ## 1300 # @brief Return instance of class of given format using filename. 1301 # @param domain Data domain (eg, 'sww', etc). 1302 # @param mode The mode to open domain in. 1303 # @return A class instance of required domain and mode. 1258 1304 #Get data objects 1259 1305 def get_dataobject(domain, mode='w'): … … 1261 1307 """ 1262 1308 1263 cls = eval('Data_format_%s' % domain.format)1309 cls = eval('Data_format_%s' % domain.format) 1264 1310 return cls(domain, mode) 1265 1311 1266 1312 1267 1268 1313 ## 1314 # @brief Convert DEM data to PTS data. 1315 # @param basename_in Stem of input filename. 1316 # @param basename_out Stem of output filename. 1317 # @param easting_min 1318 # @param easting_max 1319 # @param northing_min 1320 # @param northing_max 1321 # @param use_cache 1322 # @param verbose 1323 # @return 1269 1324 def dem2pts(basename_in, basename_out=None, 1270 1325 easting_min=None, easting_max=None, … … 1289 1344 """ 1290 1345 1291 1292 1293 1346 kwargs = {'basename_out': basename_out, 1294 1347 'easting_min': easting_min, … … 1310 1363 1311 1364 1365 ## 1366 # @brief 1367 # @param basename_in 1368 # @param basename_out 1369 # @param verbose 1370 # @param easting_min 1371 # @param easting_max 1372 # @param northing_min 1373 # @param northing_max 1312 1374 def _dem2pts(basename_in, basename_out=None, verbose=False, 1313 1375 easting_min=None, easting_max=None, … … 1327 1389 1328 1390 # Get NetCDF 1329 infile = NetCDFFile(root + '.dem', 'r') # Open existing netcdf file for read1391 infile = NetCDFFile(root + '.dem', 'r') 1330 1392 1331 1393 if verbose: print 'Reading DEM from %s' %(root + '.dem') … … 1348 1410 units = infile.units 1349 1411 1350 1351 1412 # Get output file 1352 1413 if basename_out == None: … … 1356 1417 1357 1418 if verbose: print 'Store to NetCDF file %s' %ptsname 1419 1358 1420 # NetCDF file definition 1359 1421 outfile = NetCDFFile(ptsname, 'w') … … 1361 1423 # Create new file 1362 1424 outfile.institution = 'Geoscience Australia' 1363 outfile.description = 'NetCDF pts format for compact and portable storage ' +\ 1364 'of spatial point data' 1425 outfile.description = 'NetCDF pts format for compact and portable ' \ 1426 'storage of spatial point data' 1427 1365 1428 # Assign default values 1366 1429 if easting_min is None: easting_min = xllcorner … … 1383 1446 outfile.datum = datum 1384 1447 outfile.units = units 1385 1386 1448 1387 1449 # Grid info (FIXME: probably not going to be used, but heck) … … 1404 1466 for j in range(ncols): 1405 1467 x = j*cellsize + xllcorner 1406 if easting_min <= x <= easting_max and\1407 northing_min <= y <= northing_max:1468 if easting_min <= x <= easting_max \ 1469 and northing_min <= y <= northing_max: 1408 1470 thisj = j 1409 1471 thisi = i 1410 if dem_elevation_r[i,j] == NODATA_value: nn += 1 1472 if dem_elevation_r[i,j] == NODATA_value: 1473 nn += 1 1411 1474 1412 1475 if k == 0: 1413 1476 i1_0 = i 1414 1477 j1_0 = j 1478 1415 1479 k += 1 1416 1480 … … 1445 1509 1446 1510 lenv = index2-index1+1 1511 1447 1512 # Store data 1448 1513 global_index = 0 1449 1514 # for i in range(nrows): 1450 for i in range(i1_0, thisi+1,1):1451 if verbose and i %((nrows+10)/10)==0:1452 print 'Processing row %d of %d' % (i, nrows)1515 for i in range(i1_0, thisi+1, 1): 1516 if verbose and i % ((nrows+10)/10) == 0: 1517 print 'Processing row %d of %d' % (i, nrows) 1453 1518 1454 1519 lower_index = global_index … … 1457 1522 no_NODATA = sum(v == NODATA_value) 1458 1523 if no_NODATA > 0: 1459 newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA1524 newcols = lenv - no_NODATA # ncols_in_bounding_box - no_NODATA 1460 1525 else: 1461 newcols = lenv # ncols_in_bounding_box1526 newcols = lenv # ncols_in_bounding_box 1462 1527 1463 1528 telev = zeros(newcols, Float) … … 1469 1534 #for j in range(ncols): 1470 1535 for j in range(j1_0,index2+1,1): 1471 1472 1536 x = j*cellsize + xllcorner 1473 if easting_min <= x <= easting_max and\1474 northing_min <= y <= northing_max and\1475 dem_elevation_r[i,j] <> NODATA_value:1476 tpoints[local_index, :] = [x-easting_min, y-northing_min]1537 if easting_min <= x <= easting_max \ 1538 and northing_min <= y <= northing_max \ 1539 and dem_elevation_r[i,j] <> NODATA_value: 1540 tpoints[local_index, :] = [x-easting_min, y-northing_min] 1477 1541 telev[local_index] = dem_elevation_r[i, j] 1478 1542 global_index += 1 … … 1491 1555 1492 1556 1493 1557 ## 1558 # @brief Return block of surface lines for each cross section 1559 # @param lines Iterble of text lines to process. 1560 # @note BROKEN? UNUSED? 1494 1561 def _read_hecras_cross_sections(lines): 1495 1562 """Return block of surface lines for each cross section … … 1502 1569 reading_surface = False 1503 1570 for i, line in enumerate(lines): 1504 1505 1571 if len(line.strip()) == 0: #Ignore blanks 1506 1572 continue … … 1523 1589 1524 1590 1525 1526 1591 ## 1592 # @brief Convert HECRAS (.sdf) file to PTS file. 1593 # @param basename_in Sterm of input filename. 1594 # @param basename_out Sterm of output filename. 1595 # @param verbose True if this function is to be verbose. 1527 1596 def hecras_cross_sections2pts(basename_in, 1528 1597 basename_out=None, … … 1531 1600 1532 1601 Example: 1533 1534 1602 1535 1603 # RAS export file created on Mon 15Aug2005 11:42 … … 1550 1618 END HEADER: 1551 1619 1552 1553 1620 Only the SURFACE LINE data of the following form will be utilised 1554 1555 1621 CROSS-SECTION: 1556 1622 STREAM ID:Southern-Wungong … … 1587 1653 root = basename_in 1588 1654 1589 # Get ASCII file1590 infile = open(root + '.sdf', 'r') #Open SDF file for read1655 # Get ASCII file 1656 infile = open(root + '.sdf', 'r') 1591 1657 1592 1658 if verbose: print 'Reading DEM from %s' %(root + '.sdf') … … 1597 1663 if verbose: print 'Converting to pts format' 1598 1664 1665 # Scan through the header, picking up stuff we need. 1599 1666 i = 0 1600 1667 while lines[i].strip() == '' or lines[i].strip().startswith('#'): … … 1642 1709 i += 1 1643 1710 1644 1645 #Now read all points 1711 # Now read all points 1646 1712 points = [] 1647 1713 elevation = [] … … 1651 1717 elevation.append(entry[2]) 1652 1718 1653 1654 1719 msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\ 1655 1720 %(j+1, number_of_cross_sections) 1656 1721 assert j+1 == number_of_cross_sections, msg 1657 1722 1658 # Get output file1723 # Get output file, write PTS data 1659 1724 if basename_out == None: 1660 1725 ptsname = root + '.pts' … … 1667 1732 geo.export_points_file(ptsname) 1668 1733 1669 def export_grid(basename_in, extra_name_out = None, 1670 quantities = None, # defaults to elevation 1671 timestep = None, 1672 reduction = None, 1673 cellsize = 10, 1674 number_of_decimal_places = None, 1675 NODATA_value = -9999, 1676 easting_min = None, 1677 easting_max = None, 1678 northing_min = None, 1679 northing_max = None, 1680 verbose = False, 1681 origin = None, 1682 datum = 'WGS84', 1683 format = 'ers'): 1684 """ 1685 1686 Wrapper for sww2dem. - see sww2dem to find out what most of the 1687 parameters do. 1734 1735 ## 1736 # @brief 1737 # @param basename_in 1738 # @param extra_name_out 1739 # @param quantities 1740 # @param timestep 1741 # @param reduction 1742 # @param cellsize 1743 # @param number_of_decimal_places 1744 # @param NODATA_value 1745 # @param easting_min 1746 # @param easting_max 1747 # @param northing_min 1748 # @param northing_max 1749 # @param verbose 1750 # @param origin 1751 # @param datum 1752 # @param format 1753 # @return 1754 def export_grid(basename_in, extra_name_out=None, 1755 quantities=None, # defaults to elevation 1756 timestep=None, 1757 reduction=None, 1758 cellsize=10, 1759 number_of_decimal_places=None, 1760 NODATA_value=-9999, 1761 easting_min=None, 1762 easting_max=None, 1763 northing_min=None, 1764 northing_max=None, 1765 verbose=False, 1766 origin=None, 1767 datum='WGS84', 1768 format='ers'): 1769 """Wrapper for sww2dem. 1770 See sww2dem to find out what most of the parameters do. 1688 1771 1689 1772 Quantities is a list of quantities. Each quantity will be … … 1695 1778 This function returns the names of the files produced. 1696 1779 1697 It will also produce as many output files as there are input sww files. 1698 """ 1699 1780 It will also produce as many output files as there are input sww files. 1781 """ 1782 1700 1783 if quantities is None: 1701 1784 quantities = ['elevation'] 1702 1785 1703 1786 if type(quantities) is str: 1704 1787 quantities = [quantities] … … 1706 1789 # How many sww files are there? 1707 1790 dir, base = os.path.split(basename_in) 1708 #print "basename_in",basename_in 1709 #print "base",base 1710 1711 iterate_over = get_all_swwfiles(dir,base,verbose) 1712 1791 1792 iterate_over = get_all_swwfiles(dir, base, verbose) 1793 1713 1794 if dir == "": 1714 1795 dir = "." # Unix compatibility 1715 1796 1716 1797 files_out = [] 1717 #print 'sww_file',iterate_over1718 1798 for sww_file in iterate_over: 1719 1799 for quantity in quantities: … … 1721 1801 basename_out = sww_file + '_' + quantity 1722 1802 else: 1723 basename_out = sww_file + '_' + quantity + '_' \ 1724 + extra_name_out 1725 # print "basename_out", basename_out 1726 1803 basename_out = sww_file + '_' + quantity + '_' + extra_name_out 1804 1727 1805 file_out = sww2dem(dir+sep+sww_file, dir+sep+basename_out, 1728 quantity, 1806 quantity, 1729 1807 timestep, 1730 1808 reduction, … … 1741 1819 format) 1742 1820 files_out.append(file_out) 1743 #print "basenames_out after",basenames_out1744 1821 return files_out 1745 1822 1746 1823 1824 ## 1825 # @brief 1826 # @param production_dirs 1827 # @param output_dir 1828 # @param scenario_name 1829 # @param gauges_dir_name 1830 # @param plot_quantity 1831 # @param generate_fig 1832 # @param reportname 1833 # @param surface 1834 # @param time_min 1835 # @param time_max 1836 # @param title_on 1837 # @param verbose 1838 # @param nodes 1747 1839 def get_timeseries(production_dirs, output_dir, scenario_name, gauges_dir_name, 1748 plot_quantity, generate_fig =False,1749 reportname = None, surface = False, time_min =None,1750 time_max = None, title_on = False, verbose =True,1840 plot_quantity, generate_fig=False, 1841 reportname=None, surface=False, time_min=None, 1842 time_max=None, title_on=False, verbose=True, 1751 1843 nodes=None): 1752 1844 """ … … 1755 1847 warning - this function has no tests 1756 1848 """ 1849 1757 1850 if reportname == None: 1758 1851 report = False 1759 1852 else: 1760 1853 report = True 1761 1854 1762 1855 if nodes is None: 1763 1856 is_parallel = False 1764 1857 else: 1765 1858 is_parallel = True 1766 1859 1767 1860 # Generate figures 1768 1861 swwfiles = {} 1769 1770 if is_parallel is True: 1862 if is_parallel is True: 1771 1863 for i in range(nodes): 1772 print 'Sending node %d of %d' % (i,nodes)1864 print 'Sending node %d of %d' % (i, nodes) 1773 1865 swwfiles = {} 1774 1866 if not reportname == None: 1775 reportname = report_name + '_%s' % (i)1867 reportname = report_name + '_%s' % i 1776 1868 for label_id in production_dirs.keys(): 1777 1869 if label_id == 'boundaries': … … 1779 1871 else: 1780 1872 file_loc = output_dir + label_id + sep 1781 sww_extra = '_P%s_%s' % (i,nodes)1873 sww_extra = '_P%s_%s' % (i, nodes) 1782 1874 swwfile = file_loc + scenario_name + sww_extra + '.sww' 1783 print 'swwfile', swwfile1875 print 'swwfile', swwfile 1784 1876 swwfiles[swwfile] = label_id 1785 1877 … … 1787 1879 gauges_dir_name, 1788 1880 production_dirs, 1789 report =report,1790 reportname =reportname,1791 plot_quantity =plot_quantity,1792 generate_fig =generate_fig,1793 surface =surface,1794 time_min =time_min,1795 time_max =time_max,1796 title_on =title_on,1797 verbose =verbose)1798 else: 1799 for label_id in production_dirs.keys(): 1881 report=report, 1882 reportname=reportname, 1883 plot_quantity=plot_quantity, 1884 generate_fig=generate_fig, 1885 surface=surface, 1886 time_min=time_min, 1887 time_max=time_max, 1888 title_on=title_on, 1889 verbose=verbose) 1890 else: 1891 for label_id in production_dirs.keys(): 1800 1892 if label_id == 'boundaries': 1801 1893 print 'boundaries' … … 1807 1899 swwfile = file_loc + scenario_name + '.sww' 1808 1900 swwfiles[swwfile] = label_id 1809 1901 1810 1902 texname, elev_output = sww2timeseries(swwfiles, 1811 1903 gauges_dir_name, 1812 1904 production_dirs, 1813 report = report, 1814 reportname = reportname, 1815 plot_quantity = plot_quantity, 1816 generate_fig = generate_fig, 1817 surface = surface, 1818 time_min = time_min, 1819 time_max = time_max, 1820 title_on = title_on, 1821 verbose = verbose) 1822 1823 1824 1825 def sww2dem(basename_in, basename_out = None, 1826 quantity = None, # defaults to elevation 1827 timestep = None, 1828 reduction = None, 1829 cellsize = 10, 1830 number_of_decimal_places = None, 1831 NODATA_value = -9999, 1832 easting_min = None, 1833 easting_max = None, 1834 northing_min = None, 1835 northing_max = None, 1836 verbose = False, 1837 origin = None, 1838 datum = 'WGS84', 1839 format = 'ers'): 1840 1905 report=report, 1906 reportname=reportname, 1907 plot_quantity=plot_quantity, 1908 generate_fig=generate_fig, 1909 surface=surface, 1910 time_min=time_min, 1911 time_max=time_max, 1912 title_on=title_on, 1913 verbose=verbose) 1914 1915 1916 ## 1917 # @brief Convert SWW file to DEM file (.asc or .ers). 1918 # @param basename_in 1919 # @param basename_out 1920 # @param quantity 1921 # @param timestep 1922 # @param reduction 1923 # @param cellsize 1924 # @param number_of_decimal_places 1925 # @param NODATA_value 1926 # @param easting_min 1927 # @param easting_max 1928 # @param northing_min 1929 # @param northing_max 1930 # @param verbose 1931 # @param origin 1932 # @param datum 1933 # @param format 1934 # @return 1935 def sww2dem(basename_in, basename_out=None, 1936 quantity=None, # defaults to elevation 1937 timestep=None, 1938 reduction=None, 1939 cellsize=10, 1940 number_of_decimal_places=None, 1941 NODATA_value=-9999, 1942 easting_min=None, 1943 easting_max=None, 1944 northing_min=None, 1945 northing_max=None, 1946 verbose=False, 1947 origin=None, 1948 datum='WGS84', 1949 format='ers'): 1841 1950 """Read SWW file and convert to Digitial Elevation model format 1842 1951 (.asc or .ers) 1843 1952 1844 1953 Example (ASC): 1845 1846 1954 ncols 3121 1847 1955 nrows 1800 … … 1854 1962 The number of decimal places can be specified by the user to save 1855 1963 on disk space requirements by specifying in the call to sww2dem. 1856 1964 1857 1965 Also write accompanying file with same basename_in but extension .prj 1858 1966 used to fix the UTM zone, datum, false northings and eastings. … … 1885 1993 1886 1994 import sys 1887 from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, \ 1888 sometrue 1889 from Numeric import array2string 1995 from Numeric import array, Float, concatenate, NewAxis, zeros, reshape 1996 from Numeric import array2string, sometrue 1890 1997 1891 1998 from anuga.utilities.polygon import inside_polygon, outside_polygon, \ … … 1897 2004 assert format.lower() in ['asc', 'ers'], msg 1898 2005 1899 1900 2006 false_easting = 500000 1901 2007 false_northing = 10000000 … … 1903 2009 if quantity is None: 1904 2010 quantity = 'elevation' 1905 2011 1906 2012 if reduction is None: 1907 2013 reduction = max 1908 2014 1909 2015 if basename_out is None: 1910 basename_out = basename_in + '_%s' % quantity2016 basename_out = basename_in + '_%s' % quantity 1911 2017 1912 2018 if quantity_formula.has_key(quantity): … … 1915 2021 if number_of_decimal_places is None: 1916 2022 number_of_decimal_places = 3 1917 2023 1918 2024 swwfile = basename_in + '.sww' 1919 2025 demfile = basename_out + '.' + format 1920 2026 # Note the use of a .ers extension is optional (write_ermapper_grid will 1921 2027 # deal with either option 1922 1923 #if verbose: bye= nsuadsfd[0] # uncomment to check catching verbose errors 1924 2028 1925 2029 # Read sww file 1926 if verbose: 1927 print 'Reading from %s' % swwfile1928 print 'Output directory is %s' % basename_out1929 2030 if verbose: 2031 print 'Reading from %s' % swwfile 2032 print 'Output directory is %s' % basename_out 2033 1930 2034 from Scientific.IO.NetCDF import NetCDFFile 1931 2035 fid = NetCDFFile(swwfile) … … 1942 2046 number_of_timesteps = fid.dimensions['number_of_timesteps'] 1943 2047 number_of_points = fid.dimensions['number_of_points'] 1944 2048 1945 2049 if origin is None: 1946 1947 2050 # Get geo_reference 1948 2051 # sww files don't have to have a geo_ref … … 1959 2062 xllcorner = origin[1] 1960 2063 yllcorner = origin[2] 1961 1962 1963 2064 1964 2065 # FIXME: Refactor using code from Interpolation_function.statistics … … 1996 2097 q = fid.variables[name][:].flat 1997 2098 if verbose: print ' %s in [%f, %f]' %(name, min(q), max(q)) 1998 2099 1999 2100 # Get quantity and reduce if applicable 2000 2101 if verbose: print 'Processing quantity %s' %quantity … … 2002 2103 # Turn NetCDF objects into Numeric arrays 2003 2104 try: 2004 q = fid.variables[quantity][:] 2005 2006 2105 q = fid.variables[quantity][:] 2007 2106 except: 2008 2107 quantity_dict = {} 2009 2108 for name in fid.variables.keys(): 2010 quantity_dict[name] = fid.variables[name][:] 2011 #Convert quantity expression to quantities found in sww file 2109 quantity_dict[name] = fid.variables[name][:] 2110 #Convert quantity expression to quantities found in sww file 2012 2111 q = apply_expression_to_dictionary(quantity, quantity_dict) 2013 #print "q.shape",q.shape 2112 2014 2113 if len(q.shape) == 2: 2015 #q has a time component and needs to be reduced along 2016 #the temporal dimension 2114 #q has a time component, must be reduced alongthe temporal dimension 2017 2115 if verbose: print 'Reducing quantity %s' %quantity 2018 q_reduced = zeros( number_of_points, Float)2019 2116 q_reduced = zeros(number_of_points, Float) 2117 2020 2118 if timestep is not None: 2021 2119 for k in range(number_of_points): 2022 q_reduced[k] = q[timestep,k] 2120 q_reduced[k] = q[timestep,k] 2023 2121 else: 2024 2122 for k in range(number_of_points): … … 2032 2130 2033 2131 if verbose: 2034 print 'Processed values for %s are in [%f, %f]' % (quantity, min(q), max(q))2035 2132 print 'Processed values for %s are in [%f, %f]' % \ 2133 (quantity, min(q), max(q)) 2036 2134 2037 2135 #Create grid and update xll/yll corner and x,y 2038 2039 2136 #Relative extent 2040 2137 if easting_min is None: … … 2057 2154 else: 2058 2155 ymax = northing_max - yllcorner 2059 2060 2156 2061 2157 msg = 'xmax must be greater than or equal to xmin.\n' … … 2065 2161 msg = 'yax must be greater than or equal to xmin.\n' 2066 2162 msg += 'I got ymin = %f, ymax = %f' %(ymin, ymax) 2067 assert ymax >= ymin, msg 2068 2163 assert ymax >= ymin, msg 2164 2069 2165 if verbose: print 'Creating grid' 2070 ncols = int((xmax-xmin)/cellsize)+1 2071 nrows = int((ymax-ymin)/cellsize)+1 2072 2166 ncols = int((xmax-xmin)/cellsize) + 1 2167 nrows = int((ymax-ymin)/cellsize) + 1 2073 2168 2074 2169 #New absolute reference and coordinates 2075 newxllcorner = xmin +xllcorner2076 newyllcorner = ymin +yllcorner2077 2078 x = x +xllcorner-newxllcorner2079 y = y +yllcorner-newyllcorner2080 2081 vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis =1)2170 newxllcorner = xmin + xllcorner 2171 newyllcorner = ymin + yllcorner 2172 2173 x = x + xllcorner - newxllcorner 2174 y = y + yllcorner - newyllcorner 2175 2176 vertex_points = concatenate ((x[:,NewAxis], y[:,NewAxis]), axis=1) 2082 2177 assert len(vertex_points.shape) == 2 2083 2178 2084 grid_points = zeros ( (ncols*nrows, 2), Float ) 2085 2179 grid_points = zeros ((ncols*nrows, 2), Float) 2086 2180 2087 2181 for i in xrange(nrows): 2088 2182 if format.lower() == 'asc': 2089 yg = i *cellsize2183 yg = i * cellsize 2090 2184 else: 2091 2185 #this will flip the order of the y values for ers 2092 yg = (nrows-i) *cellsize2186 yg = (nrows-i) * cellsize 2093 2187 2094 2188 for j in xrange(ncols): 2095 xg = j *cellsize2189 xg = j * cellsize 2096 2190 k = i*ncols + j 2097 2191 2098 grid_points[k, 0] = xg2099 grid_points[k, 1] = yg2192 grid_points[k, 0] = xg 2193 grid_points[k, 1] = yg 2100 2194 2101 2195 #Interpolate … … 2112 2206 grid_values = interp.interpolate(q, grid_points).flat 2113 2207 2114 2115 2208 if verbose: 2116 2209 print 'Interpolated values are in [%f, %f]' %(min(grid_values), … … 2124 2217 grid_values[i] = NODATA_value 2125 2218 2126 2127 2128 2129 2219 if format.lower() == 'ers': 2130 2220 # setup ERS header information 2131 grid_values = reshape(grid_values, (nrows, ncols))2221 grid_values = reshape(grid_values, (nrows, ncols)) 2132 2222 header = {} 2133 2223 header['datum'] = '"' + datum + '"' … … 2148 2238 #header['celltype'] = 'IEEE8ByteReal' #FIXME: Breaks unit test 2149 2239 2150 2151 2240 #Write 2152 2241 if verbose: print 'Writing %s' %demfile 2242 2153 2243 import ermapper_grids 2244 2154 2245 ermapper_grids.write_ermapper_grid(demfile, grid_values, header) 2155 2246 … … 2157 2248 else: 2158 2249 #Write to Ascii format 2159 2160 2250 #Write prj file 2161 2251 prjfile = basename_out + '.prj' … … 2174 2264 prjid.close() 2175 2265 2176 2177 2178 2266 if verbose: print 'Writing %s' %demfile 2179 2267 … … 2187 2275 ascid.write('NODATA_value %d\n' %NODATA_value) 2188 2276 2189 2190 2277 #Get bounding polygon from mesh 2191 2278 #P = interp.mesh.get_boundary_polygon() 2192 2279 #inside_indices = inside_polygon(grid_points, P) 2193 2194 2280 for i in range(nrows): 2195 if verbose and i %((nrows+10)/10)==0:2281 if verbose and i % ((nrows+10)/10) == 0: 2196 2282 print 'Doing row %d of %d' %(i, nrows) 2197 2283 … … 2200 2286 slice = grid_values[base_index:base_index+ncols] 2201 2287 #s = array2string(slice, max_line_width=sys.maxint) 2202 s = array2string(slice, max_line_width=sys.maxint, precision=number_of_decimal_places) 2288 s = array2string(slice, max_line_width=sys.maxint, 2289 precision=number_of_decimal_places) 2203 2290 ascid.write(s[1:-1] + '\n') 2204 2205 #print2206 #for j in range(ncols):2207 # index = base_index+j#2208 # print grid_values[index],2209 # ascid.write('%f ' %grid_values[index])2210 #ascid.write('\n')2211 2291 2212 2292 #Close 2213 2293 ascid.close() 2214 2294 fid.close() 2295 2215 2296 return basename_out 2216 2297 2217 2218 #Backwards compatibility 2298 ################################################################################ 2299 # Obsolete functions - mainatined for backwards compatibility 2300 ################################################################################ 2301 2302 ## 2303 # @brief 2304 # @param basename_in 2305 # @param basename_out 2306 # @param quantity 2307 # @param timestep 2308 # @param reduction 2309 # @param cellsize 2310 # @param verbose 2311 # @param origin 2312 # @note OBSOLETE - use sww2dem() instead. 2219 2313 def sww2asc(basename_in, basename_out = None, 2220 2314 quantity = None, … … 2237 2331 format = 'asc') 2238 2332 2239 def sww2ers(basename_in, basename_out = None, 2240 quantity = None, 2241 timestep = None, 2242 reduction = None, 2243 cellsize = 10, 2244 verbose = False, 2245 origin = None, 2246 datum = 'WGS84'): 2333 2334 ## 2335 # @brief 2336 # @param basename_in 2337 # @param basename_out 2338 # @param quantity 2339 # @param timestep 2340 # @param reduction 2341 # @param cellsize 2342 # @param verbose 2343 # @param origin 2344 # @param datum 2345 # @note OBSOLETE - use sww2dem() instead. 2346 def sww2ers(basename_in, basename_out=None, 2347 quantity=None, 2348 timestep=None, 2349 reduction=None, 2350 cellsize=10, 2351 verbose=False, 2352 origin=None, 2353 datum='WGS84'): 2247 2354 print 'sww2ers will soon be obsoleted - please use sww2dem' 2248 2355 sww2dem(basename_in, 2249 basename_out = basename_out, 2250 quantity = quantity, 2251 timestep = timestep, 2252 reduction = reduction, 2253 cellsize = cellsize, 2254 number_of_decimal_places = number_of_decimal_places, 2255 verbose = verbose, 2256 origin = origin, 2257 datum = datum, 2258 format = 'ers') 2259 ################################# END COMPATIBILITY ############## 2260 2261 2262 2356 basename_out=basename_out, 2357 quantity=quantity, 2358 timestep=timestep, 2359 reduction=reduction, 2360 cellsize=cellsize, 2361 number_of_decimal_places=number_of_decimal_places, 2362 verbose=verbose, 2363 origin=origin, 2364 datum=datum, 2365 format='ers') 2366 2367 ################################################################################ 2368 # End of obsolete functions 2369 ################################################################################ 2370 2371 2372 ## 2373 # @brief Convert SWW file to PTS file (at selected points). 2374 # @param basename_in Stem name of input SWW file. 2375 # @param basename_out Stem name of output file. 2376 # @param data_points If given, points where quantity is to be computed. 2377 # @param quantity Name (or expression) of existing quantity(s) (def: elevation). 2378 # @param timestep If given, output quantity at that timestep. 2379 # @param reduction If given, reduce quantity by this factor. 2380 # @param NODATA_value The NODATA value (default -9999). 2381 # @param verbose True if this function is to be verbose. 2382 # @param origin ?? 2263 2383 def sww2pts(basename_in, basename_out=None, 2264 2384 data_points=None, … … 2269 2389 verbose=False, 2270 2390 origin=None): 2271 #datum = 'WGS84')2272 2273 2274 2391 """Read SWW file and convert to interpolated values at selected points 2275 2392 2276 The parameter quantity' must be the name of an existing quantity or 2277 an expression involving existing quantities. The default is 2278 'elevation'. 2279 2280 if timestep (an index) is given, output quantity at that timestep 2393 The parameter 'quantity' must be the name of an existing quantity or 2394 an expression involving existing quantities. The default is 'elevation'. 2395 2396 if timestep (an index) is given, output quantity at that timestep. 2281 2397 2282 2398 if reduction is given use that to reduce quantity over all timesteps. 2283 2399 2284 data_points (Nx2 array) give locations of points where quantity is to be computed.2285 2400 data_points (Nx2 array) give locations of points where quantity is to 2401 be computed. 2286 2402 """ 2287 2403 2288 2404 import sys 2289 from Numeric import array, Float, concatenate, NewAxis, zeros, reshape , sometrue2290 from Numeric import array2string 2291 2292 from anuga.utilities.polygon import inside_polygon, outside_polygon,separate_points_by_polygon2293 from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary2294 2405 from Numeric import array, Float, concatenate, NewAxis, zeros, reshape 2406 from Numeric import array2string, sometrue 2407 from anuga.utilities.polygon import inside_polygon, outside_polygon, \ 2408 separate_points_by_polygon 2409 from anuga.abstract_2d_finite_volumes.util import \ 2410 apply_expression_to_dictionary 2295 2411 from anuga.geospatial_data.geospatial_data import Geospatial_data 2296 2412 … … 2302 2418 2303 2419 if basename_out is None: 2304 basename_out = basename_in + '_%s' % quantity2420 basename_out = basename_in + '_%s' % quantity 2305 2421 2306 2422 swwfile = basename_in + '.sww' … … 2308 2424 2309 2425 # Read sww file 2310 if verbose: print 'Reading from %s' % swwfile2426 if verbose: print 'Reading from %s' % swwfile 2311 2427 from Scientific.IO.NetCDF import NetCDFFile 2312 2428 fid = NetCDFFile(swwfile) … … 2320 2436 number_of_points = fid.dimensions['number_of_points'] 2321 2437 if origin is None: 2322 2323 2438 # Get geo_reference 2324 2439 # sww files don't have to have a geo_ref … … 2326 2441 geo_reference = Geo_reference(NetCDFObject=fid) 2327 2442 except AttributeError, e: 2328 geo_reference = Geo_reference() # Default georef object2443 geo_reference = Geo_reference() # Default georef object 2329 2444 2330 2445 xllcorner = geo_reference.get_xllcorner() … … 2335 2450 xllcorner = origin[1] 2336 2451 yllcorner = origin[2] 2337 2338 2339 2452 2340 2453 # FIXME: Refactor using code from file_function.statistics … … 2346 2459 print '------------------------------------------------' 2347 2460 print 'Statistics of SWW file:' 2348 print ' Name: %s' % swwfile2461 print ' Name: %s' % swwfile 2349 2462 print ' Reference:' 2350 print ' Lower left corner: [%f, %f]'\ 2351 %(xllcorner, yllcorner) 2352 print ' Start time: %f' %fid.starttime[0] 2463 print ' Lower left corner: [%f, %f]' % (xllcorner, yllcorner) 2464 print ' Start time: %f' % fid.starttime[0] 2353 2465 print ' Extent:' 2354 print ' x [m] in [%f, %f], len(x) == %d' \2355 % (min(x.flat), max(x.flat), len(x.flat))2356 print ' y [m] in [%f, %f], len(y) == %d' \2357 % (min(y.flat), max(y.flat), len(y.flat))2358 print ' t [s] in [%f, %f], len(t) == %d' \2359 % (min(times), max(times), len(times))2466 print ' x [m] in [%f, %f], len(x) == %d' \ 2467 % (min(x.flat), max(x.flat), len(x.flat)) 2468 print ' y [m] in [%f, %f], len(y) == %d' \ 2469 % (min(y.flat), max(y.flat), len(y.flat)) 2470 print ' t [s] in [%f, %f], len(t) == %d' \ 2471 % (min(times), max(times), len(times)) 2360 2472 print ' Quantities [SI units]:' 2361 2473 for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']: 2362 2474 q = fid.variables[name][:].flat 2363 print ' %s in [%f, %f]' %(name, min(q), max(q)) 2364 2365 2475 print ' %s in [%f, %f]' % (name, min(q), max(q)) 2366 2476 2367 2477 # Get quantity and reduce if applicable 2368 if verbose: print 'Processing quantity %s' % quantity2478 if verbose: print 'Processing quantity %s' % quantity 2369 2479 2370 2480 # Turn NetCDF objects into Numeric arrays … … 2373 2483 quantity_dict[name] = fid.variables[name][:] 2374 2484 2375 2376 2377 # Convert quantity expression to quantities found in sww file 2485 # Convert quantity expression to quantities found in sww file 2378 2486 q = apply_expression_to_dictionary(quantity, quantity_dict) 2379 2380 2381 2487 2382 2488 if len(q.shape) == 2: 2383 2489 # q has a time component and needs to be reduced along 2384 2490 # the temporal dimension 2385 if verbose: print 'Reducing quantity %s' % quantity2386 q_reduced = zeros( number_of_points, Float ) 2387 2491 if verbose: print 'Reducing quantity %s' % quantity 2492 2493 q_reduced = zeros(number_of_points, Float) 2388 2494 for k in range(number_of_points): 2389 q_reduced[k] = reduction( q[:,k] ) 2390 2495 q_reduced[k] = reduction(q[:,k]) 2391 2496 q = q_reduced 2392 2497 … … 2395 2500 assert q.shape[0] == number_of_points 2396 2501 2397 2398 2502 if verbose: 2399 print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))2400 2503 print 'Processed values for %s are in [%f, %f]' \ 2504 % (quantity, min(q), max(q)) 2401 2505 2402 2506 # Create grid and update xll/yll corner and x,y 2403 vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis =1)2507 vertex_points = concatenate((x[:, NewAxis], y[:, NewAxis]), axis=1) 2404 2508 assert len(vertex_points.shape) == 2 2405 2509 2406 2510 # Interpolate 2407 2511 from anuga.fit_interpolate.interpolate import Interpolate 2408 interp = Interpolate(vertex_points, volumes, verbose =verbose)2512 interp = Interpolate(vertex_points, volumes, verbose=verbose) 2409 2513 2410 2514 # Interpolate using quantity values … … 2412 2516 interpolated_values = interp.interpolate(q, data_points).flat 2413 2517 2414 2415 2518 if verbose: 2416 print 'Interpolated values are in [%f, %f]' %(min(interpolated_values), 2417 max(interpolated_values)) 2418 2419 # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh) 2519 print 'Interpolated values are in [%f, %f]' % (min(interpolated_values), 2520 max(interpolated_values)) 2521 2522 # Assign NODATA_value to all points outside bounding polygon 2523 # (from interpolation mesh) 2420 2524 P = interp.mesh.get_boundary_polygon() 2421 2525 outside_indices = outside_polygon(data_points, P, closed=True) … … 2424 2528 interpolated_values[i] = NODATA_value 2425 2529 2426 2427 # Store results 2428 G = Geospatial_data(data_points=data_points, 2429 attributes=interpolated_values) 2530 # Store results 2531 G = Geospatial_data(data_points=data_points, attributes=interpolated_values) 2430 2532 2431 2533 G.export_points_file(ptsfile, absolute = True) … … 2434 2536 2435 2537 2436 def convert_dem_from_ascii2netcdf(basename_in, basename_out = None, 2437 use_cache = False, 2438 verbose = False): 2439 """Read Digitial Elevation model from the following ASCII format (.asc) 2538 ## 2539 # @brief Convert ASC file to DEM file. 2540 # @param basename_in Stem of input filename. 2541 # @param basename_out Stem of output filename. 2542 # @param use_cache ?? 2543 # @param verbose True if this function is to be verbose. 2544 # @return 2545 # @note A PRJ file with same stem basename must exist and is used to fix the 2546 # UTM zone, datum, false northings and eastings. 2547 def convert_dem_from_ascii2netcdf(basename_in, basename_out=None, 2548 use_cache=False, 2549 verbose=False): 2550 """Read Digital Elevation model from the following ASCII format (.asc) 2440 2551 2441 2552 Example: 2442 2443 2553 ncols 3121 2444 2554 nrows 1800 … … 2451 2561 Convert basename_in + '.asc' to NetCDF format (.dem) 2452 2562 mimicking the ASCII format closely. 2453 2454 2563 2455 2564 An accompanying file with same basename_in but extension .prj must exist … … 2469 2578 """ 2470 2579 2471 2472 2473 2580 kwargs = {'basename_out': basename_out, 'verbose': verbose} 2474 2581 … … 2476 2583 from caching import cache 2477 2584 result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs, 2478 dependencies =[basename_in + '.asc',2479 2480 verbose =verbose)2585 dependencies=[basename_in + '.asc', 2586 basename_in + '.prj'], 2587 verbose=verbose) 2481 2588 2482 2589 else: … … 2486 2593 2487 2594 2488 2489 2490 2491 2595 ## 2596 # @brief Convert an ASC file to a DEM file. 2597 # @param basename_in Stem of input filename. 2598 # @param basename_out Stem of output filename. 2599 # @param verbose True if this function is to be verbose. 2492 2600 def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None, 2493 2601 verbose = False): 2494 """Read Digitial Elevation model from the following ASCII format (.asc) 2495 2496 Internal function. See public function convert_dem_from_ascii2netcdf for details. 2602 """Read Digital Elevation model from the following ASCII format (.asc) 2603 2604 Internal function. See public function convert_dem_from_ascii2netcdf 2605 for details. 2497 2606 """ 2498 2607 … … 2501 2610 from Numeric import Float, array 2502 2611 2503 #root, ext = os.path.splitext(basename_in)2504 2612 root = basename_in 2505 2613 2506 ###########################################2507 2614 # Read Meta data 2508 if verbose: print 'Reading METADATA from %s' %root + '.prj' 2615 if verbose: print 'Reading METADATA from %s' % root + '.prj' 2616 2509 2617 metadatafile = open(root + '.prj') 2510 2618 metalines = metadatafile.readlines() … … 2543 2651 false_northing = float(L[1].strip()) 2544 2652 2545 #print false_easting, false_northing, zone, datum2546 2547 2548 ###########################################2549 2653 #Read DEM data 2550 2551 2654 datafile = open(basename_in + '.asc') 2552 2655 2553 if verbose: print 'Reading DEM from %s' %(basename_in + '.asc') 2656 if verbose: print 'Reading DEM from %s' % basename_in + '.asc' 2657 2554 2658 lines = datafile.readlines() 2555 2659 datafile.close() … … 2561 2665 2562 2666 # Do cellsize (line 4) before line 2 and 3 2563 cellsize = float(lines[4].split()[1].strip()) 2667 cellsize = float(lines[4].split()[1].strip()) 2564 2668 2565 2669 # Checks suggested by Joaquim Luis … … 2572 2676 xllcorner = float(xref[1].strip()) 2573 2677 else: 2574 msg = 'Unknown keyword: %s' % xref[0].strip()2678 msg = 'Unknown keyword: %s' % xref[0].strip() 2575 2679 raise Exception, msg 2576 2680 … … 2581 2685 yllcorner = float(yref[1].strip()) 2582 2686 else: 2583 msg = 'Unknown keyword: %s' % yref[0].strip()2687 msg = 'Unknown keyword: %s' % yref[0].strip() 2584 2688 raise Exception, msg 2585 2586 2689 2587 2690 NODATA_value = int(lines[5].split()[1].strip()) 2588 2691 2589 2692 assert len(lines) == nrows + 6 2590 2591 2592 ##########################################2593 2594 2693 2595 2694 if basename_out == None: … … 2598 2697 netcdfname = basename_out + '.dem' 2599 2698 2600 if verbose: print 'Store to NetCDF file %s' %netcdfname 2699 if verbose: print 'Store to NetCDF file %s' % netcdfname 2700 2601 2701 # NetCDF file definition 2602 2702 fid = NetCDFFile(netcdfname, 'w') … … 2604 2704 #Create new file 2605 2705 fid.institution = 'Geoscience Australia' 2606 fid.description = 'NetCDF DEM format for compact and portable storage ' +\2706 fid.description = 'NetCDF DEM format for compact and portable storage ' \ 2607 2707 'of spatial point data' 2608 2708 … … 2621 2721 fid.units = units 2622 2722 2623 2624 2723 # dimension definitions 2625 2724 fid.createDimension('number_of_rows', nrows) … … 2637 2736 for i, line in enumerate(lines[6:]): 2638 2737 fields = line.split() 2639 if verbose and i%((n+10)/10)==0: 2640 print 'Processing row %d of %d' %(i, nrows) 2641 2738 if verbose and i % ((n+10)/10) == 0: 2739 print 'Processing row %d of %d' % (i, nrows) 2642 2740 elevation[i, :] = array([float(x) for x in fields]) 2643 2741 … … 2645 2743 2646 2744 2647 2648 2649 2650 def ferret2sww(basename_in, basename_out = None, 2651 verbose = False, 2652 minlat = None, maxlat = None, 2653 minlon = None, maxlon = None, 2654 mint = None, maxt = None, mean_stage = 0, 2655 origin = None, zscale = 1, 2656 fail_on_NaN = True, 2657 NaN_filler = 0, 2658 elevation = None, 2659 inverted_bathymetry = True 2745 ## 2746 # @brief Convert 'ferret' file to SWW file. 2747 # @param basename_in Stem of input filename. 2748 # @param basename_out Stem of output filename. 2749 # @param verbose True if this function is to be verbose. 2750 # @param minlat 2751 # @param maxlat 2752 # @param minlon 2753 # @param maxlon 2754 # @param mint 2755 # @param maxt 2756 # @param mean_stage 2757 # @param origin 2758 # @param zscale 2759 # @param fail_on_NaN 2760 # @param NaN_filler 2761 # @param elevation 2762 # @param inverted_bathymetry 2763 def ferret2sww(basename_in, basename_out=None, 2764 verbose=False, 2765 minlat=None, maxlat=None, 2766 minlon=None, maxlon=None, 2767 mint=None, maxt=None, mean_stage=0, 2768 origin=None, zscale=1, 2769 fail_on_NaN=True, 2770 NaN_filler=0, 2771 elevation=None, 2772 inverted_bathymetry=True 2660 2773 ): #FIXME: Bathymetry should be obtained 2661 2774 #from MOST somehow. … … 2712 2825 if minlon != None: 2713 2826 assert maxlon > minlon 2714 2715 2716 2717 #Get NetCDF data 2718 if verbose: print 'Reading files %s_*.nc' %basename_in 2719 #print "basename_in + '_ha.nc'",basename_in + '_ha.nc' 2720 file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm) 2721 file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s) 2722 file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s) 2723 file_e = NetCDFFile(basename_in + '_e.nc', 'r') #Elevation (z) (m) 2827 2828 # Get NetCDF data 2829 if verbose: print 'Reading files %s_*.nc' % basename_in 2830 2831 file_h = NetCDFFile(basename_in + '_ha.nc', 'r') # Wave amplitude (cm) 2832 file_u = NetCDFFile(basename_in + '_ua.nc', 'r') # Velocity (x) (cm/s) 2833 file_v = NetCDFFile(basename_in + '_va.nc', 'r') # Velocity (y) (cm/s) 2834 file_e = NetCDFFile(basename_in + '_e.nc', 'r') # Elevation (z) (m) 2724 2835 2725 2836 if basename_out is None: … … 2736 2847 if dimension[:4] == 'TIME': 2737 2848 dim_h_time = dimension 2738 2739 # print 'long:', dim_h_longitude2740 # print 'lats:', dim_h_latitude2741 # print 'times:', dim_h_time2742 2849 2743 2850 times = file_h.variables[dim_h_time] … … 2764 2871 if dimension[:4] == 'TIME': 2765 2872 dim_u_time = dimension 2766 2873 2767 2874 # get dimensions for file_v 2768 2875 for dimension in file_v.dimensions.keys(): … … 2774 2881 dim_v_time = dimension 2775 2882 2776 2777 2883 # Precision used by most for lat/lon is 4 or 5 decimals 2778 2884 e_lat = around(file_e.variables[dim_e_latitude][:], 5) … … 2790 2896 if mint is None: 2791 2897 jmin = 0 2792 mint = times[0] 2898 mint = times[0] 2793 2899 else: 2794 2900 jmin = searchsorted(times, mint) 2795 2901 2796 2902 if maxt is None: 2797 2903 jmax = len(times) … … 2800 2906 jmax = searchsorted(times, maxt) 2801 2907 2802 #print "latitudes[:]",latitudes[:]2803 #print "longitudes[:]",longitudes [:]2804 2908 kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:], 2805 2909 longitudes[:], … … 2812 2916 longitudes = longitudes[lmin:lmax] 2813 2917 2814 #print "latitudes[:]",latitudes[:]2815 #print "longitudes[:]",longitudes [:]2816 2817 2918 if verbose: print 'cropping' 2919 2818 2920 zname = 'ELEVATION' 2819 2921 … … 2837 2939 # 'print hmmm' 2838 2940 2839 2840 2841 2941 #Get missing values 2842 2942 nan_ha = file_h.variables['HA'].missing_value[0] … … 2854 2954 if sometrue (missing): 2855 2955 if fail_on_NaN: 2856 msg = 'NetCDFFile %s contains missing values' \2857 % (basename_in+'_ha.nc')2956 msg = 'NetCDFFile %s contains missing values' \ 2957 % basename_in + '_ha.nc' 2858 2958 raise DataMissingValuesError, msg 2859 2959 else: … … 2863 2963 if sometrue (missing): 2864 2964 if fail_on_NaN: 2865 msg = 'NetCDFFile %s contains missing values' \2866 % (basename_in+'_ua.nc')2965 msg = 'NetCDFFile %s contains missing values' \ 2966 % basename_in + '_ua.nc' 2867 2967 raise DataMissingValuesError, msg 2868 2968 else: … … 2872 2972 if sometrue (missing): 2873 2973 if fail_on_NaN: 2874 msg = 'NetCDFFile %s contains missing values' \2875 % (basename_in+'_va.nc')2974 msg = 'NetCDFFile %s contains missing values' \ 2975 % basename_in + '_va.nc' 2876 2976 raise DataMissingValuesError, msg 2877 2977 else: 2878 2978 vspeed = vspeed*(missing==0) + missing*NaN_filler 2879 2979 2880 2881 2980 missing = (elevations == nan_e) 2882 2981 if sometrue (missing): 2883 2982 if fail_on_NaN: 2884 msg = 'NetCDFFile %s contains missing values' \2885 % (basename_in+'_e.nc')2983 msg = 'NetCDFFile %s contains missing values' \ 2984 % basename_in + '_e.nc' 2886 2985 raise DataMissingValuesError, msg 2887 2986 else: 2888 2987 elevations = elevations*(missing==0) + missing*NaN_filler 2889 2890 #######2891 2892 2893 2988 2894 2989 number_of_times = times.shape[0] … … 2904 2999 print 'Statistics:' 2905 3000 print ' Extent (lat/lon):' 2906 print ' lat in [%f, %f], len(lat) == %d'\ 2907 %(min(latitudes.flat), max(latitudes.flat), 2908 len(latitudes.flat)) 2909 print ' lon in [%f, %f], len(lon) == %d'\ 2910 %(min(longitudes.flat), max(longitudes.flat), 2911 len(longitudes.flat)) 2912 print ' t in [%f, %f], len(t) == %d'\ 2913 %(min(times.flat), max(times.flat), len(times.flat)) 3001 print ' lat in [%f, %f], len(lat) == %d' \ 3002 % (min(latitudes.flat), max(latitudes.flat), len(latitudes.flat)) 3003 print ' lon in [%f, %f], len(lon) == %d' \ 3004 % (min(longitudes.flat), max(longitudes.flat), 3005 len(longitudes.flat)) 3006 print ' t in [%f, %f], len(t) == %d' \ 3007 % (min(times.flat), max(times.flat), len(times.flat)) 2914 3008 2915 3009 q = amplitudes.flat 2916 3010 name = 'Amplitudes (ha) [cm]' 2917 print ' %s in [%f, %f]' % (name, min(q), max(q))3011 print ' %s in [%f, %f]' % (name, min(q), max(q)) 2918 3012 2919 3013 q = uspeed.flat 2920 3014 name = 'Speeds (ua) [cm/s]' 2921 print ' %s in [%f, %f]' % (name, min(q), max(q))3015 print ' %s in [%f, %f]' % (name, min(q), max(q)) 2922 3016 2923 3017 q = vspeed.flat 2924 3018 name = 'Speeds (va) [cm/s]' 2925 print ' %s in [%f, %f]' % (name, min(q), max(q))3019 print ' %s in [%f, %f]' % (name, min(q), max(q)) 2926 3020 2927 3021 q = elevations.flat 2928 3022 name = 'Elevations (e) [m]' 2929 print ' %s in [%f, %f]' %(name, min(q), max(q)) 2930 3023 print ' %s in [%f, %f]' % (name, min(q), max(q)) 2931 3024 2932 3025 # print number_of_latitudes, number_of_longitudes 2933 number_of_points = number_of_latitudes*number_of_longitudes 2934 number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2 2935 3026 number_of_points = number_of_latitudes * number_of_longitudes 3027 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2 2936 3028 2937 3029 file_h.close() … … 2940 3032 file_e.close() 2941 3033 2942 2943 3034 # NetCDF file definition 2944 3035 outfile = NetCDFFile(swwname, 'w') 2945 3036 2946 description = 'Converted from Ferret files: %s, %s, %s, %s' \2947 % (basename_in + '_ha.nc',2948 basename_in + '_ua.nc',2949 basename_in + '_va.nc',2950 basename_in + '_e.nc')2951 3037 description = 'Converted from Ferret files: %s, %s, %s, %s' \ 3038 % (basename_in + '_ha.nc', 3039 basename_in + '_ua.nc', 3040 basename_in + '_va.nc', 3041 basename_in + '_e.nc') 3042 2952 3043 # Create new file 2953 3044 starttime = times[0] 2954 3045 2955 3046 sww = Write_sww() 2956 3047 sww.store_header(outfile, times, number_of_volumes, 2957 3048 number_of_points, description=description, 2958 verbose=verbose, sww_precision=Float)3049 verbose=verbose, sww_precision=Float) 2959 3050 2960 3051 # Store … … 2963 3054 y = zeros(number_of_points, Float) #Northing 2964 3055 2965 2966 3056 if verbose: print 'Making triangular grid' 2967 3057 2968 3058 # Check zone boundaries 2969 refzone, _, _ = redfearn(latitudes[0], longitudes[0])3059 refzone, _, _ = redfearn(latitudes[0], longitudes[0]) 2970 3060 2971 3061 vertices = {} 2972 3062 i = 0 2973 for k, lat in enumerate(latitudes): #Y direction 2974 for l, lon in enumerate(longitudes): #X direction 2975 3063 for k, lat in enumerate(latitudes): # Y direction 3064 for l, lon in enumerate(longitudes): # X direction 2976 3065 vertices[l,k] = i 2977 3066 2978 3067 zone, easting, northing = redfearn(lat,lon) 2979 3068 2980 msg = 'Zone boundary crossed at longitude =', lon3069 #msg = 'Zone boundary crossed at longitude =', lon 2981 3070 #assert zone == refzone, msg 2982 3071 #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing) … … 2987 3076 #Construct 2 triangles per 'rectangular' element 2988 3077 volumes = [] 2989 for l in range(number_of_longitudes-1): # X direction2990 for k in range(number_of_latitudes-1): # Y direction3078 for l in range(number_of_longitudes-1): # X direction 3079 for k in range(number_of_latitudes-1): # Y direction 2991 3080 v1 = vertices[l,k+1] 2992 3081 v2 = vertices[l,k] … … 3000 3089 3001 3090 if origin is None: 3002 origin = Geo_reference(refzone, min(x),min(y))3091 origin = Geo_reference(refzone, min(x), min(y)) 3003 3092 geo_ref = write_NetCDF_georeference(origin, outfile) 3004 3093 3005 3094 if elevation is not None: 3006 3095 z = elevation 3007 3096 else: 3008 3097 if inverted_bathymetry: 3009 z = -1 *elevations3098 z = -1 * elevations 3010 3099 else: 3011 3100 z = elevations … … 3014 3103 #FIXME use the Write_sww instance(sww) to write this info 3015 3104 from Numeric import resize 3016 z = resize(z, outfile.variables['z'][:].shape)3105 z = resize(z, outfile.variables['z'][:].shape) 3017 3106 outfile.variables['x'][:] = x - geo_ref.get_xllcorner() 3018 3107 outfile.variables['y'][:] = y - geo_ref.get_yllcorner() … … 3021 3110 outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64 3022 3111 3023 3024 3025 3112 #Time stepping 3026 3113 stage = outfile.variables['stage'] … … 3029 3116 3030 3117 if verbose: print 'Converting quantities' 3118 3031 3119 n = len(times) 3032 3120 for j in range(n): 3033 if verbose and j%((n+10)/10)==0: print ' Doing %d of %d' %(j, n) 3121 if verbose and j % ((n+10)/10) == 0: print ' Doing %d of %d' %(j, n) 3122 3034 3123 i = 0 3035 for k in range(number_of_latitudes): # Y direction3036 for l in range(number_of_longitudes): # X direction3037 w = zscale *amplitudes[j,k,l]/100 + mean_stage3124 for k in range(number_of_latitudes): # Y direction 3125 for l in range(number_of_longitudes): # X direction 3126 w = zscale * amplitudes[j,k,l] / 100 + mean_stage 3038 3127 stage[j,i] = w 3039 3128 h = w - z[i] … … 3053 3142 print ' Name: %s' %swwname 3054 3143 print ' Reference:' 3055 print ' Lower left corner: [%f, %f]' \3056 % (geo_ref.get_xllcorner(), geo_ref.get_yllcorner())3144 print ' Lower left corner: [%f, %f]' \ 3145 % (geo_ref.get_xllcorner(), geo_ref.get_yllcorner()) 3057 3146 print ' Start time: %f' %starttime 3058 3147 print ' Min time: %f' %mint 3059 3148 print ' Max time: %f' %maxt 3060 3149 print ' Extent:' 3061 print ' x [m] in [%f, %f], len(x) == %d' \3062 % (min(x.flat), max(x.flat), len(x.flat))3063 print ' y [m] in [%f, %f], len(y) == %d' \3064 % (min(y.flat), max(y.flat), len(y.flat))3065 print ' t [s] in [%f, %f], len(t) == %d' \3066 % (min(times), max(times), len(times))3150 print ' x [m] in [%f, %f], len(x) == %d' \ 3151 % (min(x.flat), max(x.flat), len(x.flat)) 3152 print ' y [m] in [%f, %f], len(y) == %d' \ 3153 % (min(y.flat), max(y.flat), len(y.flat)) 3154 print ' t [s] in [%f, %f], len(t) == %d' \ 3155 % (min(times), max(times), len(times)) 3067 3156 print ' Quantities [SI units]:' 3068 3157 for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']: 3069 3158 q = outfile.variables[name][:].flat 3070 print ' %s in [%f, %f]' %(name, min(q), max(q)) 3071 3072 3159 print ' %s in [%f, %f]' % (name, min(q), max(q)) 3073 3160 3074 3161 outfile.close() 3075 3162 3076 3163 3077 3078 3079 3164 ## 3165 # @brief Convert time-series text file to TMS file. 3166 # @param filename 3167 # @param quantity_names 3168 # @param time_as_seconds 3080 3169 def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False): 3081 3170 """Template for converting typical text files with time series to 3082 3171 NetCDF tms file. 3083 3172 3084 3085 3173 The file format is assumed to be either two fields separated by a comma: 3086 3174 … … 3093 3181 3094 3182 or time (seconds), value0 value1 value2 ... 3095 3183 3096 3184 0.0, 1.328223 0 0 3097 3185 0.1, 1.292912 0 0 … … 3107 3195 from anuga.utilities.numerical_tools import ensure_numeric 3108 3196 3109 3110 3111 fid = open(filename + '.txt') 3197 file_text = filename + '.txt' 3198 fid = open(file_text) 3112 3199 line = fid.readline() 3113 3200 fid.close() 3114 3201 3115 3202 fields = line.split(',') 3116 msg = 'File %s must have the format date, value0 value1 value2 ...' 3203 msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \ 3204 % file_text 3117 3205 assert len(fields) == 2, msg 3118 3206 … … 3121 3209 starttime = calendar.timegm(time.strptime(fields[0], time_format)) 3122 3210 except ValueError: 3123 msg = 'First field in file %s must be' % filename3124 msg += ' date-time with format %s.\n' % time_format3125 msg += 'I got %s instead.' % fields[0]3211 msg = 'First field in file %s must be' % file_text 3212 msg += ' date-time with format %s.\n' % time_format 3213 msg += 'I got %s instead.' % fields[0] 3126 3214 raise DataTimeError, msg 3127 3215 else: … … 3132 3220 raise DataTimeError, msg 3133 3221 3134 3135 #Split values 3222 # Split values 3136 3223 values = [] 3137 3224 for value in fields[1].split(): … … 3143 3230 assert len(q.shape) == 1, msg 3144 3231 3145 3146 3147 #Read times proper 3232 # Read times proper 3148 3233 from Numeric import zeros, Float, alltrue 3149 3234 from anuga.config import time_format 3150 3235 import time, calendar 3151 3236 3152 fid = open(file name + '.txt')3237 fid = open(file_text) 3153 3238 lines = fid.readlines() 3154 3239 fid.close() … … 3157 3242 d = len(q) 3158 3243 3159 T = zeros(N, Float) # Time3160 Q = zeros((N, d), Float) # Values3244 T = zeros(N, Float) # Time 3245 Q = zeros((N, d), Float) # Values 3161 3246 3162 3247 for i, line in enumerate(lines): … … 3171 3256 Q[i, j] = float(value) 3172 3257 3173 msg = 'File %s must list time as a monotonuosly ' % filename3258 msg = 'File %s must list time as a monotonuosly ' % filename 3174 3259 msg += 'increasing sequence' 3175 assert alltrue( T[1:] - T[:-1] > 0), msg3260 assert alltrue(T[1:] - T[:-1] > 0), msg 3176 3261 3177 3262 #Create NetCDF file … … 3180 3265 fid = NetCDFFile(filename + '.tms', 'w') 3181 3266 3182 3183 3267 fid.institution = 'Geoscience Australia' 3184 3268 fid.description = 'Time series' 3185 3186 3269 3187 3270 #Reference point … … 3194 3277 #fid.createDimension('number_of_vertices', 3) 3195 3278 3196 3197 3279 fid.createDimension('number_of_timesteps', len(T)) 3198 3280 … … 3205 3287 name = quantity_names[i] 3206 3288 except: 3207 name = 'Attribute%d' %i3289 name = 'Attribute%d' % i 3208 3290 3209 3291 fid.createVariable(name, Float, ('number_of_timesteps',)) … … 3213 3295 3214 3296 3297 ## 3298 # @brief Get the extents of a NetCDF data file. 3299 # @param file_name The path to the SWW file. 3300 # @return A list of x, y, z and stage limits (min, max). 3215 3301 def extent_sww(file_name): 3216 """ 3217 Read in an sww file. 3218 3219 Input; 3302 """Read in an sww file. 3303 3304 Input: 3220 3305 file_name - the sww file 3221 3306 3222 Output; 3223 z - Vector of bed elevation 3224 volumes - Array. Each row has 3 values, representing 3225 the vertices that define the volume 3226 time - Vector of the times where there is stage information 3227 stage - array with respect to time and vertices (x,y) 3228 """ 3229 3307 Output: 3308 A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)] 3309 """ 3230 3310 3231 3311 from Scientific.IO.NetCDF import NetCDFFile 3232 3312 3233 #Check contents3234 3313 #Get NetCDF 3235 3314 fid = NetCDFFile(file_name, 'r') … … 3239 3318 y = fid.variables['y'][:] 3240 3319 stage = fid.variables['stage'][:] 3241 #print "stage",stage3242 #print "stage.shap",stage.shape3243 #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)3244 #print "min(stage)",min(stage)3245 3320 3246 3321 fid.close() 3247 3322 3248 return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)] 3249 3250 3323 return [min(x), max(x), min(y), max(y), min(stage.flat), max(stage.flat)] 3324 3325 3326 ## 3327 # @brief 3328 # @param filename 3329 # @param boundary 3330 # @param t 3331 # @param fail_if_NaN 3332 # @param NaN_filler 3333 # @param verbose 3334 # @param very_verbose 3335 # @return 3251 3336 def sww2domain(filename, boundary=None, t=None, 3252 fail_if_NaN=True ,NaN_filler=0,3253 verbose = False, very_verbose =False):3337 fail_if_NaN=True, NaN_filler=0, 3338 verbose=False, very_verbose=False): 3254 3339 """ 3255 3340 Usage: domain = sww2domain('file.sww',t=time (default = last time in file)) … … 3260 3345 give a different final boundary, or crash. 3261 3346 """ 3262 NaN=9.969209968386869e+0363263 #initialise NaN.3264 3347 3265 3348 from Scientific.IO.NetCDF import NetCDFFile … … 3267 3350 from Numeric import asarray, transpose, resize 3268 3351 3352 # initialise NaN. 3353 NaN = 9.969209968386869e+036 3354 3269 3355 if verbose: print 'Reading from ', filename 3270 fid = NetCDFFile(filename, 'r') #Open existing file for read 3271 time = fid.variables['time'] #Timesteps 3356 3357 fid = NetCDFFile(filename, 'r') # Open existing file for read 3358 time = fid.variables['time'] # Timesteps 3272 3359 if t is None: 3273 3360 t = time[-1] … … 3275 3362 3276 3363 # Get the variables as Numeric arrays 3277 x = fid.variables['x'][:] #x-coordinates of vertices3278 y = fid.variables['y'][:] #y-coordinates of vertices3279 elevation = fid.variables['elevation'] #Elevation3280 stage = fid.variables['stage'] #Water level3281 xmomentum = fid.variables['xmomentum'] #Momentum in the x-direction3282 ymomentum = fid.variables['ymomentum'] #Momentum in the y-direction3364 x = fid.variables['x'][:] # x-coordinates of vertices 3365 y = fid.variables['y'][:] # y-coordinates of vertices 3366 elevation = fid.variables['elevation'] # Elevation 3367 stage = fid.variables['stage'] # Water level 3368 xmomentum = fid.variables['xmomentum'] # Momentum in the x-direction 3369 ymomentum = fid.variables['ymomentum'] # Momentum in the y-direction 3283 3370 3284 3371 starttime = fid.starttime[0] 3285 volumes = fid.variables['volumes'][:] #Connectivity 3286 coordinates = transpose(asarray([x.tolist(),y.tolist()])) 3287 #FIXME (Ole): Something like this might be better: concatenate( (x, y), axis=1 ) 3288 # or concatenate( (x[:,NewAxis],x[:,NewAxis]), axis=1 ) 3372 volumes = fid.variables['volumes'][:] # Connectivity 3373 coordinates = transpose(asarray([x.tolist(), y.tolist()])) 3374 # FIXME (Ole): Something like this might be better: 3375 # concatenate((x, y), axis=1) 3376 # or concatenate((x[:,NewAxis], x[:,NewAxis]), axis=1) 3289 3377 3290 3378 conserved_quantities = [] … … 3293 3381 3294 3382 # get geo_reference 3295 #sww files don't have to have a geo_ref 3296 try: 3383 try: # sww files don't have to have a geo_ref 3297 3384 geo_reference = Geo_reference(NetCDFObject=fid) 3298 except: # AttributeError, e:3385 except: # AttributeError, e: 3299 3386 geo_reference = None 3300 3387 3301 3388 if verbose: print ' getting quantities' 3389 3302 3390 for quantity in fid.variables.keys(): 3303 3391 dimensions = fid.variables[quantity].dimensions 3304 3392 if 'number_of_timesteps' in dimensions: 3305 3393 conserved_quantities.append(quantity) 3306 interpolated_quantities[quantity]=\ 3307 interpolated_quantity(fid.variables[quantity][:],time_interp) 3308 else: other_quantities.append(quantity) 3394 interpolated_quantities[quantity] = \ 3395 interpolated_quantity(fid.variables[quantity][:], time_interp) 3396 else: 3397 other_quantities.append(quantity) 3309 3398 3310 3399 other_quantities.remove('x') … … 3319 3408 except: 3320 3409 pass 3321 3322 3410 3323 3411 conserved_quantities.remove('time') 3324 3412 3325 3413 if verbose: print ' building domain' 3414 3326 3415 # From domain.Domain: 3327 3416 # domain = Domain(coordinates, volumes,\ … … 3330 3419 # xllcorner=xllcorner, yllcorner=yllcorner) 3331 3420 3332 # From shallow_water.Domain: 3333 coordinates=coordinates.tolist() 3334 volumes=volumes.tolist() 3335 #FIXME:should this be in mesh?(peter row) 3336 if fid.smoothing == 'Yes': unique = False 3337 else: unique = True 3421 # From shallow_water.Domain: 3422 coordinates = coordinates.tolist() 3423 volumes = volumes.tolist() 3424 # FIXME:should this be in mesh? (peter row) 3425 if fid.smoothing == 'Yes': 3426 unique = False 3427 else: 3428 unique = True 3338 3429 if unique: 3339 coordinates,volumes,boundary=weed(coordinates,volumes,boundary) 3340 3430 coordinates, volumes, boundary = weed(coordinates, volumes,boundary) 3341 3431 3342 3432 try: … … 3344 3434 except AssertionError, e: 3345 3435 fid.close() 3346 msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e 3436 msg = 'Domain could not be created: %s. ' \ 3437 'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e 3347 3438 raise DataDomainError, msg 3348 3439 … … 3352 3443 domain.geo_reference = geo_reference 3353 3444 3354 domain.starttime =float(starttime)+float(t)3355 domain.time =0.03445 domain.starttime = float(starttime) + float(t) 3446 domain.time = 0.0 3356 3447 3357 3448 for quantity in other_quantities: … … 3359 3450 NaN = fid.variables[quantity].missing_value 3360 3451 except: 3361 pass #quantity has no missing_value number3452 pass # quantity has no missing_value number 3362 3453 X = fid.variables[quantity][:] 3363 3454 if very_verbose: 3364 print ' ', quantity3365 print ' NaN =', NaN3455 print ' ', quantity 3456 print ' NaN =', NaN 3366 3457 print ' max(X)' 3367 print ' ', max(X)3458 print ' ', max(X) 3368 3459 print ' max(X)==NaN' 3369 print ' ', max(X)==NaN3460 print ' ', max(X)==NaN 3370 3461 print '' 3371 if (max(X)==NaN) or (min(X)==NaN):3462 if max(X) == NaN or min(X) == NaN: 3372 3463 if fail_if_NaN: 3373 msg = 'quantity "%s" contains no_data entry' %quantity3464 msg = 'quantity "%s" contains no_data entry' % quantity 3374 3465 raise DataMissingValuesError, msg 3375 3466 else: 3376 data = (X <>NaN)3377 X = (X*data) +(data==0)*NaN_filler3467 data = (X != NaN) 3468 X = (X*data) + (data==0)*NaN_filler 3378 3469 if unique: 3379 X = resize(X, (len(X)/3,3))3380 domain.set_quantity(quantity, X)3470 X = resize(X, (len(X)/3, 3)) 3471 domain.set_quantity(quantity, X) 3381 3472 # 3382 3473 for quantity in conserved_quantities: … … 3384 3475 NaN = fid.variables[quantity].missing_value 3385 3476 except: 3386 pass #quantity has no missing_value number3477 pass # quantity has no missing_value number 3387 3478 X = interpolated_quantities[quantity] 3388 3479 if very_verbose: 3389 3480 print ' ',quantity 3390 print ' NaN =', NaN3481 print ' NaN =', NaN 3391 3482 print ' max(X)' 3392 print ' ', max(X)3483 print ' ', max(X) 3393 3484 print ' max(X)==NaN' 3394 print ' ', max(X)==NaN3485 print ' ', max(X)==NaN 3395 3486 print '' 3396 if (max(X)==NaN) or (min(X)==NaN):3487 if max(X) == NaN or min(X) == NaN: 3397 3488 if fail_if_NaN: 3398 msg = 'quantity "%s" contains no_data entry' %quantity3489 msg = 'quantity "%s" contains no_data entry' % quantity 3399 3490 raise DataMissingValuesError, msg 3400 3491 else: 3401 data = (X <>NaN)3402 X = (X*data) +(data==0)*NaN_filler3492 data = (X != NaN) 3493 X = (X*data) + (data==0)*NaN_filler 3403 3494 if unique: 3404 X = resize(X, (X.shape[0]/3,3))3405 domain.set_quantity(quantity, X)3495 X = resize(X, (X.shape[0]/3, 3)) 3496 domain.set_quantity(quantity, X) 3406 3497 3407 3498 fid.close() 3499 3408 3500 return domain 3409 3501 3410 3502 3411 def interpolated_quantity(saved_quantity,time_interp): 3412 3413 #given an index and ratio, interpolate quantity with respect to time. 3414 index,ratio = time_interp 3503 ## 3504 # @brief Interpolate a quantity wrt time. 3505 # @param saved_quantity The quantity to interpolate. 3506 # @param time_interp (index, ratio) 3507 # @return A vector of interpolated values. 3508 def interpolated_quantity(saved_quantity, time_interp): 3509 '''Given an index and ratio, interpolate quantity with respect to time.''' 3510 3511 index, ratio = time_interp 3512 3415 3513 Q = saved_quantity 3514 3416 3515 if ratio > 0: 3417 q = (1-ratio)*Q[index] + ratio*Q[index+1]3516 q = (1-ratio)*Q[index] + ratio*Q[index+1] 3418 3517 else: 3419 3518 q = Q[index] 3519 3420 3520 #Return vector of interpolated values 3421 3521 return q 3422 3522 3423 3523 3424 def get_time_interp(time,t=None): 3524 ## 3525 # @brief 3526 # @parm time 3527 # @param t 3528 # @return An (index, ration) tuple. 3529 def get_time_interp(time, t=None): 3425 3530 #Finds the ratio and index for time interpolation. 3426 3531 #It is borrowed from previous abstract_2d_finite_volumes code. … … 3433 3538 tau = t 3434 3539 index=0 3435 msg = 'Time interval derived from file %s [%s:%s]' \3436 %('FIXMEfilename', T[0], T[-1])3437 msg += ' does not match model time: %s' % tau3540 msg = 'Time interval derived from file %s [%s:%s]' \ 3541 % ('FIXMEfilename', T[0], T[-1]) 3542 msg += ' does not match model time: %s' % tau 3438 3543 if tau < time[0]: raise DataTimeError, msg 3439 3544 if tau > time[-1]: raise DataTimeError, msg … … 3447 3552 #t is now between index and index+1 3448 3553 ratio = (tau - time[index])/(time[index+1] - time[index]) 3449 return (index,ratio) 3450 3451 3452 def weed(coordinates,volumes,boundary = None): 3453 if type(coordinates)==ArrayType: 3554 3555 return (index, ratio) 3556 3557 3558 ## 3559 # @brief 3560 # @param coordinates 3561 # @param volumes 3562 # @param boundary 3563 def weed(coordinates, volumes, boundary=None): 3564 if type(coordinates) == ArrayType: 3454 3565 coordinates = coordinates.tolist() 3455 if type(volumes) ==ArrayType:3566 if type(volumes) == ArrayType: 3456 3567 volumes = volumes.tolist() 3457 3568 … … 3463 3574 if point_dict.has_key(point): 3464 3575 unique = True 3465 same_point[i] =point3576 same_point[i] = point 3466 3577 #to change all point i references to point j 3467 3578 else: 3468 point_dict[point] =i3469 same_point[i] =point3579 point_dict[point] = i 3580 same_point[i] = point 3470 3581 3471 3582 coordinates = [] … … 3474 3585 point = tuple(point) 3475 3586 coordinates.append(list(point)) 3476 point_dict[point]=i 3477 i+=1 3478 3587 point_dict[point] = i 3588 i += 1 3479 3589 3480 3590 for volume in volumes: 3481 3591 for i in range(len(volume)): 3482 3592 index = volume[i] 3483 if index >-1:3484 volume[i] =point_dict[same_point[index]]3593 if index > -1: 3594 volume[i] = point_dict[same_point[index]] 3485 3595 3486 3596 new_boundary = {} … … 3493 3603 #('exterior, pond') or replaced ('pond')(peter row) 3494 3604 3495 if new_boundary.has_key((point0,point1)): 3496 new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\ 3497 #+','+label 3498 3499 elif new_boundary.has_key((point1,point0)): 3500 new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\ 3501 #+','+label 3502 else: new_boundary[(point0,point1)]=label 3605 if new_boundary.has_key((point0, point1)): 3606 new_boundary[(point0,point1)] = new_boundary[(point0, point1)] 3607 3608 elif new_boundary.has_key((point1, point0)): 3609 new_boundary[(point1,point0)] = new_boundary[(point1, point0)] 3610 else: new_boundary[(point0, point1)] = label 3503 3611 3504 3612 boundary = new_boundary 3505 3613 3506 return coordinates,volumes,boundary 3507 3508 3614 return coordinates, volumes, boundary 3615 3616 3617 ## 3618 # @brief Read DEM file, decimate data, write new DEM file. 3619 # @param basename_in Stem of the input filename. 3620 # @param stencil 3621 # @param cellsize_new New cell size to resample on. 3622 # @param basename_out Stem of the output filename. 3623 # @param verbose True if this function is to be verbose. 3509 3624 def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None, 3510 3625 verbose=False): … … 3533 3648 #Open existing netcdf file to read 3534 3649 infile = NetCDFFile(inname, 'r') 3535 if verbose: print 'Reading DEM from %s' %inname 3650 3651 if verbose: print 'Reading DEM from %s' % inname 3536 3652 3537 3653 #Read metadata … … 3557 3673 outname = basename_out + '.dem' 3558 3674 3559 if verbose: print 'Write decimated NetCDF file to %s' % outname3675 if verbose: print 'Write decimated NetCDF file to %s' % outname 3560 3676 3561 3677 #Determine some dimensions for decimated grid … … 3572 3688 #Create new file 3573 3689 outfile.institution = 'Geoscience Australia' 3574 outfile.description = 'NetCDF DEM format for compact and portable storage ' +\ 3575 'of spatial point data' 3690 outfile.description = 'NetCDF DEM format for compact and portable ' \ 3691 'storage of spatial point data' 3692 3576 3693 #Georeferencing 3577 3694 outfile.zone = zone … … 3605 3722 for i in range(nrows_new): 3606 3723 if verbose: print 'Processing row %d of %d' %(i, nrows_new) 3724 3607 3725 lower_index = global_index 3608 telev = 3726 telev = zeros(ncols_new, Float) 3609 3727 local_index = 0 3610 3728 trow = i * cellsize_ratio … … 3612 3730 for j in range(ncols_new): 3613 3731 tcol = j * cellsize_ratio 3614 tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil] 3732 tmp = dem_elevation_r[trow:trow+nrows_stencil, 3733 tcol:tcol+ncols_stencil] 3615 3734 3616 3735 #if dem contains 1 or more NODATA_values set value in … … 3629 3748 elevation[lower_index:upper_index] = telev 3630 3749 3631 assert global_index == nrows_new*ncols_new, 'index not equal to number of points' 3750 assert global_index == nrows_new*ncols_new, \ 3751 'index not equal to number of points' 3632 3752 3633 3753 infile.close() … … 3635 3755 3636 3756 3637 3638 3639 def tsh2sww(filename, verbose=False): 3757 ## 3758 # @brief 3759 # @param filename 3760 # @param verbose 3761 def tsh2sww(filename, verbose=False): 3640 3762 """ 3641 3763 to check if a tsh/msh file 'looks' good. 3642 3764 """ 3643 3765 3644 3645 3766 if verbose == True:print 'Creating domain from', filename 3767 3646 3768 domain = pmesh_to_domain_instance(filename, Domain) 3769 3647 3770 if verbose == True:print "Number of triangles = ", len(domain) 3648 3771 … … 3651 3774 file_path, filename = path.split(filename) 3652 3775 filename, ext = path.splitext(filename) 3653 domain.set_name(filename) 3776 domain.set_name(filename) 3654 3777 domain.reduction = mean 3778 3655 3779 if verbose == True:print "file_path",file_path 3656 if file_path == "":file_path = "." 3780 3781 if file_path == "": 3782 file_path = "." 3657 3783 domain.set_datadir(file_path) 3658 3784 … … 3660 3786 print "Output written to " + domain.get_datadir() + sep + \ 3661 3787 domain.get_name() + "." + domain.format 3788 3662 3789 sww = get_dataobject(domain) 3663 3790 sww.store_connectivity() … … 3665 3792 3666 3793 3794 ## 3795 # @brief Convert CSIRO ESRI file to an SWW boundary file. 3796 # @param bath_dir 3797 # @param elevation_dir 3798 # @param ucur_dir 3799 # @param vcur_dir 3800 # @param sww_file 3801 # @param minlat 3802 # @param maxlat 3803 # @param minlon 3804 # @param maxlon 3805 # @param zscale 3806 # @param mean_stage 3807 # @param fail_on_NaN 3808 # @param elevation_NaN_filler 3809 # @param bath_prefix 3810 # @param elevation_prefix 3811 # @param verbose 3812 # @note Also convert latitude and longitude to UTM. All coordinates are 3813 # assumed to be given in the GDA94 datum. 3667 3814 def asc_csiro2sww(bath_dir, 3668 3815 elevation_dir, … … 3670 3817 vcur_dir, 3671 3818 sww_file, 3672 minlat = None, maxlat =None,3673 minlon = None, maxlon =None,3819 minlat=None, maxlat=None, 3820 minlon=None, maxlon=None, 3674 3821 zscale=1, 3675 mean_stage =0,3676 fail_on_NaN =True,3677 elevation_NaN_filler =0,3822 mean_stage=0, 3823 fail_on_NaN=True, 3824 elevation_NaN_filler=0, 3678 3825 bath_prefix='ba', 3679 3826 elevation_prefix='el', … … 3700 3847 The time period is less than 24hrs and uniform. 3701 3848 """ 3849 3702 3850 from Scientific.IO.NetCDF import NetCDFFile 3703 3851 … … 3708 3856 # go in to the bath dir and load the only file, 3709 3857 bath_files = os.listdir(bath_dir) 3710 3711 3858 bath_file = bath_files[0] 3712 3859 bath_dir_file = bath_dir + os.sep + bath_file 3713 bath_metadata, bath_grid = _read_asc(bath_dir_file)3860 bath_metadata, bath_grid = _read_asc(bath_dir_file) 3714 3861 3715 3862 #Use the date.time of the bath file as a basis for … … 3725 3872 vcur_files = os.listdir(vcur_dir) 3726 3873 elevation_files.sort() 3874 3727 3875 # the first elevation file should be the 3728 3876 # file with the same base name as the bath data … … 3731 3879 number_of_latitudes = bath_grid.shape[0] 3732 3880 number_of_longitudes = bath_grid.shape[1] 3733 number_of_volumes = (number_of_latitudes-1) *(number_of_longitudes-1)*23734 3735 longitudes = [bath_metadata['xllcorner'] +x*bath_metadata['cellsize'] \3881 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2 3882 3883 longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \ 3736 3884 for x in range(number_of_longitudes)] 3737 latitudes = [bath_metadata['yllcorner'] +y*bath_metadata['cellsize'] \3885 latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \ 3738 3886 for y in range(number_of_latitudes)] 3739 3887 3740 # reverse order of lat, so the fi st lat represents the first grid row3888 # reverse order of lat, so the first lat represents the first grid row 3741 3889 latitudes.reverse() 3742 3890 3743 3891 kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:], 3744 minlat=minlat, maxlat=maxlat, 3745 minlon=minlon, maxlon=maxlon) 3746 3892 minlat=minlat, maxlat=maxlat, 3893 minlon=minlon, maxlon=maxlon) 3747 3894 3748 3895 bath_grid = bath_grid[kmin:kmax,lmin:lmax] … … 3752 3899 number_of_longitudes = len(longitudes) 3753 3900 number_of_times = len(os.listdir(elevation_dir)) 3754 number_of_points = number_of_latitudes *number_of_longitudes3755 number_of_volumes = (number_of_latitudes-1) *(number_of_longitudes-1)*23901 number_of_points = number_of_latitudes * number_of_longitudes 3902 number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2 3756 3903 3757 3904 #Work out the times 3758 3905 if len(elevation_files) > 1: 3759 3906 # Assume: The time period is less than 24hrs. 3760 time_period = (int(elevation_files[1][-3:]) -\3761 int(elevation_files[0][-3:]))*60*603907 time_period = (int(elevation_files[1][-3:]) \ 3908 - int(elevation_files[0][-3:])) * 60*60 3762 3909 times = [x*time_period for x in range(len(elevation_files))] 3763 3910 else: 3764 3911 times = [0.0] 3765 3766 3912 3767 3913 if verbose: … … 3769 3915 print 'Statistics:' 3770 3916 print ' Extent (lat/lon):' 3771 print ' lat in [%f, %f], len(lat) == %d'\ 3772 %(min(latitudes), max(latitudes), 3773 len(latitudes)) 3774 print ' lon in [%f, %f], len(lon) == %d'\ 3775 %(min(longitudes), max(longitudes), 3776 len(longitudes)) 3777 print ' t in [%f, %f], len(t) == %d'\ 3778 %(min(times), max(times), len(times)) 3917 print ' lat in [%f, %f], len(lat) == %d' \ 3918 % (min(latitudes), max(latitudes), len(latitudes)) 3919 print ' lon in [%f, %f], len(lon) == %d' \ 3920 % (min(longitudes), max(longitudes), len(longitudes)) 3921 print ' t in [%f, %f], len(t) == %d' \ 3922 % (min(times), max(times), len(times)) 3779 3923 3780 3924 ######### WRITE THE SWW FILE ############# 3925 3781 3926 # NetCDF file definition 3782 3927 outfile = NetCDFFile(sww_file, 'w') … … 3786 3931 outfile.description = 'Converted from XXX' 3787 3932 3788 3789 3933 #For sww compatibility 3790 3934 outfile.smoothing = 'Yes' … … 3794 3938 outfile.starttime = starttime = times[0] 3795 3939 3796 3797 3940 # dimension definitions 3798 3941 outfile.createDimension('number_of_volumes', number_of_volumes) 3799 3800 3942 outfile.createDimension('number_of_vertices', 3) 3801 3943 outfile.createDimension('number_of_points', number_of_points) … … 3814 3956 'number_of_vertices')) 3815 3957 3816 outfile.createVariable('time', precision, 3817 ('number_of_timesteps',)) 3818 3819 outfile.createVariable('stage', precision, 3820 ('number_of_timesteps', 3821 'number_of_points')) 3822 3823 outfile.createVariable('xmomentum', precision, 3824 ('number_of_timesteps', 3825 'number_of_points')) 3826 3827 outfile.createVariable('ymomentum', precision, 3828 ('number_of_timesteps', 3829 'number_of_points')) 3958 outfile.createVariable('time', precision, ('number_of_timesteps',)) 3959 3960 outfile.createVariable('stage', precision, ('number_of_timesteps', 3961 'number_of_points')) 3962 3963 outfile.createVariable('xmomentum', precision, ('number_of_timesteps', 3964 'number_of_points')) 3965 3966 outfile.createVariable('ymomentum', precision, ('number_of_timesteps', 3967 'number_of_points')) 3830 3968 3831 3969 #Store 3832 3970 from anuga.coordinate_transforms.redfearn import redfearn 3971 3833 3972 x = zeros(number_of_points, Float) #Easting 3834 3973 y = zeros(number_of_points, Float) #Northing 3835 3974 3836 3975 if verbose: print 'Making triangular grid' 3976 3837 3977 #Get zone of 1st point. 3838 refzone, _, _ = redfearn(latitudes[0], longitudes[0])3978 refzone, _, _ = redfearn(latitudes[0], longitudes[0]) 3839 3979 3840 3980 vertices = {} … … 3842 3982 for k, lat in enumerate(latitudes): 3843 3983 for l, lon in enumerate(longitudes): 3844 3845 3984 vertices[l,k] = i 3846 3985 3847 zone, easting, northing = redfearn(lat, lon)3848 3849 msg = 'Zone boundary crossed at longitude =', lon3986 zone, easting, northing = redfearn(lat, lon) 3987 3988 #msg = 'Zone boundary crossed at longitude =', lon 3850 3989 #assert zone == refzone, msg 3851 3990 #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing) … … 3853 3992 y[i] = northing 3854 3993 i += 1 3855 3856 3994 3857 3995 #Construct 2 triangles per 'rectangular' element … … 3871 4009 volumes = array(volumes) 3872 4010 3873 geo_ref = Geo_reference(refzone, min(x),min(y))4011 geo_ref = Geo_reference(refzone, min(x), min(y)) 3874 4012 geo_ref.write_NetCDF(outfile) 3875 4013 3876 4014 # This will put the geo ref in the middle 3877 #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.) 3878 4015 #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.) 3879 4016 3880 4017 if verbose: … … 3882 4019 print 'More Statistics:' 3883 4020 print ' Extent (/lon):' 3884 print ' x in [%f, %f], len(lat) == %d'\ 3885 %(min(x), max(x), 3886 len(x)) 3887 print ' y in [%f, %f], len(lon) == %d'\ 3888 %(min(y), max(y), 3889 len(y)) 3890 print 'geo_ref: ',geo_ref 4021 print ' x in [%f, %f], len(lat) == %d' \ 4022 % (min(x), max(x), len(x)) 4023 print ' y in [%f, %f], len(lon) == %d' \ 4024 % (min(y), max(y), len(y)) 4025 print 'geo_ref: ', geo_ref 3891 4026 3892 4027 z = resize(bath_grid,outfile.variables['z'][:].shape) 3893 4028 outfile.variables['x'][:] = x - geo_ref.get_xllcorner() 3894 4029 outfile.variables['y'][:] = y - geo_ref.get_yllcorner() 3895 outfile.variables['z'][:] = z # FIXME (Ole): Remove once viewer has been recompiled and changed to use elevation instead of z 3896 outfile.variables['elevation'][:] = z 4030 # FIXME (Ole): Remove once viewer has been recompiled and changed 4031 # to use elevation instead of z 4032 outfile.variables['z'][:] = z 4033 outfile.variables['elevation'][:] = z 3897 4034 outfile.variables['volumes'][:] = volumes.astype(Int32) # On Opteron 64 3898 4035 … … 3904 4041 3905 4042 if verbose: print 'Converting quantities' 4043 3906 4044 n = number_of_times 3907 4045 for j in range(number_of_times): 3908 4046 # load in files 3909 4047 elevation_meta, elevation_grid = \ 3910 _read_asc(elevation_dir + os.sep + elevation_files[j])3911 3912 _, u_momentum_grid = 3913 _, v_momentum_grid = 4048 _read_asc(elevation_dir + os.sep + elevation_files[j]) 4049 4050 _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j]) 4051 _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j]) 3914 4052 3915 4053 #cut matrix to desired size … … 3917 4055 u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax] 3918 4056 v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax] 3919 4057 3920 4058 # handle missing values 3921 4059 missing = (elevation_grid == elevation_meta['NODATA_value']) 3922 4060 if sometrue (missing): 3923 4061 if fail_on_NaN: 3924 msg = 'File %s contains missing values' \3925 % (elevation_files[j])4062 msg = 'File %s contains missing values' \ 4063 % (elevation_files[j]) 3926 4064 raise DataMissingValuesError, msg 3927 4065 else: 3928 elevation_grid = elevation_grid*(missing==0) +\3929 missing*elevation_NaN_filler3930 3931 3932 if verbose and j%((n+10)/10)==0: print ' Doing %d of %d' %(j, n) 4066 elevation_grid = elevation_grid*(missing==0) \ 4067 + missing*elevation_NaN_filler 4068 4069 if verbose and j % ((n+10)/10) == 0: print ' Doing %d of %d' % (j, n) 4070 3933 4071 i = 0 3934 4072 for k in range(number_of_latitudes): #Y direction … … 3940 4078 ymomentum[j,i] = v_momentum_grid[k,l]*h 3941 4079 i += 1 4080 3942 4081 outfile.close() 3943 4082 4083 4084 ## 4085 # @brief Return max&min indexes (for slicing) of area specified. 4086 # @param latitudes_ref ?? 4087 # @param longitudes_ref ?? 4088 # @param minlat Minimum latitude of specified area. 4089 # @param maxlat Maximum latitude of specified area. 4090 # @param minlon Minimum longitude of specified area. 4091 # @param maxlon Maximum longitude of specified area. 4092 # @return Tuple (lat_min_index, lat_max_index, lon_min_index, lon_max_index) 3944 4093 def _get_min_max_indexes(latitudes_ref,longitudes_ref, 3945 minlat=None, maxlat=None,3946 minlon=None, maxlon=None):3947 """ 3948 return max, min indexes (for slicing) of the lat and long arrays to cover the area3949 specified with min/max lat/long4094 minlat=None, maxlat=None, 4095 minlon=None, maxlon=None): 4096 """ 4097 Return max, min indexes (for slicing) of the lat and long arrays to cover 4098 the area specified with min/max lat/long. 3950 4099 3951 4100 Think of the latitudes and longitudes describing a 2d surface. … … 3958 4107 assert latitudes are sorted, ascending or decending 3959 4108 """ 4109 3960 4110 latitudes = latitudes_ref[:] 3961 4111 longitudes = longitudes_ref[:] … … 3969 4119 #print len(latitudes),len(longitudes) 3970 4120 #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1] 3971 4121 3972 4122 lat_ascending = True 3973 4123 if not allclose(sort(latitudes), latitudes): 3974 4124 lat_ascending = False 3975 # reverse order of lat, so it's in ascending order 4125 # reverse order of lat, so it's in ascending order 3976 4126 latitudes = latitudes[::-1] 3977 4127 assert allclose(sort(latitudes), latitudes) 3978 #print "latitudes in funct", latitudes 3979 4128 3980 4129 largest_lat_index = len(latitudes)-1 4130 3981 4131 #Cut out a smaller extent. 3982 4132 if minlat == None: … … 3987 4137 lat_min_index = 0 3988 4138 3989 3990 4139 if maxlat == None: 3991 4140 lat_max_index = largest_lat_index #len(latitudes) … … 4009 4158 # Reversing the indexes, if the lat array is decending 4010 4159 if lat_ascending is False: 4011 lat_min_index, lat_max_index = largest_lat_index - lat_max_index 4160 lat_min_index, lat_max_index = largest_lat_index - lat_max_index, \ 4012 4161 largest_lat_index - lat_min_index 4013 4162 lat_max_index = lat_max_index + 1 # taking into account how slicing works … … 4017 4166 4018 4167 4168 ## 4169 # @brief Read an ASC file. 4170 # @parem filename Path to the file to read. 4171 # @param verbose True if this function is to be verbose. 4019 4172 def _read_asc(filename, verbose=False): 4020 4173 """Read esri file from the following ASCII format (.asc) … … 4029 4182 NODATA_value -9999 4030 4183 138.3698 137.4194 136.5062 135.5558 .......... 4031 4032 4184 """ 4033 4185 4034 4186 datafile = open(filename) 4035 4187 4036 if verbose: print 'Reading DEM from %s' %(filename) 4188 if verbose: print 'Reading DEM from %s' % filename 4189 4037 4190 lines = datafile.readlines() 4038 4191 datafile.close() … … 4065 4218 4066 4219 4067 4068 4220 #### URS 2 SWW ### 4069 4221 4222 # Definitions of various NetCDF dimension names, etc. 4070 4223 lon_name = 'LON' 4071 4224 lat_name = 'LAT' 4072 4225 time_name = 'TIME' 4073 precision = Float # So if we want to change the precision its done here 4226 precision = Float # So if we want to change the precision its done here 4227 4228 ## 4229 # @brief Clas for a NetCDF data file writer. 4074 4230 class Write_nc: 4075 """ 4076 Write an nc file. 4077 4231 """Write an nc file. 4232 4078 4233 Note, this should be checked to meet cdc netcdf conventions for gridded 4079 4234 data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml 4080 4081 """ 4235 """ 4236 4237 ## 4238 # @brief Instantiate a Write_nc instance. 4239 # @param quantity_name 4240 # @param file_name 4241 # @param time_step_count The number of time steps. 4242 # @param time_step The time_step size. 4243 # @param lon 4244 # @param lat 4082 4245 def __init__(self, 4083 4246 quantity_name, … … 4087 4250 lon, 4088 4251 lat): 4089 """ 4252 """Instantiate a Write_nc instance (NetCDF file writer). 4253 4090 4254 time_step_count is the number of time steps. 4091 4255 time_step is the time step size 4092 4093 pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.4256 4257 pre-condition: quantity_name must be 'HA', 'UA'or 'VA'. 4094 4258 """ 4259 4095 4260 self.quantity_name = quantity_name 4096 4261 quantity_units = {'HA':'CENTIMETERS', 4097 'UA':'CENTIMETERS/SECOND', 4098 'VA':'CENTIMETERS/SECOND'} 4099 4100 multiplier_dic = {'HA':100.0, # To convert from m to cm 4101 'UA':100.0, # m/s to cm/sec 4102 'VA':-100.0} # MUX files have positve x in the 4103 # Southern direction. This corrects for it, when writing nc files. 4104 4262 'UA':'CENTIMETERS/SECOND', 4263 'VA':'CENTIMETERS/SECOND'} 4264 4265 multiplier_dic = {'HA':100.0, # To convert from m to cm 4266 'UA':100.0, # and m/s to cm/sec 4267 'VA':-100.0} # MUX files have positive x in the 4268 # Southern direction. This corrects 4269 # for it, when writing nc files. 4270 4105 4271 self.quantity_multiplier = multiplier_dic[self.quantity_name] 4106 4272 4107 4273 #self.file_name = file_name 4108 4274 self.time_step_count = time_step_count … … 4111 4277 # NetCDF file definition 4112 4278 self.outfile = NetCDFFile(file_name, 'w') 4113 outfile = self.outfile 4279 outfile = self.outfile 4114 4280 4115 4281 #Create new file 4116 4282 nc_lon_lat_header(outfile, lon, lat) 4117 4283 4118 4284 # TIME 4119 4285 outfile.createDimension(time_name, None) … … 4123 4289 outfile.createVariable(self.quantity_name, precision, 4124 4290 (time_name, lat_name, lon_name)) 4125 outfile.variables[self.quantity_name].missing_value =-1.e+0344126 outfile.variables[self.quantity_name].units = \4291 outfile.variables[self.quantity_name].missing_value = -1.e+034 4292 outfile.variables[self.quantity_name].units = \ 4127 4293 quantity_units[self.quantity_name] 4128 4294 outfile.variables[lon_name][:]= ensure_numeric(lon) … … 4131 4297 #Assume no one will be wanting to read this, while we are writing 4132 4298 #outfile.close() 4133 4299 4300 ## 4301 # @brief Write a time-step of quantity data. 4302 # @param quantity_slice The data to be stored for this time-step. 4134 4303 def store_timestep(self, quantity_slice): 4135 """ 4136 Write a time slice of quantity info 4304 """Write a time slice of quantity info 4305 4137 4306 quantity_slice is the data to be stored at this time step 4138 4307 """ 4139 4140 outfile = self.outfile 4141 4308 4142 4309 # Get the variables 4143 time = outfile.variables[time_name] 4144 quantity = outfile.variables[self.quantity_name] 4145 4310 time = self.outfile.variables[time_name] 4311 quantity = self.outfile.variables[self.quantity_name] 4312 4313 # get index oflice to write 4146 4314 i = len(time) 4147 4315 4148 4316 #Store time 4149 time[i] = i*self.time_step #self.domain.time 4150 quantity[i,:] = quantity_slice* self.quantity_multiplier 4151 4317 time[i] = i * self.time_step #self.domain.time 4318 quantity[i,:] = quantity_slice * self.quantity_multiplier 4319 4320 ## 4321 # @brief Close file underlying the class instance. 4152 4322 def close(self): 4153 4323 self.outfile.close() 4154 4324 4325 4326 ## 4327 # @brief Convert URS file to SWW file. 4328 # @param basename_in Stem of the input filename. 4329 # @param basename_out Stem of the output filename. 4330 # @param verbose True if this function is to be verbose. 4331 # @param remove_nc_files 4332 # @param minlat Sets extent of area to be used. If not supplied, full extent. 4333 # @param maxlat Sets extent of area to be used. If not supplied, full extent. 4334 # @param minlon Sets extent of area to be used. If not supplied, full extent. 4335 # @param maxlon Sets extent of area to be used. If not supplied, full extent. 4336 # @param mint 4337 # @param maxt 4338 # @param mean_stage 4339 # @param origin A 3-tuple with geo referenced UTM coordinates 4340 # @param zscale 4341 # @param fail_on_NaN 4342 # @param NaN_filler 4343 # @param elevation 4344 # @note Also convert latitude and longitude to UTM. All coordinates are 4345 # assumed to be given in the GDA94 datum. 4155 4346 def urs2sww(basename_in='o', basename_out=None, verbose=False, 4156 4347 remove_nc_files=True, 4157 4348 minlat=None, maxlat=None, 4158 minlon= 4349 minlon=None, maxlon=None, 4159 4350 mint=None, maxt=None, 4160 4351 mean_stage=0, 4161 origin =None,4352 origin=None, 4162 4353 zscale=1, 4163 4354 fail_on_NaN=True, 4164 4355 NaN_filler=0, 4165 4356 elevation=None): 4166 """ 4357 """Convert a URS file to an SWW file. 4167 4358 Convert URS C binary format for wave propagation to 4168 4359 sww format native to abstract_2d_finite_volumes. … … 4179 4370 min's and max's: If omitted - full extend is used. 4180 4371 To include a value min may equal it, while max must exceed it. 4181 Lat and lon are assumed to be in decimal degrees. 4372 Lat and lon are assumed to be in decimal degrees. 4182 4373 NOTE: minlon is the most east boundary. 4183 4374 4184 4375 origin is a 3-tuple with geo referenced 4185 4376 UTM coordinates (zone, easting, northing) … … 4187 4378 since all of anuga should be able to handle an arbitary origin. 4188 4379 4189 4190 4380 URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE 4191 4381 which means that latitude is the fastest … … 4194 4384 In URS C binary the latitudes and longitudes are in assending order. 4195 4385 """ 4386 4196 4387 if basename_out == None: 4197 4388 basename_out = basename_in 4389 4198 4390 files_out = urs2nc(basename_in, basename_out) 4391 4199 4392 ferret2sww(basename_out, 4200 4393 minlat=minlat, … … 4211 4404 inverted_bathymetry=True, 4212 4405 verbose=verbose) 4213 #print "files_out",files_out4406 4214 4407 if remove_nc_files: 4215 4408 for file_out in files_out: 4216 4409 os.remove(file_out) 4217 4218 def urs2nc(basename_in = 'o', basename_out = 'urs'): 4219 """ 4220 Convert the 3 urs files to 4 nc files. 4410 4411 4412 ## 4413 # @brief Convert 3 URS files back to 4 NC files. 4414 # @param basename_in Stem of the input filenames. 4415 # @param basename_outStem of the output filenames. 4416 # @note The name of the urs file names must be: 4417 # [basename_in]-z-mux 4418 # [basename_in]-e-mux 4419 # [basename_in]-n-mux 4420 def urs2nc(basename_in='o', basename_out='urs'): 4421 """Convert the 3 urs files to 4 nc files. 4221 4422 4222 4423 The name of the urs file names must be; … … 4224 4425 [basename_in]-e-mux 4225 4426 [basename_in]-n-mux 4226 4227 """ 4228 4427 """ 4428 4229 4429 files_in = [basename_in + WAVEHEIGHT_MUX_LABEL, 4230 4430 basename_in + EAST_VELOCITY_LABEL, 4231 4431 basename_in + NORTH_VELOCITY_LABEL] 4232 files_out = [basename_out +'_ha.nc',4233 basename_out +'_ua.nc',4234 basename_out +'_va.nc']4235 quantities = ['HA', 'UA','VA']4432 files_out = [basename_out + '_ha.nc', 4433 basename_out + '_ua.nc', 4434 basename_out + '_va.nc'] 4435 quantities = ['HA', 'UA', 'VA'] 4236 4436 4237 4437 #if os.access(files_in[0]+'.mux', os.F_OK) == 0 : 4238 4438 for i, file_name in enumerate(files_in): 4239 4439 if os.access(file_name, os.F_OK) == 0: 4240 if os.access(file_name +'.mux', os.F_OK) == 0 :4241 msg = 'File %s does not exist or is not accessible' % file_name4440 if os.access(file_name + '.mux', os.F_OK) == 0 : 4441 msg = 'File %s does not exist or is not accessible' % file_name 4242 4442 raise IOError, msg 4243 4443 else: 4244 4444 files_in[i] += '.mux' 4245 4445 print "file_name", file_name 4446 4246 4447 hashed_elevation = None 4247 4448 for file_in, file_out, quantity in map(None, files_in, … … 4249 4450 quantities): 4250 4451 lonlatdep, lon, lat, depth = _binary_c2nc(file_in, 4251 file_out, 4252 quantity) 4253 #print "lonlatdep", lonlatdep 4452 file_out, 4453 quantity) 4254 4454 if hashed_elevation == None: 4255 elevation_file = basename_out +'_e.nc'4455 elevation_file = basename_out + '_e.nc' 4256 4456 write_elevation_nc(elevation_file, 4257 4258 4259 4457 lon, 4458 lat, 4459 depth) 4260 4460 hashed_elevation = myhash(lonlatdep) 4261 4461 else: 4262 4462 msg = "The elevation information in the mux files is inconsistent" 4263 4463 assert hashed_elevation == myhash(lonlatdep), msg 4464 4264 4465 files_out.append(elevation_file) 4466 4265 4467 return files_out 4266 4468 4469 4470 ## 4471 # @brief Convert a quantity URS file to a NetCDF file. 4472 # @param file_in Path to input URS file. 4473 # @param file_out Path to the output file. 4474 # @param quantity Name of the quantity to be written to the output file. 4475 # @return A tuple (lonlatdep, lon, lat, depth). 4267 4476 def _binary_c2nc(file_in, file_out, quantity): 4268 """ 4269 Reads in a quantity urs file and writes a quantity nc file. 4270 additionally, returns the depth and lat, long info, 4477 """Reads in a quantity urs file and writes a quantity nc file. 4478 Additionally, returns the depth and lat, long info, 4271 4479 so it can be written to a file. 4272 4480 """ 4273 columns = 3 # long, lat , depth 4481 4482 columns = 3 # long, lat , depth 4274 4483 mux_file = open(file_in, 'rb') 4275 4484 4276 4485 # Number of points/stations 4277 (points_num,) = unpack('i',mux_file.read(4))4486 (points_num,) = unpack('i', mux_file.read(4)) 4278 4487 4279 4488 # nt, int - Number of time steps 4280 (time_step_count,) = unpack('i',mux_file.read(4))4489 (time_step_count,) = unpack('i', mux_file.read(4)) 4281 4490 4282 4491 #dt, float - time step, seconds 4283 4492 (time_step,) = unpack('f', mux_file.read(4)) 4284 4493 4285 4494 msg = "Bad data in the mux file." 4286 4495 if points_num < 0: … … 4293 4502 mux_file.close() 4294 4503 raise ANUGAError, msg 4295 4504 4296 4505 lonlatdep = p_array.array('f') 4297 4506 lonlatdep.read(mux_file, columns * points_num) 4298 lonlatdep = array(lonlatdep, typecode=Float) 4507 lonlatdep = array(lonlatdep, typecode=Float) 4299 4508 lonlatdep = reshape(lonlatdep, (points_num, columns)) 4300 4509 4301 4510 lon, lat, depth = lon_lat2grid(lonlatdep) 4302 4511 lon_sorted = list(lon) … … 4306 4515 msg = "Longitudes in mux file are not in ascending order" 4307 4516 raise IOError, msg 4517 4308 4518 lat_sorted = list(lat) 4309 4519 lat_sorted.sort() 4310 4520 4311 if not lat == lat_sorted: 4312 msg = "Latitudes in mux file are not in ascending order" 4313 4521 # UNUSED? 4522 ## if not lat == lat_sorted: 4523 ## msg = "Latitudes in mux file are not in ascending order" 4524 4314 4525 nc_file = Write_nc(quantity, 4315 4526 file_out, … … 4320 4531 4321 4532 for i in range(time_step_count): 4322 #Read in a time slice from mux file4533 #Read in a time slice from mux file 4323 4534 hz_p_array = p_array.array('f') 4324 4535 hz_p_array.read(mux_file, points_num) 4325 4536 hz_p = array(hz_p_array, typecode=Float) 4326 4537 hz_p = reshape(hz_p, (len(lon), len(lat))) 4327 hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.4538 hz_p = transpose(hz_p) # mux has lat varying fastest, nc has long v.f. 4328 4539 4329 4540 #write time slice to nc file 4330 4541 nc_file.store_timestep(hz_p) 4542 4331 4543 mux_file.close() 4332 4544 nc_file.close() 4333 4545 4334 4546 return lonlatdep, lon, lat, depth 4335 4336 4547 4548 4549 ## 4550 # @brief Write an NC elevation file. 4551 # @param file_out Path to the output file. 4552 # @param lon ?? 4553 # @param lat ?? 4554 # @param depth_vector The elevation data to write. 4337 4555 def write_elevation_nc(file_out, lon, lat, depth_vector): 4338 """ 4339 Write an nc elevation file. 4340 """ 4341 4556 """Write an nc elevation file.""" 4557 4342 4558 # NetCDF file definition 4343 4559 outfile = NetCDFFile(file_out, 'w') … … 4345 4561 #Create new file 4346 4562 nc_lon_lat_header(outfile, lon, lat) 4347 4563 4348 4564 # ELEVATION 4349 4565 zname = 'ELEVATION' 4350 4566 outfile.createVariable(zname, precision, (lat_name, lon_name)) 4351 outfile.variables[zname].units ='CENTIMETERS'4352 outfile.variables[zname].missing_value =-1.e+0344353 4354 outfile.variables[lon_name][:] = ensure_numeric(lon)4355 outfile.variables[lat_name][:] = ensure_numeric(lat)4356 4357 depth = reshape(depth_vector, ( 4358 outfile.variables[zname][:] = depth4359 4567 outfile.variables[zname].units = 'CENTIMETERS' 4568 outfile.variables[zname].missing_value = -1.e+034 4569 4570 outfile.variables[lon_name][:] = ensure_numeric(lon) 4571 outfile.variables[lat_name][:] = ensure_numeric(lat) 4572 4573 depth = reshape(depth_vector, (len(lat), len(lon))) 4574 outfile.variables[zname][:] = depth 4575 4360 4576 outfile.close() 4361 4577 4578 4579 ## 4580 # @brief Write lat/lon headers to a NetCDF file. 4581 # @param outfile Handle to open file to write to. 4582 # @param lon An iterable of the longitudes. 4583 # @param lat An iterable of the latitudes. 4584 # @note Defines lat/long dimensions and variables. Sets various attributes: 4585 # .point_spacing and .units 4586 # and writes lat/lon data. 4587 4362 4588 def nc_lon_lat_header(outfile, lon, lat): 4363 """ 4589 """Write lat/lon headers to a NetCDF file. 4590 4364 4591 outfile is the netcdf file handle. 4365 4592 lon - a list/array of the longitudes 4366 4593 lat - a list/array of the latitudes 4367 4594 """ 4368 4595 4369 4596 outfile.institution = 'Geoscience Australia' 4370 4597 outfile.description = 'Converted from URS binary C' 4371 4598 4372 4599 # Longitude 4373 4600 outfile.createDimension(lon_name, len(lon)) 4374 4601 outfile.createVariable(lon_name, precision, (lon_name,)) 4375 outfile.variables[lon_name].point_spacing ='uneven'4376 outfile.variables[lon_name].units ='degrees_east'4602 outfile.variables[lon_name].point_spacing = 'uneven' 4603 outfile.variables[lon_name].units = 'degrees_east' 4377 4604 outfile.variables[lon_name].assignValue(lon) 4378 4379 4605 4380 4606 # Latitude 4381 4607 outfile.createDimension(lat_name, len(lat)) 4382 4608 outfile.createVariable(lat_name, precision, (lat_name,)) 4383 outfile.variables[lat_name].point_spacing ='uneven'4384 outfile.variables[lat_name].units ='degrees_north'4609 outfile.variables[lat_name].point_spacing = 'uneven' 4610 outfile.variables[lat_name].units = 'degrees_north' 4385 4611 outfile.variables[lat_name].assignValue(lat) 4386 4612 4387 4613 4388 4614 ## 4615 # @brief 4616 # @param long_lat_dep 4617 # @return A tuple (long, lat, quantity). 4618 # @note The latitude is the fastest varying dimension - in mux files. 4389 4619 def lon_lat2grid(long_lat_dep): 4390 4620 """ … … 4397 4627 The latitude is the fastest varying dimension - in mux files 4398 4628 """ 4629 4399 4630 LONG = 0 4400 4631 LAT = 1 … … 4402 4633 4403 4634 long_lat_dep = ensure_numeric(long_lat_dep, Float) 4404 4635 4405 4636 num_points = long_lat_dep.shape[0] 4406 4637 this_rows_long = long_lat_dep[0,LONG] … … 4410 4641 while long_lat_dep[i,LONG] == this_rows_long and i < num_points: 4411 4642 i += 1 4643 4412 4644 # determine the lats and longsfrom the grid 4413 lat = long_lat_dep[:i, LAT] 4645 lat = long_lat_dep[:i, LAT] 4414 4646 long = long_lat_dep[::i, LONG] 4415 4647 4416 4648 lenlong = len(long) 4417 4649 lenlat = len(lat) 4418 #print 'len lat', lat, len(lat) 4419 #print 'len long', long, len(long) 4420 4421 msg = 'Input data is not gridded' 4650 4651 msg = 'Input data is not gridded' 4422 4652 assert num_points % lenlat == 0, msg 4423 4653 assert num_points % lenlong == 0, msg 4424 4425 # Test that data is gridded 4654 4655 # Test that data is gridded 4426 4656 for i in range(lenlong): 4427 4657 msg = 'Data is not gridded. It must be for this operation' 4428 first = i *lenlat4658 first = i * lenlat 4429 4659 last = first + lenlat 4430 4660 4431 4661 assert allclose(long_lat_dep[first:last,LAT], lat), msg 4432 4662 assert allclose(long_lat_dep[first:last,LONG], long[i]), msg 4433 4434 4435 # print 'range long', min(long), max(long) 4436 # print 'range lat', min(lat), max(lat) 4437 # print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0]) 4438 # print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1]) 4439 4440 4441 4663 4442 4664 msg = 'Out of range latitudes/longitudes' 4443 4665 for l in lat:assert -90 < l < 90 , msg … … 4448 4670 # FIXME - make this faster/do this a better way 4449 4671 # use numeric transpose, after reshaping the quantity vector 4450 # quantity = zeros(len(long_lat_dep), Float)4451 4672 quantity = zeros(num_points, Float) 4452 4453 # print 'num',num_points 4673 4454 4674 for lat_i, _ in enumerate(lat): 4455 4675 for long_i, _ in enumerate(long): 4456 q_index = lat_i*lenlong+long_i 4457 lld_index = long_i*lenlat+lat_i 4458 # print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index 4676 q_index = lat_i*lenlong + long_i 4677 lld_index = long_i*lenlat + lat_i 4459 4678 temp = long_lat_dep[lld_index, QUANTITY] 4460 4679 quantity[q_index] = temp 4461 4680 4462 4681 return long, lat, quantity 4463 4682 4464 #### END URS 2 SWW ### 4465 4466 #### URS UNGRIDDED 2 SWW ### 4683 ################################################################################ 4684 # END URS 2 SWW 4685 ################################################################################ 4686 4687 ################################################################################ 4688 # URS UNGRIDDED 2 SWW 4689 ################################################################################ 4467 4690 4468 4691 ### PRODUCING THE POINTS NEEDED FILE ### … … 4475 4698 #LONG_AMOUNT = 3600 4476 4699 4700 4701 ## 4702 # @brief 4703 # @param file_name 4704 # @param boundary_polygon 4705 # @param zone 4706 # @param ll_lat 4707 # @param ll_long 4708 # @param grid_spacing 4709 # @param lat_amount 4710 # @param long_amount 4711 # @param isSouthernHemisphere 4712 # @param export_csv 4713 # @param use_cache 4714 # @param verbose True if this function is to be verbose. 4715 # @return 4477 4716 def URS_points_needed_to_file(file_name, boundary_polygon, zone, 4478 4717 ll_lat, ll_long, 4479 grid_spacing, 4718 grid_spacing, 4480 4719 lat_amount, long_amount, 4481 4720 isSouthernHemisphere=True, … … 4489 4728 1st line is the number of points, 4490 4729 each line after represents a point,in lats and longs. 4491 4730 4492 4731 Note: The polygon cannot cross zones or hemispheres. 4493 4732 4494 4733 A work-a-round for different zones or hemispheres is to run this twice, 4495 4734 once for each zone, and then combine the output. 4496 4735 4497 4736 file_name - name of the urs file produced for David. 4498 4737 boundary_polygon - a list of points that describes a polygon. … … 4501 4740 4502 4741 This is info about the URS model that needs to be inputted. 4503 4742 4504 4743 ll_lat - lower left latitude, in decimal degrees 4505 4744 ll-long - lower left longitude, in decimal degrees 4506 4745 grid_spacing - in deciamal degrees 4507 lat_amount - number of latitudes 4508 long_amount- number of longs 4509 4746 lat_amount - number of latitudes 4747 long_amount- number of longs 4510 4748 4511 4749 Don't add the file extension. It will be added. 4512 4750 """ 4751 4513 4752 geo = URS_points_needed(boundary_polygon, zone, ll_lat, ll_long, 4514 grid_spacing, 4753 grid_spacing, 4515 4754 lat_amount, long_amount, isSouthernHemisphere, 4516 4755 use_cache, verbose) 4756 4517 4757 if not file_name[-4:] == ".urs": 4518 4758 file_name += ".urs" 4759 4519 4760 geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere) 4761 4520 4762 if export_csv: 4521 4763 if file_name[-4:] == ".urs": 4522 4764 file_name = file_name[:-4] + ".csv" 4523 4765 geo.export_points_file(file_name) 4766 4524 4767 return geo 4525 4768 4769 4770 ## 4771 # @brief 4772 # @param boundary_polygon 4773 # @param zone 4774 # @param ll_lat 4775 # @param ll_long 4776 # @param grid_spacing 4777 # @param lat_amount 4778 # @param long_amount 4779 # @param isSouthHemisphere 4780 # @param use_cache 4781 # @param verbose 4526 4782 def URS_points_needed(boundary_polygon, zone, ll_lat, 4527 ll_long, grid_spacing, 4783 ll_long, grid_spacing, 4528 4784 lat_amount, long_amount, isSouthHemisphere=True, 4529 4785 use_cache=False, verbose=False): 4530 4786 args = (boundary_polygon, 4531 4787 zone, ll_lat, 4532 ll_long, grid_spacing, 4788 ll_long, grid_spacing, 4533 4789 lat_amount, long_amount, isSouthHemisphere) 4534 kwargs = {} 4790 kwargs = {} 4791 4535 4792 if use_cache is True: 4536 4793 try: 4537 4794 from anuga.caching import cache 4538 4795 except: 4539 msg = 'Caching was requested, but caching module' +\4796 msg = 'Caching was requested, but caching module' \ 4540 4797 'could not be imported' 4541 4798 raise msg 4542 4799 4543 4544 4800 geo = cache(_URS_points_needed, 4545 args, kwargs,4546 verbose=verbose,4547 compression=False)4801 args, kwargs, 4802 verbose=verbose, 4803 compression=False) 4548 4804 else: 4549 4805 geo = apply(_URS_points_needed, args, kwargs) … … 4551 4807 return geo 4552 4808 4809 4810 ## 4811 # @brief 4812 # @param boundary_polygon An iterable of points that describe a polygon. 4813 # @param zone 4814 # @param ll_lat Lower left latitude, in decimal degrees 4815 # @param ll_long Lower left longitude, in decimal degrees 4816 # @param grid_spacing Grid spacing in decimal degrees. 4817 # @param lat_amount 4818 # @param long_amount 4819 # @param isSouthHemisphere 4553 4820 def _URS_points_needed(boundary_polygon, 4554 zone, ll_lat,4555 ll_long, grid_spacing,4556 lat_amount, long_amount,4821 zone, ll_lat, 4822 ll_long, grid_spacing, 4823 lat_amount, long_amount, 4557 4824 isSouthHemisphere): 4558 4825 """ … … 4563 4830 ll_lat - lower left latitude, in decimal degrees 4564 4831 ll-long - lower left longitude, in decimal degrees 4565 grid_spacing - in deci amal degrees4566 4567 """ 4568 4832 grid_spacing - in decimal degrees 4833 4834 """ 4835 4569 4836 from sets import ImmutableSet 4570 4837 4571 4838 msg = "grid_spacing can not be zero" 4572 assert not grid_spacing == 0, msg 4839 assert not grid_spacing == 0, msg 4840 4573 4841 a = boundary_polygon 4842 4574 4843 # List of segments. Each segment is two points. 4575 4844 segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))] 4845 4576 4846 # convert the segs to Lat's and longs. 4577 4578 4847 # Don't assume the zone of the segments is the same as the lower left 4579 4848 # corner of the lat long data!! They can easily be in different zones 4580 4581 4849 lat_long_set = ImmutableSet() 4582 4850 for seg in segs: 4583 points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 4584 lat_amount, long_amount, zone, isSouthHemisphere) 4851 points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 4852 lat_amount, long_amount, zone, 4853 isSouthHemisphere) 4585 4854 lat_long_set |= ImmutableSet(points_lat_long) 4855 4586 4856 if lat_long_set == ImmutableSet([]): 4587 msg = " ""URS region specified and polygon does not overlap."""4857 msg = "URS region specified and polygon does not overlap." 4588 4858 raise ValueError, msg 4589 4859 … … 4591 4861 # these points. There should be. 4592 4862 geo = Geospatial_data(data_points=list(lat_long_set), 4593 points_are_lats_longs=True) 4863 points_are_lats_longs=True) 4864 4594 4865 return geo 4595 4596 def points_needed(seg, ll_lat, ll_long, grid_spacing, 4866 4867 4868 ## 4869 # @brief Get the points that are needed to interpolate any point a a segment. 4870 # @param seg Two points in the UTM. 4871 # @param ll_lat Lower left latitude, in decimal degrees 4872 # @param ll_long Lower left longitude, in decimal degrees 4873 # @param grid_spacing 4874 # @param lat_amount 4875 # @param long_amount 4876 # @param zone 4877 # @param isSouthHemisphere 4878 # @return A list of points. 4879 def points_needed(seg, ll_lat, ll_long, grid_spacing, 4597 4880 lat_amount, long_amount, zone, 4598 4881 isSouthHemisphere): … … 4602 4885 interpolate any point on the segment. 4603 4886 """ 4887 4604 4888 from math import sqrt 4605 #print "zone",zone 4889 4606 4890 geo_reference = Geo_reference(zone=zone) 4607 #print "seg", seg 4608 geo = Geospatial_data(seg,geo_reference=geo_reference) 4891 geo = Geospatial_data(seg, geo_reference=geo_reference) 4609 4892 seg_lat_long = geo.get_data_points(as_lat_long=True, 4610 4893 isSouthHemisphere=isSouthHemisphere) 4611 #print "seg_lat_long", seg_lat_long 4894 4612 4895 # 1.415 = 2^0.5, rounded up.... 4613 4896 sqrt_2_rounded_up = 1.415 4614 4897 buffer = sqrt_2_rounded_up * grid_spacing 4615 4898 4616 4899 max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer 4617 4900 max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer … … 4619 4902 min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer 4620 4903 4621 #print "min_long", min_long 4622 #print "ll_long", ll_long 4623 #print "grid_spacing", grid_spacing 4624 first_row = (min_long - ll_long)/grid_spacing 4904 first_row = (min_long - ll_long) / grid_spacing 4905 4625 4906 # To round up 4626 4907 first_row_long = int(round(first_row + 0.5)) 4627 #print "first_row", first_row_long 4628 4629 last_row = (max_long - ll_long)/grid_spacing # round down 4908 4909 last_row = (max_long - ll_long) / grid_spacing # round down 4630 4910 last_row_long = int(round(last_row)) 4631 #print "last_row",last_row _long 4632 4633 first_row = (min_lat - ll_lat)/grid_spacing 4911 4912 first_row = (min_lat - ll_lat) / grid_spacing 4634 4913 # To round up 4635 4914 first_row_lat = int(round(first_row + 0.5)) 4636 #print "first_row", first_row_lat 4637 4638 last_row = (max_lat - ll_lat)/grid_spacing # round down 4915 4916 last_row = (max_lat - ll_lat) / grid_spacing # round down 4639 4917 last_row_lat = int(round(last_row)) 4640 #print "last_row",last_row_lat 4641 4642 # to work out the max distance - 4643 # 111120 - horizontal distance between 1 deg latitude. 4644 #max_distance = sqrt_2_rounded_up * 111120 * grid_spacing 4918 4645 4919 max_distance = 157147.4112 * grid_spacing 4646 #print "max_distance", max_distance #2619.12 m for 1 minute4647 4920 points_lat_long = [] 4921 4648 4922 # Create a list of the lat long points to include. 4649 4923 for index_lat in range(first_row_lat, last_row_lat + 1): 4650 4924 for index_long in range(first_row_long, last_row_long + 1): 4651 #print "index_lat", index_lat4652 #print "index_long", index_long4653 4925 lat = ll_lat + index_lat*grid_spacing 4654 4926 long = ll_long + index_long*grid_spacing … … 4657 4929 if keep_point(lat, long, seg, max_distance): 4658 4930 points_lat_long.append((lat, long)) #must be hashable 4659 #print "points_lat_long", points_lat_long4660 4931 4661 4932 # Now that we have these points, lets throw ones out that are too far away 4662 4933 return points_lat_long 4663 4934 4935 4936 ## 4937 # @brief 4938 # @param lat 4939 # @param long 4940 # @param seg Two points in UTM. 4941 # @param max_distance 4664 4942 def keep_point(lat, long, seg, max_distance): 4665 4943 """ 4666 4944 seg is two points, UTM 4667 4945 """ 4946 4668 4947 from math import sqrt 4948 4669 4949 _ , x0, y0 = redfearn(lat, long) 4670 4950 x1 = seg[0][0] … … 4678 4958 (x2_1)*(x2_1)+(y2_1)*(y2_1)) 4679 4959 except ZeroDivisionError: 4680 #print "seg", seg 4681 #print "x0", x0 4682 #print "y0", y0 4683 if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 and \ 4684 abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0: 4960 if sqrt((x2_1)*(x2_1)+(y2_1)*(y2_1)) == 0 \ 4961 and abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1)) == 0: 4685 4962 return True 4686 4963 else: 4687 4964 return False 4688 4689 if d <= max_distance: 4690 return True 4691 else: 4692 return False 4693 4694 #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE #### 4695 4965 4966 return d <= max_distance 4967 4968 ################################################################################ 4969 # CONVERTING UNGRIDDED URS DATA TO AN SWW FILE 4970 ################################################################################ 4971 4696 4972 WAVEHEIGHT_MUX_LABEL = '-z-mux' 4697 4973 EAST_VELOCITY_LABEL = '-e-mux' 4698 NORTH_VELOCITY_LABEL = '-n-mux' 4974 NORTH_VELOCITY_LABEL = '-n-mux' 4975 4976 ## 4977 # @brief Convert URS file(s) (wave prop) to an SWW file. 4978 # @param basename_in Stem of the input filenames. 4979 # @param basename_out Path to the output SWW file. 4980 # @param verbose True if this function is to be verbose. 4981 # @param mint 4982 # @param maxt 4983 # @param mean_stage 4984 # @param origin Tuple with geo-ref UTM coordinates (zone, easting, northing). 4985 # @param hole_points_UTM 4986 # @param zscale 4987 # @note Also convert latitude and longitude to UTM. All coordinates are 4988 # assumed to be given in the GDA94 datum. 4989 # @note Input filename stem has suffixes '-z-mux', '-e-mux' and '-n-mux' 4990 # added for relative height, x-velocity and y-velocity respectively. 4699 4991 def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False, 4700 4992 mint=None, maxt=None, … … 4703 4995 hole_points_UTM=None, 4704 4996 zscale=1): 4705 """ 4997 """ 4706 4998 Convert URS C binary format for wave propagation to 4707 4999 sww format native to abstract_2d_finite_volumes. 4708 4709 5000 4710 5001 Specify only basename_in and read files of the form … … 4719 5010 min's and max's: If omitted - full extend is used. 4720 5011 To include a value min ans max may equal it. 4721 Lat and lon are assumed to be in decimal degrees. 4722 5012 Lat and lon are assumed to be in decimal degrees. 5013 4723 5014 origin is a 3-tuple with geo referenced 4724 5015 UTM coordinates (zone, easting, northing) … … 4727 5018 The mux point info is NOT relative to this origin. 4728 5019 4729 4730 URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE 5020 URS C binary format has data organised as TIME, LONGITUDE, LATITUDE 4731 5021 which means that latitude is the fastest 4732 5022 varying dimension (row major order, so to speak) … … 4738 5028 function used, and the different grid structure between urs2sww 4739 5029 and this function. 4740 5030 4741 5031 Interpolating data that has an underlying gridded source can 4742 5032 easily end up with different values, depending on the underlying … … 4750 5040 The grid can be 4751 5041 - 4752 |\| 5042 |\| A 4753 5043 - 4754 5044 or; 4755 -4756 |/| B4757 5045 - 4758 If a point is just below the center of the midpoint, it will have a 4759 +ve value in grid A and a -ve value in grid B. 4760 """ 5046 |/| B 5047 - 5048 5049 If a point is just below the center of the midpoint, it will have a 5050 +ve value in grid A and a -ve value in grid B. 5051 """ 5052 4761 5053 from anuga.mesh_engine.mesh_engine import NoTrianglesError 4762 5054 from anuga.pmesh.mesh import Mesh … … 4767 5059 quantities = ['HA','UA','VA'] 4768 5060 4769 # instan ciate urs_points of the three mux files.5061 # instantiate urs_points of the three mux files. 4770 5062 mux = {} 4771 5063 for quantity, file in map(None, quantities, files_in): 4772 5064 mux[quantity] = Urs_points(file) 4773 5065 4774 5066 # Could check that the depth is the same. (hashing) 4775 5067 4776 5068 # handle to a mux file to do depth stuff 4777 5069 a_mux = mux[quantities[0]] 4778 5070 4779 5071 # Convert to utm 4780 5072 lat = a_mux.lonlatdep[:,1] 4781 5073 long = a_mux.lonlatdep[:,0] 4782 points_utm, zone = convert_from_latlon_to_utm( \ 4783 latitudes=lat, longitudes=long) 4784 #print "points_utm", points_utm 4785 #print "zone", zone 4786 4787 elevation = a_mux.lonlatdep[:,2] * -1 # 4788 4789 # grid ( create a mesh from the selected points) 5074 points_utm, zone = convert_from_latlon_to_utm(latitudes=lat, 5075 longitudes=long) 5076 5077 elevation = a_mux.lonlatdep[:,2] * -1 5078 5079 # grid (create a mesh from the selected points) 4790 5080 # This mesh has a problem. Triangles are streched over ungridded areas. 4791 # If these areas could be described as holes in pmesh, that would be great5081 # If these areas could be described as holes in pmesh, that would be great. 4792 5082 4793 5083 # I can't just get the user to selection a point in the middle. … … 4798 5088 mesh.add_vertices(points_utm) 4799 5089 mesh.auto_segment(smooth_indents=True, expand_pinch=True) 5090 4800 5091 # To try and avoid alpha shape 'hugging' too much 4801 mesh.auto_segment( mesh.shape.get_alpha()*1.1)5092 mesh.auto_segment(mesh.shape.get_alpha() * 1.1) 4802 5093 if hole_points_UTM is not None: 4803 5094 point = ensure_absolute(hole_points_UTM) 4804 5095 mesh.add_hole(point[0], point[1]) 5096 4805 5097 try: 4806 5098 mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False) 4807 5099 except NoTrianglesError: 4808 # This is a bit of a hack, going in and changing the 4809 # data structure. 5100 # This is a bit of a hack, going in and changing the data structure. 4810 5101 mesh.holes = [] 4811 5102 mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False) 5103 4812 5104 mesh_dic = mesh.Mesh2MeshList() 4813 5105 4814 5106 #mesh.export_mesh_file(basename_in + '_168.tsh') 4815 #import sys; sys.exit() 5107 #import sys; sys.exit() 4816 5108 # These are the times of the mux file 4817 5109 mux_times = [] 4818 5110 for i in range(a_mux.time_step_count): 4819 mux_times.append(a_mux.time_step * i) 4820 mux_times_start_i, mux_times_fin_i= mux2sww_time(mux_times, mint, maxt)5111 mux_times.append(a_mux.time_step * i) 5112 (mux_times_start_i, mux_times_fin_i) = mux2sww_time(mux_times, mint, maxt) 4821 5113 times = mux_times[mux_times_start_i:mux_times_fin_i] 4822 5114 4823 5115 if mux_times_start_i == mux_times_fin_i: 4824 5116 # Close the mux files 4825 5117 for quantity, file in map(None, quantities, files_in): 4826 5118 mux[quantity].close() 4827 msg ="Due to mint and maxt there's no time info in the boundary SWW."5119 msg = "Due to mint and maxt there's no time info in the boundary SWW." 4828 5120 raise Exception, msg 4829 5121 4830 5122 # If this raise is removed there is currently no downstream errors 4831 5123 4832 5124 points_utm=ensure_numeric(points_utm) 4833 assert ensure_numeric(mesh_dic['generatedpointlist']) ==\4834 ensure_numeric(points_utm)4835 5125 assert ensure_numeric(mesh_dic['generatedpointlist']) \ 5126 == ensure_numeric(points_utm) 5127 4836 5128 volumes = mesh_dic['generatedtrianglelist'] 4837 4838 # write sww intro and grid stuff. 5129 5130 # write sww intro and grid stuff. 4839 5131 if basename_out is None: 4840 5132 swwname = basename_in + '.sww' … … 4843 5135 4844 5136 if verbose: print 'Output to ', swwname 5137 4845 5138 outfile = NetCDFFile(swwname, 'w') 5139 4846 5140 # For a different way of doing this, check out tsh2sww 4847 5141 # work out sww_times and the index range this covers 4848 5142 sww = Write_sww() 4849 5143 sww.store_header(outfile, times, len(volumes), len(points_utm), 4850 verbose=verbose, sww_precision=Float)5144 verbose=verbose, sww_precision=Float) 4851 5145 outfile.mean_stage = mean_stage 4852 5146 outfile.zscale = zscale … … 4855 5149 elevation, zone, new_origin=origin, 4856 5150 verbose=verbose) 4857 5151 4858 5152 if verbose: print 'Converting quantities' 5153 5154 # Read in a time slice from each mux file and write it to the SWW file 4859 5155 j = 0 4860 # Read in a time slice from each mux file and write it to the sww file4861 5156 for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']): 4862 5157 if j >= mux_times_start_i and j < mux_times_fin_i: … … 4864 5159 h = stage - elevation 4865 5160 xmomentum = ua*h 4866 ymomentum = -1 *va*h # -1 since in mux files south is positive.4867 sww.store_quantities(outfile, 4868 slice_index=j -mux_times_start_i,5161 ymomentum = -1 * va * h # -1 since in mux files south is positive. 5162 sww.store_quantities(outfile, 5163 slice_index=j-mux_times_start_i, 4869 5164 verbose=verbose, 4870 5165 stage=stage, … … 4873 5168 sww_precision=Float) 4874 5169 j += 1 5170 4875 5171 if verbose: sww.verbose_quantities(outfile) 5172 4876 5173 outfile.close() 4877 # 5174 4878 5175 # Do some conversions while writing the sww file 4879 5176 4880 ################################## 4881 # READ MUX2 FILES line of points # 4882 ################################## 5177 5178 ################################################################################ 5179 # READ MUX2 FILES line of points 5180 ################################################################################ 4883 5181 4884 5182 WAVEHEIGHT_MUX2_LABEL = '-z-mux2' 4885 EAST_VELOCITY_MUX2_LABEL = '-e-mux2' 4886 NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' 4887 5183 EAST_VELOCITY_MUX2_LABEL = '-e-mux2' 5184 NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' 5185 5186 ## 5187 # @brief 5188 # @param filenames List of mux2 format input filenames. 5189 # @param weights Weights associated with each source file. 5190 # @param permutation The gauge numbers for which data is extracted. 5191 # @param verbose True if this function is to be verbose. 5192 # @return (times, latitudes, longitudes, elevation, quantity, starttime) 4888 5193 def read_mux2_py(filenames, 4889 5194 weights=None, 4890 5195 permutation=None, 4891 5196 verbose=False): 4892 """Access the mux files specified in the filenames list. Combine the 4893 data found therin as a weighted linear sum as specifed by the weights. 4894 If permutation is None or empty extract timeseries data for all gauges within the 4895 files. 4896 4897 Input: 4898 filenames: List of filenames specifiying the file containing the 4899 timeseries data (mux2 format) for each source 4900 weights: Weighs associated with each source (defaults to 1 for each source) 4901 permutation: Specifies the gauge numbers that for which data is to be 4902 extracted 4903 """ 5197 """Access the mux files specified in the filenames list. Combine the 5198 data found therin as a weighted linear sum as specifed by the weights. 5199 If permutation is None or empty extract timeseries data for all gauges 5200 within the files. 5201 5202 Input: 5203 filenames: List of filenames specifiying the file containing the 5204 timeseries data (mux2 format) for each source 5205 weights: Weighs associated with each source 5206 (defaults to 1 for each source) 5207 permutation: Specifies the gauge numbers that for which data is to be 5208 extracted 5209 """ 4904 5210 4905 5211 from Numeric import ones,Float,compress,zeros,arange 4906 5212 from urs_ext import read_mux2 4907 5213 4908 numSrc =len(filenames)4909 4910 file_params =-1*ones(3,Float) #[nsta,dt,nt]4911 5214 numSrc = len(filenames) 5215 5216 file_params = -1 * ones(3, Float) # [nsta,dt,nt] 5217 4912 5218 # Convert verbose to int C flag 4913 5219 if verbose: … … 4918 5224 if weights is None: 4919 5225 weights = ones(numSrc) 4920 5226 4921 5227 if permutation is None: 4922 permutation = ensure_numeric([], Float) 4923 4924 # print 'filenames', filenames4925 #print 'weights', weights4926 4927 # Call underlying C implementation urs2sts_ext.c