[1758] | 1 | """This module contains various auxiliary function used by pyvolution. |
---|
| 2 | |
---|
| 3 | It is also a clearing house for functions that may later earn a module |
---|
| 4 | of their own. |
---|
| 5 | """ |
---|
| 6 | |
---|
[3514] | 7 | import anuga.utilities.polygon |
---|
[3931] | 8 | import sys |
---|
[3994] | 9 | import os |
---|
[3931] | 10 | |
---|
[4634] | 11 | from os import remove, mkdir, access, F_OK, W_OK, sep,mkdir |
---|
| 12 | from os.path import exists, basename, split,join |
---|
[1932] | 13 | from warnings import warn |
---|
[3931] | 14 | from shutil import copy |
---|
[1911] | 15 | |
---|
[4480] | 16 | from anuga.utilities.numerical_tools import ensure_numeric |
---|
[4634] | 17 | from Numeric import arange, choose, zeros, Float, array |
---|
[4480] | 18 | |
---|
[3514] | 19 | from anuga.geospatial_data.geospatial_data import ensure_absolute |
---|
[1911] | 20 | |
---|
[1758] | 21 | def file_function(filename, |
---|
[3900] | 22 | domain=None, |
---|
| 23 | quantities=None, |
---|
| 24 | interpolation_points=None, |
---|
| 25 | time_thinning=1, |
---|
| 26 | verbose=False, |
---|
| 27 | use_cache=False): |
---|
[1835] | 28 | """Read time history of spatial data from NetCDF file and return |
---|
| 29 | a callable object. |
---|
[1758] | 30 | |
---|
[1905] | 31 | Input variables: |
---|
| 32 | |
---|
| 33 | filename - Name of sww or tms file |
---|
| 34 | |
---|
| 35 | If the file has extension 'sww' then it is assumed to be spatio-temporal |
---|
| 36 | or temporal and the callable object will have the form f(t,x,y) or f(t) |
---|
| 37 | depending on whether the file contains spatial data |
---|
[1758] | 38 | |
---|
[1905] | 39 | If the file has extension 'tms' then it is assumed to be temporal only |
---|
| 40 | and the callable object will have the form f(t) |
---|
[1835] | 41 | |
---|
[1905] | 42 | Either form will return interpolated values based on the input file |
---|
| 43 | using the underlying interpolation_function. |
---|
[1835] | 44 | |
---|
[1905] | 45 | domain - Associated domain object |
---|
| 46 | If domain is specified, model time (domain.starttime) |
---|
| 47 | will be checked and possibly modified. |
---|
[1835] | 48 | |
---|
[1905] | 49 | All times are assumed to be in UTC |
---|
| 50 | |
---|
| 51 | All spatial information is assumed to be in absolute UTM coordinates. |
---|
[1835] | 52 | |
---|
[1905] | 53 | quantities - the name of the quantity to be interpolated or a |
---|
| 54 | list of quantity names. The resulting function will return |
---|
[4341] | 55 | a tuple of values - one for each quantity |
---|
| 56 | If quantities are None, domain's conserved quantities |
---|
| 57 | are used. |
---|
[1835] | 58 | |
---|
[3262] | 59 | interpolation_points - list of absolute UTM coordinates for points (N x 2) |
---|
| 60 | or geospatial object or points file name at which values are sought |
---|
[1905] | 61 | |
---|
[1986] | 62 | use_cache: True means that caching of intermediate result of |
---|
| 63 | Interpolation_function is attempted |
---|
[1905] | 64 | |
---|
| 65 | |
---|
[1835] | 66 | See Interpolation function for further documentation |
---|
[1758] | 67 | """ |
---|
[1986] | 68 | |
---|
| 69 | |
---|
[4341] | 70 | #FIXME (OLE): Should check origin of domain against that of file |
---|
| 71 | #In fact, this is where origin should be converted to that of domain |
---|
| 72 | #Also, check that file covers domain fully. |
---|
| 73 | |
---|
| 74 | #Take into account: |
---|
| 75 | #- domain's georef |
---|
| 76 | #- sww file's georef |
---|
| 77 | #- interpolation points as absolute UTM coordinates |
---|
| 78 | |
---|
| 79 | |
---|
| 80 | |
---|
| 81 | |
---|
| 82 | # Use domain's conserved_quantity names as defaults |
---|
| 83 | if domain is not None: |
---|
| 84 | if quantities is None: |
---|
| 85 | quantities = domain.conserved_quantities |
---|
| 86 | |
---|
| 87 | domain_starttime = domain.starttime |
---|
| 88 | else: |
---|
| 89 | domain_starttime = None |
---|
| 90 | |
---|
| 91 | |
---|
[3900] | 92 | # Build arguments and keyword arguments for use with caching or apply. |
---|
| 93 | args = (filename,) |
---|
[4341] | 94 | |
---|
| 95 | |
---|
| 96 | # FIXME (Ole): Caching this function will not work well |
---|
| 97 | # if domain is passed in as instances change hash code. |
---|
| 98 | # Instead we pass in those attributes that are needed (and return them |
---|
| 99 | # if modified) |
---|
| 100 | kwargs = {'quantities': quantities, |
---|
[3900] | 101 | 'interpolation_points': interpolation_points, |
---|
[4341] | 102 | 'domain_starttime': domain_starttime, |
---|
[3900] | 103 | 'time_thinning': time_thinning, |
---|
[4338] | 104 | 'verbose': verbose} |
---|
[3900] | 105 | |
---|
[4338] | 106 | |
---|
| 107 | # Call underlying engine with or without caching |
---|
| 108 | if use_cache is True: |
---|
| 109 | try: |
---|
| 110 | from caching import cache |
---|
| 111 | except: |
---|
| 112 | msg = 'Caching was requested, but caching module'+\ |
---|
| 113 | 'could not be imported' |
---|
| 114 | raise msg |
---|
| 115 | |
---|
[4341] | 116 | f, starttime = cache(_file_function, |
---|
| 117 | args, kwargs, |
---|
| 118 | dependencies=[filename], |
---|
| 119 | compression=False, |
---|
| 120 | verbose=verbose) |
---|
[4338] | 121 | |
---|
| 122 | else: |
---|
[4341] | 123 | f, starttime = apply(_file_function, |
---|
| 124 | args, kwargs) |
---|
[1986] | 125 | |
---|
[3900] | 126 | |
---|
| 127 | #FIXME (Ole): Pass cache arguments, such as compression, in some sort of |
---|
| 128 | #structure |
---|
| 129 | |
---|
[4487] | 130 | f.starttime = starttime |
---|
| 131 | |
---|
[4341] | 132 | if domain is not None: |
---|
| 133 | #Update domain.startime if it is *earlier* than starttime from file |
---|
| 134 | if starttime > domain.starttime: |
---|
| 135 | msg = 'WARNING: Start time as specified in domain (%f)'\ |
---|
| 136 | %domain.starttime |
---|
| 137 | msg += ' is earlier than the starttime of file %s (%f).'\ |
---|
| 138 | %(filename, starttime) |
---|
| 139 | msg += ' Modifying domain starttime accordingly.' |
---|
| 140 | |
---|
| 141 | if verbose: print msg |
---|
| 142 | domain.starttime = starttime #Modifying model time |
---|
| 143 | if verbose: print 'Domain starttime is now set to %f'\ |
---|
| 144 | %domain.starttime |
---|
| 145 | |
---|
[1986] | 146 | return f |
---|
| 147 | |
---|
| 148 | |
---|
| 149 | |
---|
| 150 | def _file_function(filename, |
---|
[3900] | 151 | quantities=None, |
---|
| 152 | interpolation_points=None, |
---|
[4341] | 153 | domain_starttime=None, |
---|
| 154 | time_thinning=1, |
---|
[4338] | 155 | verbose=False): |
---|
[1986] | 156 | """Internal function |
---|
[1835] | 157 | |
---|
[1986] | 158 | See file_function for documentatiton |
---|
| 159 | """ |
---|
| 160 | |
---|
[1758] | 161 | |
---|
| 162 | assert type(filename) == type(''),\ |
---|
| 163 | 'First argument to File_function must be a string' |
---|
| 164 | |
---|
| 165 | try: |
---|
| 166 | fid = open(filename) |
---|
| 167 | except Exception, e: |
---|
| 168 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
| 169 | %(filename, e) |
---|
| 170 | raise msg |
---|
| 171 | |
---|
| 172 | line = fid.readline() |
---|
| 173 | fid.close() |
---|
| 174 | |
---|
| 175 | |
---|
| 176 | if line[:3] == 'CDF': |
---|
[4341] | 177 | return get_netcdf_file_function(filename, |
---|
| 178 | quantities, |
---|
[1986] | 179 | interpolation_points, |
---|
[4341] | 180 | domain_starttime, |
---|
[3900] | 181 | time_thinning=time_thinning, |
---|
[4338] | 182 | verbose=verbose) |
---|
[1758] | 183 | else: |
---|
| 184 | raise 'Must be a NetCDF File' |
---|
| 185 | |
---|
| 186 | |
---|
| 187 | |
---|
[1986] | 188 | def get_netcdf_file_function(filename, |
---|
| 189 | quantity_names=None, |
---|
| 190 | interpolation_points=None, |
---|
[4341] | 191 | domain_starttime=None, |
---|
[3900] | 192 | time_thinning=1, |
---|
[4338] | 193 | verbose=False): |
---|
[1758] | 194 | """Read time history of spatial data from NetCDF sww file and |
---|
| 195 | return a callable object f(t,x,y) |
---|
| 196 | which will return interpolated values based on the input file. |
---|
| 197 | |
---|
[4341] | 198 | Model time (domain_starttime) |
---|
| 199 | will be checked, possibly modified and returned |
---|
[1758] | 200 | |
---|
| 201 | All times are assumed to be in UTC |
---|
| 202 | |
---|
| 203 | See Interpolation function for further documetation |
---|
| 204 | |
---|
| 205 | """ |
---|
| 206 | |
---|
| 207 | |
---|
| 208 | #FIXME: Check that model origin is the same as file's origin |
---|
| 209 | #(both in UTM coordinates) |
---|
| 210 | #If not - modify those from file to match domain |
---|
[4341] | 211 | #(origin should be passed in) |
---|
[1758] | 212 | #Take this code from e.g. dem2pts in data_manager.py |
---|
| 213 | #FIXME: Use geo_reference to read and write xllcorner... |
---|
| 214 | |
---|
| 215 | |
---|
| 216 | import time, calendar, types |
---|
[3514] | 217 | from anuga.config import time_format |
---|
[1758] | 218 | from Scientific.IO.NetCDF import NetCDFFile |
---|
[1884] | 219 | from Numeric import array, zeros, Float, alltrue, concatenate, reshape |
---|
[1758] | 220 | |
---|
| 221 | #Open NetCDF file |
---|
| 222 | if verbose: print 'Reading', filename |
---|
| 223 | fid = NetCDFFile(filename, 'r') |
---|
| 224 | |
---|
| 225 | if type(quantity_names) == types.StringType: |
---|
| 226 | quantity_names = [quantity_names] |
---|
| 227 | |
---|
| 228 | if quantity_names is None or len(quantity_names) < 1: |
---|
[1819] | 229 | #If no quantities are specified get quantities from file |
---|
| 230 | #x, y, time are assumed as the independent variables so |
---|
| 231 | #they are excluded as potentiol quantities |
---|
[1758] | 232 | quantity_names = [] |
---|
| 233 | for name in fid.variables.keys(): |
---|
| 234 | if name not in ['x', 'y', 'time']: |
---|
| 235 | quantity_names.append(name) |
---|
| 236 | |
---|
[1819] | 237 | if len(quantity_names) < 1: |
---|
| 238 | msg = 'ERROR: At least one independent value must be specified' |
---|
| 239 | raise msg |
---|
| 240 | |
---|
| 241 | |
---|
[1884] | 242 | if interpolation_points is not None: |
---|
[3262] | 243 | interpolation_points = ensure_absolute(interpolation_points) |
---|
[3080] | 244 | msg = 'Points must by N x 2. I got %d' %interpolation_points.shape[1] |
---|
| 245 | assert interpolation_points.shape[1] == 2, msg |
---|
[1884] | 246 | |
---|
| 247 | |
---|
[1819] | 248 | #Now assert that requested quantitites (and the independent ones) |
---|
| 249 | #are present in file |
---|
[1758] | 250 | missing = [] |
---|
[1835] | 251 | for quantity in ['time'] + quantity_names: |
---|
[1758] | 252 | if not fid.variables.has_key(quantity): |
---|
| 253 | missing.append(quantity) |
---|
| 254 | |
---|
| 255 | if len(missing) > 0: |
---|
| 256 | msg = 'Quantities %s could not be found in file %s'\ |
---|
| 257 | %(str(missing), filename) |
---|
| 258 | fid.close() |
---|
[3649] | 259 | raise Exception, msg |
---|
[1758] | 260 | |
---|
| 261 | #Decide whether this data has a spatial dimension |
---|
| 262 | spatial = True |
---|
| 263 | for quantity in ['x', 'y']: |
---|
| 264 | if not fid.variables.has_key(quantity): |
---|
| 265 | spatial = False |
---|
| 266 | |
---|
[1835] | 267 | if filename[-3:] == 'tms' and spatial is True: |
---|
| 268 | msg = 'Files of type tms must not contain spatial information' |
---|
| 269 | raise msg |
---|
[1758] | 270 | |
---|
[1835] | 271 | if filename[-3:] == 'sww' and spatial is False: |
---|
| 272 | msg = 'Files of type sww must contain spatial information' |
---|
| 273 | raise msg |
---|
[1758] | 274 | |
---|
| 275 | #Get first timestep |
---|
| 276 | try: |
---|
| 277 | starttime = fid.starttime[0] |
---|
| 278 | except ValueError: |
---|
| 279 | msg = 'Could not read starttime from file %s' %filename |
---|
| 280 | raise msg |
---|
| 281 | |
---|
| 282 | #Get variables |
---|
[3850] | 283 | #if verbose: print 'Get variables' |
---|
[1758] | 284 | time = fid.variables['time'][:] |
---|
| 285 | |
---|
[3900] | 286 | # Get time independent stuff |
---|
[1758] | 287 | if spatial: |
---|
| 288 | #Get origin |
---|
| 289 | xllcorner = fid.xllcorner[0] |
---|
| 290 | yllcorner = fid.yllcorner[0] |
---|
[1859] | 291 | zone = fid.zone[0] |
---|
[1758] | 292 | |
---|
| 293 | x = fid.variables['x'][:] |
---|
| 294 | y = fid.variables['y'][:] |
---|
| 295 | triangles = fid.variables['volumes'][:] |
---|
| 296 | |
---|
| 297 | x = reshape(x, (len(x),1)) |
---|
| 298 | y = reshape(y, (len(y),1)) |
---|
| 299 | vertex_coordinates = concatenate((x,y), axis=1) #m x 2 array |
---|
| 300 | |
---|
[1884] | 301 | if interpolation_points is not None: |
---|
| 302 | #Adjust for georef |
---|
| 303 | interpolation_points[:,0] -= xllcorner |
---|
| 304 | interpolation_points[:,1] -= yllcorner |
---|
| 305 | |
---|
[1758] | 306 | |
---|
[1884] | 307 | |
---|
| 308 | |
---|
[4341] | 309 | if domain_starttime is not None: |
---|
[1758] | 310 | |
---|
[4341] | 311 | #If domain_startime is *later* than starttime, |
---|
[1758] | 312 | #move time back - relative to domain's time |
---|
[4341] | 313 | if domain_starttime > starttime: |
---|
| 314 | time = time - domain_starttime + starttime |
---|
[1758] | 315 | |
---|
[1859] | 316 | #FIXME Use method in geo to reconcile |
---|
| 317 | #if spatial: |
---|
| 318 | #assert domain.geo_reference.xllcorner == xllcorner |
---|
| 319 | #assert domain.geo_reference.yllcorner == yllcorner |
---|
| 320 | #assert domain.geo_reference.zone == zone |
---|
| 321 | |
---|
[1758] | 322 | if verbose: |
---|
| 323 | print 'File_function data obtained from: %s' %filename |
---|
| 324 | print ' References:' |
---|
| 325 | #print ' Datum ....' #FIXME |
---|
| 326 | if spatial: |
---|
| 327 | print ' Lower left corner: [%f, %f]'\ |
---|
| 328 | %(xllcorner, yllcorner) |
---|
| 329 | print ' Start time: %f' %starttime |
---|
| 330 | |
---|
| 331 | |
---|
[3900] | 332 | # Produce values for desired data points at |
---|
| 333 | # each timestep for each quantity |
---|
[1758] | 334 | quantities = {} |
---|
| 335 | for i, name in enumerate(quantity_names): |
---|
| 336 | quantities[name] = fid.variables[name][:] |
---|
| 337 | fid.close() |
---|
| 338 | |
---|
[4396] | 339 | |
---|
[2750] | 340 | #from least_squares import Interpolation_function |
---|
[3514] | 341 | from anuga.fit_interpolate.interpolate import Interpolation_function |
---|
[1758] | 342 | |
---|
| 343 | if not spatial: |
---|
| 344 | vertex_coordinates = triangles = interpolation_points = None |
---|
[1986] | 345 | |
---|
[4341] | 346 | |
---|
| 347 | # Return Interpolation_function instance as well as |
---|
| 348 | # starttime for use to possible modify that of domain |
---|
[4338] | 349 | return Interpolation_function(time, |
---|
| 350 | quantities, |
---|
| 351 | quantity_names, |
---|
| 352 | vertex_coordinates, |
---|
| 353 | triangles, |
---|
| 354 | interpolation_points, |
---|
| 355 | time_thinning=time_thinning, |
---|
[4341] | 356 | verbose=verbose), starttime |
---|
[1986] | 357 | |
---|
[4341] | 358 | # NOTE (Ole): Caching Interpolation function is too slow as |
---|
| 359 | # the very long parameters need to be hashed. |
---|
[1758] | 360 | |
---|
| 361 | |
---|
[4341] | 362 | |
---|
| 363 | |
---|
| 364 | |
---|
| 365 | |
---|
[1927] | 366 | def multiple_replace(text, dictionary): |
---|
| 367 | """Multiple replace of words in text |
---|
[1919] | 368 | |
---|
[1927] | 369 | text: String to be modified |
---|
| 370 | dictionary: Mapping of words that are to be substituted |
---|
[1919] | 371 | |
---|
[1927] | 372 | Python Cookbook 3.14 page 88 and page 90 |
---|
| 373 | """ |
---|
[1919] | 374 | |
---|
[1927] | 375 | import re |
---|
| 376 | |
---|
| 377 | #Create a regular expression from all of the dictionary keys |
---|
| 378 | #matching only entire words |
---|
| 379 | regex = re.compile(r'\b'+ \ |
---|
| 380 | r'\b|\b'.join(map(re.escape, dictionary.keys()))+ \ |
---|
| 381 | r'\b' ) |
---|
[1919] | 382 | |
---|
[1927] | 383 | #For each match, lookup the corresponding value in the dictionary |
---|
| 384 | return regex.sub(lambda match: dictionary[match.group(0)], text) |
---|
[1919] | 385 | |
---|
| 386 | |
---|
[1927] | 387 | |
---|
| 388 | |
---|
| 389 | def apply_expression_to_dictionary(expression, dictionary):#dictionary): |
---|
| 390 | """Apply arbitrary expression to values of dictionary |
---|
| 391 | |
---|
[1919] | 392 | Given an expression in terms of the keys, replace key by the |
---|
| 393 | corresponding values and evaluate. |
---|
[1927] | 394 | |
---|
[1919] | 395 | |
---|
[1927] | 396 | expression: Arbitrary, e.g. arithmetric, expression relating keys |
---|
| 397 | from dictionary. |
---|
| 398 | |
---|
| 399 | dictionary: Mapping between symbols in expression and objects that |
---|
| 400 | will be evaluated by expression. |
---|
| 401 | Values in dictionary must support operators given in |
---|
[2314] | 402 | expression e.g. by overloading |
---|
| 403 | |
---|
| 404 | due to a limitation with Numeric, this can not evaluate 0/0 |
---|
| 405 | In general, the user can fix by adding 1e-30 to the numerator. |
---|
| 406 | SciPy core can handle this situation. |
---|
[1919] | 407 | """ |
---|
| 408 | |
---|
| 409 | import types |
---|
[1924] | 410 | import re |
---|
[1919] | 411 | |
---|
| 412 | assert isinstance(expression, basestring) |
---|
[1927] | 413 | assert type(dictionary) == types.DictType |
---|
[1919] | 414 | |
---|
[1927] | 415 | #Convert dictionary values to textual representations suitable for eval |
---|
| 416 | D = {} |
---|
| 417 | for key in dictionary: |
---|
| 418 | D[key] = 'dictionary["%s"]' %key |
---|
[1924] | 419 | |
---|
[1927] | 420 | #Perform substitution of variables |
---|
| 421 | expression = multiple_replace(expression, D) |
---|
[1924] | 422 | |
---|
[1919] | 423 | #Evaluate and return |
---|
| 424 | try: |
---|
| 425 | return eval(expression) |
---|
| 426 | except NameError, e: |
---|
| 427 | msg = 'Expression "%s" could not be evaluated: %s' %(expression, e) |
---|
| 428 | raise NameError, msg |
---|
[2314] | 429 | except ValueError, e: |
---|
| 430 | msg = 'Expression "%s" could not be evaluated: %s' %(expression, e) |
---|
| 431 | raise ValueError, msg |
---|
[1919] | 432 | |
---|
| 433 | |
---|
[4704] | 434 | def get_textual_float(value, format = '%.2f'): |
---|
| 435 | """Get textual representation of floating point numbers |
---|
| 436 | and accept None as valid entry |
---|
[1919] | 437 | |
---|
[4704] | 438 | format is a string - default = '%.2f' |
---|
| 439 | """ |
---|
[1919] | 440 | |
---|
[4704] | 441 | if value is None: |
---|
| 442 | return 'None' |
---|
| 443 | else: |
---|
| 444 | try: |
---|
| 445 | float(value) |
---|
| 446 | except: |
---|
| 447 | # May this is a vector |
---|
| 448 | if len(value) > 1: |
---|
| 449 | s = '(' |
---|
| 450 | for v in value: |
---|
| 451 | s += get_textual_float(v, format) + ', ' |
---|
| 452 | |
---|
| 453 | s = s[:-2] + ')' # Strip trailing comma and close |
---|
| 454 | return s |
---|
| 455 | else: |
---|
| 456 | raise 'Illegal input to get_textual_float:', value |
---|
| 457 | else: |
---|
| 458 | return format %float(value) |
---|
| 459 | |
---|
| 460 | |
---|
| 461 | |
---|
[2516] | 462 | #################################### |
---|
| 463 | ####OBSOLETE STUFF |
---|
[1919] | 464 | |
---|
| 465 | |
---|
[2704] | 466 | def angle(v1, v2): |
---|
[2526] | 467 | """Temporary Interface to new location""" |
---|
| 468 | |
---|
[3514] | 469 | import anuga.utilities.numerical_tools as NT |
---|
[2526] | 470 | |
---|
| 471 | msg = 'angle has moved from util.py. ' |
---|
[3514] | 472 | msg += 'Please use "from anuga.utilities.numerical_tools import angle"' |
---|
[2526] | 473 | warn(msg, DeprecationWarning) |
---|
| 474 | |
---|
[2704] | 475 | return NT.angle(v1, v2) |
---|
[2526] | 476 | |
---|
| 477 | def anglediff(v0, v1): |
---|
| 478 | """Temporary Interface to new location""" |
---|
| 479 | |
---|
[3514] | 480 | import anuga.utilities.numerical_tools as NT |
---|
[2526] | 481 | |
---|
| 482 | msg = 'anglediff has moved from util.py. ' |
---|
[3514] | 483 | msg += 'Please use "from anuga.utilities.numerical_tools import anglediff"' |
---|
[2526] | 484 | warn(msg, DeprecationWarning) |
---|
| 485 | |
---|
| 486 | return NT.anglediff(v0, v1) |
---|
| 487 | |
---|
| 488 | |
---|
| 489 | def mean(x): |
---|
| 490 | """Temporary Interface to new location""" |
---|
| 491 | |
---|
[3514] | 492 | import anuga.utilities.numerical_tools as NT |
---|
[2526] | 493 | |
---|
| 494 | msg = 'mean has moved from util.py. ' |
---|
[3514] | 495 | msg += 'Please use "from anuga.utilities.numerical_tools import mean"' |
---|
[2526] | 496 | warn(msg, DeprecationWarning) |
---|
| 497 | |
---|
| 498 | return NT.mean(x) |
---|
| 499 | |
---|
[2516] | 500 | def point_on_line(*args, **kwargs): |
---|
| 501 | """Temporary Interface to new location""" |
---|
[1919] | 502 | |
---|
[2516] | 503 | msg = 'point_on_line has moved from util.py. ' |
---|
[3514] | 504 | msg += 'Please use "from anuga.utilities.polygon import point_on_line"' |
---|
[2516] | 505 | warn(msg, DeprecationWarning) |
---|
[1919] | 506 | |
---|
[2516] | 507 | return utilities.polygon.point_on_line(*args, **kwargs) |
---|
| 508 | |
---|
| 509 | def inside_polygon(*args, **kwargs): |
---|
| 510 | """Temporary Interface to new location""" |
---|
[1758] | 511 | |
---|
[2516] | 512 | print 'inside_polygon has moved from util.py. ', |
---|
[3514] | 513 | print 'Please use "from anuga.utilities.polygon import inside_polygon"' |
---|
[1758] | 514 | |
---|
[2516] | 515 | return utilities.polygon.inside_polygon(*args, **kwargs) |
---|
| 516 | |
---|
| 517 | def outside_polygon(*args, **kwargs): |
---|
| 518 | """Temporary Interface to new location""" |
---|
[1758] | 519 | |
---|
[2516] | 520 | print 'outside_polygon has moved from util.py. ', |
---|
[3514] | 521 | print 'Please use "from anuga.utilities.polygon import outside_polygon"' |
---|
[2516] | 522 | |
---|
| 523 | return utilities.polygon.outside_polygon(*args, **kwargs) |
---|
| 524 | |
---|
| 525 | |
---|
| 526 | def separate_points_by_polygon(*args, **kwargs): |
---|
| 527 | """Temporary Interface to new location""" |
---|
| 528 | |
---|
| 529 | print 'separate_points_by_polygon has moved from util.py. ', |
---|
[3514] | 530 | print 'Please use "from anuga.utilities.polygon import separate_points_by_polygon"' |
---|
[2516] | 531 | |
---|
| 532 | return utilities.polygon.separate_points_by_polygon(*args, **kwargs) |
---|
| 533 | |
---|
| 534 | |
---|
| 535 | |
---|
| 536 | def read_polygon(*args, **kwargs): |
---|
| 537 | """Temporary Interface to new location""" |
---|
| 538 | |
---|
| 539 | print 'read_polygon has moved from util.py. ', |
---|
[3514] | 540 | print 'Please use "from anuga.utilities.polygon import read_polygon"' |
---|
[2516] | 541 | |
---|
| 542 | return utilities.polygon.read_polygon(*args, **kwargs) |
---|
| 543 | |
---|
| 544 | |
---|
| 545 | def populate_polygon(*args, **kwargs): |
---|
| 546 | """Temporary Interface to new location""" |
---|
| 547 | |
---|
| 548 | print 'populate_polygon has moved from util.py. ', |
---|
[3514] | 549 | print 'Please use "from anuga.utilities.polygon import populate_polygon"' |
---|
[2516] | 550 | |
---|
| 551 | return utilities.polygon.populate_polygon(*args, **kwargs) |
---|
| 552 | |
---|
[3299] | 553 | ##################### end of obsolete stuff ? ############ |
---|
[2701] | 554 | |
---|
[4304] | 555 | def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='', |
---|
| 556 | print_to_screen=False, verbose=False): |
---|
[4510] | 557 | """Temporary Interface to new location""" |
---|
[4512] | 558 | from anuga.shallow_water.data_manager import start_screen_catcher as dm_start_screen_catcher |
---|
[2701] | 559 | |
---|
[4510] | 560 | print 'start_screen_catcher has moved from util.py. ', |
---|
| 561 | print 'Please use "from anuga.shallow_water.data_manager import start_screen_catcher"' |
---|
[3931] | 562 | |
---|
[4512] | 563 | return dm_start_screen_catcher(dir_name, myid='', numprocs='', extra_info='', |
---|
[4510] | 564 | print_to_screen=False, verbose=False) |
---|
[2701] | 565 | |
---|
[4170] | 566 | def get_revision_number(): |
---|
| 567 | """Get the version number of the SVN |
---|
[4159] | 568 | NOTE: This requires that the command svn is on the system PATH |
---|
| 569 | (simply aliasing svn to the binary will not work) |
---|
[2929] | 570 | """ |
---|
[4168] | 571 | |
---|
[4170] | 572 | # Create dummy info |
---|
| 573 | #info = 'Revision: Version info could not be obtained.' |
---|
| 574 | #info += 'A command line version of svn must be availbable ' |
---|
| 575 | #info += 'on the system PATH, access to the subversion ' |
---|
| 576 | #info += 'repository is necessary and the output must ' |
---|
| 577 | #info += 'contain a line starting with "Revision:"' |
---|
| 578 | |
---|
| 579 | |
---|
[4168] | 580 | #FIXME (Ole): Change this so that svn info is attempted first. |
---|
| 581 | # If that fails, try to read a stored file with that same info (this would be created by e.g. the release script). Failing that, throw an exception. |
---|
| 582 | |
---|
[4170] | 583 | #FIXME (Ole): Move this and store_version_info to utilities |
---|
| 584 | |
---|
| 585 | |
---|
| 586 | try: |
---|
| 587 | from anuga.stored_version_info import version_info |
---|
| 588 | except: |
---|
[4516] | 589 | msg = 'No version info stored and command "svn" is not ' |
---|
| 590 | msg += 'recognised on the system PATH.\n\n' |
---|
| 591 | msg += 'If ANUGA has been installed from a distribution e.g. as ' |
---|
[4517] | 592 | msg += 'obtained from SourceForge,\n' |
---|
[4516] | 593 | msg += 'the version info should be ' |
---|
| 594 | msg += 'available in the automatically generated file ' |
---|
| 595 | msg += 'stored_version_info.py\n' |
---|
| 596 | msg += 'in the anuga root directory.\n' |
---|
[4517] | 597 | msg += 'If run from a Subversion sandpit, ' |
---|
| 598 | msg += 'ANUGA will try to obtain the version info ' |
---|
| 599 | msg += 'by using the command: "svn info".\n' |
---|
| 600 | msg += 'In this case, make sure svn is accessible on the system path. ' |
---|
| 601 | msg += 'Simply aliasing svn to the binary will not work. ' |
---|
[4516] | 602 | msg += 'Good luck!' |
---|
| 603 | |
---|
[4170] | 604 | # No file available - try using Subversion |
---|
| 605 | try: |
---|
[4473] | 606 | # The null stuff is so this section fails quitly. |
---|
| 607 | # This could cause the svn info command to fail due to |
---|
| 608 | # the redirection being bad on some platforms. |
---|
| 609 | # If that occurs then change this code. |
---|
| 610 | if sys.platform[0:3] == 'win': |
---|
| 611 | fid = os.popen('svn info 2> null') |
---|
| 612 | else: |
---|
| 613 | fid = os.popen('svn info 2>/dev/null') |
---|
[4516] | 614 | |
---|
[4170] | 615 | except: |
---|
| 616 | raise Exception(msg) |
---|
| 617 | else: |
---|
| 618 | #print 'Got version from svn' |
---|
| 619 | version_info = fid.read() |
---|
[4516] | 620 | |
---|
| 621 | if version_info == '': |
---|
| 622 | raise Exception(msg) |
---|
[4170] | 623 | else: |
---|
| 624 | pass |
---|
| 625 | #print 'Got version from file' |
---|
| 626 | |
---|
| 627 | |
---|
| 628 | for line in version_info.split('\n'): |
---|
| 629 | if line.startswith('Revision:'): |
---|
| 630 | break |
---|
| 631 | |
---|
| 632 | fields = line.split(':') |
---|
| 633 | msg = 'Keyword "Revision" was not found anywhere in text: %s' %version_info |
---|
| 634 | assert fields[0].startswith('Revision'), msg |
---|
| 635 | |
---|
| 636 | try: |
---|
| 637 | revision_number = int(fields[1]) |
---|
| 638 | except: |
---|
| 639 | msg = 'Revision number must be an integer. I got %s' %fields[1] |
---|
| 640 | msg += 'Check that the command svn is on the system path' |
---|
| 641 | raise Exception(msg) |
---|
| 642 | |
---|
| 643 | return revision_number |
---|
| 644 | |
---|
| 645 | |
---|
| 646 | def store_version_info(destination_path='.', verbose=False): |
---|
| 647 | """Obtain current version from Subversion and store it. |
---|
[2933] | 648 | |
---|
[4170] | 649 | Title: store_version_info() |
---|
[2933] | 650 | |
---|
[4170] | 651 | Author: Ole Nielsen (Ole.Nielsen@ga.gov.au) |
---|
[2933] | 652 | |
---|
[4170] | 653 | CreationDate: January 2006 |
---|
| 654 | |
---|
| 655 | Description: |
---|
| 656 | This function obtains current version from Subversion and stores it |
---|
| 657 | is a Python file named 'stored_version_info.py' for use with |
---|
| 658 | get_version_info() |
---|
| 659 | |
---|
| 660 | If svn is not available on the system PATH, an Exception is thrown |
---|
| 661 | """ |
---|
| 662 | |
---|
| 663 | # Note (Ole): This function should not be unit tested as it will only |
---|
| 664 | # work when running out of the sandpit. End users downloading the |
---|
| 665 | # ANUGA distribution would see a failure. |
---|
| 666 | # |
---|
| 667 | # FIXME: This function should really only be used by developers ( |
---|
| 668 | # (e.g. for creating new ANUGA releases), so maybe it should move |
---|
| 669 | # to somewhere else. |
---|
| 670 | |
---|
| 671 | import config |
---|
| 672 | |
---|
[2933] | 673 | try: |
---|
| 674 | fid = os.popen('svn info') |
---|
| 675 | except: |
---|
[4170] | 676 | msg = 'Command "svn" is not recognised on the system PATH' |
---|
| 677 | raise Exception(msg) |
---|
[2929] | 678 | else: |
---|
[4170] | 679 | txt = fid.read() |
---|
[2929] | 680 | fid.close() |
---|
[4170] | 681 | |
---|
| 682 | |
---|
| 683 | # Determine absolute filename |
---|
| 684 | if destination_path[-1] != os.sep: |
---|
| 685 | destination_path += os.sep |
---|
| 686 | |
---|
| 687 | filename = destination_path + config.version_filename |
---|
| 688 | |
---|
| 689 | fid = open(filename, 'w') |
---|
| 690 | |
---|
| 691 | docstring = 'Stored version info.\n\n' |
---|
| 692 | docstring += 'This file provides the version for distributions ' |
---|
| 693 | docstring += 'that are not accessing Subversion directly.\n' |
---|
| 694 | docstring += 'The file is automatically generated and should not ' |
---|
| 695 | docstring += 'be modified manually.\n' |
---|
| 696 | fid.write('"""%s"""\n\n' %docstring) |
---|
| 697 | |
---|
| 698 | fid.write('version_info = """\n%s"""' %txt) |
---|
| 699 | fid.close() |
---|
| 700 | |
---|
| 701 | |
---|
| 702 | if verbose is True: |
---|
| 703 | print 'Version info stored to %s' %filename |
---|
| 704 | |
---|
[2929] | 705 | |
---|
[3080] | 706 | def sww2timeseries(swwfiles, |
---|
[2795] | 707 | gauge_filename, |
---|
[3080] | 708 | production_dirs, |
---|
[2839] | 709 | report = None, |
---|
[3141] | 710 | reportname = None, |
---|
[2795] | 711 | plot_quantity = None, |
---|
[4226] | 712 | generate_fig = False, |
---|
[3190] | 713 | surface = None, |
---|
[2795] | 714 | time_min = None, |
---|
| 715 | time_max = None, |
---|
[4571] | 716 | time_thinning = 1, |
---|
[4487] | 717 | time_unit = None, |
---|
[2795] | 718 | title_on = None, |
---|
[4304] | 719 | use_cache = False, |
---|
[2795] | 720 | verbose = False): |
---|
| 721 | |
---|
| 722 | """ Read sww file and plot the time series for the |
---|
| 723 | prescribed quantities at defined gauge locations and |
---|
| 724 | prescribed time range. |
---|
| 725 | |
---|
| 726 | Input variables: |
---|
| 727 | |
---|
[3080] | 728 | swwfiles - dictionary of sww files with label_ids (used in |
---|
| 729 | generating latex output. It will be part of |
---|
| 730 | the directory name of file_loc (typically the timestamp). |
---|
| 731 | Helps to differentiate latex files for different simulations |
---|
| 732 | for a particular scenario. |
---|
[2795] | 733 | - assume that all conserved quantities have been stored |
---|
[3080] | 734 | - assume each sww file has been simulated with same timestep |
---|
[3094] | 735 | |
---|
[2795] | 736 | gauge_filename - name of file containing gauge data |
---|
[3262] | 737 | - easting, northing, name , elevation? |
---|
[2795] | 738 | - OR (this is not yet done) |
---|
| 739 | - structure which can be converted to a Numeric array, |
---|
| 740 | such as a geospatial data object |
---|
[2905] | 741 | |
---|
[4304] | 742 | production_dirs - A list of list, example {20061101_121212: '1 in 10000', |
---|
| 743 | 'boundaries': 'urs boundary'} |
---|
| 744 | this will use the second part as the label and the first part |
---|
[4571] | 745 | as the ? |
---|
| 746 | #FIXME: Is it a list or a dictionary |
---|
| 747 | # This is probably obsolete by now |
---|
[4304] | 748 | |
---|
[2839] | 749 | report - if True, then write figures to report_figures directory in |
---|
| 750 | relevant production directory |
---|
| 751 | - if False, figures are already stored with sww file |
---|
| 752 | - default to False |
---|
[3141] | 753 | |
---|
| 754 | reportname - name for report if wishing to generate report |
---|
[2795] | 755 | |
---|
| 756 | plot_quantity - list containing quantities to plot, they must |
---|
| 757 | be the name of an existing quantity or one of |
---|
| 758 | the following possibilities |
---|
| 759 | - possibilities: |
---|
| 760 | - stage; 'stage' |
---|
| 761 | - depth; 'depth' |
---|
[3080] | 762 | - speed; calculated as absolute momentum |
---|
| 763 | (pointwise) divided by depth; 'speed' |
---|
[2795] | 764 | - bearing; calculated as the angle of the momentum |
---|
| 765 | vector (xmomentum, ymomentum) from the North; 'bearing' |
---|
| 766 | - absolute momentum; calculated as |
---|
| 767 | sqrt(xmomentum^2 + ymomentum^2); 'momentum' |
---|
| 768 | - x momentum; 'xmomentum' |
---|
| 769 | - y momentum; 'ymomentum' |
---|
[3080] | 770 | - default will be ['stage', 'speed', 'bearing'] |
---|
[2795] | 771 | |
---|
[4226] | 772 | generate_fig - if True, generate figures as well as csv files of quantities |
---|
| 773 | - if False, csv files created only |
---|
| 774 | |
---|
[3190] | 775 | surface - if True, then generate solution surface with 3d plot |
---|
| 776 | and save to current working directory |
---|
[3221] | 777 | - default = False |
---|
[3190] | 778 | |
---|
[2795] | 779 | time_min - beginning of user defined time range for plotting purposes |
---|
| 780 | - default will be first available time found in swwfile |
---|
| 781 | |
---|
| 782 | time_max - end of user defined time range for plotting purposes |
---|
| 783 | - default will be last available time found in swwfile |
---|
| 784 | |
---|
[2839] | 785 | title_on - if True, export standard graphics with title |
---|
| 786 | - if False, export standard graphics without title |
---|
[2905] | 787 | |
---|
| 788 | |
---|
[2795] | 789 | |
---|
| 790 | Output: |
---|
| 791 | |
---|
| 792 | - time series data stored in .csv for later use if required. |
---|
| 793 | Name = gauges_timeseries followed by gauge name |
---|
[2839] | 794 | - latex file will be generated in same directory as where script is |
---|
[2891] | 795 | run (usually production scenario directory. |
---|
[2839] | 796 | Name = latexoutputlabel_id.tex |
---|
[2795] | 797 | |
---|
| 798 | Other important information: |
---|
| 799 | |
---|
| 800 | It is assumed that the used has stored all the conserved quantities |
---|
| 801 | and elevation during the scenario run, i.e. |
---|
| 802 | ['stage', 'elevation', 'xmomentum', 'ymomentum'] |
---|
| 803 | If this has not occurred then sww2timeseries will not work. |
---|
[4571] | 804 | |
---|
| 805 | |
---|
| 806 | Usage example |
---|
| 807 | texname = sww2timeseries({project.boundary_name + '.sww': ''}, |
---|
| 808 | project.polygons_dir + sep + 'boundary_extent.csv', |
---|
| 809 | project.anuga_dir, |
---|
| 810 | report = False, |
---|
| 811 | plot_quantity = ['stage', 'speed', 'bearing'], |
---|
| 812 | time_min = None, |
---|
| 813 | time_max = None, |
---|
| 814 | title_on = True, |
---|
| 815 | verbose = True) |
---|
| 816 | |
---|
[2795] | 817 | """ |
---|
[2808] | 818 | |
---|
| 819 | |
---|
[3080] | 820 | k = _sww2timeseries(swwfiles, |
---|
[2795] | 821 | gauge_filename, |
---|
[3080] | 822 | production_dirs, |
---|
[2839] | 823 | report, |
---|
[3141] | 824 | reportname, |
---|
[2795] | 825 | plot_quantity, |
---|
[4226] | 826 | generate_fig, |
---|
[3190] | 827 | surface, |
---|
[2795] | 828 | time_min, |
---|
| 829 | time_max, |
---|
[4571] | 830 | time_thinning, |
---|
[4487] | 831 | time_unit, |
---|
[2795] | 832 | title_on, |
---|
[4304] | 833 | use_cache, |
---|
[2795] | 834 | verbose) |
---|
| 835 | |
---|
| 836 | return k |
---|
| 837 | |
---|
[3080] | 838 | def _sww2timeseries(swwfiles, |
---|
[2795] | 839 | gauge_filename, |
---|
[3080] | 840 | production_dirs, |
---|
[2839] | 841 | report = None, |
---|
[3141] | 842 | reportname = None, |
---|
[2795] | 843 | plot_quantity = None, |
---|
[4226] | 844 | generate_fig = False, |
---|
[3190] | 845 | surface = None, |
---|
[2795] | 846 | time_min = None, |
---|
| 847 | time_max = None, |
---|
[4571] | 848 | time_thinning = 1, |
---|
[4487] | 849 | time_unit = None, |
---|
[2795] | 850 | title_on = None, |
---|
[4304] | 851 | use_cache = False, |
---|
[2836] | 852 | verbose = False): |
---|
[2839] | 853 | |
---|
[2795] | 854 | assert type(gauge_filename) == type(''),\ |
---|
[3944] | 855 | 'Gauge filename must be a string' |
---|
[2795] | 856 | |
---|
| 857 | try: |
---|
| 858 | fid = open(gauge_filename) |
---|
| 859 | except Exception, e: |
---|
| 860 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
| 861 | %(gauge_filename, e) |
---|
| 862 | raise msg |
---|
[2839] | 863 | |
---|
| 864 | if report is None: |
---|
| 865 | report = False |
---|
[2795] | 866 | |
---|
| 867 | if plot_quantity is None: |
---|
[4248] | 868 | plot_quantity = ['depth', 'speed'] |
---|
[2795] | 869 | else: |
---|
[3262] | 870 | assert type(plot_quantity) == list,\ |
---|
| 871 | 'plot_quantity must be a list' |
---|
[2795] | 872 | check_list(plot_quantity) |
---|
| 873 | |
---|
[3190] | 874 | if surface is None: |
---|
| 875 | surface = False |
---|
[4487] | 876 | |
---|
| 877 | if time_unit is None: |
---|
| 878 | time_unit = 'hours' |
---|
| 879 | |
---|
[2795] | 880 | if title_on is None: |
---|
| 881 | title_on = True |
---|
| 882 | |
---|
| 883 | if verbose: print '\n Gauges obtained from: %s \n' %gauge_filename |
---|
[3080] | 884 | gauges, locations, elev = get_gauges_from_file(gauge_filename) |
---|
[2795] | 885 | |
---|
| 886 | sww_quantity = ['stage', 'elevation', 'xmomentum', 'ymomentum'] |
---|
| 887 | |
---|
[3080] | 888 | file_loc = [] |
---|
| 889 | f_list = [] |
---|
| 890 | label_id = [] |
---|
[3094] | 891 | leg_label = [] |
---|
| 892 | themaxT = 0.0 |
---|
| 893 | theminT = 0.0 |
---|
[4487] | 894 | |
---|
[3080] | 895 | for swwfile in swwfiles.keys(): |
---|
[2795] | 896 | |
---|
[3080] | 897 | try: |
---|
| 898 | fid = open(swwfile) |
---|
| 899 | except Exception, e: |
---|
| 900 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
| 901 | %(swwfile, e) |
---|
| 902 | raise msg |
---|
[2884] | 903 | |
---|
[4571] | 904 | print 'swwfile', swwfile |
---|
| 905 | |
---|
| 906 | # Extract parent dir name and use as label |
---|
| 907 | path, _ = os.path.split(swwfile) |
---|
| 908 | _, label = os.path.split(path) |
---|
| 909 | |
---|
| 910 | #print 'label', label |
---|
| 911 | leg_label.append(label) |
---|
| 912 | |
---|
| 913 | |
---|
| 914 | |
---|
[3080] | 915 | f = file_function(swwfile, |
---|
| 916 | quantities = sww_quantity, |
---|
| 917 | interpolation_points = gauges, |
---|
[4571] | 918 | time_thinning = time_thinning, |
---|
[4487] | 919 | verbose = verbose, |
---|
[4304] | 920 | use_cache = use_cache) |
---|
[3944] | 921 | |
---|
| 922 | # determine which gauges are contained in sww file |
---|
| 923 | count = 0 |
---|
| 924 | gauge_index = [] |
---|
| 925 | for k, g in enumerate(gauges): |
---|
| 926 | if f(0.0, point_id = k)[2] > 1.0e6: |
---|
| 927 | count += 1 |
---|
| 928 | if count == 1: print 'Gauges not contained here:' |
---|
| 929 | print locations[k] |
---|
| 930 | else: |
---|
| 931 | gauge_index.append(k) |
---|
| 932 | |
---|
| 933 | if len(gauge_index) > 0: |
---|
| 934 | print 'Gauges contained here: \n', |
---|
| 935 | else: |
---|
| 936 | print 'No gauges contained here. \n' |
---|
| 937 | for i in range(len(gauge_index)): |
---|
| 938 | print locations[gauge_index[i]] |
---|
| 939 | |
---|
[3080] | 940 | index = swwfile.rfind(sep) |
---|
| 941 | file_loc.append(swwfile[:index+1]) |
---|
| 942 | label_id.append(swwfiles[swwfile]) |
---|
[4571] | 943 | |
---|
[3094] | 944 | |
---|
[3080] | 945 | f_list.append(f) |
---|
[3094] | 946 | maxT = max(f.get_time()) |
---|
| 947 | minT = min(f.get_time()) |
---|
| 948 | if maxT > themaxT: themaxT = maxT |
---|
| 949 | if minT > theminT: theminT = minT |
---|
| 950 | |
---|
[2795] | 951 | if time_min is None: |
---|
[3094] | 952 | time_min = theminT # min(T) |
---|
[2795] | 953 | else: |
---|
[3094] | 954 | if time_min < theminT: # min(T): |
---|
[2795] | 955 | msg = 'Minimum time entered not correct - please try again' |
---|
| 956 | raise Exception, msg |
---|
| 957 | |
---|
| 958 | if time_max is None: |
---|
[3094] | 959 | time_max = themaxT # max(T) |
---|
[2795] | 960 | else: |
---|
[3094] | 961 | if time_max > themaxT: # max(T): |
---|
[2795] | 962 | msg = 'Maximum time entered not correct - please try again' |
---|
| 963 | raise Exception, msg |
---|
| 964 | |
---|
[3944] | 965 | if verbose and len(gauge_index) > 0: print 'Inputs OK - going to generate figures' |
---|
[2905] | 966 | |
---|
[3944] | 967 | if len(gauge_index) <> 0: |
---|
| 968 | texfile, elev_output = generate_figures(plot_quantity, file_loc, report, reportname, surface, |
---|
| 969 | leg_label, f_list, gauges, locations, elev, gauge_index, |
---|
[4487] | 970 | production_dirs, time_min, time_max, time_unit, |
---|
| 971 | title_on, label_id, generate_fig, verbose) |
---|
[3944] | 972 | else: |
---|
| 973 | texfile = '' |
---|
| 974 | elev_output = [] |
---|
| 975 | |
---|
| 976 | return texfile, elev_output |
---|
[4663] | 977 | |
---|
[2795] | 978 | def get_gauges_from_file(filename): |
---|
[3944] | 979 | """ Read in gauge information from file |
---|
| 980 | """ |
---|
[2981] | 981 | from os import sep, getcwd, access, F_OK, mkdir |
---|
[2795] | 982 | fid = open(filename) |
---|
| 983 | lines = fid.readlines() |
---|
| 984 | fid.close() |
---|
| 985 | |
---|
| 986 | gauges = [] |
---|
| 987 | gaugelocation = [] |
---|
[3080] | 988 | elev = [] |
---|
[4571] | 989 | |
---|
| 990 | # Check header information |
---|
[2795] | 991 | line1 = lines[0] |
---|
| 992 | line11 = line1.split(',') |
---|
[3176] | 993 | |
---|
[4635] | 994 | if isinstance(line11[0],str) is True: |
---|
[4571] | 995 | # We have found text in the first line |
---|
[4582] | 996 | east_index = None |
---|
| 997 | north_index = None |
---|
| 998 | name_index = None |
---|
| 999 | elev_index = None |
---|
[4571] | 1000 | for i in range(len(line11)): |
---|
[4582] | 1001 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'easting': east_index = i |
---|
| 1002 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'northing': north_index = i |
---|
| 1003 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'name': name_index = i |
---|
| 1004 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'elevation': elev_index = i |
---|
[4571] | 1005 | |
---|
[3909] | 1006 | if east_index < len(line11) and north_index < len(line11): |
---|
[4571] | 1007 | pass |
---|
[3909] | 1008 | else: |
---|
[4571] | 1009 | msg = 'WARNING: %s does not contain correct header information' %(filename) |
---|
| 1010 | msg += 'The header must be: easting, northing, name, elevation' |
---|
[3909] | 1011 | raise Exception, msg |
---|
[4571] | 1012 | |
---|
[4582] | 1013 | if elev_index is None: |
---|
[4571] | 1014 | raise Exception |
---|
| 1015 | |
---|
[4582] | 1016 | if name_index is None: |
---|
[4571] | 1017 | raise Exception |
---|
| 1018 | |
---|
| 1019 | lines = lines[1:] # Remove header from data |
---|
| 1020 | else: |
---|
[4635] | 1021 | # No header, assume that this is a simple easting, northing file |
---|
[4571] | 1022 | |
---|
| 1023 | msg = 'There was no header in file %s and the number of columns is %d' %(filename, len(line11)) |
---|
| 1024 | msg += '- I was assuming two columns corresponding to Easting and Northing' |
---|
| 1025 | assert len(line11) == 2, msg |
---|
| 1026 | |
---|
| 1027 | east_index = 0 |
---|
| 1028 | north_index = 1 |
---|
| 1029 | |
---|
| 1030 | N = len(lines) |
---|
| 1031 | elev = [-9999]*N |
---|
| 1032 | gaugelocation = range(N) |
---|
| 1033 | |
---|
| 1034 | # Read in gauge data |
---|
| 1035 | for line in lines: |
---|
| 1036 | fields = line.split(',') |
---|
| 1037 | |
---|
| 1038 | gauges.append([float(fields[east_index]), float(fields[north_index])]) |
---|
| 1039 | |
---|
| 1040 | if len(fields) > 2: |
---|
| 1041 | elev.append(float(fields[elev_index])) |
---|
[3909] | 1042 | loc = fields[name_index] |
---|
| 1043 | gaugelocation.append(loc.strip('\n')) |
---|
[2981] | 1044 | |
---|
[3080] | 1045 | return gauges, gaugelocation, elev |
---|
[2795] | 1046 | |
---|
[4571] | 1047 | |
---|
| 1048 | |
---|
[2795] | 1049 | def check_list(quantity): |
---|
[3944] | 1050 | """ Check that input quantities in quantity list are possible |
---|
| 1051 | """ |
---|
[2795] | 1052 | all_quantity = ['stage', 'depth', 'momentum', 'xmomentum', |
---|
[3080] | 1053 | 'ymomentum', 'speed', 'bearing', 'elevation'] |
---|
[2795] | 1054 | |
---|
[4125] | 1055 | |
---|
| 1056 | |
---|
[4076] | 1057 | import sys |
---|
[4145] | 1058 | #if not sys.version.startswith('2.4'): |
---|
[4125] | 1059 | # Backwards compatibility |
---|
[4145] | 1060 | # from sets import Set as set |
---|
| 1061 | |
---|
| 1062 | from sets import Set as set |
---|
| 1063 | |
---|
| 1064 | for i,j in enumerate(quantity): |
---|
| 1065 | quantity[i] = quantity[i].lower() |
---|
| 1066 | p = list(set(quantity).difference(set(all_quantity))) |
---|
| 1067 | if len(p) <> 0: |
---|
| 1068 | msg = 'Quantities %s do not exist - please try again' %p |
---|
| 1069 | raise Exception, msg |
---|
[2795] | 1070 | |
---|
| 1071 | return |
---|
| 1072 | |
---|
| 1073 | def calc_bearing(uh, vh): |
---|
[3944] | 1074 | """ Calculate velocity bearing from North |
---|
| 1075 | """ |
---|
[2795] | 1076 | from math import atan, degrees |
---|
| 1077 | |
---|
| 1078 | angle = degrees(atan(vh/(uh+1.e-15))) |
---|
| 1079 | if (0 < angle < 90.0): |
---|
| 1080 | if vh > 0: |
---|
| 1081 | bearing = 90.0 - abs(angle) |
---|
| 1082 | if vh < 0: |
---|
| 1083 | bearing = 270.0 - abs(angle) |
---|
| 1084 | if (-90 < angle < 0): |
---|
| 1085 | if vh < 0: |
---|
| 1086 | bearing = 90.0 - abs(angle) |
---|
| 1087 | if vh > 0: |
---|
| 1088 | bearing = 270.0 - abs(angle) |
---|
| 1089 | if angle == 0: bearing = 0.0 |
---|
| 1090 | |
---|
| 1091 | return bearing |
---|
| 1092 | |
---|
[3190] | 1093 | def generate_figures(plot_quantity, file_loc, report, reportname, surface, |
---|
[3944] | 1094 | leg_label, f_list, gauges, locations, elev, gauge_index, |
---|
[4487] | 1095 | production_dirs, time_min, time_max, time_unit, |
---|
| 1096 | title_on, label_id, generate_fig, verbose): |
---|
[3944] | 1097 | """ Generate figures based on required quantities and gauges for each sww file |
---|
| 1098 | """ |
---|
[2795] | 1099 | from math import sqrt, atan, degrees |
---|
[3190] | 1100 | from Numeric import ones, allclose, zeros, Float, ravel |
---|
[2836] | 1101 | from os import sep, altsep, getcwd, mkdir, access, F_OK, environ |
---|
[2795] | 1102 | |
---|
[4226] | 1103 | if generate_fig is True: |
---|
| 1104 | from pylab import ion, hold, plot, axis, figure, legend, savefig, \ |
---|
| 1105 | xlabel, ylabel, title, close, subplot |
---|
| 1106 | |
---|
| 1107 | if surface is True: |
---|
| 1108 | import pylab as p1 |
---|
| 1109 | import mpl3d.mplot3d as p3 |
---|
[3944] | 1110 | |
---|
[3080] | 1111 | if report == True: |
---|
[2951] | 1112 | texdir = getcwd()+sep+'report'+sep |
---|
| 1113 | if access(texdir,F_OK) == 0: |
---|
| 1114 | mkdir (texdir) |
---|
[3080] | 1115 | if len(label_id) == 1: |
---|
| 1116 | label_id1 = label_id[0].replace(sep,'') |
---|
| 1117 | label_id2 = label_id1.replace('_','') |
---|
[3141] | 1118 | texfile = texdir+reportname+'%s' %(label_id2) |
---|
| 1119 | texfile2 = reportname+'%s' %(label_id2) |
---|
| 1120 | texfilename = texfile + '.tex' |
---|
[3080] | 1121 | if verbose: print '\n Latex output printed to %s \n' %texfilename |
---|
| 1122 | fid = open(texfilename, 'w') |
---|
| 1123 | else: |
---|
[3141] | 1124 | texfile = texdir+reportname |
---|
| 1125 | texfile2 = reportname |
---|
[3080] | 1126 | texfilename = texfile + '.tex' |
---|
| 1127 | if verbose: print '\n Latex output printed to %s \n' %texfilename |
---|
| 1128 | fid = open(texfilename, 'w') |
---|
[2795] | 1129 | else: |
---|
[2839] | 1130 | texfile = '' |
---|
[3629] | 1131 | texfile2 = '' |
---|
[2918] | 1132 | |
---|
[3080] | 1133 | p = len(f_list) |
---|
| 1134 | n = [] |
---|
| 1135 | n0 = 0 |
---|
| 1136 | for i in range(len(f_list)): |
---|
| 1137 | n.append(len(f_list[i].get_time())) |
---|
| 1138 | if n[i] > n0: n0 = n[i] |
---|
| 1139 | n0 = int(n0) |
---|
[2981] | 1140 | m = len(locations) |
---|
[3080] | 1141 | model_time = zeros((n0,m,p), Float) |
---|
| 1142 | stages = zeros((n0,m,p), Float) |
---|
| 1143 | elevations = zeros((n0,m,p), Float) |
---|
| 1144 | momenta = zeros((n0,m,p), Float) |
---|
| 1145 | xmom = zeros((n0,m,p), Float) |
---|
| 1146 | ymom = zeros((n0,m,p), Float) |
---|
| 1147 | speed = zeros((n0,m,p), Float) |
---|
| 1148 | bearings = zeros((n0,m,p), Float) |
---|
| 1149 | depths = zeros((n0,m,p), Float) |
---|
[3190] | 1150 | eastings = zeros((n0,m,p), Float) |
---|
[2981] | 1151 | min_stages = [] |
---|
| 1152 | max_stages = [] |
---|
[4633] | 1153 | min_momentums = [] |
---|
[2981] | 1154 | max_momentums = [] |
---|
[4627] | 1155 | max_xmomentums = [] |
---|
| 1156 | max_ymomentums = [] |
---|
| 1157 | min_xmomentums = [] |
---|
| 1158 | min_ymomentums = [] |
---|
[3080] | 1159 | max_speeds = [] |
---|
[4633] | 1160 | min_speeds = [] |
---|
[4145] | 1161 | max_depths = [] |
---|
[3190] | 1162 | model_time_plot3d = zeros((n0,m), Float) |
---|
| 1163 | stages_plot3d = zeros((n0,m), Float) |
---|
| 1164 | eastings_plot3d = zeros((n0,m),Float) |
---|
[4487] | 1165 | if time_unit is 'mins': scale = 60.0 |
---|
| 1166 | if time_unit is 'hours': scale = 3600.0 |
---|
[3080] | 1167 | ##### loop over each swwfile ##### |
---|
| 1168 | for j, f in enumerate(f_list): |
---|
[4487] | 1169 | starttime = f.starttime |
---|
[3080] | 1170 | if verbose: print 'swwfile %d of %d' %(j, len(f_list)) |
---|
[3221] | 1171 | comparefile = file_loc[j]+sep+'gauges_maxmins'+'.csv' |
---|
| 1172 | fid_compare = open(comparefile, 'w') |
---|
[4320] | 1173 | file0 = file_loc[j]+'gauges_t0.csv' |
---|
| 1174 | fid_0 = open(file0, 'w') |
---|
[3080] | 1175 | ##### loop over each gauge ##### |
---|
[3944] | 1176 | for k in gauge_index: |
---|
| 1177 | g = gauges[k] |
---|
[3080] | 1178 | if verbose: print 'Gauge %d of %d' %(k, len(gauges)) |
---|
| 1179 | min_stage = 10 |
---|
| 1180 | max_stage = 0 |
---|
[4627] | 1181 | max_momentum = max_xmomentum = max_ymomentum = 0 |
---|
[4633] | 1182 | min_momentum = min_xmomentum = min_ymomentum = 100 |
---|
[4145] | 1183 | max_speed = 0 |
---|
[4633] | 1184 | min_speed = 0 |
---|
| 1185 | max_depth = 0 |
---|
[4571] | 1186 | gaugeloc = str(locations[k]) |
---|
| 1187 | thisfile = file_loc[j]+sep+'gauges_time_series'+'_'\ |
---|
| 1188 | +gaugeloc+'.csv' |
---|
[3080] | 1189 | fid_out = open(thisfile, 'w') |
---|
[4304] | 1190 | s = 'Time, Stage, Momentum, Speed, Elevation, xmom, ymom \n' |
---|
[3080] | 1191 | fid_out.write(s) |
---|
| 1192 | #### generate quantities ####### |
---|
| 1193 | for i, t in enumerate(f.get_time()): |
---|
| 1194 | if time_min <= t <= time_max: |
---|
| 1195 | w = f(t, point_id = k)[0] |
---|
| 1196 | z = f(t, point_id = k)[1] |
---|
| 1197 | uh = f(t, point_id = k)[2] |
---|
[3176] | 1198 | vh = f(t, point_id = k)[3] |
---|
[3080] | 1199 | depth = w-z |
---|
[4248] | 1200 | m = sqrt(uh*uh + vh*vh) |
---|
[3176] | 1201 | if depth < 0.001: |
---|
[3080] | 1202 | vel = 0.0 |
---|
| 1203 | else: |
---|
[4248] | 1204 | vel = m / (depth + 1.e-6/depth) |
---|
[4487] | 1205 | #bearing = calc_bearing(uh, vh) |
---|
| 1206 | model_time[i,k,j] = (t + starttime)/scale #t/60.0 |
---|
[3190] | 1207 | stages[i,k,j] = w |
---|
[3080] | 1208 | elevations[i,k,j] = z |
---|
| 1209 | xmom[i,k,j] = uh |
---|
| 1210 | ymom[i,k,j] = vh |
---|
| 1211 | momenta[i,k,j] = m |
---|
| 1212 | speed[i,k,j] = vel |
---|
[4248] | 1213 | #bearings[i,k,j] = bearing |
---|
[3190] | 1214 | depths[i,k,j] = depth |
---|
| 1215 | thisgauge = gauges[k] |
---|
| 1216 | eastings[i,k,j] = thisgauge[0] |
---|
[4304] | 1217 | s = '%.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f\n' %(t, w, m, vel, z, uh, vh) |
---|
[3080] | 1218 | fid_out.write(s) |
---|
[4320] | 1219 | if t == 0: |
---|
| 1220 | s = '%.2f, %.2f, %.2f\n' %(g[0], g[1], w) |
---|
| 1221 | fid_0.write(s) |
---|
[3221] | 1222 | if t/60.0 <= 13920: tindex = i |
---|
[3080] | 1223 | if w > max_stage: max_stage = w |
---|
| 1224 | if w < min_stage: min_stage = w |
---|
| 1225 | if m > max_momentum: max_momentum = m |
---|
[4633] | 1226 | if m < min_momentum: min_momentum = m |
---|
[4627] | 1227 | if uh > max_xmomentum: max_xmomentum = uh |
---|
| 1228 | if vh > max_ymomentum: max_ymomentum = vh |
---|
| 1229 | if uh < min_xmomentum: min_xmomentum = uh |
---|
| 1230 | if vh < min_ymomentum: min_ymomentum = vh |
---|
[3221] | 1231 | if vel > max_speed: max_speed = vel |
---|
[4633] | 1232 | if vel < min_speed: min_speed = vel |
---|
[4145] | 1233 | if z > 0 and depth > max_depth: max_depth = depth |
---|
[3221] | 1234 | |
---|
| 1235 | |
---|
[3996] | 1236 | s = '%.2f, %.2f, %.2f, %.2f, %s\n' %(max_stage, min_stage, z, thisgauge[0], leg_label[j]) |
---|
[3221] | 1237 | fid_compare.write(s) |
---|
[3080] | 1238 | max_stages.append(max_stage) |
---|
| 1239 | min_stages.append(min_stage) |
---|
| 1240 | max_momentums.append(max_momentum) |
---|
[4627] | 1241 | max_xmomentums.append(max_xmomentum) |
---|
| 1242 | max_ymomentums.append(max_ymomentum) |
---|
| 1243 | min_xmomentums.append(min_xmomentum) |
---|
| 1244 | min_ymomentums.append(min_ymomentum) |
---|
[4633] | 1245 | min_momentums.append(min_momentum) |
---|
[4627] | 1246 | max_depths.append(max_depth) |
---|
[4145] | 1247 | max_speeds.append(max_speed) |
---|
[4633] | 1248 | min_speeds.append(min_speed) |
---|
[3080] | 1249 | #### finished generating quantities for each swwfile ##### |
---|
[3221] | 1250 | |
---|
[3190] | 1251 | model_time_plot3d[:,:] = model_time[:,:,j] |
---|
| 1252 | stages_plot3d[:,:] = stages[:,:,j] |
---|
| 1253 | eastings_plot3d[:,] = eastings[:,:,j] |
---|
[3080] | 1254 | |
---|
[4221] | 1255 | if surface is True: |
---|
[3190] | 1256 | print 'Printing surface figure' |
---|
| 1257 | for i in range(2): |
---|
| 1258 | fig = p1.figure(10) |
---|
| 1259 | ax = p3.Axes3D(fig) |
---|
| 1260 | if len(gauges) > 80: |
---|
[3221] | 1261 | ax.plot_surface(model_time[:,:,j],eastings[:,:,j],stages[:,:,j]) |
---|
[3190] | 1262 | else: |
---|
[3909] | 1263 | #ax.plot_wireframe(model_time[:,:,j],eastings[:,:,j],stages[:,:,j]) |
---|
| 1264 | ax.plot3D(ravel(eastings[:,:,j]),ravel(model_time[:,:,j]),ravel(stages[:,:,j])) |
---|
[3221] | 1265 | ax.set_xlabel('time') |
---|
| 1266 | ax.set_ylabel('x') |
---|
[3190] | 1267 | ax.set_zlabel('stage') |
---|
| 1268 | fig.add_axes(ax) |
---|
| 1269 | p1.show() |
---|
| 1270 | surfacefig = 'solution_surface%s' %leg_label[j] |
---|
| 1271 | p1.savefig(surfacefig) |
---|
| 1272 | p1.close() |
---|
[3221] | 1273 | |
---|
[3080] | 1274 | #### finished generating quantities for all swwfiles ##### |
---|
[3190] | 1275 | |
---|
[3221] | 1276 | # x profile for given time |
---|
[4221] | 1277 | if surface is True: |
---|
[3221] | 1278 | figure(11) |
---|
| 1279 | plot(eastings[tindex,:,j],stages[tindex,:,j]) |
---|
| 1280 | xlabel('x') |
---|
| 1281 | ylabel('stage') |
---|
| 1282 | profilefig = 'solution_xprofile' |
---|
| 1283 | savefig('profilefig') |
---|
[4145] | 1284 | |
---|
[4248] | 1285 | elev_output = [] |
---|
[4226] | 1286 | if generate_fig is True: |
---|
[4487] | 1287 | depth_axis = axis([starttime/scale, time_max/scale, -0.1, max(max_depths)*1.1]) |
---|
| 1288 | stage_axis = axis([starttime/scale, time_max/scale, min(min_stages), max(max_stages)*1.1]) |
---|
[4627] | 1289 | vel_axis = axis([starttime/scale, time_max/scale, min(min_speeds), max(max_speeds)*1.1]) |
---|
| 1290 | mom_axis = axis([starttime/scale, time_max/scale, min(min_momentums), max(max_momentums)*1.1]) |
---|
| 1291 | xmom_axis = axis([starttime/scale, time_max/scale, min(min_xmomentums), max(max_xmomentums)*1.1]) |
---|
| 1292 | ymom_axis = axis([starttime/scale, time_max/scale, min(min_ymomentums), max(max_ymomentums)*1.1]) |
---|
[4226] | 1293 | cstr = ['g', 'r', 'b', 'c', 'm', 'y', 'k'] |
---|
| 1294 | nn = len(plot_quantity) |
---|
| 1295 | no_cols = 2 |
---|
[4248] | 1296 | |
---|
[3080] | 1297 | if len(label_id) > 1: graphname_report = [] |
---|
[4226] | 1298 | pp = 1 |
---|
| 1299 | div = 11. |
---|
| 1300 | cc = 0 |
---|
| 1301 | for k in gauge_index: |
---|
| 1302 | g = gauges[k] |
---|
| 1303 | count1 = 0 |
---|
| 1304 | if report == True and len(label_id) > 1: |
---|
| 1305 | s = '\\begin{figure}[ht] \n \\centering \n \\begin{tabular}{cc} \n' |
---|
[3080] | 1306 | fid.write(s) |
---|
[4226] | 1307 | if len(label_id) > 1: graphname_report = [] |
---|
| 1308 | #### generate figures for each gauge #### |
---|
| 1309 | for j, f in enumerate(f_list): |
---|
| 1310 | ion() |
---|
| 1311 | hold(True) |
---|
| 1312 | count = 0 |
---|
| 1313 | where1 = 0 |
---|
| 1314 | where2 = 0 |
---|
| 1315 | word_quantity = '' |
---|
| 1316 | if report == True and len(label_id) == 1: |
---|
| 1317 | s = '\\begin{figure}[hbt] \n \\centering \n \\begin{tabular}{cc} \n' |
---|
| 1318 | fid.write(s) |
---|
| 1319 | |
---|
| 1320 | for which_quantity in plot_quantity: |
---|
| 1321 | count += 1 |
---|
| 1322 | where1 += 1 |
---|
| 1323 | figure(count, frameon = False) |
---|
| 1324 | if which_quantity == 'depth': |
---|
[4078] | 1325 | plot(model_time[0:n[j]-1,k,j], depths[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
[4226] | 1326 | units = 'm' |
---|
| 1327 | axis(depth_axis) |
---|
| 1328 | if which_quantity == 'stage': |
---|
[4320] | 1329 | if elevations[0,k,j] <= 0: |
---|
[4226] | 1330 | plot(model_time[0:n[j]-1,k,j], stages[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
| 1331 | axis(stage_axis) |
---|
| 1332 | else: |
---|
| 1333 | plot(model_time[0:n[j]-1,k,j], depths[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
[4274] | 1334 | #axis(depth_axis) |
---|
[4226] | 1335 | units = 'm' |
---|
| 1336 | if which_quantity == 'momentum': |
---|
| 1337 | plot(model_time[0:n[j]-1,k,j], momenta[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
| 1338 | axis(mom_axis) |
---|
| 1339 | units = 'm^2 / sec' |
---|
| 1340 | if which_quantity == 'xmomentum': |
---|
| 1341 | plot(model_time[0:n[j]-1,k,j], xmom[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
[4627] | 1342 | axis(xmom_axis) |
---|
[4226] | 1343 | units = 'm^2 / sec' |
---|
| 1344 | if which_quantity == 'ymomentum': |
---|
| 1345 | plot(model_time[0:n[j]-1,k,j], ymom[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
[4627] | 1346 | axis(ymom_axis) |
---|
[4226] | 1347 | units = 'm^2 / sec' |
---|
| 1348 | if which_quantity == 'speed': |
---|
| 1349 | plot(model_time[0:n[j]-1,k,j], speed[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
| 1350 | axis(vel_axis) |
---|
| 1351 | units = 'm / sec' |
---|
| 1352 | if which_quantity == 'bearing': |
---|
| 1353 | due_east = 90.0*ones(shape(model_time[0:n[j]-1,k,j],Float)) |
---|
| 1354 | due_west = 270.0*ones(shape(model_time[0:n[j]-1,k,j],Float)) |
---|
| 1355 | plot(model_time[0:n[j]-1,k,j], bearings, '-', |
---|
| 1356 | model_time[0:n[j]-1,k,j], due_west, '-.', |
---|
| 1357 | model_time[0:n[j]-1,k,j], due_east, '-.') |
---|
| 1358 | units = 'degrees from North' |
---|
| 1359 | ax = axis([time_min, time_max, 0.0, 360.0]) |
---|
| 1360 | legend(('Bearing','West','East')) |
---|
[2795] | 1361 | |
---|
[4487] | 1362 | if time_unit is 'mins': xlabel('time (mins)') |
---|
| 1363 | if time_unit is 'hours': xlabel('time (hours)') |
---|
| 1364 | #if which_quantity == 'stage' and elevations[0:n[j]-1,k,j] > 0: |
---|
| 1365 | # ylabel('%s (%s)' %('depth', units)) |
---|
| 1366 | #else: |
---|
| 1367 | # ylabel('%s (%s)' %(which_quantity, units)) |
---|
| 1368 | #ylabel('%s (%s)' %('wave height', units)) |
---|
| 1369 | ylabel('%s (%s)' %(which_quantity, units)) |
---|
[4226] | 1370 | if len(label_id) > 1: legend((leg_label),loc='upper right') |
---|
[2795] | 1371 | |
---|
[4571] | 1372 | #gaugeloc1 = gaugeloc.replace(' ','') |
---|
[4226] | 1373 | #gaugeloc2 = gaugeloc1.replace('_','') |
---|
[4571] | 1374 | gaugeloc2 = str(locations[k]).replace(' ','') |
---|
| 1375 | graphname = '%sgauge%s_%s' %(file_loc[j], |
---|
| 1376 | gaugeloc2, |
---|
| 1377 | which_quantity) |
---|
[2795] | 1378 | |
---|
[4226] | 1379 | if report == True and len(label_id) > 1: |
---|
| 1380 | figdir = getcwd()+sep+'report_figures'+sep |
---|
| 1381 | if access(figdir,F_OK) == 0 : |
---|
| 1382 | mkdir (figdir) |
---|
| 1383 | latex_file_loc = figdir.replace(sep,altsep) |
---|
| 1384 | graphname_latex = '%sgauge%s%s' %(latex_file_loc, gaugeloc2, which_quantity) # storing files in production directory |
---|
| 1385 | graphname_report_input = '%sgauge%s%s' %('..'+altsep+'report_figures'+altsep, gaugeloc2, which_quantity) # giving location in latex output file |
---|
| 1386 | graphname_report.append(graphname_report_input) |
---|
| 1387 | |
---|
| 1388 | savefig(graphname_latex) # save figures in production directory for report generation |
---|
[2839] | 1389 | |
---|
[4226] | 1390 | if report == True: |
---|
[2839] | 1391 | |
---|
[4226] | 1392 | figdir = getcwd()+sep+'report_figures'+sep |
---|
| 1393 | if access(figdir,F_OK) == 0 : |
---|
| 1394 | mkdir (figdir) |
---|
| 1395 | latex_file_loc = figdir.replace(sep,altsep) |
---|
[2981] | 1396 | |
---|
[4226] | 1397 | if len(label_id) == 1: |
---|
| 1398 | graphname_latex = '%sgauge%s%s%s' %(latex_file_loc, gaugeloc2, which_quantity, label_id2) # storing files in production directory |
---|
| 1399 | graphname_report = '%sgauge%s%s%s' %('..'+altsep+'report_figures'+altsep, gaugeloc2, which_quantity, label_id2) # giving location in latex output file |
---|
| 1400 | s = '\includegraphics[width=0.49\linewidth, height=50mm]{%s%s}' %(graphname_report, '.png') |
---|
| 1401 | fid.write(s) |
---|
| 1402 | if where1 % 2 == 0: |
---|
| 1403 | s = '\\\\ \n' |
---|
| 1404 | where1 = 0 |
---|
| 1405 | else: |
---|
| 1406 | s = '& \n' |
---|
| 1407 | fid.write(s) |
---|
| 1408 | savefig(graphname_latex) |
---|
| 1409 | |
---|
| 1410 | if title_on == True: |
---|
| 1411 | title('%s scenario: %s at %s gauge' %(label_id, which_quantity, gaugeloc2)) |
---|
[4320] | 1412 | #title('Gauge %s (MOST elevation %.2f, ANUGA elevation %.2f)' %(gaugeloc2, elevations[10,k,0], elevations[10,k,1] )) |
---|
[3080] | 1413 | |
---|
[4226] | 1414 | savefig(graphname) # save figures with sww file |
---|
[3080] | 1415 | |
---|
[4226] | 1416 | if report == True and len(label_id) == 1: |
---|
| 1417 | for i in range(nn-1): |
---|
| 1418 | if nn > 2: |
---|
| 1419 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
| 1420 | word_quantity += 'depth' + ', ' |
---|
| 1421 | else: |
---|
| 1422 | word_quantity += plot_quantity[i] + ', ' |
---|
| 1423 | else: |
---|
| 1424 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
| 1425 | word_quantity += 'depth' + ', ' |
---|
| 1426 | else: |
---|
| 1427 | word_quantity += plot_quantity[i] |
---|
| 1428 | |
---|
| 1429 | if plot_quantity[nn-1] == 'stage' and elevations[0,k,j] > 0: |
---|
| 1430 | word_quantity += ' and ' + 'depth' |
---|
| 1431 | else: |
---|
| 1432 | word_quantity += ' and ' + plot_quantity[nn-1] |
---|
| 1433 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elev[k]) #gaugeloc.replace('_',' ')) |
---|
| 1434 | if elev[k] == 0.0: |
---|
| 1435 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elevations[0,k,j]) |
---|
| 1436 | east = gauges[0] |
---|
| 1437 | north = gauges[1] |
---|
| 1438 | elev_output.append([locations[k],east,north,elevations[0,k,j]]) |
---|
| 1439 | label = '%sgauge%s' %(label_id2, gaugeloc2) |
---|
| 1440 | s = '\end{tabular} \n \\caption{%s} \n \label{fig:%s} \n \end{figure} \n \n' %(caption, label) |
---|
| 1441 | fid.write(s) |
---|
| 1442 | cc += 1 |
---|
| 1443 | if cc % 6 == 0: fid.write('\\clearpage \n') |
---|
| 1444 | savefig(graphname_latex) |
---|
| 1445 | |
---|
| 1446 | if report == True and len(label_id) > 1: |
---|
[3080] | 1447 | for i in range(nn-1): |
---|
| 1448 | if nn > 2: |
---|
[4078] | 1449 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
[4226] | 1450 | word_quantity += 'depth' + ',' |
---|
[4078] | 1451 | else: |
---|
| 1452 | word_quantity += plot_quantity[i] + ', ' |
---|
[3080] | 1453 | else: |
---|
[4078] | 1454 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
[4226] | 1455 | word_quantity += 'depth' |
---|
[4078] | 1456 | else: |
---|
| 1457 | word_quantity += plot_quantity[i] |
---|
[4226] | 1458 | where1 = 0 |
---|
| 1459 | count1 += 1 |
---|
| 1460 | index = j*len(plot_quantity) |
---|
| 1461 | for which_quantity in plot_quantity: |
---|
| 1462 | where1 += 1 |
---|
| 1463 | s = '\includegraphics[width=0.49\linewidth, height=50mm]{%s%s}' %(graphname_report[index], '.png') |
---|
| 1464 | index += 1 |
---|
| 1465 | fid.write(s) |
---|
| 1466 | if where1 % 2 == 0: |
---|
| 1467 | s = '\\\\ \n' |
---|
| 1468 | where1 = 0 |
---|
| 1469 | else: |
---|
| 1470 | s = '& \n' |
---|
| 1471 | fid.write(s) |
---|
| 1472 | word_quantity += ' and ' + plot_quantity[nn-1] |
---|
| 1473 | label = 'gauge%s' %(gaugeloc2) |
---|
| 1474 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elev[k]) |
---|
[3150] | 1475 | if elev[k] == 0.0: |
---|
[4226] | 1476 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elevations[0,k,j]) |
---|
| 1477 | thisgauge = gauges[k] |
---|
| 1478 | east = thisgauge[0] |
---|
| 1479 | north = thisgauge[1] |
---|
| 1480 | elev_output.append([locations[k],east,north,elevations[0,k,j]]) |
---|
| 1481 | |
---|
[3080] | 1482 | s = '\end{tabular} \n \\caption{%s} \n \label{fig:%s} \n \end{figure} \n \n' %(caption, label) |
---|
[2795] | 1483 | fid.write(s) |
---|
[4226] | 1484 | if float((k+1)/div - pp) == 0.: |
---|
| 1485 | fid.write('\\clearpage \n') |
---|
| 1486 | pp += 1 |
---|
[2981] | 1487 | |
---|
[4226] | 1488 | #### finished generating figures ### |
---|
[2795] | 1489 | |
---|
[4226] | 1490 | close('all') |
---|
| 1491 | |
---|
[3150] | 1492 | return texfile2, elev_output |
---|
[3931] | 1493 | |
---|
[3983] | 1494 | # FIXME (DSG): Add unit test, make general, not just 2 files, |
---|
| 1495 | # but any number of files. |
---|
[3931] | 1496 | def copy_code_files(dir_name, filename1, filename2): |
---|
[4510] | 1497 | """Temporary Interface to new location""" |
---|
[3931] | 1498 | |
---|
[4634] | 1499 | from anuga.shallow_water.data_manager import \ |
---|
| 1500 | copy_code_files as dm_copy_code_files |
---|
[4510] | 1501 | print 'copy_code_files has moved from util.py. ', |
---|
[4634] | 1502 | print 'Please use "from anuga.shallow_water.data_manager \ |
---|
| 1503 | import copy_code_files"' |
---|
[4510] | 1504 | |
---|
[4512] | 1505 | return dm_copy_code_files(dir_name, filename1, filename2) |
---|
[4487] | 1506 | |
---|
[4510] | 1507 | |
---|
[3983] | 1508 | def add_directories(root_directory, directories): |
---|
| 1509 | """ |
---|
[3984] | 1510 | Add the first directory in directories to root_directory. |
---|
| 1511 | Then add the second |
---|
[4634] | 1512 | directory to the first directory and so on. |
---|
[3931] | 1513 | |
---|
[3983] | 1514 | Return the path of the final directory. |
---|
[3931] | 1515 | |
---|
[4634] | 1516 | This is handy for specifying and creating a directory |
---|
| 1517 | where data will go. |
---|
[3983] | 1518 | """ |
---|
| 1519 | dir = root_directory |
---|
| 1520 | for new_dir in directories: |
---|
[3994] | 1521 | dir = os.path.join(dir, new_dir) |
---|
[3983] | 1522 | if not access(dir,F_OK): |
---|
| 1523 | mkdir (dir) |
---|
| 1524 | return dir |
---|
[3931] | 1525 | |
---|
[4306] | 1526 | def get_data_from_file(filename,separator_value = ','): |
---|
[4510] | 1527 | """Temporary Interface to new location""" |
---|
[4634] | 1528 | from anuga.shallow_water.data_manager import \ |
---|
| 1529 | get_data_from_file as dm_get_data_from_file |
---|
| 1530 | print 'get_data_from_file has moved from util.py' |
---|
| 1531 | print 'Please use "from anuga.shallow_water.data_manager \ |
---|
| 1532 | import get_data_from_file"' |
---|
[4306] | 1533 | |
---|
[4512] | 1534 | return dm_get_data_from_file(filename,separator_value = ',') |
---|
[3931] | 1535 | |
---|
[4468] | 1536 | def store_parameters(verbose=False,**kwargs): |
---|
[4510] | 1537 | """Temporary Interface to new location""" |
---|
[4512] | 1538 | |
---|
[4634] | 1539 | from anuga.shallow_water.data_manager \ |
---|
| 1540 | import store_parameters as dm_store_parameters |
---|
| 1541 | print 'store_parameters has moved from util.py.' |
---|
| 1542 | print 'Please use "from anuga.shallow_water.data_manager \ |
---|
| 1543 | import store_parameters"' |
---|
[4468] | 1544 | |
---|
[4512] | 1545 | return dm_store_parameters(verbose=False,**kwargs) |
---|
[4487] | 1546 | |
---|
[4495] | 1547 | def remove_lone_verts(verts, triangles, number_of_full_nodes=None): |
---|
| 1548 | """ |
---|
| 1549 | Removes vertices that are not associated with any triangles. |
---|
| 1550 | |
---|
| 1551 | verts is a list/array of points |
---|
[4634] | 1552 | triangles is a list of 3 element tuples. |
---|
| 1553 | Each tuple represents a triangle. |
---|
[4495] | 1554 | |
---|
| 1555 | number_of_full_nodes relate to parallelism when a mesh has an |
---|
| 1556 | extra layer of ghost points. |
---|
| 1557 | |
---|
| 1558 | """ |
---|
[4480] | 1559 | verts = ensure_numeric(verts) |
---|
| 1560 | triangles = ensure_numeric(triangles) |
---|
[4495] | 1561 | |
---|
[4480] | 1562 | N = len(verts) |
---|
[4495] | 1563 | |
---|
[4480] | 1564 | # initialise the array to easily find the index of the first loner |
---|
| 1565 | loners=arange(2*N, N, -1) # if N=3 [6,5,4] |
---|
[4495] | 1566 | for t in triangles: |
---|
[4480] | 1567 | for vert in t: |
---|
| 1568 | loners[vert]= vert # all non-loners will have loners[i]=i |
---|
| 1569 | #print loners |
---|
[4468] | 1570 | |
---|
[4480] | 1571 | lone_start = 2*N - max(loners) # The index of the first loner |
---|
[4468] | 1572 | |
---|
[4480] | 1573 | if lone_start-1 == N: |
---|
| 1574 | # no loners |
---|
| 1575 | pass |
---|
| 1576 | elif min(loners[lone_start:N]) > N: |
---|
| 1577 | # All the loners are at the end of the vert array |
---|
| 1578 | verts = verts[0:lone_start] |
---|
| 1579 | else: |
---|
[4495] | 1580 | # change the loners list so it can be used to modify triangles |
---|
[4480] | 1581 | # Remove the loners from verts |
---|
| 1582 | # Could've used X=compress(less(loners,N),loners) |
---|
| 1583 | # verts=take(verts,X) to Remove the loners from verts |
---|
| 1584 | # but I think it would use more memory |
---|
| 1585 | new_i = 0 |
---|
| 1586 | for i in range(lone_start, N): |
---|
| 1587 | if loners[i] >= N: |
---|
| 1588 | # Loner! |
---|
| 1589 | pass |
---|
| 1590 | else: |
---|
| 1591 | loners[i] = new_i |
---|
| 1592 | verts[new_i] = verts[i] |
---|
| 1593 | new_i += 1 |
---|
| 1594 | verts = verts[0:new_i] |
---|
| 1595 | |
---|
| 1596 | # Modify the triangles |
---|
| 1597 | #print "loners", loners |
---|
| 1598 | #print "triangles before", triangles |
---|
| 1599 | triangles = choose(triangles,loners) |
---|
| 1600 | #print "triangles after", triangles |
---|
| 1601 | return verts, triangles |
---|
[4568] | 1602 | |
---|
[4480] | 1603 | |
---|
[4568] | 1604 | def get_centroid_values(x, triangles): |
---|
| 1605 | """Compute centroid values from vertex values |
---|
| 1606 | |
---|
| 1607 | x: Values at vertices of triangular mesh |
---|
| 1608 | triangles: Nx3 integer array pointing to vertex information |
---|
| 1609 | for each of the N triangels. Elements of triangles are |
---|
| 1610 | indices into x |
---|
| 1611 | """ |
---|
| 1612 | |
---|
| 1613 | |
---|
| 1614 | xc = zeros(triangles.shape[0], Float) # Space for centroid info |
---|
| 1615 | |
---|
| 1616 | for k in range(triangles.shape[0]): |
---|
| 1617 | # Indices of vertices |
---|
| 1618 | i0 = triangles[k][0] |
---|
| 1619 | i1 = triangles[k][1] |
---|
| 1620 | i2 = triangles[k][2] |
---|
| 1621 | |
---|
| 1622 | xc[k] = (x[i0] + x[i1] + x[i2])/3 |
---|
| 1623 | |
---|
| 1624 | |
---|
| 1625 | return xc |
---|
| 1626 | |
---|
[4634] | 1627 | def make_plots_from_csv_file(directories_dic={}, |
---|
| 1628 | output_dir='', |
---|
| 1629 | base_name=None, |
---|
| 1630 | plot_numbers='0', |
---|
| 1631 | quantities=['Stage'], |
---|
| 1632 | extra_plot_name='', |
---|
| 1633 | assess_all_csv_files=True, |
---|
| 1634 | create_latex=False, |
---|
| 1635 | verbose=False): |
---|
| 1636 | |
---|
| 1637 | """Read in csv files that have the right header information and |
---|
| 1638 | plot time series such as Stage, Speed, etc. Will also plot several |
---|
| 1639 | time series on one plot. Filenames must follow this convention, |
---|
| 1640 | <base_name><plot_number>.csv eg gauge_timeseries3.csv |
---|
| 1641 | |
---|
| 1642 | For example: |
---|
| 1643 | if "directories_dic" defines 4 directories and in each directories |
---|
| 1644 | there is a csv files corresponding to the right "plot_numbers", |
---|
| 1645 | this will create a plot with 4 lines one for each directory AND |
---|
| 1646 | one plot for each "quantities". |
---|
| 1647 | |
---|
| 1648 | Inputs: |
---|
| 1649 | directories_dic: dictionary of directory with values (plot |
---|
| 1650 | legend name for directory), (start time of |
---|
| 1651 | the time series) and the (value to add to |
---|
| 1652 | stage if needed). For example |
---|
| 1653 | {dir1:['Anuga_ons',5000, 0], |
---|
| 1654 | dir2:['b_emoth',5000,1.5], |
---|
| 1655 | dir3:['b_ons',5000,1.5]} |
---|
| 1656 | |
---|
| 1657 | output_dir: directory for the plot outputs |
---|
[4568] | 1658 | |
---|
[4634] | 1659 | base_name: common name to all the csv files to be read |
---|
| 1660 | |
---|
| 1661 | plot_numbers: a String list of numbers to plot. For example |
---|
| 1662 | [0-4,10,15-17] will read and attempt to plot |
---|
| 1663 | the follow 0,1,2,3,4,10,15,16,17 |
---|
| 1664 | quantities: Currently limited to "Stage", "Speed", and |
---|
| 1665 | "Momentum", should be changed to incorporate |
---|
| 1666 | any quantity read from CSV file.... |
---|
| 1667 | |
---|
| 1668 | extra_plot_name: A string that is appended to the end of the |
---|
| 1669 | output filename. |
---|
| 1670 | |
---|
| 1671 | assess_all_csv_files: if true it will read ALL csv file with |
---|
| 1672 | "base_name", regardless of 'plot_numbers' |
---|
| 1673 | and determine a uniform set of axes for |
---|
| 1674 | Stage, Speed and Momentum. IF FALSE it |
---|
| 1675 | will only read the csv file within the |
---|
| 1676 | 'plot_numbers' |
---|
| 1677 | |
---|
| 1678 | create_latex: NOT IMPLEMENTED YET!! sorry Jane.... |
---|
| 1679 | |
---|
| 1680 | OUTPUTS: None, it saves the plots to |
---|
| 1681 | <output_dir><base_name><plot_number><extra_plot_name>.png |
---|
| 1682 | |
---|
| 1683 | """ |
---|
| 1684 | import pylab# import plot, xlabel, ylabel, savefig, \ |
---|
| 1685 | #ion, hold, axis, close, figure, legend |
---|
| 1686 | from os import sep |
---|
| 1687 | from anuga.shallow_water.data_manager import \ |
---|
| 1688 | get_all_files_with_extension, csv2dict |
---|
| 1689 | #find all the files that meet the specs |
---|
| 1690 | #FIXME remove all references to word that such as Stage |
---|
| 1691 | #(with a Capital letter) could use the header info from |
---|
| 1692 | #the dictionary that is returned from the csv2dict this could |
---|
| 1693 | #be very good and very flexable.... but this works for now! |
---|
| 1694 | |
---|
| 1695 | #FIXME plot all available data from the csv file, currently will |
---|
| 1696 | #only plot Stage,Speed and Momentum and only if the header is |
---|
| 1697 | #named like wise |
---|
| 1698 | |
---|
| 1699 | # quantities_dic={'time':'time (hour)'} |
---|
| 1700 | # if 'time' in quantities: |
---|
| 1701 | # quantities_dic['time']='time (hour)' |
---|
| 1702 | # if 'Time' in quantities: |
---|
| 1703 | # quantities_dic['Time']='time (hour)' |
---|
| 1704 | # if 'stage' in quantities: |
---|
| 1705 | # quantities_dic['stage']='wave height (m)' |
---|
| 1706 | # if 'Stage' in quantities: |
---|
| 1707 | # quantities_dic['Stage']='wave height (m)' |
---|
| 1708 | # if 'speed' in quantities: |
---|
| 1709 | # quantities_dic['speed']='speed (m/s)' |
---|
| 1710 | # if 'Speed' in quantities: |
---|
| 1711 | # quantities_dic['Speed']='speed (m/s)' |
---|
| 1712 | # if 'stage' or 'Stage' in quantities: |
---|
| 1713 | # quantities_dic['stage']='speed (m/s)' |
---|
| 1714 | # if 'stage' or 'Stage' in quantities: |
---|
| 1715 | # quantities_dic['stage']='speed (m/s)' |
---|
| 1716 | |
---|
| 1717 | seconds_in_hour = 3600 |
---|
| 1718 | time_label = 'time (hour)' |
---|
| 1719 | stage_label = 'wave height (m)' |
---|
| 1720 | speed_label = 'speed (m/s)' |
---|
| 1721 | momentum_label = 'momentum (m^2/sec)' |
---|
| 1722 | |
---|
| 1723 | if extra_plot_name != '': |
---|
| 1724 | extra_plot_name='_'+extra_plot_name |
---|
| 1725 | |
---|
| 1726 | #finds all the files that fit the specs and return a list of them |
---|
| 1727 | #so to help find a uniform max and min for the plots... |
---|
| 1728 | list_filenames=[] |
---|
| 1729 | if verbose: print 'Determining files to access for axes ranges \n' |
---|
| 1730 | for i,directory in enumerate(directories_dic.keys()): |
---|
| 1731 | list_filenames.append(get_all_files_with_extension(directory, |
---|
| 1732 | base_name,'.csv')) |
---|
| 1733 | # print 'list_filenames',list_filenames |
---|
| 1734 | |
---|
| 1735 | #use all the files to get the values for the plot axis |
---|
| 1736 | max_st=max_sp=max_mom=min_st=min_sp=min_mom=max_t=min_t=0. |
---|
| 1737 | max_start_time= 0. |
---|
| 1738 | min_start_time = 100000 |
---|
| 1739 | |
---|
| 1740 | |
---|
| 1741 | new_plot_numbers=[] |
---|
| 1742 | #change plot_numbers to list, eg ['0-4','10'] |
---|
| 1743 | #to ['0','1','2','3','4','10'] |
---|
| 1744 | for i, num_string in enumerate(plot_numbers): |
---|
| 1745 | if '-' in num_string: |
---|
| 1746 | start = int(num_string[:num_string.rfind('-')]) |
---|
| 1747 | end = int(num_string[num_string.rfind('-')+1:])+1 |
---|
| 1748 | for x in range(start, end): |
---|
| 1749 | new_plot_numbers.append(str(x)) |
---|
| 1750 | else: |
---|
| 1751 | new_plot_numbers.append(num_string) |
---|
| 1752 | # print 'new_plot_numbers',new_plot_numbers |
---|
| 1753 | |
---|
| 1754 | if verbose: print 'Determining uniform axes \n' |
---|
| 1755 | #this entire loop is to determine the min and max range for the |
---|
| 1756 | #axes of the plot |
---|
| 1757 | for i, directory in enumerate(directories_dic.keys()): |
---|
| 1758 | |
---|
| 1759 | if assess_all_csv_files==False: |
---|
| 1760 | which_csv_to_assess = new_plot_numbers |
---|
| 1761 | else: |
---|
| 1762 | which_csv_to_assess = list_filenames[i] |
---|
| 1763 | |
---|
| 1764 | for j, filename in enumerate(which_csv_to_assess): |
---|
| 1765 | if assess_all_csv_files==False: |
---|
[4647] | 1766 | # dir_filename=directory+sep+base_name+filename |
---|
| 1767 | dir_filename=join(directory,base_name+filename) |
---|
[4634] | 1768 | else: |
---|
| 1769 | dir_filename=join(directory,filename) |
---|
[4647] | 1770 | # print'dir_filename',dir_filename |
---|
[4634] | 1771 | attribute_dic, title_index_dic = csv2dict(dir_filename+ |
---|
| 1772 | '.csv') |
---|
| 1773 | |
---|
| 1774 | directory_start_time = directories_dic[directory][1] |
---|
| 1775 | directory_add_tide = directories_dic[directory][2] |
---|
| 1776 | |
---|
| 1777 | time = [float(x) for x in attribute_dic["Time"]] |
---|
| 1778 | min_t, max_t = get_min_max_values(time,min_t,max_t) |
---|
| 1779 | |
---|
| 1780 | stage = [float(x) for x in attribute_dic["Stage"]] |
---|
| 1781 | stage =array(stage)+directory_add_tide |
---|
| 1782 | min_st, max_st = get_min_max_values(stage,min_st,max_st) |
---|
| 1783 | |
---|
| 1784 | speed = [float(x) for x in attribute_dic["Speed"]] |
---|
| 1785 | min_sp, max_sp = get_min_max_values(speed,min_sp,max_sp) |
---|
| 1786 | |
---|
| 1787 | momentum = [float(x) for x in attribute_dic["Momentum"]] |
---|
| 1788 | min_mom, max_mom = get_min_max_values(momentum, |
---|
| 1789 | min_mom, |
---|
| 1790 | max_mom) |
---|
| 1791 | |
---|
| 1792 | # print 'min_sp, max_sp',min_sp, max_sp, |
---|
| 1793 | # print directory_start_time |
---|
| 1794 | if min_start_time > directory_start_time: |
---|
| 1795 | min_start_time = directory_start_time |
---|
| 1796 | if max_start_time < directory_start_time: |
---|
| 1797 | max_start_time = directory_start_time |
---|
| 1798 | # print 'start_time' , max_start_time, min_start_time |
---|
| 1799 | |
---|
| 1800 | stage_axis = (min_start_time/seconds_in_hour, |
---|
| 1801 | (max_t+max_start_time)/seconds_in_hour, |
---|
| 1802 | min_st, max_st) |
---|
| 1803 | speed_axis = (min_start_time/seconds_in_hour, |
---|
| 1804 | (max_t+max_start_time)/seconds_in_hour, |
---|
| 1805 | min_sp, max_sp) |
---|
| 1806 | momentum_axis = (min_start_time/seconds_in_hour, |
---|
| 1807 | (max_t+max_start_time)/seconds_in_hour, |
---|
| 1808 | min_mom, max_mom) |
---|
| 1809 | # ion() |
---|
[4647] | 1810 | # pylab.hold() |
---|
[4634] | 1811 | |
---|
| 1812 | |
---|
| 1813 | cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k'] |
---|
| 1814 | |
---|
| 1815 | # if verbose: print 'Now start to plot \n' |
---|
| 1816 | |
---|
| 1817 | i_max = len(directories_dic.keys()) |
---|
| 1818 | legend_list_dic =[] |
---|
| 1819 | legend_list =[] |
---|
| 1820 | for i, directory in enumerate(directories_dic.keys()): |
---|
| 1821 | if verbose: print'Plotting in %s', directory |
---|
| 1822 | for j, number in enumerate(new_plot_numbers): |
---|
| 1823 | if verbose: print'Starting %s',base_name+number |
---|
| 1824 | directory_name = directories_dic[directory][0] |
---|
| 1825 | directory_start_time = directories_dic[directory][1] |
---|
| 1826 | directory_add_tide = directories_dic[directory][2] |
---|
| 1827 | |
---|
| 1828 | #create an if about the start time and tide hieght |
---|
| 1829 | #if they don't exist |
---|
| 1830 | file=directory+sep+base_name+number |
---|
| 1831 | #print 'i %s,j %s, number %s, file %s' %(i,j,number,file) |
---|
| 1832 | attribute_dic, title_index_dic = csv2dict(file+'.csv') |
---|
| 1833 | #get data from dict in to list |
---|
| 1834 | t = [float(x) for x in attribute_dic["Time"]] |
---|
| 1835 | |
---|
| 1836 | #do maths to list by changing to array |
---|
| 1837 | t=(array(t)+directory_start_time)/seconds_in_hour |
---|
| 1838 | stage = [float(x) for x in attribute_dic["Stage"]] |
---|
| 1839 | speed = [float(x) for x in attribute_dic["Speed"]] |
---|
| 1840 | momentum = [float(x) for x in attribute_dic["Momentum"]] |
---|
| 1841 | |
---|
| 1842 | #Can add tide so plots are all on the same tide height |
---|
| 1843 | stage =array(stage)+directory_add_tide |
---|
| 1844 | |
---|
| 1845 | #finds the maximum elevation |
---|
| 1846 | max_ele=-100000 |
---|
| 1847 | min_ele=100000 |
---|
| 1848 | elevation = [float(x) for x in attribute_dic["Elevation"]] |
---|
| 1849 | |
---|
| 1850 | min_ele, max_ele = get_min_max_values(elevation, |
---|
| 1851 | min_ele, |
---|
| 1852 | max_ele) |
---|
| 1853 | if min_ele != max_ele: |
---|
| 1854 | print "Note! Elevation changes in %s" %dir_filename |
---|
| 1855 | # print 'min_ele, max_ele',min_ele, max_ele |
---|
| 1856 | |
---|
| 1857 | |
---|
| 1858 | #populates the legend_list_dic with dir_name and the elevation |
---|
| 1859 | if i==0: |
---|
| 1860 | legend_list_dic.append({directory_name:max_ele}) |
---|
| 1861 | else: |
---|
| 1862 | legend_list_dic[j][directory_name]=max_ele |
---|
| 1863 | |
---|
| 1864 | # creates a list for the legend after at "legend_dic" has been fully populated |
---|
| 1865 | # only runs on the last iteration for all the gauges(csv) files |
---|
| 1866 | # empties the list before creating it |
---|
| 1867 | if i==i_max-1: |
---|
| 1868 | legend_list=[] |
---|
| 1869 | for k, l in legend_list_dic[j].iteritems(): |
---|
| 1870 | legend_list.append('%s (elevation = %sm)'%(k,l)) |
---|
| 1871 | #print k,l, legend_list_dic[j] |
---|
| 1872 | |
---|
| 1873 | if "Stage" in quantities: |
---|
| 1874 | pylab.figure(100+j) |
---|
| 1875 | pylab.plot(t, stage, c = cstr[i], linewidth=1) |
---|
| 1876 | pylab.xlabel(time_label) |
---|
| 1877 | pylab.ylabel(stage_label) |
---|
| 1878 | pylab.axis(stage_axis) |
---|
| 1879 | pylab.legend(legend_list,loc='upper right') |
---|
| 1880 | figname = '%sstage_%s%s.png' %(output_dir+sep, |
---|
| 1881 | base_name+number, |
---|
| 1882 | extra_plot_name) |
---|
| 1883 | pylab.savefig(figname) |
---|
| 1884 | if "Speed" in quantities: |
---|
| 1885 | pylab.figure(200+j) |
---|
| 1886 | pylab.plot(t, speed, c = cstr[i], linewidth=1) |
---|
| 1887 | pylab.xlabel(time_label) |
---|
| 1888 | pylab.ylabel(speed_label) |
---|
| 1889 | pylab.axis(speed_axis) |
---|
| 1890 | pylab.legend(legend_list,loc='upper right') |
---|
| 1891 | figname = '%sspeed_%s%s.png' %(output_dir+sep, |
---|
| 1892 | base_name+number, |
---|
| 1893 | extra_plot_name) |
---|
| 1894 | pylab.savefig(figname) |
---|
| 1895 | if "Momentum" in quantities: |
---|
| 1896 | pylab.figure(300+j) |
---|
| 1897 | pylab.plot(t, momentum, c = cstr[i], linewidth=1) |
---|
| 1898 | pylab.xlabel(time_label) |
---|
| 1899 | pylab.ylabel(momentum_label) |
---|
| 1900 | pylab.axis(momentum_axis) |
---|
| 1901 | pylab.legend(legend_list,loc='upper right') |
---|
| 1902 | figname = '%smomentum_%s%s.png' %(output_dir+sep, |
---|
| 1903 | base_name+number, |
---|
| 1904 | extra_plot_name) |
---|
| 1905 | pylab.savefig(figname) |
---|
| 1906 | if verbose: print 'Closing all plots' |
---|
| 1907 | pylab.close('all') |
---|
| 1908 | del pylab |
---|
| 1909 | if verbose: print 'Finished closing plots' |
---|
| 1910 | |
---|
| 1911 | def get_min_max_values(list=None,min1=100,max1=-100): |
---|
| 1912 | """ Returns the min and max of the list it was provided. |
---|
| 1913 | NOTE: default min and max may need to change depeending on |
---|
| 1914 | your list |
---|
| 1915 | """ |
---|
| 1916 | if list == None: print 'List must be provided' |
---|
| 1917 | # min = max_list = 0 |
---|
| 1918 | if max(list) > max1: |
---|
| 1919 | max1 = max(list) |
---|
| 1920 | if min(list) < min1: |
---|
| 1921 | min1 = min(list) |
---|
| 1922 | |
---|
| 1923 | return min1, max1 |
---|
| 1924 | |
---|
[4643] | 1925 | |
---|
| 1926 | def get_runup_data_for_locations_from_file(gauge_filename, |
---|
| 1927 | sww_filename, |
---|
| 1928 | runup_filename, |
---|
| 1929 | size=10, |
---|
| 1930 | verbose=False): |
---|
| 1931 | |
---|
| 1932 | '''this will read a csv file with the header x,y. Then look in a square 'size'x2 |
---|
| 1933 | around this position for the 'max_inundaiton_height' in the 'sww_filename' and |
---|
| 1934 | report the findings in the 'runup_filename |
---|
[4634] | 1935 | |
---|
[4643] | 1936 | WARNING: NO TESTS! |
---|
| 1937 | ''' |
---|
| 1938 | |
---|
| 1939 | from anuga.shallow_water.data_manager import get_all_directories_with_name,\ |
---|
| 1940 | get_maximum_inundation_data,\ |
---|
| 1941 | csv2dict |
---|
| 1942 | |
---|
| 1943 | file = open(runup_filename,"w") |
---|
| 1944 | file.write("easting,northing,runup \n ") |
---|
| 1945 | file.close() |
---|
[4634] | 1946 | |
---|
[4643] | 1947 | #read gauge csv file to dictionary |
---|
| 1948 | attribute_dic, title_index_dic = csv2dict(gauge_filename) |
---|
| 1949 | northing = [float(x) for x in attribute_dic["y"]] |
---|
| 1950 | easting = [float(x) for x in attribute_dic["x"]] |
---|
[4634] | 1951 | |
---|
[4643] | 1952 | print 'Reading %s' %sww_filename |
---|
[4634] | 1953 | |
---|
[4643] | 1954 | runup_locations=[] |
---|
| 1955 | for i, x in enumerate(northing): |
---|
| 1956 | # print 'easting,northing',i,easting[i],northing[i] |
---|
| 1957 | poly = [[int(easting[i]+size),int(northing[i]+size)], |
---|
| 1958 | [int(easting[i]+size),int(northing[i]-size)], |
---|
| 1959 | [int(easting[i]-size),int(northing[i]-size)], |
---|
| 1960 | [int(easting[i]-size),int(northing[i]+size)]] |
---|
[4634] | 1961 | |
---|
[4643] | 1962 | run_up, x_y = get_maximum_inundation_data(filename=sww_filename, |
---|
| 1963 | polygon=poly, |
---|
| 1964 | verbose=False) |
---|
| 1965 | #if no runup will return 0 instead of NONE |
---|
| 1966 | if run_up==None: run_up=0 |
---|
| 1967 | if x_y==None: x_y=[0,0] |
---|
| 1968 | |
---|
| 1969 | if verbose:print 'maximum inundation runup near %s is %s meters' %(x_y,run_up) |
---|
| 1970 | |
---|
| 1971 | #writes to file |
---|
| 1972 | file = open(runup_filename,"a") |
---|
| 1973 | temp = '%s,%s,%s \n' %(x_y[0], x_y[1], run_up) |
---|
| 1974 | file.write(temp) |
---|
| 1975 | file.close() |
---|