source: anuga_core/source/anuga/abstract_2d_finite_volumes/util.py @ 7675

Last change on this file since 7675 was 7675, checked in by hudson, 14 years ago

Ticket 113 is complete, and all tests pass.
A centroid list is built by Interpolation_function as it calculates the interpolation matrix, and this is passed out as extra quantities which are output into the gauge.csv file.

File size: 82.4 KB
RevLine 
[5897]1"""This module contains various auxiliary function used by pyvolution.
2
3It is also a clearing house for functions that may later earn a module
4of their own.
5"""
6
7import anuga.utilities.polygon
8import sys
9import os
10
11from os import remove, mkdir, access, F_OK, R_OK, W_OK, sep,getcwd
12from os.path import exists, basename, split,join
13from warnings import warn
14from shutil import copy
15
16from anuga.utilities.numerical_tools import ensure_numeric
[6070]17from Scientific.IO.NetCDF import NetCDFFile
[5897]18   
19from anuga.geospatial_data.geospatial_data import ensure_absolute
20from math import sqrt, atan, degrees
21
[6070]22# FIXME (Ole): Temporary short cuts -
23# FIXME (Ole): remove and update scripts where they are used
[5897]24from anuga.utilities.system_tools import get_revision_number
25from anuga.utilities.system_tools import store_version_info
26
[6086]27from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
[7317]28import anuga.utilities.log as log
[5897]29
[7276]30import numpy as num
[6086]31
[6145]32
[6070]33##
34# @brief Read time history of data from NetCDF file, return callable object.
35# @param filename  Name of .sww or .tms file.
36# @param domain Associated domain object.
37# @param quantities Name of quantity to be interpolated or a list of names.
38# @param interpolation_points List of absolute UTM coordinates for points
39#                             (N x 2) or geospatial object or
40#                             points file name at which values are sought.
41# @param time_thinning
42# @param verbose True if this function is to be verbose.
43# @param use_cache True means that caching of intermediate result is attempted.
44# @param boundary_polygon
[7672]45# @param output_centroids if True, data for the centroid of the triangle will be output
[6070]46# @return A callable object.
[5897]47def file_function(filename,
48                  domain=None,
49                  quantities=None,
50                  interpolation_points=None,
51                  time_thinning=1,
[6173]52                  time_limit=None,
[5897]53                  verbose=False,
54                  use_cache=False,
[7672]55                  boundary_polygon=None,
[7673]56                  output_centroids=False):
[5897]57    """Read time history of spatial data from NetCDF file and return
58    a callable object.
59
60    Input variables:
61   
[7282]62    filename - Name of sww, tms or sts file
[5897]63       
64       If the file has extension 'sww' then it is assumed to be spatio-temporal
65       or temporal and the callable object will have the form f(t,x,y) or f(t)
66       depending on whether the file contains spatial data
67
68       If the file has extension 'tms' then it is assumed to be temporal only
69       and the callable object will have the form f(t)
70
71       Either form will return interpolated values based on the input file
72       using the underlying interpolation_function.
73
74    domain - Associated domain object   
75       If domain is specified, model time (domain.starttime)
76       will be checked and possibly modified.
77   
78       All times are assumed to be in UTC
79       
80       All spatial information is assumed to be in absolute UTM coordinates.
81
82    quantities - the name of the quantity to be interpolated or a
83                 list of quantity names. The resulting function will return
84                 a tuple of values - one for each quantity
85                 If quantities are None, the default quantities are
86                 ['stage', 'xmomentum', 'ymomentum']
87                 
88
89    interpolation_points - list of absolute UTM coordinates for points (N x 2)
90    or geospatial object or points file name at which values are sought
[6070]91
92    time_thinning -
93
94    verbose -
95
[5897]96    use_cache: True means that caching of intermediate result of
97               Interpolation_function is attempted
98
[6070]99    boundary_polygon -
100
[5897]101   
[6070]102    See Interpolation function in anuga.fit_interpolate.interpolation for
103    further documentation
[5897]104    """
105
106    # FIXME (OLE): Should check origin of domain against that of file
107    # In fact, this is where origin should be converted to that of domain
108    # Also, check that file covers domain fully.
109
110    # Take into account:
111    # - domain's georef
112    # - sww file's georef
113    # - interpolation points as absolute UTM coordinates
114
115    if quantities is None:
116        if verbose:
117            msg = 'Quantities specified in file_function are None,'
[6070]118            msg += ' so I will use stage, xmomentum, and ymomentum in that order'
[7317]119            log.critical(msg)
[5897]120        quantities = ['stage', 'xmomentum', 'ymomentum']
121
122    # Use domain's startime if available
123    if domain is not None:   
124        domain_starttime = domain.get_starttime()
125    else:
126        domain_starttime = None
127
128    # Build arguments and keyword arguments for use with caching or apply.
129    args = (filename,)
130
131    # FIXME (Ole): Caching this function will not work well
132    # if domain is passed in as instances change hash code.
133    # Instead we pass in those attributes that are needed (and return them
134    # if modified)
135    kwargs = {'quantities': quantities,
136              'interpolation_points': interpolation_points,
137              'domain_starttime': domain_starttime,
[6173]138              'time_thinning': time_thinning,     
139              'time_limit': time_limit,                                 
[5897]140              'verbose': verbose,
[7672]141              'boundary_polygon': boundary_polygon,
[7673]142              'output_centroids': output_centroids}
[5897]143
144    # Call underlying engine with or without caching
145    if use_cache is True:
146        try:
147            from caching import cache
148        except:
149            msg = 'Caching was requested, but caching module'+\
150                  'could not be imported'
151            raise msg
152
153        f, starttime = cache(_file_function,
154                             args, kwargs,
155                             dependencies=[filename],
156                             compression=False,                 
157                             verbose=verbose)
158    else:
159        f, starttime = apply(_file_function,
160                             args, kwargs)
161
162    #FIXME (Ole): Pass cache arguments, such as compression, in some sort of
163    #structure
164
165    f.starttime = starttime
166    f.filename = filename
167   
168    if domain is not None:
169        #Update domain.startime if it is *earlier* than starttime from file
170        if starttime > domain.starttime:
171            msg = 'WARNING: Start time as specified in domain (%f)'\
172                  %domain.starttime
173            msg += ' is earlier than the starttime of file %s (%f).'\
174                     %(filename, starttime)
175            msg += ' Modifying domain starttime accordingly.'
176           
[7317]177            if verbose: log.critical(msg)
[6070]178
[5897]179            domain.set_starttime(starttime) #Modifying model time
[6070]180
[7317]181            if verbose: log.critical('Domain starttime is now set to %f'
182                                     % domain.starttime)
[5897]183    return f
184
185
[6070]186##
187# @brief ??
188# @param filename  Name of .sww or .tms file.
189# @param domain Associated domain object.
190# @param quantities Name of quantity to be interpolated or a list of names.
191# @param interpolation_points List of absolute UTM coordinates for points
192#                             (N x 2) or geospatial object or
193#                             points file name at which values are sought.
194# @param time_thinning
195# @param verbose True if this function is to be verbose.
196# @param use_cache True means that caching of intermediate result is attempted.
197# @param boundary_polygon
[5897]198def _file_function(filename,
199                   quantities=None,
200                   interpolation_points=None,
201                   domain_starttime=None,
202                   time_thinning=1,
[6173]203                   time_limit=None,
[5897]204                   verbose=False,
[7672]205                   boundary_polygon=None,
206                                   output_centroids=False):
[5897]207    """Internal function
208   
209    See file_function for documentatiton
210    """
211
212    assert type(filename) == type(''),\
213               'First argument to File_function must be a string'
214
215    try:
216        fid = open(filename)
217    except Exception, e:
[6070]218        msg = 'File "%s" could not be opened: Error="%s"' % (filename, e)
[5897]219        raise msg
220
[6070]221    # read first line of file, guess file type
[5897]222    line = fid.readline()
223    fid.close()
224
225    if line[:3] == 'CDF':
226        return get_netcdf_file_function(filename,
227                                        quantities,
228                                        interpolation_points,
229                                        domain_starttime,
230                                        time_thinning=time_thinning,
[6173]231                                        time_limit=time_limit,
[5897]232                                        verbose=verbose,
[7672]233                                        boundary_polygon=boundary_polygon,
[7673]234                                        output_centroids=output_centroids)
[5897]235    else:
[6070]236        # FIXME (Ole): Could add csv file here to address Ted Rigby's
237        # suggestion about reading hydrographs.
[5897]238        # This may also deal with the gist of ticket:289
239        raise 'Must be a NetCDF File'
240
241
[6070]242##
243# @brief ??
244# @param filename  Name of .sww or .tms file.
245# @param quantity_names Name of quantity to be interpolated or a list of names.
246# @param interpolation_points List of absolute UTM coordinates for points
247#                             (N x 2) or geospatial object or
248#                             points file name at which values are sought.
249# @param domain_starttime Start time from domain object.
250# @param time_thinning ??
251# @param verbose True if this function is to be verbose.
252# @param boundary_polygon ??
253# @return A callable object.
[5897]254def get_netcdf_file_function(filename,
255                             quantity_names=None,
256                             interpolation_points=None,
257                             domain_starttime=None,                           
[6173]258                             time_thinning=1,                 
259                             time_limit=None,           
[5897]260                             verbose=False,
[7672]261                             boundary_polygon=None,
[7673]262                             output_centroids=False):
[5897]263    """Read time history of spatial data from NetCDF sww file and
264    return a callable object f(t,x,y)
265    which will return interpolated values based on the input file.
266
267    Model time (domain_starttime)
268    will be checked, possibly modified and returned
269   
270    All times are assumed to be in UTC
271
272    See Interpolation function for further documetation
273    """
[6070]274
[5897]275    # FIXME: Check that model origin is the same as file's origin
276    # (both in UTM coordinates)
277    # If not - modify those from file to match domain
278    # (origin should be passed in)
279    # Take this code from e.g. dem2pts in data_manager.py
280    # FIXME: Use geo_reference to read and write xllcorner...
281
282    import time, calendar, types
283    from anuga.config import time_format
284
285    # Open NetCDF file
[7317]286    if verbose: log.critical('Reading %s' % filename)
[6070]287
[6086]288    fid = NetCDFFile(filename, netcdf_mode_r)
[5897]289
290    if type(quantity_names) == types.StringType:
291        quantity_names = [quantity_names]       
292
293    if quantity_names is None or len(quantity_names) < 1:
294        msg = 'No quantities are specified in file_function'
295        raise Exception, msg
296 
297    if interpolation_points is not None:
298        interpolation_points = ensure_absolute(interpolation_points)
[6070]299        msg = 'Points must by N x 2. I got %d' % interpolation_points.shape[1]
[5897]300        assert interpolation_points.shape[1] == 2, msg
301
302    # Now assert that requested quantitites (and the independent ones)
303    # are present in file
304    missing = []
305    for quantity in ['time'] + quantity_names:
306        if not fid.variables.has_key(quantity):
307            missing.append(quantity)
308
309    if len(missing) > 0:
310        msg = 'Quantities %s could not be found in file %s'\
[6070]311              % (str(missing), filename)
[5897]312        fid.close()
313        raise Exception, msg
314
315    # Decide whether this data has a spatial dimension
316    spatial = True
317    for quantity in ['x', 'y']:
318        if not fid.variables.has_key(quantity):
319            spatial = False
320
321    if filename[-3:] == 'tms' and spatial is True:
322        msg = 'Files of type tms must not contain spatial  information'
323        raise msg
324
325    if filename[-3:] == 'sww' and spatial is False:
326        msg = 'Files of type sww must contain spatial information'       
327        raise msg
328
329    if filename[-3:] == 'sts' and spatial is False:
330        #What if mux file only contains one point
331        msg = 'Files of type sts must contain spatial information'       
332        raise msg
333
334    if filename[-3:] == 'sts' and boundary_polygon is None:
335        #What if mux file only contains one point
336        msg = 'Files of type sts require boundary polygon'       
337        raise msg
338
339    # Get first timestep
340    try:
341        starttime = fid.starttime[0]
342    except ValueError:
[6173]343        msg = 'Could not read starttime from file %s' % filename
[5897]344        raise msg
345
346    # Get variables
[7317]347    # if verbose: log.critical('Get variables'    )
[5897]348    time = fid.variables['time'][:]   
[6175]349    # FIXME(Ole): Is time monotoneous?
[5897]350
[6173]351    # Apply time limit if requested
352    upper_time_index = len(time)   
353    msg = 'Time vector obtained from file %s has length 0' % filename
354    assert upper_time_index > 0, msg
355   
356    if time_limit is not None:
[6175]357        # Adjust given time limit to given start time
358        time_limit = time_limit - starttime
359
[6178]360
[6175]361        # Find limit point
[6173]362        for i, t in enumerate(time):
363            if t > time_limit:
364                upper_time_index = i
365                break
366               
367        msg = 'Time vector is zero. Requested time limit is %f' % time_limit
[6175]368        assert upper_time_index > 0, msg
[6173]369
[6175]370        if time_limit < time[-1] and verbose is True:
[7317]371            log.critical('Limited time vector from %.2fs to %.2fs'
372                         % (time[-1], time_limit))
[6175]373
[6173]374    time = time[:upper_time_index]
375
376
377   
378   
[5897]379    # Get time independent stuff
380    if spatial:
381        # Get origin
382        xllcorner = fid.xllcorner[0]
383        yllcorner = fid.yllcorner[0]
384        zone = fid.zone[0]       
385
386        x = fid.variables['x'][:]
387        y = fid.variables['y'][:]
[6173]388        if filename.endswith('sww'):
[5897]389            triangles = fid.variables['volumes'][:]
390
[6145]391        x = num.reshape(x, (len(x),1))
392        y = num.reshape(y, (len(y),1))
393        vertex_coordinates = num.concatenate((x,y), axis=1) #m x 2 array
[5897]394
395        if boundary_polygon is not None:
[6192]396            # Remove sts points that do not lie on boundary
[6189]397            # FIXME(Ole): Why don't we just remove such points from the list of points and associated data?
398            # I am actually convinced we can get rid of neighbour_gauge_id altogether as the sts file is produced using the ordering file.
399            # All sts points are therefore always present in the boundary. In fact, they *define* parts of the boundary.
[5897]400            boundary_polygon=ensure_numeric(boundary_polygon)
401            boundary_polygon[:,0] -= xllcorner
402            boundary_polygon[:,1] -= yllcorner
403            temp=[]
404            boundary_id=[]
405            gauge_id=[]
406            for i in range(len(boundary_polygon)):
407                for j in range(len(x)):
[6192]408                    if num.allclose(vertex_coordinates[j],
409                                    boundary_polygon[i], 1e-4):
[6173]410                        #FIXME:
[5897]411                        #currently gauges lat and long is stored as float and
412                        #then cast to double. This cuases slight repositioning
413                        #of vertex_coordinates.
414                        temp.append(boundary_polygon[i])
415                        gauge_id.append(j)
416                        boundary_id.append(i)
417                        break
418            gauge_neighbour_id=[]
419            for i in range(len(boundary_id)-1):
420                if boundary_id[i]+1==boundary_id[i+1]:
421                    gauge_neighbour_id.append(i+1)
422                else:
423                    gauge_neighbour_id.append(-1)
[6070]424            if boundary_id[len(boundary_id)-1]==len(boundary_polygon)-1 \
425               and boundary_id[0]==0:
[5897]426                gauge_neighbour_id.append(0)
427            else:
428                gauge_neighbour_id.append(-1)
429            gauge_neighbour_id=ensure_numeric(gauge_neighbour_id)
[6070]430
[6145]431            if len(num.compress(gauge_neighbour_id>=0,gauge_neighbour_id)) \
[6070]432               != len(temp)-1:
[5897]433                msg='incorrect number of segments'
434                raise msg
435            vertex_coordinates=ensure_numeric(temp)
436            if len(vertex_coordinates)==0:
437                msg = 'None of the sts gauges fall on the boundary'
438                raise msg
439        else:
440            gauge_neighbour_id=None
441
442        if interpolation_points is not None:
443            # Adjust for georef
444            interpolation_points[:,0] -= xllcorner
445            interpolation_points[:,1] -= yllcorner       
446    else:
447        gauge_neighbour_id=None
[6011]448       
[5897]449    if domain_starttime is not None:
450        # If domain_startime is *later* than starttime,
451        # move time back - relative to domain's time
452        if domain_starttime > starttime:
453            time = time - domain_starttime + starttime
454
455        # FIXME Use method in geo to reconcile
456        # if spatial:
457        # assert domain.geo_reference.xllcorner == xllcorner
458        # assert domain.geo_reference.yllcorner == yllcorner
459        # assert domain.geo_reference.zone == zone       
460       
461    if verbose:
[7317]462        log.critical('File_function data obtained from: %s' % filename)
463        log.critical('  References:')
[5897]464        if spatial:
[7317]465            log.critical('    Lower left corner: [%f, %f]'
466                         % (xllcorner, yllcorner))
467        log.critical('    Start time:   %f' % starttime)
[5897]468       
469   
470    # Produce values for desired data points at
471    # each timestep for each quantity
472    quantities = {}
473    for i, name in enumerate(quantity_names):
474        quantities[name] = fid.variables[name][:]
475        if boundary_polygon is not None:
476            #removes sts points that do not lie on boundary
[7276]477            quantities[name] = num.take(quantities[name], gauge_id, axis=1)
[5897]478           
479    # Close sww, tms or sts netcdf file         
480    fid.close()
481
482    from anuga.fit_interpolate.interpolate import Interpolation_function
483
484    if not spatial:
485        vertex_coordinates = triangles = interpolation_points = None
486    if filename[-3:] == 'sts':#added
487        triangles = None
488        #vertex coordinates is position of urs gauges
489
[6192]490    if verbose:
[7317]491        log.critical('Calling interpolation function')
[6194]492       
[5897]493    # Return Interpolation_function instance as well as
494    # starttime for use to possible modify that of domain
[6070]495    return (Interpolation_function(time,
496                                   quantities,
497                                   quantity_names,
498                                   vertex_coordinates,
499                                   triangles,
500                                   interpolation_points,
501                                   time_thinning=time_thinning,
502                                   verbose=verbose,
[7673]503                                   gauge_neighbour_id=gauge_neighbour_id,
[7675]504                                   output_centroids=output_centroids),
[6070]505            starttime)
[5897]506
507    # NOTE (Ole): Caching Interpolation function is too slow as
508    # the very long parameters need to be hashed.
509
510
[6070]511##
512# @brief Replace multiple substrings in a string.
513# @param text The string to operate on.
514# @param dictionary A dict containing replacements, key->value.
515# @return The new string.
[5897]516def multiple_replace(text, dictionary):
517    """Multiple replace of words in text
518
519    text:       String to be modified
520    dictionary: Mapping of words that are to be substituted
521
522    Python Cookbook 3.14 page 88 and page 90
[6070]523    http://code.activestate.com/recipes/81330/
[5897]524    """
525
526    import re
527   
528    #Create a regular expression from all of the dictionary keys
529    #matching only entire words
530    regex = re.compile(r'\b'+ \
531                       r'\b|\b'.join(map(re.escape, dictionary.keys()))+ \
532                       r'\b' )
533
534    #For each match, lookup the corresponding value in the dictionary
535    return regex.sub(lambda match: dictionary[match.group(0)], text)
536
537
[6070]538##
539# @brief Apply arbitrary expressions to the values of a dict.
540# @param expression A string expression to apply.
541# @param dictionary The dictionary to apply the expression to.
542def apply_expression_to_dictionary(expression, dictionary):
[5897]543    """Apply arbitrary expression to values of dictionary
544
545    Given an expression in terms of the keys, replace key by the
546    corresponding values and evaluate.
547
548    expression: Arbitrary, e.g. arithmetric, expression relating keys
549                from dictionary.
550
551    dictionary: Mapping between symbols in expression and objects that
552                will be evaluated by expression.
553                Values in dictionary must support operators given in
554                expression e.g. by overloading
555
[7276]556    Due to a limitation with numeric, this can not evaluate 0/0
[5897]557    In general, the user can fix by adding 1e-30 to the numerator.
558    SciPy core can handle this situation.
559    """
560
561    import types
562    import re
563
564    assert isinstance(expression, basestring)
565    assert type(dictionary) == types.DictType
566
567    #Convert dictionary values to textual representations suitable for eval
568    D = {}
569    for key in dictionary:
[6070]570        D[key] = 'dictionary["%s"]' % key
[5897]571
572    #Perform substitution of variables   
573    expression = multiple_replace(expression, D)
574
575    #Evaluate and return
576    try:
577        return eval(expression)
578    except NameError, e:
[6070]579        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
[5897]580        raise NameError, msg
581    except ValueError, e:
[6070]582        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
[5897]583        raise ValueError, msg
584
[6070]585
586##
587# @brief Format a float into a string.
588# @param value Float value to format.
589# @param format The format to use (%.2f is default).
590# @return The formatted float as a string.
[5897]591def get_textual_float(value, format = '%.2f'):
592    """Get textual representation of floating point numbers
593    and accept None as valid entry
594
595    format is a string - default = '%.2f'
596    """
597
598    if value is None:
599        return 'None'
600    else:
601        try:
602            float(value)
603        except:
604            # May this is a vector
605            if len(value) > 1:
606                s = '('
607                for v in value:
608                    s += get_textual_float(v, format) + ', '
609                   
610                s = s[:-2] + ')' # Strip trailing comma and close
611                return s
612            else:
613                raise 'Illegal input to get_textual_float:', value
614        else:
[6070]615            return format % float(value)
[5897]616
617
[6070]618#################################################################################
619# OBSOLETE STUFF
620#################################################################################
[5897]621
[6070]622# @note TEMP
[5897]623def angle(v1, v2):
624    """Temporary Interface to new location"""
625
626    import anuga.utilities.numerical_tools as NT       
627   
628    msg = 'angle has moved from util.py.  '
629    msg += 'Please use "from anuga.utilities.numerical_tools import angle"'
630    warn(msg, DeprecationWarning) 
631
632    return NT.angle(v1, v2)
[6070]633
[5897]634   
[6070]635# @note TEMP
[5897]636def anglediff(v0, v1):
637    """Temporary Interface to new location"""
638
639    import anuga.utilities.numerical_tools as NT
640   
641    msg = 'anglediff has moved from util.py.  '
642    msg += 'Please use "from anuga.utilities.numerical_tools import anglediff"'
643    warn(msg, DeprecationWarning) 
644
645    return NT.anglediff(v0, v1)   
646
647   
[6070]648# @note TEMP
[5897]649def mean(x):
650    """Temporary Interface to new location"""
651
652    import anuga.utilities.numerical_tools as NT   
653   
654    msg = 'mean has moved from util.py.  '
655    msg += 'Please use "from anuga.utilities.numerical_tools import mean"'
656    warn(msg, DeprecationWarning) 
657
658    return NT.mean(x)   
659
[6070]660
661# @note TEMP
[5897]662def point_on_line(*args, **kwargs):
663    """Temporary Interface to new location"""
664
665    msg = 'point_on_line has moved from util.py.  '
666    msg += 'Please use "from anuga.utilities.polygon import point_on_line"'
667    warn(msg, DeprecationWarning) 
668
669    return utilities.polygon.point_on_line(*args, **kwargs)     
[6070]670
[5897]671   
[6070]672# @note TEMP
[5897]673def inside_polygon(*args, **kwargs):
674    """Temporary Interface to new location"""
675
[7317]676    log.critical('inside_polygon has moved from util.py.')
677    log.critical('Please use '
678                 '"from anuga.utilities.polygon import inside_polygon"')
[5897]679
680    return utilities.polygon.inside_polygon(*args, **kwargs)   
[6070]681
[5897]682   
[6070]683# @note TEMP
[5897]684def outside_polygon(*args, **kwargs):
685    """Temporary Interface to new location"""
686
[7317]687    log.critical('outside_polygon has moved from util.py.')
688    log.critical('Please use '
689                 '"from anuga.utilities.polygon import outside_polygon"')
[5897]690
691    return utilities.polygon.outside_polygon(*args, **kwargs)   
692
693
[6070]694# @note TEMP
[5897]695def separate_points_by_polygon(*args, **kwargs):
696    """Temporary Interface to new location"""
697
[7317]698    log.critical('separate_points_by_polygon has moved from util.py.')
699    log.critical('Please use "from anuga.utilities.polygon import '
700                 'separate_points_by_polygon"')
[5897]701
702    return utilities.polygon.separate_points_by_polygon(*args, **kwargs)   
703
704
[6070]705# @note TEMP
[5897]706def read_polygon(*args, **kwargs):
707    """Temporary Interface to new location"""
708
[7317]709    log.critical('read_polygon has moved from util.py.')
710    log.critical('Please use '
711                 '"from anuga.utilities.polygon import read_polygon"')
[5897]712
713    return utilities.polygon.read_polygon(*args, **kwargs)   
714
715
[6070]716# @note TEMP
[5897]717def populate_polygon(*args, **kwargs):
718    """Temporary Interface to new location"""
719
[7317]720    log.critical('populate_polygon has moved from util.py.')
721    log.critical('Please use '
722                 '"from anuga.utilities.polygon import populate_polygon"')
[5897]723
724    return utilities.polygon.populate_polygon(*args, **kwargs)   
725
726
[6070]727#################################################################################
728# End of obsolete stuff ?
729#################################################################################
730
731# @note TEMP
[5897]732def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
733                         verbose=False):
734    """Temporary Interface to new location"""
[6070]735    from anuga.shallow_water.data_manager import start_screen_catcher \
736         as dm_start_screen_catcher
[5897]737
[7317]738    log.critical('start_screen_catcher has moved from util.py.')
739    log.critical('Please use "from anuga.shallow_water.data_manager import '
740                 'start_screen_catcher"')
[5897]741   
[6070]742    return dm_start_screen_catcher(dir_name, myid='', numprocs='',
743                                   extra_info='', verbose=False)
[5897]744
745
[6070]746##
747# @brief Read a .sww file and plot the time series.
[7672]748# @note This function is deprecated - use gauge.sww2timeseries instead.
749#
[5897]750def sww2timeseries(swwfiles,
751                   gauge_filename,
752                   production_dirs,
[6070]753                   report=None,
754                   reportname=None,
755                   plot_quantity=None,
756                   generate_fig=False,
757                   surface=None,
758                   time_min=None,
759                   time_max=None,
760                   time_thinning=1,                   
761                   time_unit=None,
762                   title_on=None,
763                   use_cache=False,
764                   verbose=False):
[7672]765        return gauge.sww2timeseries(swwfiles, gauge_filename, production_dirs, report, reportname, \
766                                    plot_quantity, generate_fig, surface, time_min, time_max,     \
767                                                                time_thinning, time_unit, title_on, use_cache, verbose)
[5897]768
769
770
[6070]771##
772# @brief Read gauge info from a file.
773# @param filename The name of the file to read.
774# @return A (gauges, gaugelocation, elev) tuple.
[5897]775def get_gauges_from_file(filename):
[7672]776    return gauge.get_from_file(filename)
[6070]777
778
779##
780# @brief Check that input quantities in quantity list are legal.
781# @param quantity Quantity list to check.
782# @note Raises an exception of list is not legal.
[5897]783def check_list(quantity):
784    """ Check that input quantities in quantity list are possible
785    """
[6070]786    import sys
787
[5897]788    all_quantity = ['stage', 'depth', 'momentum', 'xmomentum',
789                    'ymomentum', 'speed', 'bearing', 'elevation']
790
[6070]791    # convert all quanitiy names to lowercase
[5897]792    for i,j in enumerate(quantity):
793        quantity[i] = quantity[i].lower()
[6070]794
795    # check that all names in 'quantity' appear in 'all_quantity'
[5897]796    p = list(set(quantity).difference(set(all_quantity)))
[6070]797    if len(p) != 0:
[5897]798        msg = 'Quantities %s do not exist - please try again' %p
799        raise Exception, msg
800
[6070]801
802##
803# @brief Calculate velocity bearing from North.
804# @param uh ??
805# @param vh ??
806# @return The calculated bearing.
[5897]807def calc_bearing(uh, vh):
808    """ Calculate velocity bearing from North
809    """
810    #FIXME (Ole): I reckon we should refactor this one to use
811    #             the function angle() in utilities/numerical_tools
812    #
813    #             It will be a simple matter of
814    # * converting from radians to degrees
815    # * moving the reference direction from [1,0] to North
816    # * changing from counter clockwise to clocwise.
817       
818    angle = degrees(atan(vh/(uh+1.e-15)))
[6070]819
[5897]820    if (0 < angle < 90.0):
821        if vh > 0:
822            bearing = 90.0 - abs(angle)
823        if vh < 0:
824            bearing = 270.0 - abs(angle)
825   
826    if (-90 < angle < 0):
827        if vh < 0:
828            bearing = 90.0 - (angle)
829        if vh > 0:
830            bearing = 270.0 - (angle)
831    if angle == 0: bearing = 0.0
832
833    return bearing
834
[6070]835
836##
837# @brief Generate figures from quantities and gauges for each sww file.
838# @param plot_quantity  ??
839# @param file_loc ??
840# @param report ??
841# @param reportname ??
842# @param surface ??
843# @param leg_label ??
844# @param f_list ??
845# @param gauges ??
846# @param locations ??
847# @param elev ??
848# @param gauge_index ??
849# @param production_dirs ??
850# @param time_min ??
851# @param time_max ??
852# @param time_unit ??
853# @param title_on ??
854# @param label_id ??
855# @param generate_fig ??
856# @param verbose??
857# @return (texfile2, elev_output)
[5897]858def generate_figures(plot_quantity, file_loc, report, reportname, surface,
859                     leg_label, f_list, gauges, locations, elev, gauge_index,
860                     production_dirs, time_min, time_max, time_unit,
861                     title_on, label_id, generate_fig, verbose):
862    """ Generate figures based on required quantities and gauges for
863    each sww file
864    """
865    from os import sep, altsep, getcwd, mkdir, access, F_OK, environ
866
867    if generate_fig is True:
868        from pylab import ion, hold, plot, axis, figure, legend, savefig, \
869             xlabel, ylabel, title, close, subplot
870   
871        if surface is True:
872            import pylab as p1
873            import mpl3d.mplot3d as p3
874       
875    if report == True:   
876        texdir = getcwd()+sep+'report'+sep
877        if access(texdir,F_OK) == 0:
878            mkdir (texdir)
879        if len(label_id) == 1:
880            label_id1 = label_id[0].replace(sep,'')
881            label_id2 = label_id1.replace('_','')
[6070]882            texfile = texdir + reportname + '%s' % label_id2
883            texfile2 = reportname + '%s' % label_id2
[5897]884            texfilename = texfile + '.tex'
[6070]885            fid = open(texfilename, 'w')
886
[7317]887            if verbose: log.critical('Latex output printed to %s' % texfilename)
[5897]888        else:
889            texfile = texdir+reportname
890            texfile2 = reportname
891            texfilename = texfile + '.tex' 
[6070]892            fid = open(texfilename, 'w')
893
[7317]894            if verbose: log.critical('Latex output printed to %s' % texfilename)
[5897]895    else:
896        texfile = ''
897        texfile2 = ''
898
899    p = len(f_list)
900    n = []
901    n0 = 0
902    for i in range(len(f_list)):
903        n.append(len(f_list[i].get_time()))
904        if n[i] > n0: n0 = n[i] 
905    n0 = int(n0)
906    m = len(locations)
[7276]907    model_time = num.zeros((n0, m, p), num.float) 
908    stages = num.zeros((n0, m, p), num.float)
909    elevations = num.zeros((n0, m, p), num.float) 
910    momenta = num.zeros((n0, m, p), num.float)
911    xmom = num.zeros((n0, m, p), num.float)
912    ymom = num.zeros((n0, m, p), num.float)
913    speed = num.zeros((n0, m, p), num.float)
914    bearings = num.zeros((n0, m, p), num.float)
915    due_east = 90.0*num.ones((n0, 1), num.float)
916    due_west = 270.0*num.ones((n0, 1), num.float)
917    depths = num.zeros((n0, m, p), num.float)
918    eastings = num.zeros((n0, m, p), num.float)
[5897]919    min_stages = []
920    max_stages = []
921    min_momentums = []   
922    max_momentums = []
923    max_xmomentums = []
924    max_ymomentums = []
925    min_xmomentums = []
926    min_ymomentums = []
927    max_speeds = []
928    min_speeds = []   
929    max_depths = []
[7276]930    model_time_plot3d = num.zeros((n0, m), num.float)
931    stages_plot3d = num.zeros((n0, m), num.float)
932    eastings_plot3d = num.zeros((n0, m),num.float)
[5897]933    if time_unit is 'mins': scale = 60.0
934    if time_unit is 'hours': scale = 3600.0
[6070]935
[5897]936    ##### loop over each swwfile #####
937    for j, f in enumerate(f_list):
[7317]938        if verbose: log.critical('swwfile %d of %d' % (j, len(f_list)))
[6070]939
[5897]940        starttime = f.starttime
[6070]941        comparefile = file_loc[j] + sep + 'gauges_maxmins' + '.csv'
[5897]942        fid_compare = open(comparefile, 'w')
[6070]943        file0 = file_loc[j] + 'gauges_t0.csv'
[5897]944        fid_0 = open(file0, 'w')
[6070]945
[5897]946        ##### loop over each gauge #####
947        for k in gauge_index:
[7317]948            if verbose: log.critical('Gauge %d of %d' % (k, len(gauges)))
[6070]949
[5897]950            g = gauges[k]
951            min_stage = 10
952            max_stage = 0
953            max_momentum = max_xmomentum = max_ymomentum = 0
954            min_momentum = min_xmomentum = min_ymomentum = 100
955            max_speed = 0
956            min_speed = 0           
957            max_depth = 0           
958            gaugeloc = str(locations[k])
[6070]959            thisfile = file_loc[j] + sep + 'gauges_time_series' + '_' \
960                       + gaugeloc + '.csv'
[6314]961            if j == 0:
962                fid_out = open(thisfile, 'w')
963                s = 'Time, Stage, Momentum, Speed, Elevation, xmom, ymom, Bearing \n'
964                fid_out.write(s)           
[6070]965
[5897]966            #### generate quantities #######
967            for i, t in enumerate(f.get_time()):
968                if time_min <= t <= time_max:
969                    w = f(t, point_id = k)[0]
970                    z = f(t, point_id = k)[1]
971                    uh = f(t, point_id = k)[2]
972                    vh = f(t, point_id = k)[3]
973                    depth = w-z     
974                    m = sqrt(uh*uh + vh*vh)
975                    if depth < 0.001:
976                        vel = 0.0
977                    else:
978                        vel = m / (depth + 1.e-6/depth) 
979                    bearing = calc_bearing(uh, vh)                   
980                    model_time[i,k,j] = (t + starttime)/scale #t/60.0
981                    stages[i,k,j] = w
982                    elevations[i,k,j] = z
983                    xmom[i,k,j] = uh
984                    ymom[i,k,j] = vh
985                    momenta[i,k,j] = m
986                    speed[i,k,j] = vel
987                    bearings[i,k,j] = bearing
988                    depths[i,k,j] = depth
989                    thisgauge = gauges[k]
990                    eastings[i,k,j] = thisgauge[0]
[6070]991                    s = '%.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f,\n' \
992                            % (t, w, m, vel, z, uh, vh, bearing)
[5897]993                    fid_out.write(s)
994                    if t == 0:
[6070]995                        s = '%.2f, %.2f, %.2f\n' % (g[0], g[1], w)
[5897]996                        fid_0.write(s)
997                    if t/60.0 <= 13920: tindex = i
998                    if w > max_stage: max_stage = w
999                    if w < min_stage: min_stage = w
1000                    if m > max_momentum: max_momentum = m
1001                    if m < min_momentum: min_momentum = m                   
1002                    if uh > max_xmomentum: max_xmomentum = uh
1003                    if vh > max_ymomentum: max_ymomentum = vh
1004                    if uh < min_xmomentum: min_xmomentum = uh
1005                    if vh < min_ymomentum: min_ymomentum = vh
1006                    if vel > max_speed: max_speed = vel
1007                    if vel < min_speed: min_speed = vel                   
1008                    if z > 0 and depth > max_depth: max_depth = depth
1009                   
1010                   
[6070]1011            s = '%.2f, %.2f, %.2f, %.2f, %s\n' \
1012                    % (max_stage, min_stage, z, thisgauge[0], leg_label[j])
[5897]1013            fid_compare.write(s)
1014            max_stages.append(max_stage)
1015            min_stages.append(min_stage)
1016            max_momentums.append(max_momentum)
1017            max_xmomentums.append(max_xmomentum)
1018            max_ymomentums.append(max_ymomentum)
1019            min_xmomentums.append(min_xmomentum)
1020            min_ymomentums.append(min_ymomentum)
1021            min_momentums.append(min_momentum)           
1022            max_depths.append(max_depth)
1023            max_speeds.append(max_speed)
1024            min_speeds.append(min_speed)           
1025            #### finished generating quantities for each swwfile #####
1026       
1027        model_time_plot3d[:,:] = model_time[:,:,j]
1028        stages_plot3d[:,:] = stages[:,:,j]
1029        eastings_plot3d[:,] = eastings[:,:,j]
1030           
[6070]1031        if surface is True:
[7317]1032            log.critical('Printing surface figure')
[5897]1033            for i in range(2):
1034                fig = p1.figure(10)
1035                ax = p3.Axes3D(fig)
1036                if len(gauges) > 80:
[6070]1037                    ax.plot_surface(model_time[:,:,j],
1038                                    eastings[:,:,j],
1039                                    stages[:,:,j])
[5897]1040                else:
[6145]1041                    ax.plot3D(num.ravel(eastings[:,:,j]),
1042                              num.ravel(model_time[:,:,j]),
1043                              num.ravel(stages[:,:,j]))
[5897]1044                ax.set_xlabel('time')
1045                ax.set_ylabel('x')
1046                ax.set_zlabel('stage')
1047                fig.add_axes(ax)
1048                p1.show()
[6070]1049                surfacefig = 'solution_surface%s' % leg_label[j]
[5897]1050                p1.savefig(surfacefig)
1051                p1.close()
1052           
1053    #### finished generating quantities for all swwfiles #####
1054
1055    # x profile for given time
1056    if surface is True:
1057        figure(11)
[6070]1058        plot(eastings[tindex,:,j], stages[tindex,:,j])
[5897]1059        xlabel('x')
1060        ylabel('stage')
1061        profilefig = 'solution_xprofile' 
1062        savefig('profilefig')
1063
1064    elev_output = []
1065    if generate_fig is True:
[6070]1066        depth_axis = axis([starttime/scale, time_max/scale, -0.1,
1067                           max(max_depths)*1.1])
1068        stage_axis = axis([starttime/scale, time_max/scale,
1069                           min(min_stages), max(max_stages)*1.1])
1070        vel_axis = axis([starttime/scale, time_max/scale,
1071                         min(min_speeds), max(max_speeds)*1.1])
1072        mom_axis = axis([starttime/scale, time_max/scale,
1073                         min(min_momentums), max(max_momentums)*1.1])
1074        xmom_axis = axis([starttime/scale, time_max/scale,
1075                          min(min_xmomentums), max(max_xmomentums)*1.1])
1076        ymom_axis = axis([starttime/scale, time_max/scale,
1077                          min(min_ymomentums), max(max_ymomentums)*1.1])
[5897]1078        cstr = ['g', 'r', 'b', 'c', 'm', 'y', 'k']
1079        nn = len(plot_quantity)
1080        no_cols = 2
1081       
1082        if len(label_id) > 1: graphname_report = []
1083        pp = 1
1084        div = 11.
1085        cc = 0
1086        for k in gauge_index:
1087            g = gauges[k]
1088            count1 = 0
1089            if report == True and len(label_id) > 1:
[6070]1090                s = '\\begin{figure}[ht] \n' \
1091                    '\\centering \n' \
1092                    '\\begin{tabular}{cc} \n'
[5897]1093                fid.write(s)
1094            if len(label_id) > 1: graphname_report = []
[6070]1095
[5897]1096            #### generate figures for each gauge ####
1097            for j, f in enumerate(f_list):
1098                ion()
1099                hold(True)
1100                count = 0
1101                where1 = 0
1102                where2 = 0
1103                word_quantity = ''
1104                if report == True and len(label_id) == 1:
[6070]1105                    s = '\\begin{figure}[hbt] \n' \
1106                        '\\centering \n' \
1107                        '\\begin{tabular}{cc} \n'
[5897]1108                    fid.write(s)
1109                   
1110                for which_quantity in plot_quantity:
1111                    count += 1
1112                    where1 += 1
1113                    figure(count, frameon = False)
1114                    if which_quantity == 'depth':
[6070]1115                        plot(model_time[0:n[j]-1,k,j],
1116                             depths[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1117                        units = 'm'
1118                        axis(depth_axis)
1119                    if which_quantity == 'stage':
1120                        if elevations[0,k,j] <= 0:
[6070]1121                            plot(model_time[0:n[j]-1,k,j],
1122                                 stages[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1123                            axis(stage_axis)
1124                        else:
[6070]1125                            plot(model_time[0:n[j]-1,k,j],
1126                                 depths[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1127                            #axis(depth_axis)                 
1128                        units = 'm'
1129                    if which_quantity == 'momentum':
[6070]1130                        plot(model_time[0:n[j]-1,k,j],
1131                             momenta[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1132                        axis(mom_axis)
1133                        units = 'm^2 / sec'
1134                    if which_quantity == 'xmomentum':
[6070]1135                        plot(model_time[0:n[j]-1,k,j],
1136                             xmom[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1137                        axis(xmom_axis)
1138                        units = 'm^2 / sec'
1139                    if which_quantity == 'ymomentum':
[6070]1140                        plot(model_time[0:n[j]-1,k,j],
1141                             ymom[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1142                        axis(ymom_axis)
1143                        units = 'm^2 / sec'
1144                    if which_quantity == 'speed':
[6070]1145                        plot(model_time[0:n[j]-1,k,j],
1146                             speed[0:n[j]-1,k,j], '-', c = cstr[j])
[5897]1147                        axis(vel_axis)
1148                        units = 'm / sec'
1149                    if which_quantity == 'bearing':
[6070]1150                        plot(model_time[0:n[j]-1,k,j],bearings[0:n[j]-1,k,j],'-',
[5897]1151                             model_time[0:n[j]-1,k,j], due_west[0:n[j]-1], '-.', 
1152                             model_time[0:n[j]-1,k,j], due_east[0:n[j]-1], '-.')
1153                        units = 'degrees from North'
1154                        #ax = axis([time_min, time_max, 0.0, 360.0])
1155                        legend(('Bearing','West','East'))
1156
1157                    if time_unit is 'mins': xlabel('time (mins)')
1158                    if time_unit is 'hours': xlabel('time (hours)')
[6070]1159                    #if which_quantity == 'stage' \
1160                    #   and elevations[0:n[j]-1,k,j] > 0:
[5897]1161                    #    ylabel('%s (%s)' %('depth', units))
1162                    #else:
1163                    #    ylabel('%s (%s)' %(which_quantity, units))
1164                        #ylabel('%s (%s)' %('wave height', units))
1165                    ylabel('%s (%s)' %(which_quantity, units))
1166                    if len(label_id) > 1: legend((leg_label),loc='upper right')
1167
1168                    #gaugeloc1 = gaugeloc.replace(' ','')
1169                    #gaugeloc2 = gaugeloc1.replace('_','')
1170                    gaugeloc2 = str(locations[k]).replace(' ','')
1171                    graphname = '%sgauge%s_%s' %(file_loc[j],
1172                                                 gaugeloc2,
1173                                                 which_quantity)
1174
1175                    if report == True and len(label_id) > 1:
1176                        figdir = getcwd()+sep+'report_figures'+sep
1177                        if access(figdir,F_OK) == 0 :
1178                            mkdir (figdir)
1179                        latex_file_loc = figdir.replace(sep,altsep) 
[6070]1180                        # storing files in production directory   
1181                        graphname_latex = '%sgauge%s%s' \
1182                                          % (latex_file_loc, gaugeloc2,
1183                                             which_quantity)
1184                        # giving location in latex output file
1185                        graphname_report_input = '%sgauge%s%s' % \
1186                                                 ('..' + altsep + 
1187                                                      'report_figures' + altsep,
1188                                                  gaugeloc2, which_quantity)
[5897]1189                        graphname_report.append(graphname_report_input)
1190                       
[6070]1191                        # save figures in production directory for report
1192                        savefig(graphname_latex)
[5897]1193
1194                    if report == True:
[6070]1195                        figdir = getcwd() + sep + 'report_figures' + sep
1196                        if access(figdir,F_OK) == 0:
1197                            mkdir(figdir)
[5897]1198                        latex_file_loc = figdir.replace(sep,altsep)   
1199
1200                        if len(label_id) == 1: 
[6070]1201                            # storing files in production directory 
1202                            graphname_latex = '%sgauge%s%s%s' % \
1203                                              (latex_file_loc, gaugeloc2,
1204                                               which_quantity, label_id2)
1205                            # giving location in latex output file
1206                            graphname_report = '%sgauge%s%s%s' % \
1207                                               ('..' + altsep +
1208                                                    'report_figures' + altsep,
1209                                                gaugeloc2, which_quantity,
1210                                                label_id2)
1211                            s = '\includegraphics' \
1212                                '[width=0.49\linewidth, height=50mm]{%s%s}' % \
1213                                (graphname_report, '.png')
[5897]1214                            fid.write(s)
1215                            if where1 % 2 == 0:
1216                                s = '\\\\ \n'
1217                                where1 = 0
1218                            else:
1219                                s = '& \n'
1220                            fid.write(s)
1221                            savefig(graphname_latex)
1222                   
1223                    if title_on == True:
[6070]1224                        title('%s scenario: %s at %s gauge' % \
1225                              (label_id, which_quantity, gaugeloc2))
1226                        #title('Gauge %s (MOST elevation %.2f, ' \
1227                        #      'ANUGA elevation %.2f)' % \
1228                        #      (gaugeloc2, elevations[10,k,0],
1229                        #       elevations[10,k,1]))
[5897]1230
1231                    savefig(graphname) # save figures with sww file
1232
1233                if report == True and len(label_id) == 1:
1234                    for i in range(nn-1):
1235                        if nn > 2:
[6070]1236                            if plot_quantity[i] == 'stage' \
1237                               and elevations[0,k,j] > 0:
[5897]1238                                word_quantity += 'depth' + ', '
1239                            else:
1240                                word_quantity += plot_quantity[i] + ', '
1241                        else:
[6070]1242                            if plot_quantity[i] == 'stage' \
1243                               and elevations[0,k,j] > 0:
[5897]1244                                word_quantity += 'depth' + ', '
1245                            else:
1246                                word_quantity += plot_quantity[i]
1247                       
1248                    if plot_quantity[nn-1] == 'stage' and elevations[0,k,j] > 0:
1249                        word_quantity += ' and ' + 'depth'
1250                    else:
1251                        word_quantity += ' and ' + plot_quantity[nn-1]
[6070]1252                    caption = 'Time series for %s at %s location ' \
1253                              '(elevation %.2fm)' % \
1254                              (word_quantity, locations[k], elev[k])
[5897]1255                    if elev[k] == 0.0:
[6070]1256                        caption = 'Time series for %s at %s location ' \
1257                                  '(elevation %.2fm)' % \
1258                                  (word_quantity, locations[k],
1259                                   elevations[0,k,j])
[5897]1260                        east = gauges[0]
1261                        north = gauges[1]
[6070]1262                        elev_output.append([locations[k], east, north,
1263                                            elevations[0,k,j]])
1264                    label = '%sgauge%s' % (label_id2, gaugeloc2)
1265                    s = '\end{tabular} \n' \
1266                        '\\caption{%s} \n' \
1267                        '\label{fig:%s} \n' \
1268                        '\end{figure} \n \n' % (caption, label)
[5897]1269                    fid.write(s)
1270                    cc += 1
1271                    if cc % 6 == 0: fid.write('\\clearpage \n')
1272                    savefig(graphname_latex)               
1273                   
1274            if report == True and len(label_id) > 1:
1275                for i in range(nn-1):
1276                    if nn > 2:
1277                        if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0:
1278                            word_quantity += 'depth' + ','
1279                        else:
1280                            word_quantity += plot_quantity[i] + ', '
1281                    else:
1282                        if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0:
1283                            word_quantity += 'depth'
1284                        else:
1285                            word_quantity += plot_quantity[i]
1286                    where1 = 0
1287                    count1 += 1
1288                    index = j*len(plot_quantity)
1289                    for which_quantity in plot_quantity:
1290                        where1 += 1
[6070]1291                        s = '\includegraphics' \
1292                            '[width=0.49\linewidth, height=50mm]{%s%s}' % \
1293                            (graphname_report[index], '.png')
[5897]1294                        index += 1
1295                        fid.write(s)
1296                        if where1 % 2 == 0:
1297                            s = '\\\\ \n'
1298                            where1 = 0
1299                        else:
1300                            s = '& \n'
1301                        fid.write(s)
1302                word_quantity += ' and ' + plot_quantity[nn-1]           
1303                label = 'gauge%s' %(gaugeloc2) 
[6070]1304                caption = 'Time series for %s at %s location ' \
1305                          '(elevation %.2fm)' % \
1306                          (word_quantity, locations[k], elev[k])
[5897]1307                if elev[k] == 0.0:
[6070]1308                        caption = 'Time series for %s at %s location ' \
1309                                  '(elevation %.2fm)' % \
1310                                  (word_quantity, locations[k],
1311                                   elevations[0,k,j])
[5897]1312                        thisgauge = gauges[k]
1313                        east = thisgauge[0]
1314                        north = thisgauge[1]
[6070]1315                        elev_output.append([locations[k], east, north,
1316                                            elevations[0,k,j]])
[5897]1317                       
[6070]1318                s = '\end{tabular} \n' \
1319                    '\\caption{%s} \n' \
1320                    '\label{fig:%s} \n' \
1321                    '\end{figure} \n \n' % (caption, label)
[5897]1322                fid.write(s)
1323                if float((k+1)/div - pp) == 0.:
1324                    fid.write('\\clearpage \n')
1325                    pp += 1
1326                #### finished generating figures ###
1327
1328            close('all')
1329       
1330    return texfile2, elev_output
1331
[6070]1332
[5897]1333# FIXME (DSG): Add unit test, make general, not just 2 files,
1334# but any number of files.
[6070]1335##
1336# @brief ??
1337# @param dir_name ??
1338# @param filename1 ??
1339# @param filename2 ??
1340# @return ??
1341# @note TEMP
[5897]1342def copy_code_files(dir_name, filename1, filename2):
1343    """Temporary Interface to new location"""
1344
1345    from anuga.shallow_water.data_manager import \
1346                    copy_code_files as dm_copy_code_files
[7317]1347    log.critical('copy_code_files has moved from util.py.')
1348    log.critical('Please use "from anuga.shallow_water.data_manager import '
1349                 'copy_code_files"')
[5897]1350   
1351    return dm_copy_code_files(dir_name, filename1, filename2)
1352
1353
[6070]1354##
1355# @brief Create a nested sub-directory path.
1356# @param root_directory The base diretory path.
1357# @param directories An iterable of sub-directory names.
1358# @return The final joined directory path.
1359# @note If each sub-directory doesn't exist, it will be created.
[5897]1360def add_directories(root_directory, directories):
1361    """
[6070]1362    Add the first sub-directory in 'directories' to root_directory.
1363    Then add the second sub-directory to the accumulating path and so on.
[5897]1364
1365    Return the path of the final directory.
1366
[6070]1367    This is handy for specifying and creating a directory where data will go.
[5897]1368    """
1369    dir = root_directory
1370    for new_dir in directories:
1371        dir = os.path.join(dir, new_dir)
1372        if not access(dir,F_OK):
[6070]1373            mkdir(dir)
[5897]1374    return dir
1375
[6070]1376
1377##
1378# @brief
1379# @param filename
1380# @param separator_value
1381# @return
1382# @note TEMP
1383def get_data_from_file(filename, separator_value=','):
[5897]1384    """Temporary Interface to new location"""
1385    from anuga.shallow_water.data_manager import \
1386                        get_data_from_file as dm_get_data_from_file
[7317]1387    log.critical('get_data_from_file has moved from util.py')
1388    log.critical('Please use "from anuga.shallow_water.data_manager import '
1389                 'get_data_from_file"')
[5897]1390   
1391    return dm_get_data_from_file(filename,separator_value = ',')
1392
[6070]1393
1394##
1395# @brief
1396# @param verbose
1397# @param kwargs
1398# @return
1399# @note TEMP
[5897]1400def store_parameters(verbose=False,**kwargs):
1401    """Temporary Interface to new location"""
1402   
1403    from anuga.shallow_water.data_manager \
1404                    import store_parameters as dm_store_parameters
[7317]1405    log.critical('store_parameters has moved from util.py.')
1406    log.critical('Please use "from anuga.shallow_water.data_manager '
1407                 'import store_parameters"')
[5897]1408   
1409    return dm_store_parameters(verbose=False,**kwargs)
1410
[6070]1411
1412##
1413# @brief Remove vertices that are not associated with any triangle.
1414# @param verts An iterable (or array) of points.
1415# @param triangles An iterable of 3 element tuples.
1416# @param number_of_full_nodes ??
1417# @return (verts, triangles) where 'verts' has been updated.
[5897]1418def remove_lone_verts(verts, triangles, number_of_full_nodes=None):
[6070]1419    """Removes vertices that are not associated with any triangles.
[5897]1420
[6070]1421    verts is a list/array of points.
1422    triangles is a list of 3 element tuples.  Each tuple represents a triangle.
[5897]1423    number_of_full_nodes relate to parallelism when a mesh has an
1424        extra layer of ghost points.
1425    """
[6070]1426
[5897]1427    verts = ensure_numeric(verts)
1428    triangles = ensure_numeric(triangles)
1429   
1430    N = len(verts)
1431   
1432    # initialise the array to easily find the index of the first loner
[6070]1433    # ie, if N=3 -> [6,5,4]
[6145]1434    loners=num.arange(2*N, N, -1)
[5897]1435    for t in triangles:
1436        for vert in t:
1437            loners[vert]= vert # all non-loners will have loners[i]=i
1438
1439    lone_start = 2*N - max(loners) # The index of the first loner
1440
1441    if lone_start-1 == N:
1442        # no loners
1443        pass
1444    elif min(loners[lone_start:N]) > N:
1445        # All the loners are at the end of the vert array
1446        verts = verts[0:lone_start]
1447    else:
1448        # change the loners list so it can be used to modify triangles
1449        # Remove the loners from verts
1450        # Could've used X=compress(less(loners,N),loners)
[7276]1451        # verts=num.take(verts,X,axis=0)  to Remove the loners from verts
[5897]1452        # but I think it would use more memory
[6072]1453        new_i = lone_start      # point at first loner - 'shuffle down' target
[5897]1454        for i in range(lone_start, N):
[6070]1455            if loners[i] >= N:  # [i] is a loner, leave alone
[5897]1456                pass
[6070]1457            else:               # a non-loner, move down
[5897]1458                loners[i] = new_i
1459                verts[new_i] = verts[i]
1460                new_i += 1
1461        verts = verts[0:new_i]
1462
1463        # Modify the triangles
[6145]1464        triangles = num.choose(triangles,loners)
[5897]1465    return verts, triangles
1466
[6072]1467
1468##
1469# @brief Compute centroid values from vertex values
1470# @param x Values at vertices of triangular mesh.
1471# @param triangles Nx3 integer array pointing to vertex information.
1472# @return [N] array of centroid values.
[5897]1473def get_centroid_values(x, triangles):
1474    """Compute centroid values from vertex values
1475   
1476    x: Values at vertices of triangular mesh
1477    triangles: Nx3 integer array pointing to vertex information
1478    for each of the N triangels. Elements of triangles are
1479    indices into x
1480    """
1481       
[7276]1482    xc = num.zeros(triangles.shape[0], num.float) # Space for centroid info
[5897]1483   
1484    for k in range(triangles.shape[0]):
1485        # Indices of vertices
1486        i0 = triangles[k][0]
1487        i1 = triangles[k][1]
1488        i2 = triangles[k][2]       
1489       
1490        xc[k] = (x[i0] + x[i1] + x[i2])/3
1491
1492    return xc
1493
[6072]1494
[6070]1495# @note TEMP
[5897]1496def make_plots_from_csv_file(directories_dic={dir:['gauge', 0, 0]},
1497                                output_dir='',
1498                                base_name='',
1499                                plot_numbers=['3-5'],
1500                                quantities=['speed','stage','momentum'],
1501                                assess_all_csv_files=True,
[6169]1502                                extra_plot_name='test'):
[5897]1503
[6072]1504    msg = 'make_plots_from_csv_file has been replaced by csv2timeseries_graphs '
1505    msg += 'Please use "from anuga.abstract_2d_finite_volumes.util import ' \
1506           'csv2timeseries_graphs"'
1507    raise Exception, msg
[5897]1508
1509    return csv2timeseries_graphs(directories_dic,
1510                                 output_dir,
1511                                 base_name,
1512                                 plot_numbers,
1513                                 quantities,
1514                                 extra_plot_name,
[6169]1515                                 assess_all_csv_files)
[6072]1516
1517
1518##
1519# @brief Plot time series from CSV files.
1520# @param directories_dic
1521# @param output_dir
1522# @param base_name
1523# @param plot_numbers
1524# @param quantities
1525# @param extra_plot_name
1526# @param assess_all_csv_files
1527# @param create_latex
1528# @param verbose
1529# @note Assumes that 'elevation' is in the CSV file(s).
[5897]1530def csv2timeseries_graphs(directories_dic={},
[6072]1531                          output_dir='',
1532                          base_name=None,
1533                          plot_numbers='',
1534                          quantities=['stage'],
1535                          extra_plot_name='',
1536                          assess_all_csv_files=True,
1537                          create_latex=False,
1538                          verbose=False):
[5897]1539                               
1540    """
1541    Read in csv files that have the right header information and
1542    plot time series such as Stage, Speed, etc. Will also plot several
1543    time series on one plot. Filenames must follow this convention,
1544    <base_name><plot_number>.csv eg gauge_timeseries3.csv
1545   
1546    NOTE: relies that 'elevation' is in the csv file!
1547
1548    Each file represents a location and within each file there are
1549    time, quantity columns.
1550   
1551    For example:   
1552    if "directories_dic" defines 4 directories and in each directories
1553    there is a csv files corresponding to the right "plot_numbers",
1554    this will create a plot with 4 lines one for each directory AND
1555    one plot for each "quantities".  ??? FIXME: unclear.
1556   
1557    Usage:
1558        csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
1559                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
1560                            output_dir='fixed_wave'+sep,
1561                            base_name='gauge_timeseries_',
1562                            plot_numbers='',
1563                            quantities=['stage','speed'],
1564                            extra_plot_name='',
1565                            assess_all_csv_files=True,                           
1566                            create_latex=False,
1567                            verbose=True)
1568            this will create one plot for stage with both 'slide' and
1569            'fixed_wave' lines on it for stage and speed for each csv
1570            file with 'gauge_timeseries_' as the prefix. The graghs
1571            will be in the output directory 'fixed_wave' and the graph
1572            axis will be determined by assessing all the
1573   
1574    ANOTHER EXAMPLE
1575        new_csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
1576                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
1577                            output_dir='fixed_wave'+sep,
1578                            base_name='gauge_timeseries_',
1579                            plot_numbers=['1-3'],
1580                            quantities=['stage','speed'],
1581                            extra_plot_name='',
1582                            assess_all_csv_files=False,                           
1583                            create_latex=False,
1584                            verbose=True)
1585        This will plot csv files called gauge_timeseries_1.csv and
1586        gauge_timeseries3.csv from both 'slide' and 'fixed_wave' directories
1587        to 'fixed_wave'. There will be 4 plots created two speed and two stage
1588        one for each csv file. There will be two lines on each of these plots.
1589        And the axis will have been determined from only these files, had
1590        assess_all_csv_files = True all csv file with 'gauges_timeseries_' prefix
1591        would of been assessed.
1592   
1593    ANOTHER EXAMPLE   
1594         csv2timeseries_graphs({'J:'+sep+'anuga_validation'+sep:['new',20,-.1],
1595                                   'J:'+sep+'conical_island'+sep:['test',0,0]},
1596                                   output_dir='',
1597                                   plot_numbers=['1','3'],
1598                                   quantities=['stage','depth','bearing'],
1599                                   base_name='gauge_b',
1600                                   assess_all_csv_files=True,
1601                                  verbose=True)   
1602       
[6072]1603            This will produce one plot for each quantity (therefore 3) in the
1604            current directory, each plot will have 2 lines on them. The first
1605            plot named 'new' will have the time offseted by 20secs and the stage
1606            height adjusted by -0.1m
[5897]1607       
1608    Inputs:
1609        directories_dic: dictionary of directory with values (plot
1610                         legend name for directory), (start time of
1611                         the time series) and the (value to add to
1612                         stage if needed). For example
[6072]1613                         {dir1:['Anuga_ons',5000, 0],
1614                          dir2:['b_emoth',5000,1.5],
1615                          dir3:['b_ons',5000,1.5]}
[5897]1616                         Having multiple directories defined will plot them on
[6072]1617                         one plot, therefore there will be 3 lines on each of
1618                         these plot. If you only want one line per plot call
1619                         csv2timeseries_graph separately for each directory,
1620                         eg only have one directory in the 'directories_dic' in
1621                         each call.
[5897]1622                         
[6072]1623        output_dir: directory for the plot outputs. Only important to define when
1624                    you have more than one directory in your directories_dic, if
1625                    you have not defined it and you have multiple directories in
1626                    'directories_dic' there will be plots in each directory,
1627                    however only one directory will contain the complete
1628                    plot/graphs.
[5897]1629       
[6072]1630        base_name: Is used a couple of times.
1631                   1) to find the csv files to be plotted if there is no
1632                      'plot_numbers' then csv files with 'base_name' are plotted
1633                   2) in the title of the plots, the length of base_name is
1634                      removed from the front of the filename to be used in the
1635                      title.
[5897]1636                   This could be changed if needed.
1637                   Note is ignored if assess_all_csv_files=True
1638       
1639        plot_numbers: a String list of numbers to plot. For example
1640                      [0-4,10,15-17] will read and attempt to plot
[6072]1641                      the follow 0,1,2,3,4,10,15,16,17
1642                      NOTE: if no plot numbers this will create one plot per
1643                            quantity, per gauge
1644
1645        quantities: Will get available quantities from the header in the csv
1646                    file.  Quantities must be one of these.
[5897]1647                    NOTE: ALL QUANTITY NAMES MUST BE lower case!
1648                   
1649        extra_plot_name: A string that is appended to the end of the
1650                         output filename.
1651                   
1652        assess_all_csv_files: if true it will read ALL csv file with
1653                             "base_name", regardless of 'plot_numbers'
1654                              and determine a uniform set of axes for
1655                              Stage, Speed and Momentum. IF FALSE it
1656                              will only read the csv file within the
1657                             'plot_numbers'
1658                             
1659        create_latex: NOT IMPLEMENTED YET!! sorry Jane....
1660       
1661    OUTPUTS: saves the plots to
1662              <output_dir><base_name><plot_number><extra_plot_name>.png
1663    """
[6072]1664
[5897]1665    try: 
1666        import pylab
1667    except ImportError:
1668        msg='csv2timeseries_graphs needs pylab to be installed correctly'
1669        raise msg
1670            #ANUGA don't need pylab to work so the system doesn't
1671            #rely on pylab being installed
1672        return
1673
1674    from os import sep
1675    from anuga.shallow_water.data_manager import \
1676                               get_all_files_with_extension, csv2dict
1677
1678    seconds_in_hour = 3600
1679    seconds_in_minutes = 60
1680   
1681    quantities_label={}
1682#    quantities_label['time'] = 'time (hours)'
1683    quantities_label['time'] = 'time (minutes)'
1684    quantities_label['stage'] = 'wave height (m)'
1685    quantities_label['speed'] = 'speed (m/s)'
1686    quantities_label['momentum'] = 'momentum (m^2/sec)'
1687    quantities_label['depth'] = 'water depth (m)'
1688    quantities_label['xmomentum'] = 'momentum (m^2/sec)'
1689    quantities_label['ymomentum'] = 'momentum (m^2/sec)'
1690    quantities_label['bearing'] = 'degrees (o)'
1691    quantities_label['elevation'] = 'elevation (m)'
1692   
1693    if extra_plot_name != '':
[6072]1694        extra_plot_name = '_' + extra_plot_name
[5897]1695
1696    new_plot_numbers=[]
1697    #change plot_numbers to list, eg ['0-4','10']
1698    #to ['0','1','2','3','4','10']
1699    for i, num_string in enumerate(plot_numbers):
1700        if '-' in num_string: 
1701            start = int(num_string[:num_string.rfind('-')])
[6072]1702            end = int(num_string[num_string.rfind('-') + 1:]) + 1
[5897]1703            for x in range(start, end):
1704                new_plot_numbers.append(str(x))
1705        else:
1706            new_plot_numbers.append(num_string)
1707
1708    #finds all the files that fit the specs provided and return a list of them
1709    #so to help find a uniform max and min for the plots...
1710    list_filenames=[]
1711    all_csv_filenames=[]
[7317]1712    if verbose: log.critical('Determining files to access for axes ranges.')
[5897]1713   
1714    for i,directory in enumerate(directories_dic.keys()):
1715        all_csv_filenames.append(get_all_files_with_extension(directory,
[6072]1716                                                              base_name, '.csv'))
[5897]1717
1718        filenames=[]
1719        if plot_numbers == '': 
1720            list_filenames.append(get_all_files_with_extension(directory,
[6072]1721                                                               base_name,'.csv'))
[5897]1722        else:
1723            for number in new_plot_numbers:
[6072]1724                filenames.append(base_name + number)
[5897]1725            list_filenames.append(filenames)
1726
1727    #use all the files to get the values for the plot axis
1728    max_start_time= -1000.
1729    min_start_time = 100000 
1730   
[7317]1731    if verbose: log.critical('Determining uniform axes')
[6072]1732
[5897]1733    #this entire loop is to determine the min and max range for the
1734    #axes of the plots
1735
1736#    quantities.insert(0,'elevation')
1737    quantities.insert(0,'time')
1738
1739    directory_quantity_value={}
1740#    quantity_value={}
1741    min_quantity_value={}
1742    max_quantity_value={}
1743
1744    for i, directory in enumerate(directories_dic.keys()):
[6072]1745        filename_quantity_value = {}
1746        if assess_all_csv_files == False:
[5897]1747            which_csv_to_assess = list_filenames[i]
1748        else:
1749            #gets list of filenames for directory "i"
1750            which_csv_to_assess = all_csv_filenames[i]
1751       
1752        for j, filename in enumerate(which_csv_to_assess):
[6072]1753            quantity_value = {}
[5897]1754
[6072]1755            dir_filename = join(directory,filename)
1756            attribute_dic, title_index_dic = csv2dict(dir_filename + '.csv')
[5897]1757            directory_start_time = directories_dic[directory][1]
1758            directory_add_tide = directories_dic[directory][2]
1759
[7317]1760            if verbose: log.critical('reading: %s.csv' % dir_filename)
[6072]1761
[5897]1762            #add time to get values
1763            for k, quantity in enumerate(quantities):
[6072]1764                quantity_value[quantity] = [float(x) for
1765                                                x in attribute_dic[quantity]]
[5897]1766
1767                #add tide to stage if provided
1768                if quantity == 'stage':
[7276]1769                     quantity_value[quantity] = num.array(quantity_value[quantity],
1770                                                          num.float) + directory_add_tide
[5897]1771
1772                #condition to find max and mins for all the plots
1773                # populate the list with something when i=0 and j=0 and
1774                # then compare to the other values to determine abs max and min
1775                if i==0 and j==0:
1776                    min_quantity_value[quantity], \
[6072]1777                        max_quantity_value[quantity] = \
1778                            get_min_max_values(quantity_value[quantity])
[5897]1779
1780                    if quantity != 'time':
[6072]1781                        min_quantity_value[quantity] = \
1782                            min_quantity_value[quantity] *1.1
1783                        max_quantity_value[quantity] = \
1784                            max_quantity_value[quantity] *1.1
[5897]1785                else:
1786                    min, max = get_min_max_values(quantity_value[quantity])
1787               
[6072]1788                    # min and max are multipled by "1+increase_axis" to get axes
1789                    # that are slighty bigger than the max and mins
1790                    # so the plots look good.
[5897]1791
1792                    increase_axis = (max-min)*0.05
[6072]1793                    if min <= min_quantity_value[quantity]:
[5897]1794                        if quantity == 'time': 
[6072]1795                            min_quantity_value[quantity] = min
[5897]1796                        else:
1797                            if round(min,2) == 0.00:
[6072]1798                                min_quantity_value[quantity] = -increase_axis
1799#                                min_quantity_value[quantity] = -2.
1800                                #min_quantity_value[quantity] = \
1801                                #    -max_quantity_value[quantity]*increase_axis
[5897]1802                            else:
[6072]1803#                                min_quantity_value[quantity] = \
1804#                                    min*(1+increase_axis)
[5897]1805                                min_quantity_value[quantity]=min-increase_axis
1806                   
[6072]1807                    if max > max_quantity_value[quantity]: 
[5897]1808                        if quantity == 'time': 
[6072]1809                            max_quantity_value[quantity] = max
[5897]1810                        else:
[6072]1811                            max_quantity_value[quantity] = max + increase_axis
[5897]1812#                            max_quantity_value[quantity]=max*(1+increase_axis)
1813
1814            #set the time... ???
1815            if min_start_time > directory_start_time: 
1816                min_start_time = directory_start_time
1817            if max_start_time < directory_start_time: 
1818                max_start_time = directory_start_time
1819           
1820            filename_quantity_value[filename]=quantity_value
1821           
1822        directory_quantity_value[directory]=filename_quantity_value
1823   
1824    #final step to unifrom axis for the graphs
1825    quantities_axis={}
1826   
1827    for i, quantity in enumerate(quantities):
[6072]1828        quantities_axis[quantity] = (float(min_start_time) \
1829                                         / float(seconds_in_minutes),
1830                                     (float(max_quantity_value['time']) \
1831                                          + float(max_start_time)) \
1832                                              / float(seconds_in_minutes),
1833                                     min_quantity_value[quantity],
1834                                     max_quantity_value[quantity])
1835
[5897]1836        if verbose and (quantity != 'time' and quantity != 'elevation'): 
[7317]1837            log.critical('axis for quantity %s are x:(%s to %s)%s '
1838                         'and y:(%s to %s)%s' 
1839                         % (quantity, quantities_axis[quantity][0],
1840                            quantities_axis[quantity][1],
1841                            quantities_label['time'],
1842                            quantities_axis[quantity][2],
1843                            quantities_axis[quantity][3],
1844                            quantities_label[quantity]))
[5897]1845
1846    cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k']
1847
[7317]1848    if verbose: log.critical('Now start to plot')
[5897]1849   
1850    i_max = len(directories_dic.keys())
[6072]1851    legend_list_dic = {}
1852    legend_list = []
[5897]1853    for i, directory in enumerate(directories_dic.keys()):
[7317]1854        if verbose: log.critical('Plotting in %s %s'
1855                                 % (directory, new_plot_numbers))
[6072]1856
1857        # FIXME THIS SORT IS VERY IMPORTANT
1858        # Without it the assigned plot numbers may not work correctly
1859        # there must be a better way
[5897]1860        list_filenames[i].sort()
1861        for j, filename in enumerate(list_filenames[i]):
[7317]1862            if verbose: log.critical('Starting %s' % filename)
[6072]1863
[5897]1864            directory_name = directories_dic[directory][0]
1865            directory_start_time = directories_dic[directory][1]
1866            directory_add_tide = directories_dic[directory][2]
1867           
[6072]1868            # create an if about the start time and tide height if don't exist
1869            attribute_dic, title_index_dic = csv2dict(directory + sep
1870                                                      + filename + '.csv')
[5897]1871            #get data from dict in to list
1872            #do maths to list by changing to array
[6145]1873            t = (num.array(directory_quantity_value[directory][filename]['time'])
[6072]1874                     + directory_start_time) / seconds_in_minutes
[5897]1875
1876            #finds the maximum elevation, used only as a test
1877            # and as info in the graphs
1878            max_ele=-100000
1879            min_ele=100000
1880            elevation = [float(x) for x in attribute_dic["elevation"]]
1881           
1882            min_ele, max_ele = get_min_max_values(elevation)
1883           
1884            if min_ele != max_ele:
[7317]1885                log.critical("Note! Elevation changes in %s" % dir_filename)
[5897]1886
[6072]1887            # creates a dictionary with keys that is the filename and attributes
1888            # are a list of lists containing 'directory_name' and 'elevation'.
1889            # This is used to make the contents for the legends in the graphs,
1890            # this is the name of the model and the elevation.  All in this
1891            # great one liner from DG. If the key 'filename' doesn't exist it
1892            # creates the entry if the entry exist it appends to the key.
[5897]1893
[6072]1894            legend_list_dic.setdefault(filename,[]) \
1895                .append([directory_name, round(max_ele, 3)])
1896
1897            # creates a LIST for the legend on the last iteration of the
1898            # directories which is when "legend_list_dic" has been fully
1899            # populated. Creates a list of strings which is used in the legend
[5897]1900            # only runs on the last iteration for all the gauges(csv) files
1901            # empties the list before creating it
[6072]1902
1903            if i == i_max - 1:
1904                legend_list = []
[5897]1905   
1906                for name_and_elevation in legend_list_dic[filename]:
[6072]1907                    legend_list.append('%s (elevation = %sm)'\
1908                                       % (name_and_elevation[0],
1909                                          name_and_elevation[1]))
[5897]1910           
1911            #skip time and elevation so it is not plotted!
1912            for k, quantity in enumerate(quantities):
1913                if quantity != 'time' and quantity != 'elevation':
[6171]1914                    pylab.figure(int(k*100+j))
[5897]1915                    pylab.ylabel(quantities_label[quantity])
[6072]1916                    pylab.plot(t,
1917                               directory_quantity_value[directory]\
1918                                                       [filename][quantity],
1919                               c = cstr[i], linewidth=1)
[5897]1920                    pylab.xlabel(quantities_label['time'])
1921                    pylab.axis(quantities_axis[quantity])
1922                    pylab.legend(legend_list,loc='upper right')
1923                   
[6072]1924                    pylab.title('%s at %s gauge'
1925                                % (quantity, filename[len(base_name):]))
1926
[5897]1927                    if output_dir == '':
[6072]1928                        figname = '%s%s%s_%s%s.png' \
1929                                  % (directory, sep, filename, quantity,
1930                                     extra_plot_name)
[5897]1931                    else:
[6072]1932                        figname = '%s%s%s_%s%s.png' \
1933                                  % (output_dir, sep, filename, quantity,
1934                                     extra_plot_name)
1935
[7317]1936                    if verbose: log.critical('saving figure here %s' % figname)
[6072]1937
[5897]1938                    pylab.savefig(figname)
1939           
[7317]1940    if verbose: log.critical('Closing all plots')
[6072]1941
[5897]1942    pylab.close('all')
1943    del pylab
[6072]1944
[7317]1945    if verbose: log.critical('Finished closing plots')
[5897]1946
[6072]1947##
1948# @brief Return min and max of an iterable.
1949# @param list The iterable to return min & max of.
1950# @return (min, max) of 'list'.
[5897]1951def get_min_max_values(list=None):
1952    """
1953    Returns the min and max of the list it was provided.
1954    """
[6072]1955
[7317]1956    if list == None: log.critical('List must be provided')
[5897]1957       
1958    return min(list), max(list)
1959
1960
[6072]1961##
1962# @brief Get runup around a point in a CSV file.
1963# @param gauge_filename gauge file name.
1964# @param sww_filename SWW file name.
1965# @param runup_filename Name of file to report into.
1966# @param size ??
1967# @param verbose ??
[5897]1968def get_runup_data_for_locations_from_file(gauge_filename,
1969                                           sww_filename,
1970                                           runup_filename,
1971                                           size=10,
1972                                           verbose=False):
1973    """this will read a csv file with the header x,y. Then look in a square
1974    'size'x2 around this position for the 'max_inundaiton_height' in the
[6072]1975    'sww_filename' and report the findings in the 'runup_filename'.
[5897]1976   
1977    WARNING: NO TESTS!
1978    """
1979
1980    from anuga.shallow_water.data_manager import get_all_directories_with_name,\
1981                                                 get_maximum_inundation_data,\
1982                                                 csv2dict
1983                                                 
[6072]1984    file = open(runup_filename, "w")
[5897]1985    file.write("easting,northing,runup \n ")
1986    file.close()
1987   
1988    #read gauge csv file to dictionary
1989    attribute_dic, title_index_dic = csv2dict(gauge_filename)
1990    northing = [float(x) for x in attribute_dic["y"]]
1991    easting = [float(x) for x in attribute_dic["x"]]
1992
[7317]1993    log.critical('Reading %s' % sww_filename)
[5897]1994
1995    runup_locations=[]
1996    for i, x in enumerate(northing):
1997        poly = [[int(easting[i]+size),int(northing[i]+size)],
1998                [int(easting[i]+size),int(northing[i]-size)],
1999                [int(easting[i]-size),int(northing[i]-size)],
2000                [int(easting[i]-size),int(northing[i]+size)]]
2001       
2002        run_up, x_y = get_maximum_inundation_data(filename=sww_filename,
[6072]2003                                                  polygon=poly,
2004                                                  verbose=False) 
2005
[5897]2006        #if no runup will return 0 instead of NONE
2007        if run_up==None: run_up=0
2008        if x_y==None: x_y=[0,0]
2009       
2010        if verbose:
[7317]2011            log.critical('maximum inundation runup near %s is %s meters'
2012                         % (x_y, run_up))
[5897]2013       
2014        #writes to file
[6072]2015        file = open(runup_filename, "a")
2016        temp = '%s,%s,%s \n' % (x_y[0], x_y[1], run_up)
[5897]2017        file.write(temp)
2018        file.close()
2019
[6072]2020##
2021# @brief ??
[6314]2022# @param  ??
[6072]2023# @param gauge_file ??
2024# @param out_name ??
2025# @param quantities ??
2026# @param verbose ??
2027# @param use_cache ??
[5897]2028def sww2csv_gauges(sww_file,
2029                   gauge_file,
[6035]2030                   out_name='gauge_',
[6072]2031                   quantities=['stage', 'depth', 'elevation',
2032                               'xmomentum', 'ymomentum'],
[5897]2033                   verbose=False,
[6277]2034                   use_cache=True):
[5897]2035    """
2036   
2037    Inputs:
2038        NOTE: if using csv2timeseries_graphs after creating csv file,
2039        it is essential to export quantities 'depth' and 'elevation'.
2040        'depth' is good to analyse gauges on land and elevation is used
2041        automatically by csv2timeseries_graphs in the legend.
2042       
2043        sww_file: path to any sww file
2044       
[6035]2045        gauge_file: Assumes that it follows this format
[5897]2046            name, easting, northing, elevation
2047            point1, 100.3, 50.2, 10.0
2048            point2, 10.3, 70.3, 78.0
2049       
2050        NOTE: order of column can change but names eg 'easting', 'elevation'
2051        must be the same! ALL lowercaps!
[6013]2052
2053        out_name: prefix for output file name (default is 'gauge_')
[5897]2054       
2055    Outputs:
2056        one file for each gauge/point location in the points file. They
2057        will be named with this format in the same directory as the 'sww_file'
[6013]2058            <out_name><name>.csv
2059        eg gauge_point1.csv if <out_name> not supplied
2060           myfile_2_point1.csv if <out_name> ='myfile_2_'
[5897]2061           
2062        They will all have a header
2063   
[6035]2064    Usage: sww2csv_gauges(sww_file='test1.sww',
[5897]2065                          quantities = ['stage', 'elevation','depth','bearing'],
2066                          gauge_file='gauge.txt')   
2067   
2068    Interpolate the quantities at a given set of locations, given
2069    an sww file.
2070    The results are written to a csv file.
2071
2072    In the future let points be a points file.
2073    And the user choose the quantities.
2074
2075    This is currently quite specific.
[6035]2076    If it needs to be more general, change things.
[5897]2077
2078    This is really returning speed, not velocity.
2079    """
[7672]2080    from gauge import sww2csv
2081       
2082    sww2csv(sww_file, gauge_file, out_name, quantities, verbose, use_cache)
2083       
2084       
[6072]2085##
2086# @brief Get a wave height at a certain depth given wave height at another depth.
2087# @param d1 The first depth.
2088# @param d2 The second depth.
2089# @param h1 Wave ampitude at d1
2090# @param verbose True if this function is to be verbose.
2091# @return The wave height at d2.
2092def greens_law(d1, d2, h1, verbose=False):
2093    """Green's Law
[5897]2094
2095    Green's Law allows an approximation of wave amplitude at
2096    a given depth based on the fourh root of the ratio of two depths
2097    and the amplitude at another given depth.
2098
2099    Note, wave amplitude is equal to stage.
2100   
2101    Inputs:
2102
2103    d1, d2 - the two depths
2104    h1     - the wave amplitude at d1
2105    h2     - the derived amplitude at d2
2106
2107    h2 = h1 (d1/d2)^(1/4), where d2 cannot equal 0.
2108   
2109    """
2110
2111    d1 = ensure_numeric(d1)
2112    d2 = ensure_numeric(d2)
2113    h1 = ensure_numeric(h1)
2114
2115    if d1 <= 0.0:
[6072]2116        msg = 'the first depth, d1 (%f), must be strictly positive' % (d1)
[5897]2117        raise Exception(msg)
2118
2119    if d2 <= 0.0:
[6072]2120        msg = 'the second depth, d2 (%f), must be strictly positive' % (d2)
[5897]2121        raise Exception(msg)
2122   
2123    if h1 <= 0.0:
[6072]2124        msg = 'the wave amplitude, h1 (%f), must be strictly positive' % (h1)
[5897]2125        raise Exception(msg)
2126   
2127    h2 = h1*(d1/d2)**0.25
2128
2129    assert h2 > 0
2130   
2131    return h2
2132       
2133
[6072]2134##
2135# @brief Get the square-root of a value.
2136# @param s The value to get the square-root of.
2137# @return The square-root of 's'.
[5897]2138def square_root(s):
2139    return sqrt(s)
[6072]2140
2141
Note: See TracBrowser for help on using the repository browser.