source: anuga_core/source/anuga/abstract_2d_finite_volumes/util.py @ 7693

Last change on this file since 7693 was 7693, checked in by hudson, 14 years ago

ticket 292: sww2csv_gauges needs to ignore gauges that are not in the domain.
Now, a warning message is output, instead of generating a maningless file.

File size: 42.5 KB
RevLine 
[5897]1"""This module contains various auxiliary function used by pyvolution.
2
3It is also a clearing house for functions that may later earn a module
4of their own.
5"""
6
7import anuga.utilities.polygon
8import sys
9import os
10
11from os import remove, mkdir, access, F_OK, R_OK, W_OK, sep,getcwd
12from os.path import exists, basename, split,join
13from warnings import warn
14from shutil import copy
15
[7693]16from anuga.utilities.numerical_tools import ensure_numeric, angle
[7679]17
[5897]18from math import sqrt, atan, degrees
19
[6070]20# FIXME (Ole): Temporary short cuts -
21# FIXME (Ole): remove and update scripts where they are used
[5897]22from anuga.utilities.system_tools import get_revision_number
23from anuga.utilities.system_tools import store_version_info
24
[7317]25import anuga.utilities.log as log
[5897]26
[7276]27import numpy as num
[6086]28
[6145]29
[5897]30def file_function(filename,
31                  domain=None,
32                  quantities=None,
33                  interpolation_points=None,
34                  time_thinning=1,
[6173]35                  time_limit=None,
[5897]36                  verbose=False,
37                  use_cache=False,
[7672]38                  boundary_polygon=None,
[7673]39                  output_centroids=False):
[7679]40    from file_function import file_function as file_function_new
41    return file_function_new(filename, domain, quantities, interpolation_points,
42                      time_thinning, time_limit, verbose, use_cache,
43                      boundary_polygon, output_centroids)
[5897]44
[6070]45##
46# @brief Replace multiple substrings in a string.
47# @param text The string to operate on.
48# @param dictionary A dict containing replacements, key->value.
49# @return The new string.
[5897]50def multiple_replace(text, dictionary):
51    """Multiple replace of words in text
52
53    text:       String to be modified
54    dictionary: Mapping of words that are to be substituted
55
56    Python Cookbook 3.14 page 88 and page 90
[6070]57    http://code.activestate.com/recipes/81330/
[5897]58    """
59
60    import re
61   
62    #Create a regular expression from all of the dictionary keys
63    #matching only entire words
64    regex = re.compile(r'\b'+ \
65                       r'\b|\b'.join(map(re.escape, dictionary.keys()))+ \
66                       r'\b' )
67
68    #For each match, lookup the corresponding value in the dictionary
69    return regex.sub(lambda match: dictionary[match.group(0)], text)
70
71
[6070]72##
73# @brief Apply arbitrary expressions to the values of a dict.
74# @param expression A string expression to apply.
75# @param dictionary The dictionary to apply the expression to.
76def apply_expression_to_dictionary(expression, dictionary):
[5897]77    """Apply arbitrary expression to values of dictionary
78
79    Given an expression in terms of the keys, replace key by the
80    corresponding values and evaluate.
81
82    expression: Arbitrary, e.g. arithmetric, expression relating keys
83                from dictionary.
84
85    dictionary: Mapping between symbols in expression and objects that
86                will be evaluated by expression.
87                Values in dictionary must support operators given in
88                expression e.g. by overloading
89
[7276]90    Due to a limitation with numeric, this can not evaluate 0/0
[5897]91    In general, the user can fix by adding 1e-30 to the numerator.
92    SciPy core can handle this situation.
93    """
94
95    import types
96    import re
97
98    assert isinstance(expression, basestring)
99    assert type(dictionary) == types.DictType
100
101    #Convert dictionary values to textual representations suitable for eval
102    D = {}
103    for key in dictionary:
[6070]104        D[key] = 'dictionary["%s"]' % key
[5897]105
106    #Perform substitution of variables   
107    expression = multiple_replace(expression, D)
108
109    #Evaluate and return
110    try:
111        return eval(expression)
112    except NameError, e:
[6070]113        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
[5897]114        raise NameError, msg
115    except ValueError, e:
[6070]116        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
[5897]117        raise ValueError, msg
118
[6070]119
120##
121# @brief Format a float into a string.
122# @param value Float value to format.
123# @param format The format to use (%.2f is default).
124# @return The formatted float as a string.
[5897]125def get_textual_float(value, format = '%.2f'):
126    """Get textual representation of floating point numbers
127    and accept None as valid entry
128
129    format is a string - default = '%.2f'
130    """
131
132    if value is None:
133        return 'None'
134    else:
135        try:
136            float(value)
137        except:
138            # May this is a vector
139            if len(value) > 1:
140                s = '('
141                for v in value:
142                    s += get_textual_float(v, format) + ', '
143                   
144                s = s[:-2] + ')' # Strip trailing comma and close
145                return s
146            else:
147                raise 'Illegal input to get_textual_float:', value
148        else:
[6070]149            return format % float(value)
[5897]150
151
[6070]152#################################################################################
153# OBSOLETE STUFF
154#################################################################################
[5897]155
[6070]156# @note TEMP
[5897]157def angle(v1, v2):
158    """Temporary Interface to new location"""
159
[7685]160    import anuga.utilities.numerical_tools as NT   
[5897]161   
162    msg = 'angle has moved from util.py.  '
163    msg += 'Please use "from anuga.utilities.numerical_tools import angle"'
164    warn(msg, DeprecationWarning) 
165
166    return NT.angle(v1, v2)
[6070]167
[5897]168   
[6070]169# @note TEMP
[5897]170def anglediff(v0, v1):
171    """Temporary Interface to new location"""
172
173    import anuga.utilities.numerical_tools as NT
174   
175    msg = 'anglediff has moved from util.py.  '
176    msg += 'Please use "from anuga.utilities.numerical_tools import anglediff"'
177    warn(msg, DeprecationWarning) 
178
179    return NT.anglediff(v0, v1)   
180
[6070]181# @note TEMP
[5897]182def point_on_line(*args, **kwargs):
183    """Temporary Interface to new location"""
184
185    msg = 'point_on_line has moved from util.py.  '
186    msg += 'Please use "from anuga.utilities.polygon import point_on_line"'
187    warn(msg, DeprecationWarning) 
188
[7685]189    return utilities.polygon.point_on_line(*args, **kwargs)   
[6070]190
[5897]191   
[6070]192# @note TEMP
[5897]193def inside_polygon(*args, **kwargs):
194    """Temporary Interface to new location"""
195
[7317]196    log.critical('inside_polygon has moved from util.py.')
197    log.critical('Please use '
198                 '"from anuga.utilities.polygon import inside_polygon"')
[5897]199
200    return utilities.polygon.inside_polygon(*args, **kwargs)   
[6070]201
[5897]202   
[6070]203# @note TEMP
[5897]204def outside_polygon(*args, **kwargs):
205    """Temporary Interface to new location"""
206
[7317]207    log.critical('outside_polygon has moved from util.py.')
208    log.critical('Please use '
209                 '"from anuga.utilities.polygon import outside_polygon"')
[5897]210
211    return utilities.polygon.outside_polygon(*args, **kwargs)   
212
[7685]213   
[6070]214# @note TEMP
[5897]215def read_polygon(*args, **kwargs):
216    """Temporary Interface to new location"""
217
[7317]218    log.critical('read_polygon has moved from util.py.')
219    log.critical('Please use '
220                 '"from anuga.utilities.polygon import read_polygon"')
[5897]221
222    return utilities.polygon.read_polygon(*args, **kwargs)   
223
224
[6070]225# @note TEMP
[5897]226def populate_polygon(*args, **kwargs):
227    """Temporary Interface to new location"""
228
[7317]229    log.critical('populate_polygon has moved from util.py.')
230    log.critical('Please use '
231                 '"from anuga.utilities.polygon import populate_polygon"')
[5897]232
233    return utilities.polygon.populate_polygon(*args, **kwargs)   
234
235
[6070]236#################################################################################
237# End of obsolete stuff ?
238#################################################################################
239
240# @note TEMP
[5897]241def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
242                         verbose=False):
243    """Temporary Interface to new location"""
[6070]244    from anuga.shallow_water.data_manager import start_screen_catcher \
245         as dm_start_screen_catcher
[5897]246
[7317]247    log.critical('start_screen_catcher has moved from util.py.')
248    log.critical('Please use "from anuga.shallow_water.data_manager import '
249                 'start_screen_catcher"')
[5897]250   
[6070]251    return dm_start_screen_catcher(dir_name, myid='', numprocs='',
252                                   extra_info='', verbose=False)
[5897]253
254
255
[6070]256##
257# @brief Read gauge info from a file.
258# @param filename The name of the file to read.
259# @return A (gauges, gaugelocation, elev) tuple.
[5897]260def get_gauges_from_file(filename):
[7679]261    return gauge_get_from_file(filename)
[6070]262
263
264##
265# @brief Check that input quantities in quantity list are legal.
266# @param quantity Quantity list to check.
267# @note Raises an exception of list is not legal.
[5897]268def check_list(quantity):
269    """ Check that input quantities in quantity list are possible
270    """
[6070]271    import sys
272
[5897]273    all_quantity = ['stage', 'depth', 'momentum', 'xmomentum',
274                    'ymomentum', 'speed', 'bearing', 'elevation']
275
[6070]276    # convert all quanitiy names to lowercase
[5897]277    for i,j in enumerate(quantity):
278        quantity[i] = quantity[i].lower()
[6070]279
280    # check that all names in 'quantity' appear in 'all_quantity'
[5897]281    p = list(set(quantity).difference(set(all_quantity)))
[6070]282    if len(p) != 0:
[5897]283        msg = 'Quantities %s do not exist - please try again' %p
284        raise Exception, msg
285
[6070]286
287##
288# @brief Calculate velocity bearing from North.
289# @param uh ??
290# @param vh ??
291# @return The calculated bearing.
[5897]292def calc_bearing(uh, vh):
293    """ Calculate velocity bearing from North
294    """
295    #FIXME (Ole): I reckon we should refactor this one to use
296    #             the function angle() in utilities/numerical_tools
297    #
298    #             It will be a simple matter of
299    # * converting from radians to degrees
300    # * moving the reference direction from [1,0] to North
301    # * changing from counter clockwise to clocwise.
302   
[7693]303    return degrees(angle([uh, vh], [0, -1]))   
[5897]304
305
306# FIXME (DSG): Add unit test, make general, not just 2 files,
307# but any number of files.
[6070]308##
309# @brief ??
310# @param dir_name ??
311# @param filename1 ??
312# @param filename2 ??
313# @return ??
314# @note TEMP
[5897]315def copy_code_files(dir_name, filename1, filename2):
316    """Temporary Interface to new location"""
317
318    from anuga.shallow_water.data_manager import \
319                    copy_code_files as dm_copy_code_files
[7317]320    log.critical('copy_code_files has moved from util.py.')
321    log.critical('Please use "from anuga.shallow_water.data_manager import '
322                 'copy_code_files"')
[5897]323   
324    return dm_copy_code_files(dir_name, filename1, filename2)
325
326
[6070]327##
328# @brief Create a nested sub-directory path.
329# @param root_directory The base diretory path.
330# @param directories An iterable of sub-directory names.
331# @return The final joined directory path.
332# @note If each sub-directory doesn't exist, it will be created.
[5897]333def add_directories(root_directory, directories):
334    """
[6070]335    Add the first sub-directory in 'directories' to root_directory.
336    Then add the second sub-directory to the accumulating path and so on.
[5897]337
338    Return the path of the final directory.
339
[6070]340    This is handy for specifying and creating a directory where data will go.
[5897]341    """
342    dir = root_directory
343    for new_dir in directories:
344        dir = os.path.join(dir, new_dir)
345        if not access(dir,F_OK):
[6070]346            mkdir(dir)
[5897]347    return dir
348
[6070]349
350##
351# @brief
352# @param filename
353# @param separator_value
354# @return
355# @note TEMP
356def get_data_from_file(filename, separator_value=','):
[5897]357    """Temporary Interface to new location"""
358    from anuga.shallow_water.data_manager import \
359                        get_data_from_file as dm_get_data_from_file
[7317]360    log.critical('get_data_from_file has moved from util.py')
361    log.critical('Please use "from anuga.shallow_water.data_manager import '
362                 'get_data_from_file"')
[5897]363   
364    return dm_get_data_from_file(filename,separator_value = ',')
365
[6070]366
367##
368# @brief
369# @param verbose
370# @param kwargs
371# @return
372# @note TEMP
[5897]373def store_parameters(verbose=False,**kwargs):
374    """Temporary Interface to new location"""
375   
376    from anuga.shallow_water.data_manager \
377                    import store_parameters as dm_store_parameters
[7317]378    log.critical('store_parameters has moved from util.py.')
379    log.critical('Please use "from anuga.shallow_water.data_manager '
380                 'import store_parameters"')
[5897]381   
382    return dm_store_parameters(verbose=False,**kwargs)
383
[6070]384
385##
386# @brief Remove vertices that are not associated with any triangle.
387# @param verts An iterable (or array) of points.
388# @param triangles An iterable of 3 element tuples.
389# @param number_of_full_nodes ??
390# @return (verts, triangles) where 'verts' has been updated.
[5897]391def remove_lone_verts(verts, triangles, number_of_full_nodes=None):
[6070]392    """Removes vertices that are not associated with any triangles.
[5897]393
[6070]394    verts is a list/array of points.
395    triangles is a list of 3 element tuples.  Each tuple represents a triangle.
[5897]396    number_of_full_nodes relate to parallelism when a mesh has an
397        extra layer of ghost points.
398    """
[6070]399
[5897]400    verts = ensure_numeric(verts)
401    triangles = ensure_numeric(triangles)
402   
403    N = len(verts)
404   
405    # initialise the array to easily find the index of the first loner
[6070]406    # ie, if N=3 -> [6,5,4]
[6145]407    loners=num.arange(2*N, N, -1)
[5897]408    for t in triangles:
409        for vert in t:
410            loners[vert]= vert # all non-loners will have loners[i]=i
411
412    lone_start = 2*N - max(loners) # The index of the first loner
413
414    if lone_start-1 == N:
415        # no loners
416        pass
417    elif min(loners[lone_start:N]) > N:
418        # All the loners are at the end of the vert array
419        verts = verts[0:lone_start]
420    else:
421        # change the loners list so it can be used to modify triangles
422        # Remove the loners from verts
423        # Could've used X=compress(less(loners,N),loners)
[7276]424        # verts=num.take(verts,X,axis=0)  to Remove the loners from verts
[5897]425        # but I think it would use more memory
[7685]426        new_i = lone_start    # point at first loner - 'shuffle down' target
[5897]427        for i in range(lone_start, N):
[7685]428            if loners[i] >= N:    # [i] is a loner, leave alone
[5897]429                pass
[7685]430            else:        # a non-loner, move down
[5897]431                loners[i] = new_i
432                verts[new_i] = verts[i]
433                new_i += 1
434        verts = verts[0:new_i]
435
436        # Modify the triangles
[6145]437        triangles = num.choose(triangles,loners)
[5897]438    return verts, triangles
439
[6072]440
441##
442# @brief Compute centroid values from vertex values
443# @param x Values at vertices of triangular mesh.
444# @param triangles Nx3 integer array pointing to vertex information.
445# @return [N] array of centroid values.
[5897]446def get_centroid_values(x, triangles):
447    """Compute centroid values from vertex values
448   
449    x: Values at vertices of triangular mesh
450    triangles: Nx3 integer array pointing to vertex information
451    for each of the N triangels. Elements of triangles are
452    indices into x
453    """
454       
[7276]455    xc = num.zeros(triangles.shape[0], num.float) # Space for centroid info
[5897]456   
457    for k in range(triangles.shape[0]):
458        # Indices of vertices
459        i0 = triangles[k][0]
460        i1 = triangles[k][1]
461        i2 = triangles[k][2]       
462       
463        xc[k] = (x[i0] + x[i1] + x[i2])/3
464
465    return xc
466
[6072]467
[6070]468# @note TEMP
[5897]469def make_plots_from_csv_file(directories_dic={dir:['gauge', 0, 0]},
470                                output_dir='',
471                                base_name='',
472                                plot_numbers=['3-5'],
473                                quantities=['speed','stage','momentum'],
474                                assess_all_csv_files=True,
[6169]475                                extra_plot_name='test'):
[5897]476
[6072]477    msg = 'make_plots_from_csv_file has been replaced by csv2timeseries_graphs '
478    msg += 'Please use "from anuga.abstract_2d_finite_volumes.util import ' \
479           'csv2timeseries_graphs"'
480    raise Exception, msg
[5897]481
482    return csv2timeseries_graphs(directories_dic,
483                                 output_dir,
484                                 base_name,
485                                 plot_numbers,
486                                 quantities,
487                                 extra_plot_name,
[6169]488                                 assess_all_csv_files)
[6072]489
490
491##
492# @brief Plot time series from CSV files.
493# @param directories_dic
494# @param output_dir
495# @param base_name
496# @param plot_numbers
497# @param quantities
498# @param extra_plot_name
499# @param assess_all_csv_files
500# @param create_latex
501# @param verbose
502# @note Assumes that 'elevation' is in the CSV file(s).
[5897]503def csv2timeseries_graphs(directories_dic={},
[6072]504                          output_dir='',
505                          base_name=None,
506                          plot_numbers='',
507                          quantities=['stage'],
508                          extra_plot_name='',
509                          assess_all_csv_files=True,
510                          create_latex=False,
511                          verbose=False):
[5897]512                               
513    """
514    Read in csv files that have the right header information and
515    plot time series such as Stage, Speed, etc. Will also plot several
516    time series on one plot. Filenames must follow this convention,
517    <base_name><plot_number>.csv eg gauge_timeseries3.csv
518   
519    NOTE: relies that 'elevation' is in the csv file!
520
521    Each file represents a location and within each file there are
522    time, quantity columns.
523   
524    For example:   
525    if "directories_dic" defines 4 directories and in each directories
526    there is a csv files corresponding to the right "plot_numbers",
527    this will create a plot with 4 lines one for each directory AND
528    one plot for each "quantities".  ??? FIXME: unclear.
529   
530    Usage:
531        csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
532                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
533                            output_dir='fixed_wave'+sep,
534                            base_name='gauge_timeseries_',
535                            plot_numbers='',
536                            quantities=['stage','speed'],
537                            extra_plot_name='',
538                            assess_all_csv_files=True,                           
539                            create_latex=False,
540                            verbose=True)
541            this will create one plot for stage with both 'slide' and
542            'fixed_wave' lines on it for stage and speed for each csv
543            file with 'gauge_timeseries_' as the prefix. The graghs
544            will be in the output directory 'fixed_wave' and the graph
545            axis will be determined by assessing all the
546   
547    ANOTHER EXAMPLE
548        new_csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
549                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
550                            output_dir='fixed_wave'+sep,
551                            base_name='gauge_timeseries_',
552                            plot_numbers=['1-3'],
553                            quantities=['stage','speed'],
554                            extra_plot_name='',
555                            assess_all_csv_files=False,                           
556                            create_latex=False,
557                            verbose=True)
558        This will plot csv files called gauge_timeseries_1.csv and
559        gauge_timeseries3.csv from both 'slide' and 'fixed_wave' directories
560        to 'fixed_wave'. There will be 4 plots created two speed and two stage
561        one for each csv file. There will be two lines on each of these plots.
562        And the axis will have been determined from only these files, had
563        assess_all_csv_files = True all csv file with 'gauges_timeseries_' prefix
564        would of been assessed.
565   
566    ANOTHER EXAMPLE   
567         csv2timeseries_graphs({'J:'+sep+'anuga_validation'+sep:['new',20,-.1],
568                                   'J:'+sep+'conical_island'+sep:['test',0,0]},
569                                   output_dir='',
570                                   plot_numbers=['1','3'],
571                                   quantities=['stage','depth','bearing'],
572                                   base_name='gauge_b',
573                                   assess_all_csv_files=True,
574                                  verbose=True)   
575       
[6072]576            This will produce one plot for each quantity (therefore 3) in the
577            current directory, each plot will have 2 lines on them. The first
578            plot named 'new' will have the time offseted by 20secs and the stage
579            height adjusted by -0.1m
[5897]580       
581    Inputs:
582        directories_dic: dictionary of directory with values (plot
583                         legend name for directory), (start time of
584                         the time series) and the (value to add to
585                         stage if needed). For example
[6072]586                         {dir1:['Anuga_ons',5000, 0],
587                          dir2:['b_emoth',5000,1.5],
588                          dir3:['b_ons',5000,1.5]}
[5897]589                         Having multiple directories defined will plot them on
[6072]590                         one plot, therefore there will be 3 lines on each of
591                         these plot. If you only want one line per plot call
592                         csv2timeseries_graph separately for each directory,
593                         eg only have one directory in the 'directories_dic' in
594                         each call.
[5897]595                         
[6072]596        output_dir: directory for the plot outputs. Only important to define when
597                    you have more than one directory in your directories_dic, if
598                    you have not defined it and you have multiple directories in
599                    'directories_dic' there will be plots in each directory,
600                    however only one directory will contain the complete
601                    plot/graphs.
[5897]602       
[6072]603        base_name: Is used a couple of times.
604                   1) to find the csv files to be plotted if there is no
605                      'plot_numbers' then csv files with 'base_name' are plotted
606                   2) in the title of the plots, the length of base_name is
607                      removed from the front of the filename to be used in the
608                      title.
[5897]609                   This could be changed if needed.
610                   Note is ignored if assess_all_csv_files=True
611       
612        plot_numbers: a String list of numbers to plot. For example
613                      [0-4,10,15-17] will read and attempt to plot
[6072]614                      the follow 0,1,2,3,4,10,15,16,17
615                      NOTE: if no plot numbers this will create one plot per
616                            quantity, per gauge
617
618        quantities: Will get available quantities from the header in the csv
619                    file.  Quantities must be one of these.
[5897]620                    NOTE: ALL QUANTITY NAMES MUST BE lower case!
621                   
622        extra_plot_name: A string that is appended to the end of the
623                         output filename.
624                   
625        assess_all_csv_files: if true it will read ALL csv file with
626                             "base_name", regardless of 'plot_numbers'
627                              and determine a uniform set of axes for
628                              Stage, Speed and Momentum. IF FALSE it
629                              will only read the csv file within the
630                             'plot_numbers'
631                             
632        create_latex: NOT IMPLEMENTED YET!! sorry Jane....
633       
634    OUTPUTS: saves the plots to
635              <output_dir><base_name><plot_number><extra_plot_name>.png
636    """
[6072]637
[5897]638    try: 
639        import pylab
640    except ImportError:
641        msg='csv2timeseries_graphs needs pylab to be installed correctly'
642        raise msg
643            #ANUGA don't need pylab to work so the system doesn't
644            #rely on pylab being installed
645        return
646
647    from os import sep
648    from anuga.shallow_water.data_manager import \
649                               get_all_files_with_extension, csv2dict
650
651    seconds_in_hour = 3600
652    seconds_in_minutes = 60
653   
654    quantities_label={}
655#    quantities_label['time'] = 'time (hours)'
656    quantities_label['time'] = 'time (minutes)'
657    quantities_label['stage'] = 'wave height (m)'
658    quantities_label['speed'] = 'speed (m/s)'
659    quantities_label['momentum'] = 'momentum (m^2/sec)'
660    quantities_label['depth'] = 'water depth (m)'
661    quantities_label['xmomentum'] = 'momentum (m^2/sec)'
662    quantities_label['ymomentum'] = 'momentum (m^2/sec)'
663    quantities_label['bearing'] = 'degrees (o)'
664    quantities_label['elevation'] = 'elevation (m)'
665   
666    if extra_plot_name != '':
[6072]667        extra_plot_name = '_' + extra_plot_name
[5897]668
669    new_plot_numbers=[]
670    #change plot_numbers to list, eg ['0-4','10']
671    #to ['0','1','2','3','4','10']
672    for i, num_string in enumerate(plot_numbers):
673        if '-' in num_string: 
674            start = int(num_string[:num_string.rfind('-')])
[6072]675            end = int(num_string[num_string.rfind('-') + 1:]) + 1
[5897]676            for x in range(start, end):
677                new_plot_numbers.append(str(x))
678        else:
679            new_plot_numbers.append(num_string)
680
681    #finds all the files that fit the specs provided and return a list of them
682    #so to help find a uniform max and min for the plots...
683    list_filenames=[]
684    all_csv_filenames=[]
[7317]685    if verbose: log.critical('Determining files to access for axes ranges.')
[5897]686   
687    for i,directory in enumerate(directories_dic.keys()):
688        all_csv_filenames.append(get_all_files_with_extension(directory,
[6072]689                                                              base_name, '.csv'))
[5897]690
691        filenames=[]
692        if plot_numbers == '': 
693            list_filenames.append(get_all_files_with_extension(directory,
[6072]694                                                               base_name,'.csv'))
[5897]695        else:
696            for number in new_plot_numbers:
[6072]697                filenames.append(base_name + number)
[5897]698            list_filenames.append(filenames)
699
700    #use all the files to get the values for the plot axis
701    max_start_time= -1000.
702    min_start_time = 100000 
703   
[7317]704    if verbose: log.critical('Determining uniform axes')
[6072]705
[5897]706    #this entire loop is to determine the min and max range for the
707    #axes of the plots
708
709#    quantities.insert(0,'elevation')
710    quantities.insert(0,'time')
711
712    directory_quantity_value={}
713#    quantity_value={}
714    min_quantity_value={}
715    max_quantity_value={}
716
717    for i, directory in enumerate(directories_dic.keys()):
[6072]718        filename_quantity_value = {}
719        if assess_all_csv_files == False:
[5897]720            which_csv_to_assess = list_filenames[i]
721        else:
722            #gets list of filenames for directory "i"
723            which_csv_to_assess = all_csv_filenames[i]
724       
725        for j, filename in enumerate(which_csv_to_assess):
[6072]726            quantity_value = {}
[5897]727
[6072]728            dir_filename = join(directory,filename)
729            attribute_dic, title_index_dic = csv2dict(dir_filename + '.csv')
[5897]730            directory_start_time = directories_dic[directory][1]
731            directory_add_tide = directories_dic[directory][2]
732
[7317]733            if verbose: log.critical('reading: %s.csv' % dir_filename)
[6072]734
[5897]735            #add time to get values
736            for k, quantity in enumerate(quantities):
[6072]737                quantity_value[quantity] = [float(x) for
738                                                x in attribute_dic[quantity]]
[5897]739
740                #add tide to stage if provided
741                if quantity == 'stage':
[7276]742                     quantity_value[quantity] = num.array(quantity_value[quantity],
743                                                          num.float) + directory_add_tide
[5897]744
745                #condition to find max and mins for all the plots
746                # populate the list with something when i=0 and j=0 and
747                # then compare to the other values to determine abs max and min
748                if i==0 and j==0:
749                    min_quantity_value[quantity], \
[6072]750                        max_quantity_value[quantity] = \
751                            get_min_max_values(quantity_value[quantity])
[5897]752
753                    if quantity != 'time':
[6072]754                        min_quantity_value[quantity] = \
755                            min_quantity_value[quantity] *1.1
756                        max_quantity_value[quantity] = \
757                            max_quantity_value[quantity] *1.1
[5897]758                else:
759                    min, max = get_min_max_values(quantity_value[quantity])
760               
[6072]761                    # min and max are multipled by "1+increase_axis" to get axes
762                    # that are slighty bigger than the max and mins
763                    # so the plots look good.
[5897]764
765                    increase_axis = (max-min)*0.05
[6072]766                    if min <= min_quantity_value[quantity]:
[5897]767                        if quantity == 'time': 
[6072]768                            min_quantity_value[quantity] = min
[5897]769                        else:
770                            if round(min,2) == 0.00:
[6072]771                                min_quantity_value[quantity] = -increase_axis
772#                                min_quantity_value[quantity] = -2.
773                                #min_quantity_value[quantity] = \
774                                #    -max_quantity_value[quantity]*increase_axis
[5897]775                            else:
[6072]776#                                min_quantity_value[quantity] = \
777#                                    min*(1+increase_axis)
[5897]778                                min_quantity_value[quantity]=min-increase_axis
779                   
[6072]780                    if max > max_quantity_value[quantity]: 
[5897]781                        if quantity == 'time': 
[6072]782                            max_quantity_value[quantity] = max
[5897]783                        else:
[6072]784                            max_quantity_value[quantity] = max + increase_axis
[5897]785#                            max_quantity_value[quantity]=max*(1+increase_axis)
786
787            #set the time... ???
788            if min_start_time > directory_start_time: 
789                min_start_time = directory_start_time
790            if max_start_time < directory_start_time: 
791                max_start_time = directory_start_time
792           
793            filename_quantity_value[filename]=quantity_value
794           
795        directory_quantity_value[directory]=filename_quantity_value
796   
797    #final step to unifrom axis for the graphs
798    quantities_axis={}
799   
800    for i, quantity in enumerate(quantities):
[6072]801        quantities_axis[quantity] = (float(min_start_time) \
802                                         / float(seconds_in_minutes),
803                                     (float(max_quantity_value['time']) \
804                                          + float(max_start_time)) \
805                                              / float(seconds_in_minutes),
806                                     min_quantity_value[quantity],
807                                     max_quantity_value[quantity])
808
[5897]809        if verbose and (quantity != 'time' and quantity != 'elevation'): 
[7317]810            log.critical('axis for quantity %s are x:(%s to %s)%s '
811                         'and y:(%s to %s)%s' 
812                         % (quantity, quantities_axis[quantity][0],
813                            quantities_axis[quantity][1],
814                            quantities_label['time'],
815                            quantities_axis[quantity][2],
816                            quantities_axis[quantity][3],
817                            quantities_label[quantity]))
[5897]818
819    cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k']
820
[7317]821    if verbose: log.critical('Now start to plot')
[5897]822   
823    i_max = len(directories_dic.keys())
[6072]824    legend_list_dic = {}
825    legend_list = []
[5897]826    for i, directory in enumerate(directories_dic.keys()):
[7317]827        if verbose: log.critical('Plotting in %s %s'
828                                 % (directory, new_plot_numbers))
[6072]829
830        # FIXME THIS SORT IS VERY IMPORTANT
831        # Without it the assigned plot numbers may not work correctly
832        # there must be a better way
[5897]833        list_filenames[i].sort()
834        for j, filename in enumerate(list_filenames[i]):
[7317]835            if verbose: log.critical('Starting %s' % filename)
[6072]836
[5897]837            directory_name = directories_dic[directory][0]
838            directory_start_time = directories_dic[directory][1]
839            directory_add_tide = directories_dic[directory][2]
840           
[6072]841            # create an if about the start time and tide height if don't exist
842            attribute_dic, title_index_dic = csv2dict(directory + sep
843                                                      + filename + '.csv')
[5897]844            #get data from dict in to list
845            #do maths to list by changing to array
[6145]846            t = (num.array(directory_quantity_value[directory][filename]['time'])
[6072]847                     + directory_start_time) / seconds_in_minutes
[5897]848
849            #finds the maximum elevation, used only as a test
850            # and as info in the graphs
851            max_ele=-100000
852            min_ele=100000
853            elevation = [float(x) for x in attribute_dic["elevation"]]
854           
855            min_ele, max_ele = get_min_max_values(elevation)
856           
857            if min_ele != max_ele:
[7317]858                log.critical("Note! Elevation changes in %s" % dir_filename)
[5897]859
[6072]860            # creates a dictionary with keys that is the filename and attributes
861            # are a list of lists containing 'directory_name' and 'elevation'.
862            # This is used to make the contents for the legends in the graphs,
863            # this is the name of the model and the elevation.  All in this
864            # great one liner from DG. If the key 'filename' doesn't exist it
865            # creates the entry if the entry exist it appends to the key.
[5897]866
[6072]867            legend_list_dic.setdefault(filename,[]) \
868                .append([directory_name, round(max_ele, 3)])
869
870            # creates a LIST for the legend on the last iteration of the
871            # directories which is when "legend_list_dic" has been fully
872            # populated. Creates a list of strings which is used in the legend
[5897]873            # only runs on the last iteration for all the gauges(csv) files
874            # empties the list before creating it
[6072]875
876            if i == i_max - 1:
877                legend_list = []
[5897]878   
879                for name_and_elevation in legend_list_dic[filename]:
[6072]880                    legend_list.append('%s (elevation = %sm)'\
881                                       % (name_and_elevation[0],
882                                          name_and_elevation[1]))
[5897]883           
884            #skip time and elevation so it is not plotted!
885            for k, quantity in enumerate(quantities):
886                if quantity != 'time' and quantity != 'elevation':
[6171]887                    pylab.figure(int(k*100+j))
[5897]888                    pylab.ylabel(quantities_label[quantity])
[6072]889                    pylab.plot(t,
890                               directory_quantity_value[directory]\
891                                                       [filename][quantity],
892                               c = cstr[i], linewidth=1)
[5897]893                    pylab.xlabel(quantities_label['time'])
894                    pylab.axis(quantities_axis[quantity])
895                    pylab.legend(legend_list,loc='upper right')
896                   
[6072]897                    pylab.title('%s at %s gauge'
898                                % (quantity, filename[len(base_name):]))
899
[5897]900                    if output_dir == '':
[6072]901                        figname = '%s%s%s_%s%s.png' \
902                                  % (directory, sep, filename, quantity,
903                                     extra_plot_name)
[5897]904                    else:
[6072]905                        figname = '%s%s%s_%s%s.png' \
906                                  % (output_dir, sep, filename, quantity,
907                                     extra_plot_name)
908
[7317]909                    if verbose: log.critical('saving figure here %s' % figname)
[6072]910
[5897]911                    pylab.savefig(figname)
912           
[7317]913    if verbose: log.critical('Closing all plots')
[6072]914
[5897]915    pylab.close('all')
916    del pylab
[6072]917
[7317]918    if verbose: log.critical('Finished closing plots')
[5897]919
[6072]920##
921# @brief Return min and max of an iterable.
922# @param list The iterable to return min & max of.
923# @return (min, max) of 'list'.
[5897]924def get_min_max_values(list=None):
925    """
926    Returns the min and max of the list it was provided.
927    """
[6072]928
[7317]929    if list == None: log.critical('List must be provided')
[5897]930       
931    return min(list), max(list)
932
933
[6072]934##
935# @brief Get runup around a point in a CSV file.
936# @param gauge_filename gauge file name.
937# @param sww_filename SWW file name.
938# @param runup_filename Name of file to report into.
939# @param size ??
940# @param verbose ??
[5897]941def get_runup_data_for_locations_from_file(gauge_filename,
942                                           sww_filename,
943                                           runup_filename,
944                                           size=10,
945                                           verbose=False):
946    """this will read a csv file with the header x,y. Then look in a square
947    'size'x2 around this position for the 'max_inundaiton_height' in the
[6072]948    'sww_filename' and report the findings in the 'runup_filename'.
[5897]949   
950    WARNING: NO TESTS!
951    """
952
953    from anuga.shallow_water.data_manager import get_all_directories_with_name,\
954                                                 get_maximum_inundation_data,\
955                                                 csv2dict
956                                                 
[6072]957    file = open(runup_filename, "w")
[5897]958    file.write("easting,northing,runup \n ")
959    file.close()
960   
961    #read gauge csv file to dictionary
962    attribute_dic, title_index_dic = csv2dict(gauge_filename)
963    northing = [float(x) for x in attribute_dic["y"]]
964    easting = [float(x) for x in attribute_dic["x"]]
965
[7317]966    log.critical('Reading %s' % sww_filename)
[5897]967
968    runup_locations=[]
969    for i, x in enumerate(northing):
970        poly = [[int(easting[i]+size),int(northing[i]+size)],
971                [int(easting[i]+size),int(northing[i]-size)],
972                [int(easting[i]-size),int(northing[i]-size)],
973                [int(easting[i]-size),int(northing[i]+size)]]
974       
975        run_up, x_y = get_maximum_inundation_data(filename=sww_filename,
[6072]976                                                  polygon=poly,
977                                                  verbose=False) 
978
[5897]979        #if no runup will return 0 instead of NONE
980        if run_up==None: run_up=0
981        if x_y==None: x_y=[0,0]
982       
983        if verbose:
[7317]984            log.critical('maximum inundation runup near %s is %s meters'
985                         % (x_y, run_up))
[5897]986       
987        #writes to file
[6072]988        file = open(runup_filename, "a")
989        temp = '%s,%s,%s \n' % (x_y[0], x_y[1], run_up)
[5897]990        file.write(temp)
991        file.close()
992
[6072]993##
994# @brief ??
[6314]995# @param  ??
[6072]996# @param gauge_file ??
997# @param out_name ??
998# @param quantities ??
999# @param verbose ??
1000# @param use_cache ??
[5897]1001def sww2csv_gauges(sww_file,
1002                   gauge_file,
[6035]1003                   out_name='gauge_',
[6072]1004                   quantities=['stage', 'depth', 'elevation',
1005                               'xmomentum', 'ymomentum'],
[5897]1006                   verbose=False,
[6277]1007                   use_cache=True):
[5897]1008    """
1009   
1010    Inputs:
1011        NOTE: if using csv2timeseries_graphs after creating csv file,
1012        it is essential to export quantities 'depth' and 'elevation'.
1013        'depth' is good to analyse gauges on land and elevation is used
1014        automatically by csv2timeseries_graphs in the legend.
1015       
1016        sww_file: path to any sww file
1017       
[6035]1018        gauge_file: Assumes that it follows this format
[5897]1019            name, easting, northing, elevation
1020            point1, 100.3, 50.2, 10.0
1021            point2, 10.3, 70.3, 78.0
1022       
1023        NOTE: order of column can change but names eg 'easting', 'elevation'
1024        must be the same! ALL lowercaps!
[6013]1025
1026        out_name: prefix for output file name (default is 'gauge_')
[5897]1027       
1028    Outputs:
1029        one file for each gauge/point location in the points file. They
1030        will be named with this format in the same directory as the 'sww_file'
[6013]1031            <out_name><name>.csv
1032        eg gauge_point1.csv if <out_name> not supplied
1033           myfile_2_point1.csv if <out_name> ='myfile_2_'
[5897]1034           
1035        They will all have a header
1036   
[6035]1037    Usage: sww2csv_gauges(sww_file='test1.sww',
[5897]1038                          quantities = ['stage', 'elevation','depth','bearing'],
1039                          gauge_file='gauge.txt')   
1040   
1041    Interpolate the quantities at a given set of locations, given
1042    an sww file.
1043    The results are written to a csv file.
1044
1045    In the future let points be a points file.
1046    And the user choose the quantities.
1047
1048    This is currently quite specific.
[6035]1049    If it needs to be more general, change things.
[5897]1050
1051    This is really returning speed, not velocity.
1052    """
[7679]1053    from gauge import sww2csv_gauges as sww2csv
[7685]1054   
[7679]1055    return sww2csv(sww_file, gauge_file, out_name, quantities, verbose, use_cache)
[7685]1056   
[7679]1057def sww2timeseries(swwfiles,
1058                   gauge_filename,
1059                   production_dirs,
1060                   report=None,
1061                   reportname=None,
1062                   plot_quantity=None,
1063                   generate_fig=False,
1064                   surface=None,
1065                   time_min=None,
1066                   time_max=None,
1067                   time_thinning=1,                   
1068                   time_unit=None,
1069                   title_on=None,
1070                   use_cache=False,
1071                   verbose=False,
1072                   output_centroids=False):
1073    from gauge import sww2timeseries as sww2timeseries_new
1074    return sww2timeseries_new(swwfiles,
1075                   gauge_filename,
1076                   production_dirs,
1077                   report,
1078                   reportname,
1079                   plot_quantity,
1080                   generate_fig,
1081                   surface,
1082                   time_min,
1083                   time_max,
1084                   time_thinning,                   
1085                   time_unit,
1086                   title_on,
1087                   use_cache,
1088                   verbose,
[7685]1089                   output_centroids)                   
1090   
[6072]1091##
1092# @brief Get a wave height at a certain depth given wave height at another depth.
1093# @param d1 The first depth.
1094# @param d2 The second depth.
1095# @param h1 Wave ampitude at d1
1096# @param verbose True if this function is to be verbose.
1097# @return The wave height at d2.
1098def greens_law(d1, d2, h1, verbose=False):
1099    """Green's Law
[5897]1100
1101    Green's Law allows an approximation of wave amplitude at
1102    a given depth based on the fourh root of the ratio of two depths
1103    and the amplitude at another given depth.
1104
1105    Note, wave amplitude is equal to stage.
1106   
1107    Inputs:
1108
1109    d1, d2 - the two depths
1110    h1     - the wave amplitude at d1
1111    h2     - the derived amplitude at d2
1112
1113    h2 = h1 (d1/d2)^(1/4), where d2 cannot equal 0.
1114   
1115    """
1116
1117    d1 = ensure_numeric(d1)
1118    d2 = ensure_numeric(d2)
1119    h1 = ensure_numeric(h1)
1120
1121    if d1 <= 0.0:
[6072]1122        msg = 'the first depth, d1 (%f), must be strictly positive' % (d1)
[5897]1123        raise Exception(msg)
1124
1125    if d2 <= 0.0:
[6072]1126        msg = 'the second depth, d2 (%f), must be strictly positive' % (d2)
[5897]1127        raise Exception(msg)
1128   
1129    if h1 <= 0.0:
[6072]1130        msg = 'the wave amplitude, h1 (%f), must be strictly positive' % (h1)
[5897]1131        raise Exception(msg)
1132   
1133    h2 = h1*(d1/d2)**0.25
1134
1135    assert h2 > 0
1136   
1137    return h2
1138       
1139
[6072]1140##
1141# @brief Get the square-root of a value.
1142# @param s The value to get the square-root of.
1143# @return The square-root of 's'.
[5897]1144def square_root(s):
1145    return sqrt(s)
[6072]1146
1147
Note: See TracBrowser for help on using the repository browser.