source: trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/util.py @ 7772

Last change on this file since 7772 was 7772, checked in by hudson, 13 years ago

Fixed a bunch of failing tests - only 3 still failing.

File size: 41.3 KB
Line 
1"""This module contains various auxiliary function used by pyvolution.
2
3It is also a clearing house for functions that may later earn a module
4of their own.
5"""
6
7import anuga.geometry.polygon
8import sys
9import os
10
11from os import remove, mkdir, access, F_OK, R_OK, W_OK, sep,getcwd
12from os.path import exists, basename, split,join
13from warnings import warn
14from shutil import copy
15
16from anuga.utilities.numerical_tools import ensure_numeric, angle, NAN
17from anuga.file.csv_file import load_csv_as_dict
18
19from math import sqrt, atan, degrees
20
21# FIXME (Ole): Temporary short cuts -
22# FIXME (Ole): remove and update scripts where they are used
23from anuga.utilities.system_tools import get_revision_number
24from anuga.utilities.system_tools import store_version_info
25
26import anuga.utilities.log as log
27
28import numpy as num
29
30
31def file_function(filename,
32                  domain=None,
33                  quantities=None,
34                  interpolation_points=None,
35                  time_thinning=1,
36                  time_limit=None,
37                  verbose=False,
38                  use_cache=False,
39                  boundary_polygon=None,
40                  output_centroids=False):
41    from file_function import file_function as file_function_new
42    return file_function_new(filename, domain, quantities, interpolation_points,
43                      time_thinning, time_limit, verbose, use_cache,
44                      boundary_polygon, output_centroids)
45
46##
47# @brief Replace multiple substrings in a string.
48# @param text The string to operate on.
49# @param dictionary A dict containing replacements, key->value.
50# @return The new string.
51def multiple_replace(text, dictionary):
52    """Multiple replace of words in text
53
54    text:       String to be modified
55    dictionary: Mapping of words that are to be substituted
56
57    Python Cookbook 3.14 page 88 and page 90
58    http://code.activestate.com/recipes/81330/
59    """
60
61    import re
62   
63    #Create a regular expression from all of the dictionary keys
64    #matching only entire words
65    regex = re.compile(r'\b'+ \
66                       r'\b|\b'.join(map(re.escape, dictionary.keys()))+ \
67                       r'\b' )
68
69    #For each match, lookup the corresponding value in the dictionary
70    return regex.sub(lambda match: dictionary[match.group(0)], text)
71
72
73##
74# @brief Apply arbitrary expressions to the values of a dict.
75# @param expression A string expression to apply.
76# @param dictionary The dictionary to apply the expression to.
77def apply_expression_to_dictionary(expression, dictionary):
78    """Apply arbitrary expression to values of dictionary
79
80    Given an expression in terms of the keys, replace key by the
81    corresponding values and evaluate.
82
83    expression: Arbitrary, e.g. arithmetric, expression relating keys
84                from dictionary.
85
86    dictionary: Mapping between symbols in expression and objects that
87                will be evaluated by expression.
88                Values in dictionary must support operators given in
89                expression e.g. by overloading
90
91    Due to a limitation with numeric, this can not evaluate 0/0
92    In general, the user can fix by adding 1e-30 to the numerator.
93    SciPy core can handle this situation.
94    """
95
96    import types
97    import re
98
99    assert isinstance(expression, basestring)
100    assert type(dictionary) == types.DictType
101
102    #Convert dictionary values to textual representations suitable for eval
103    D = {}
104    for key in dictionary:
105        D[key] = 'dictionary["%s"]' % key
106
107    #Perform substitution of variables   
108    expression = multiple_replace(expression, D)
109
110    #Evaluate and return
111    try:
112        return eval(expression)
113    except NameError, e:
114        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
115        raise NameError, msg
116    except ValueError, e:
117        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
118        raise ValueError, msg
119
120
121##
122# @brief Format a float into a string.
123# @param value Float value to format.
124# @param format The format to use (%.2f is default).
125# @return The formatted float as a string.
126def get_textual_float(value, format = '%.2f'):
127    """Get textual representation of floating point numbers
128    and accept None as valid entry
129
130    format is a string - default = '%.2f'
131    """
132
133    if value is None:
134        return 'None'
135    else:
136        try:
137            float(value)
138        except:
139            # May this is a vector
140            if len(value) > 1:
141                s = '('
142                for v in value:
143                    s += get_textual_float(v, format) + ', '
144                   
145                s = s[:-2] + ')' # Strip trailing comma and close
146                return s
147            else:
148                raise 'Illegal input to get_textual_float:', value
149        else:
150            return format % float(value)
151
152
153#################################################################################
154# OBSOLETE STUFF
155#################################################################################
156
157# @note TEMP
158def angle(v1, v2):
159    """Temporary Interface to new location"""
160
161    import anuga.utilities.numerical_tools as NT   
162   
163    msg = 'angle has moved from util.py.  '
164    msg += 'Please use "from anuga.utilities.numerical_tools import angle"'
165    warn(msg, DeprecationWarning) 
166
167    return NT.angle(v1, v2)
168
169   
170# @note TEMP
171def anglediff(v0, v1):
172    """Temporary Interface to new location"""
173
174    import anuga.utilities.numerical_tools as NT
175   
176    msg = 'anglediff has moved from util.py.  '
177    msg += 'Please use "from anuga.utilities.numerical_tools import anglediff"'
178    warn(msg, DeprecationWarning) 
179
180    return NT.anglediff(v0, v1)   
181
182# @note TEMP
183def point_on_line(*args, **kwargs):
184    """Temporary Interface to new location"""
185
186    msg = 'point_on_line has moved from util.py.  '
187    msg += 'Please use "from anuga.geometry.polygon import point_on_line"'
188    warn(msg, DeprecationWarning) 
189
190    return utilities.geometry.point_on_line(*args, **kwargs)   
191
192   
193# @note TEMP
194def inside_polygon(*args, **kwargs):
195    """Temporary Interface to new location"""
196
197    log.critical('inside_polygon has moved from util.py.')
198    log.critical('Please use '
199                 '"from anuga.utilities.polygon import inside_polygon"')
200
201    return geometry.polygon.inside_polygon(*args, **kwargs)   
202
203   
204# @note TEMP
205def outside_polygon(*args, **kwargs):
206    """Temporary Interface to new location"""
207
208    log.critical('outside_polygon has moved from util.py.')
209    log.critical('Please use '
210                 '"from anuga.geometry.polygon import outside_polygon"')
211
212    return geometry.geometry.outside_polygon(*args, **kwargs)   
213
214   
215# @note TEMP
216def read_polygon(*args, **kwargs):
217    """Temporary Interface to new location"""
218
219    log.critical('read_polygon has moved from util.py.')
220    log.critical('Please use '
221                 '"from anuga.geometry.polygon import read_polygon"')
222
223    return geometry.geometry.read_polygon(*args, **kwargs)   
224
225
226# @note TEMP
227def populate_polygon(*args, **kwargs):
228    """Temporary Interface to new location"""
229
230    log.critical('populate_polygon has moved from util.py.')
231    log.critical('Please use '
232                 '"from anuga.utilities.polygon import populate_polygon"')
233
234    return utilities.geometry.populate_polygon(*args, **kwargs)   
235
236
237#################################################################################
238# End of obsolete stuff ?
239#################################################################################
240
241# @note TEMP
242def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
243                         verbose=False):
244    """Temporary Interface to new location"""
245    from anuga.shallow_water.data_manager import start_screen_catcher \
246         as dm_start_screen_catcher
247
248    log.critical('start_screen_catcher has moved from util.py.')
249    log.critical('Please use "from anuga.shallow_water.data_manager import '
250                 'start_screen_catcher"')
251   
252    return dm_start_screen_catcher(dir_name, myid='', numprocs='',
253                                   extra_info='', verbose=False)
254
255
256
257##
258# @brief Read gauge info from a file.
259# @param filename The name of the file to read.
260# @return A (gauges, gaugelocation, elev) tuple.
261def get_gauges_from_file(filename):
262    return gauge_get_from_file(filename)
263
264
265##
266# @brief Check that input quantities in quantity list are legal.
267# @param quantity Quantity list to check.
268# @note Raises an exception of list is not legal.
269def check_list(quantity):
270    """ Check that input quantities in quantity list are possible
271    """
272    import sys
273
274    all_quantity = ['stage', 'depth', 'momentum', 'xmomentum',
275                    'ymomentum', 'speed', 'bearing', 'elevation']
276
277    # convert all quanitiy names to lowercase
278    for i,j in enumerate(quantity):
279        quantity[i] = quantity[i].lower()
280
281    # check that all names in 'quantity' appear in 'all_quantity'
282    p = list(set(quantity).difference(set(all_quantity)))
283    if len(p) != 0:
284        msg = 'Quantities %s do not exist - please try again' %p
285        raise Exception, msg
286
287
288##
289# @brief Calculate velocity bearing from North.
290# @param uh ??
291# @param vh ??
292# @return The calculated bearing.
293def calc_bearing(uh, vh):
294    """ Calculate velocity bearing from North
295    """
296    #FIXME (Ole): I reckon we should refactor this one to use
297    #             the function angle() in utilities/numerical_tools
298    #
299    #             It will be a simple matter of
300    # * converting from radians to degrees
301    # * moving the reference direction from [1,0] to North
302    # * changing from counter clockwise to clocwise.
303
304    # if indeterminate, just return
305    if uh==0 and vh==0:
306        return NAN
307   
308    return degrees(angle([uh, vh], [0, -1]))   
309
310
311##
312# @brief Create a nested sub-directory path.
313# @param root_directory The base diretory path.
314# @param directories An iterable of sub-directory names.
315# @return The final joined directory path.
316# @note If each sub-directory doesn't exist, it will be created.
317def add_directories(root_directory, directories):
318    """
319    Add the first sub-directory in 'directories' to root_directory.
320    Then add the second sub-directory to the accumulating path and so on.
321
322    Return the path of the final directory.
323
324    This is handy for specifying and creating a directory where data will go.
325    """
326    dir = root_directory
327    for new_dir in directories:
328        dir = os.path.join(dir, new_dir)
329        if not access(dir,F_OK):
330            mkdir(dir)
331    return dir
332
333
334##
335# @brief
336# @param verbose
337# @param kwargs
338# @return
339# @note TEMP
340def store_parameters(verbose=False,**kwargs):
341    """Temporary Interface to new location"""
342   
343    from anuga.shallow_water.data_manager \
344                    import store_parameters as dm_store_parameters
345    log.critical('store_parameters has moved from util.py.')
346    log.critical('Please use "from anuga.shallow_water.data_manager '
347                 'import store_parameters"')
348   
349    return dm_store_parameters(verbose=False,**kwargs)
350
351
352##
353# @brief Remove vertices that are not associated with any triangle.
354# @param verts An iterable (or array) of points.
355# @param triangles An iterable of 3 element tuples.
356# @param number_of_full_nodes ??
357# @return (verts, triangles) where 'verts' has been updated.
358def remove_lone_verts(verts, triangles, number_of_full_nodes=None):
359    """Removes vertices that are not associated with any triangles.
360
361    verts is a list/array of points.
362    triangles is a list of 3 element tuples.  Each tuple represents a triangle.
363    number_of_full_nodes relate to parallelism when a mesh has an
364        extra layer of ghost points.
365    """
366
367    verts = ensure_numeric(verts)
368    triangles = ensure_numeric(triangles)
369   
370    N = len(verts)
371   
372    # initialise the array to easily find the index of the first loner
373    # ie, if N=3 -> [6,5,4]
374    loners=num.arange(2*N, N, -1)
375    for t in triangles:
376        for vert in t:
377            loners[vert]= vert # all non-loners will have loners[i]=i
378
379    lone_start = 2*N - max(loners) # The index of the first loner
380
381    if lone_start-1 == N:
382        # no loners
383        pass
384    elif min(loners[lone_start:N]) > N:
385        # All the loners are at the end of the vert array
386        verts = verts[0:lone_start]
387    else:
388        # change the loners list so it can be used to modify triangles
389        # Remove the loners from verts
390        # Could've used X=compress(less(loners,N),loners)
391        # verts=num.take(verts,X,axis=0)  to Remove the loners from verts
392        # but I think it would use more memory
393        new_i = lone_start    # point at first loner - 'shuffle down' target
394        for i in range(lone_start, N):
395            if loners[i] >= N:    # [i] is a loner, leave alone
396                pass
397            else:        # a non-loner, move down
398                loners[i] = new_i
399                verts[new_i] = verts[i]
400                new_i += 1
401        verts = verts[0:new_i]
402
403        # Modify the triangles
404        triangles = num.choose(triangles,loners)
405    return verts, triangles
406
407
408##
409# @brief Compute centroid values from vertex values
410# @param x Values at vertices of triangular mesh.
411# @param triangles Nx3 integer array pointing to vertex information.
412# @return [N] array of centroid values.
413def get_centroid_values(x, triangles):
414    """Compute centroid values from vertex values
415   
416    x: Values at vertices of triangular mesh
417    triangles: Nx3 integer array pointing to vertex information
418    for each of the N triangels. Elements of triangles are
419    indices into x
420    """
421       
422    xc = num.zeros(triangles.shape[0], num.float) # Space for centroid info
423   
424    for k in range(triangles.shape[0]):
425        # Indices of vertices
426        i0 = triangles[k][0]
427        i1 = triangles[k][1]
428        i2 = triangles[k][2]       
429       
430        xc[k] = (x[i0] + x[i1] + x[i2])/3
431
432    return xc
433
434
435# @note TEMP
436def make_plots_from_csv_file(directories_dic={dir:['gauge', 0, 0]},
437                                output_dir='',
438                                base_name='',
439                                plot_numbers=['3-5'],
440                                quantities=['speed','stage','momentum'],
441                                assess_all_csv_files=True,
442                                extra_plot_name='test'):
443
444    msg = 'make_plots_from_csv_file has been replaced by csv2timeseries_graphs '
445    msg += 'Please use "from anuga.abstract_2d_finite_volumes.util import ' \
446           'csv2timeseries_graphs"'
447    raise Exception, msg
448
449    return csv2timeseries_graphs(directories_dic,
450                                 output_dir,
451                                 base_name,
452                                 plot_numbers,
453                                 quantities,
454                                 extra_plot_name,
455                                 assess_all_csv_files)
456
457
458##
459# @brief Plot time series from CSV files.
460# @param directories_dic
461# @param output_dir
462# @param base_name
463# @param plot_numbers
464# @param quantities
465# @param extra_plot_name
466# @param assess_all_csv_files
467# @param create_latex
468# @param verbose
469# @note Assumes that 'elevation' is in the CSV file(s).
470def csv2timeseries_graphs(directories_dic={},
471                          output_dir='',
472                          base_name=None,
473                          plot_numbers='',
474                          quantities=['stage'],
475                          extra_plot_name='',
476                          assess_all_csv_files=True,
477                          create_latex=False,
478                          verbose=False):
479                               
480    """
481    Read in csv files that have the right header information and
482    plot time series such as Stage, Speed, etc. Will also plot several
483    time series on one plot. Filenames must follow this convention,
484    <base_name><plot_number>.csv eg gauge_timeseries3.csv
485   
486    NOTE: relies that 'elevation' is in the csv file!
487
488    Each file represents a location and within each file there are
489    time, quantity columns.
490   
491    For example:   
492    if "directories_dic" defines 4 directories and in each directories
493    there is a csv files corresponding to the right "plot_numbers",
494    this will create a plot with 4 lines one for each directory AND
495    one plot for each "quantities".  ??? FIXME: unclear.
496   
497    Usage:
498        csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
499                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
500                            output_dir='fixed_wave'+sep,
501                            base_name='gauge_timeseries_',
502                            plot_numbers='',
503                            quantities=['stage','speed'],
504                            extra_plot_name='',
505                            assess_all_csv_files=True,                           
506                            create_latex=False,
507                            verbose=True)
508            this will create one plot for stage with both 'slide' and
509            'fixed_wave' lines on it for stage and speed for each csv
510            file with 'gauge_timeseries_' as the prefix. The graghs
511            will be in the output directory 'fixed_wave' and the graph
512            axis will be determined by assessing all the
513   
514    ANOTHER EXAMPLE
515        new_csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
516                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
517                            output_dir='fixed_wave'+sep,
518                            base_name='gauge_timeseries_',
519                            plot_numbers=['1-3'],
520                            quantities=['stage','speed'],
521                            extra_plot_name='',
522                            assess_all_csv_files=False,                           
523                            create_latex=False,
524                            verbose=True)
525        This will plot csv files called gauge_timeseries_1.csv and
526        gauge_timeseries3.csv from both 'slide' and 'fixed_wave' directories
527        to 'fixed_wave'. There will be 4 plots created two speed and two stage
528        one for each csv file. There will be two lines on each of these plots.
529        And the axis will have been determined from only these files, had
530        assess_all_csv_files = True all csv file with 'gauges_timeseries_' prefix
531        would of been assessed.
532   
533    ANOTHER EXAMPLE   
534         csv2timeseries_graphs({'J:'+sep+'anuga_validation'+sep:['new',20,-.1],
535                                   'J:'+sep+'conical_island'+sep:['test',0,0]},
536                                   output_dir='',
537                                   plot_numbers=['1','3'],
538                                   quantities=['stage','depth','bearing'],
539                                   base_name='gauge_b',
540                                   assess_all_csv_files=True,
541                                  verbose=True)   
542       
543            This will produce one plot for each quantity (therefore 3) in the
544            current directory, each plot will have 2 lines on them. The first
545            plot named 'new' will have the time offseted by 20secs and the stage
546            height adjusted by -0.1m
547       
548    Inputs:
549        directories_dic: dictionary of directory with values (plot
550                         legend name for directory), (start time of
551                         the time series) and the (value to add to
552                         stage if needed). For example
553                         {dir1:['Anuga_ons',5000, 0],
554                          dir2:['b_emoth',5000,1.5],
555                          dir3:['b_ons',5000,1.5]}
556                         Having multiple directories defined will plot them on
557                         one plot, therefore there will be 3 lines on each of
558                         these plot. If you only want one line per plot call
559                         csv2timeseries_graph separately for each directory,
560                         eg only have one directory in the 'directories_dic' in
561                         each call.
562                         
563        output_dir: directory for the plot outputs. Only important to define when
564                    you have more than one directory in your directories_dic, if
565                    you have not defined it and you have multiple directories in
566                    'directories_dic' there will be plots in each directory,
567                    however only one directory will contain the complete
568                    plot/graphs.
569       
570        base_name: Is used a couple of times.
571                   1) to find the csv files to be plotted if there is no
572                      'plot_numbers' then csv files with 'base_name' are plotted
573                   2) in the title of the plots, the length of base_name is
574                      removed from the front of the filename to be used in the
575                      title.
576                   This could be changed if needed.
577                   Note is ignored if assess_all_csv_files=True
578       
579        plot_numbers: a String list of numbers to plot. For example
580                      [0-4,10,15-17] will read and attempt to plot
581                      the follow 0,1,2,3,4,10,15,16,17
582                      NOTE: if no plot numbers this will create one plot per
583                            quantity, per gauge
584
585        quantities: Will get available quantities from the header in the csv
586                    file.  Quantities must be one of these.
587                    NOTE: ALL QUANTITY NAMES MUST BE lower case!
588                   
589        extra_plot_name: A string that is appended to the end of the
590                         output filename.
591                   
592        assess_all_csv_files: if true it will read ALL csv file with
593                             "base_name", regardless of 'plot_numbers'
594                              and determine a uniform set of axes for
595                              Stage, Speed and Momentum. IF FALSE it
596                              will only read the csv file within the
597                             'plot_numbers'
598                             
599        create_latex: NOT IMPLEMENTED YET!! sorry Jane....
600       
601    OUTPUTS: saves the plots to
602              <output_dir><base_name><plot_number><extra_plot_name>.png
603    """
604
605    try: 
606        import pylab
607    except ImportError:
608        msg='csv2timeseries_graphs needs pylab to be installed correctly'
609        raise msg
610            #ANUGA don't need pylab to work so the system doesn't
611            #rely on pylab being installed
612        return
613
614    from os import sep
615    from anuga.utilities.file_utils import get_all_files_with_extension
616
617    seconds_in_hour = 3600
618    seconds_in_minutes = 60
619   
620    quantities_label={}
621#    quantities_label['time'] = 'time (hours)'
622    quantities_label['time'] = 'time (minutes)'
623    quantities_label['stage'] = 'wave height (m)'
624    quantities_label['speed'] = 'speed (m/s)'
625    quantities_label['momentum'] = 'momentum (m^2/sec)'
626    quantities_label['depth'] = 'water depth (m)'
627    quantities_label['xmomentum'] = 'momentum (m^2/sec)'
628    quantities_label['ymomentum'] = 'momentum (m^2/sec)'
629    quantities_label['bearing'] = 'degrees (o)'
630    quantities_label['elevation'] = 'elevation (m)'
631   
632    if extra_plot_name != '':
633        extra_plot_name = '_' + extra_plot_name
634
635    new_plot_numbers=[]
636    #change plot_numbers to list, eg ['0-4','10']
637    #to ['0','1','2','3','4','10']
638    for i, num_string in enumerate(plot_numbers):
639        if '-' in num_string: 
640            start = int(num_string[:num_string.rfind('-')])
641            end = int(num_string[num_string.rfind('-') + 1:]) + 1
642            for x in range(start, end):
643                new_plot_numbers.append(str(x))
644        else:
645            new_plot_numbers.append(num_string)
646
647    #finds all the files that fit the specs provided and return a list of them
648    #so to help find a uniform max and min for the plots...
649    list_filenames=[]
650    all_csv_filenames=[]
651    if verbose: log.critical('Determining files to access for axes ranges.')
652   
653    for i,directory in enumerate(directories_dic.keys()):
654        all_csv_filenames.append(get_all_files_with_extension(directory,
655                                                              base_name, '.csv'))
656
657        filenames=[]
658        if plot_numbers == '': 
659            list_filenames.append(get_all_files_with_extension(directory,
660                                                               base_name,'.csv'))
661        else:
662            for number in new_plot_numbers:
663                filenames.append(base_name + number)
664            list_filenames.append(filenames)
665
666    #use all the files to get the values for the plot axis
667    max_start_time= -1000.
668    min_start_time = 100000 
669   
670    if verbose: log.critical('Determining uniform axes')
671
672    #this entire loop is to determine the min and max range for the
673    #axes of the plots
674
675#    quantities.insert(0,'elevation')
676    quantities.insert(0,'time')
677
678    directory_quantity_value={}
679#    quantity_value={}
680    min_quantity_value={}
681    max_quantity_value={}
682
683    for i, directory in enumerate(directories_dic.keys()):
684        filename_quantity_value = {}
685        if assess_all_csv_files == False:
686            which_csv_to_assess = list_filenames[i]
687        else:
688            #gets list of filenames for directory "i"
689            which_csv_to_assess = all_csv_filenames[i]
690       
691        for j, filename in enumerate(which_csv_to_assess):
692            quantity_value = {}
693
694            dir_filename = join(directory,filename)
695            attribute_dic, title_index_dic = load_csv_as_dict(dir_filename + '.csv')
696            directory_start_time = directories_dic[directory][1]
697            directory_add_tide = directories_dic[directory][2]
698
699            if verbose: log.critical('reading: %s.csv' % dir_filename)
700
701            #add time to get values
702            for k, quantity in enumerate(quantities):
703                quantity_value[quantity] = [float(x) for
704                                                x in attribute_dic[quantity]]
705
706                #add tide to stage if provided
707                if quantity == 'stage':
708                     quantity_value[quantity] = num.array(quantity_value[quantity],
709                                                          num.float) + directory_add_tide
710
711                #condition to find max and mins for all the plots
712                # populate the list with something when i=0 and j=0 and
713                # then compare to the other values to determine abs max and min
714                if i==0 and j==0:
715                    min_quantity_value[quantity], \
716                        max_quantity_value[quantity] = \
717                            get_min_max_values(quantity_value[quantity])
718
719                    if quantity != 'time':
720                        min_quantity_value[quantity] = \
721                            min_quantity_value[quantity] *1.1
722                        max_quantity_value[quantity] = \
723                            max_quantity_value[quantity] *1.1
724                else:
725                    min, max = get_min_max_values(quantity_value[quantity])
726               
727                    # min and max are multipled by "1+increase_axis" to get axes
728                    # that are slighty bigger than the max and mins
729                    # so the plots look good.
730
731                    increase_axis = (max-min)*0.05
732                    if min <= min_quantity_value[quantity]:
733                        if quantity == 'time': 
734                            min_quantity_value[quantity] = min
735                        else:
736                            if round(min,2) == 0.00:
737                                min_quantity_value[quantity] = -increase_axis
738#                                min_quantity_value[quantity] = -2.
739                                #min_quantity_value[quantity] = \
740                                #    -max_quantity_value[quantity]*increase_axis
741                            else:
742#                                min_quantity_value[quantity] = \
743#                                    min*(1+increase_axis)
744                                min_quantity_value[quantity]=min-increase_axis
745                   
746                    if max > max_quantity_value[quantity]: 
747                        if quantity == 'time': 
748                            max_quantity_value[quantity] = max
749                        else:
750                            max_quantity_value[quantity] = max + increase_axis
751#                            max_quantity_value[quantity]=max*(1+increase_axis)
752
753            #set the time... ???
754            if min_start_time > directory_start_time: 
755                min_start_time = directory_start_time
756            if max_start_time < directory_start_time: 
757                max_start_time = directory_start_time
758           
759            filename_quantity_value[filename]=quantity_value
760           
761        directory_quantity_value[directory]=filename_quantity_value
762   
763    #final step to unifrom axis for the graphs
764    quantities_axis={}
765   
766    for i, quantity in enumerate(quantities):
767        quantities_axis[quantity] = (float(min_start_time) \
768                                         / float(seconds_in_minutes),
769                                     (float(max_quantity_value['time']) \
770                                          + float(max_start_time)) \
771                                              / float(seconds_in_minutes),
772                                     min_quantity_value[quantity],
773                                     max_quantity_value[quantity])
774
775        if verbose and (quantity != 'time' and quantity != 'elevation'): 
776            log.critical('axis for quantity %s are x:(%s to %s)%s '
777                         'and y:(%s to %s)%s' 
778                         % (quantity, quantities_axis[quantity][0],
779                            quantities_axis[quantity][1],
780                            quantities_label['time'],
781                            quantities_axis[quantity][2],
782                            quantities_axis[quantity][3],
783                            quantities_label[quantity]))
784
785    cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k']
786
787    if verbose: log.critical('Now start to plot')
788   
789    i_max = len(directories_dic.keys())
790    legend_list_dic = {}
791    legend_list = []
792    for i, directory in enumerate(directories_dic.keys()):
793        if verbose: log.critical('Plotting in %s %s'
794                                 % (directory, new_plot_numbers))
795
796        # FIXME THIS SORT IS VERY IMPORTANT
797        # Without it the assigned plot numbers may not work correctly
798        # there must be a better way
799        list_filenames[i].sort()
800        for j, filename in enumerate(list_filenames[i]):
801            if verbose: log.critical('Starting %s' % filename)
802
803            directory_name = directories_dic[directory][0]
804            directory_start_time = directories_dic[directory][1]
805            directory_add_tide = directories_dic[directory][2]
806           
807            # create an if about the start time and tide height if don't exist
808            attribute_dic, title_index_dic = load_csv_as_dict(directory + sep
809                                                      + filename + '.csv')
810            #get data from dict in to list
811            #do maths to list by changing to array
812            t = (num.array(directory_quantity_value[directory][filename]['time'])
813                     + directory_start_time) / seconds_in_minutes
814
815            #finds the maximum elevation, used only as a test
816            # and as info in the graphs
817            max_ele=-100000
818            min_ele=100000
819            elevation = [float(x) for x in attribute_dic["elevation"]]
820           
821            min_ele, max_ele = get_min_max_values(elevation)
822           
823            if min_ele != max_ele:
824                log.critical("Note! Elevation changes in %s" % dir_filename)
825
826            # creates a dictionary with keys that is the filename and attributes
827            # are a list of lists containing 'directory_name' and 'elevation'.
828            # This is used to make the contents for the legends in the graphs,
829            # this is the name of the model and the elevation.  All in this
830            # great one liner from DG. If the key 'filename' doesn't exist it
831            # creates the entry if the entry exist it appends to the key.
832
833            legend_list_dic.setdefault(filename,[]) \
834                .append([directory_name, round(max_ele, 3)])
835
836            # creates a LIST for the legend on the last iteration of the
837            # directories which is when "legend_list_dic" has been fully
838            # populated. Creates a list of strings which is used in the legend
839            # only runs on the last iteration for all the gauges(csv) files
840            # empties the list before creating it
841
842            if i == i_max - 1:
843                legend_list = []
844   
845                for name_and_elevation in legend_list_dic[filename]:
846                    legend_list.append('%s (elevation = %sm)'\
847                                       % (name_and_elevation[0],
848                                          name_and_elevation[1]))
849           
850            #skip time and elevation so it is not plotted!
851            for k, quantity in enumerate(quantities):
852                if quantity != 'time' and quantity != 'elevation':
853                    pylab.figure(int(k*100+j))
854                    pylab.ylabel(quantities_label[quantity])
855                    pylab.plot(t,
856                               directory_quantity_value[directory]\
857                                                       [filename][quantity],
858                               c = cstr[i], linewidth=1)
859                    pylab.xlabel(quantities_label['time'])
860                    pylab.axis(quantities_axis[quantity])
861                    pylab.legend(legend_list,loc='upper right')
862                   
863                    pylab.title('%s at %s gauge'
864                                % (quantity, filename[len(base_name):]))
865
866                    if output_dir == '':
867                        figname = '%s%s%s_%s%s.png' \
868                                  % (directory, sep, filename, quantity,
869                                     extra_plot_name)
870                    else:
871                        figname = '%s%s%s_%s%s.png' \
872                                  % (output_dir, sep, filename, quantity,
873                                     extra_plot_name)
874
875                    if verbose: log.critical('saving figure here %s' % figname)
876
877                    pylab.savefig(figname)
878           
879    if verbose: log.critical('Closing all plots')
880
881    pylab.close('all')
882    del pylab
883
884    if verbose: log.critical('Finished closing plots')
885
886##
887# @brief Return min and max of an iterable.
888# @param list The iterable to return min & max of.
889# @return (min, max) of 'list'.
890def get_min_max_values(list=None):
891    """
892    Returns the min and max of the list it was provided.
893    """
894
895    if list == None: log.critical('List must be provided')
896       
897    return min(list), max(list)
898
899
900##
901# @brief Get runup around a point in a CSV file.
902# @param gauge_filename gauge file name.
903# @param sww_filename SWW file name.
904# @param runup_filename Name of file to report into.
905# @param size ??
906# @param verbose ??
907def get_runup_data_for_locations_from_file(gauge_filename,
908                                           sww_filename,
909                                           runup_filename,
910                                           size=10,
911                                           verbose=False):
912    """this will read a csv file with the header x,y. Then look in a square
913    'size'x2 around this position for the 'max_inundaiton_height' in the
914    'sww_filename' and report the findings in the 'runup_filename'.
915   
916    WARNING: NO TESTS!
917    """
918
919    from anuga.shallow_water.data_manager import \
920        get_maximum_inundation_data
921                                                 
922    file = open(runup_filename, "w")
923    file.write("easting,northing,runup \n ")
924    file.close()
925   
926    #read gauge csv file to dictionary
927    attribute_dic, title_index_dic = load_csv_as_dict(gauge_filename)
928    northing = [float(x) for x in attribute_dic["y"]]
929    easting = [float(x) for x in attribute_dic["x"]]
930
931    log.critical('Reading %s' % sww_filename)
932
933    runup_locations=[]
934    for i, x in enumerate(northing):
935        poly = [[int(easting[i]+size),int(northing[i]+size)],
936                [int(easting[i]+size),int(northing[i]-size)],
937                [int(easting[i]-size),int(northing[i]-size)],
938                [int(easting[i]-size),int(northing[i]+size)]]
939       
940        run_up, x_y = get_maximum_inundation_data(filename=sww_filename,
941                                                  polygon=poly,
942                                                  verbose=False) 
943
944        #if no runup will return 0 instead of NONE
945        if run_up==None: run_up=0
946        if x_y==None: x_y=[0,0]
947       
948        if verbose:
949            log.critical('maximum inundation runup near %s is %s meters'
950                         % (x_y, run_up))
951       
952        #writes to file
953        file = open(runup_filename, "a")
954        temp = '%s,%s,%s \n' % (x_y[0], x_y[1], run_up)
955        file.write(temp)
956        file.close()
957
958##
959# @brief ??
960# @param  ??
961# @param gauge_file ??
962# @param out_name ??
963# @param quantities ??
964# @param verbose ??
965# @param use_cache ??
966def sww2csv_gauges(sww_file,
967                   gauge_file,
968                   out_name='gauge_',
969                   quantities=['stage', 'depth', 'elevation',
970                               'xmomentum', 'ymomentum'],
971                   verbose=False,
972                   use_cache=True):
973    """
974   
975    Inputs:
976        NOTE: if using csv2timeseries_graphs after creating csv file,
977        it is essential to export quantities 'depth' and 'elevation'.
978        'depth' is good to analyse gauges on land and elevation is used
979        automatically by csv2timeseries_graphs in the legend.
980       
981        sww_file: path to any sww file
982       
983        gauge_file: Assumes that it follows this format
984            name, easting, northing, elevation
985            point1, 100.3, 50.2, 10.0
986            point2, 10.3, 70.3, 78.0
987       
988        NOTE: order of column can change but names eg 'easting', 'elevation'
989        must be the same! ALL lowercaps!
990
991        out_name: prefix for output file name (default is 'gauge_')
992       
993    Outputs:
994        one file for each gauge/point location in the points file. They
995        will be named with this format in the same directory as the 'sww_file'
996            <out_name><name>.csv
997        eg gauge_point1.csv if <out_name> not supplied
998           myfile_2_point1.csv if <out_name> ='myfile_2_'
999           
1000        They will all have a header
1001   
1002    Usage: sww2csv_gauges(sww_file='test1.sww',
1003                          quantities = ['stage', 'elevation','depth','bearing'],
1004                          gauge_file='gauge.txt')   
1005   
1006    Interpolate the quantities at a given set of locations, given
1007    an sww file.
1008    The results are written to a csv file.
1009
1010    In the future let points be a points file.
1011    And the user choose the quantities.
1012
1013    This is currently quite specific.
1014    If it needs to be more general, change things.
1015
1016    This is really returning speed, not velocity.
1017    """
1018    from gauge import sww2csv_gauges as sww2csv
1019   
1020    return sww2csv(sww_file, gauge_file, out_name, quantities, verbose, use_cache)
1021   
1022def sww2timeseries(swwfiles,
1023                   gauge_filename,
1024                   production_dirs,
1025                   report=None,
1026                   reportname=None,
1027                   plot_quantity=None,
1028                   generate_fig=False,
1029                   surface=None,
1030                   time_min=None,
1031                   time_max=None,
1032                   time_thinning=1,                   
1033                   time_unit=None,
1034                   title_on=None,
1035                   use_cache=False,
1036                   verbose=False,
1037                   output_centroids=False):
1038    from gauge import sww2timeseries as sww2timeseries_new
1039    return sww2timeseries_new(swwfiles,
1040                   gauge_filename,
1041                   production_dirs,
1042                   report,
1043                   reportname,
1044                   plot_quantity,
1045                   generate_fig,
1046                   surface,
1047                   time_min,
1048                   time_max,
1049                   time_thinning,                   
1050                   time_unit,
1051                   title_on,
1052                   use_cache,
1053                   verbose,
1054                   output_centroids)                   
1055   
1056##
1057# @brief Get a wave height at a certain depth given wave height at another depth.
1058# @param d1 The first depth.
1059# @param d2 The second depth.
1060# @param h1 Wave ampitude at d1
1061# @param verbose True if this function is to be verbose.
1062# @return The wave height at d2.
1063def greens_law(d1, d2, h1, verbose=False):
1064    """Green's Law
1065
1066    Green's Law allows an approximation of wave amplitude at
1067    a given depth based on the fourh root of the ratio of two depths
1068    and the amplitude at another given depth.
1069
1070    Note, wave amplitude is equal to stage.
1071   
1072    Inputs:
1073
1074    d1, d2 - the two depths
1075    h1     - the wave amplitude at d1
1076    h2     - the derived amplitude at d2
1077
1078    h2 = h1 (d1/d2)^(1/4), where d2 cannot equal 0.
1079   
1080    """
1081
1082    d1 = ensure_numeric(d1)
1083    d2 = ensure_numeric(d2)
1084    h1 = ensure_numeric(h1)
1085
1086    if d1 <= 0.0:
1087        msg = 'the first depth, d1 (%f), must be strictly positive' % (d1)
1088        raise Exception(msg)
1089
1090    if d2 <= 0.0:
1091        msg = 'the second depth, d2 (%f), must be strictly positive' % (d2)
1092        raise Exception(msg)
1093   
1094    if h1 <= 0.0:
1095        msg = 'the wave amplitude, h1 (%f), must be strictly positive' % (h1)
1096        raise Exception(msg)
1097   
1098    h2 = h1*(d1/d2)**0.25
1099
1100    assert h2 > 0
1101   
1102    return h2
1103       
1104
1105##
1106# @brief Get the square-root of a value.
1107# @param s The value to get the square-root of.
1108# @return The square-root of 's'.
1109def square_root(s):
1110    return sqrt(s)
1111
1112
Note: See TracBrowser for help on using the repository browser.