source: anuga_core/source/anuga/abstract_2d_finite_volumes/util.py @ 7695

Last change on this file since 7695 was 7695, checked in by hudson, 15 years ago

calc_bearing handles cases where the speed is zero, and outputs NaN.

File size: 42.6 KB
Line 
1"""This module contains various auxiliary function used by pyvolution.
2
3It is also a clearing house for functions that may later earn a module
4of their own.
5"""
6
7import anuga.utilities.polygon
8import sys
9import os
10
11from os import remove, mkdir, access, F_OK, R_OK, W_OK, sep,getcwd
12from os.path import exists, basename, split,join
13from warnings import warn
14from shutil import copy
15
16from anuga.utilities.numerical_tools import ensure_numeric, angle, NAN
17
18from math import sqrt, atan, degrees
19
20# FIXME (Ole): Temporary short cuts -
21# FIXME (Ole): remove and update scripts where they are used
22from anuga.utilities.system_tools import get_revision_number
23from anuga.utilities.system_tools import store_version_info
24
25import anuga.utilities.log as log
26
27import numpy as num
28
29
30def file_function(filename,
31                  domain=None,
32                  quantities=None,
33                  interpolation_points=None,
34                  time_thinning=1,
35                  time_limit=None,
36                  verbose=False,
37                  use_cache=False,
38                  boundary_polygon=None,
39                  output_centroids=False):
40    from file_function import file_function as file_function_new
41    return file_function_new(filename, domain, quantities, interpolation_points,
42                      time_thinning, time_limit, verbose, use_cache,
43                      boundary_polygon, output_centroids)
44
45##
46# @brief Replace multiple substrings in a string.
47# @param text The string to operate on.
48# @param dictionary A dict containing replacements, key->value.
49# @return The new string.
50def multiple_replace(text, dictionary):
51    """Multiple replace of words in text
52
53    text:       String to be modified
54    dictionary: Mapping of words that are to be substituted
55
56    Python Cookbook 3.14 page 88 and page 90
57    http://code.activestate.com/recipes/81330/
58    """
59
60    import re
61   
62    #Create a regular expression from all of the dictionary keys
63    #matching only entire words
64    regex = re.compile(r'\b'+ \
65                       r'\b|\b'.join(map(re.escape, dictionary.keys()))+ \
66                       r'\b' )
67
68    #For each match, lookup the corresponding value in the dictionary
69    return regex.sub(lambda match: dictionary[match.group(0)], text)
70
71
72##
73# @brief Apply arbitrary expressions to the values of a dict.
74# @param expression A string expression to apply.
75# @param dictionary The dictionary to apply the expression to.
76def apply_expression_to_dictionary(expression, dictionary):
77    """Apply arbitrary expression to values of dictionary
78
79    Given an expression in terms of the keys, replace key by the
80    corresponding values and evaluate.
81
82    expression: Arbitrary, e.g. arithmetric, expression relating keys
83                from dictionary.
84
85    dictionary: Mapping between symbols in expression and objects that
86                will be evaluated by expression.
87                Values in dictionary must support operators given in
88                expression e.g. by overloading
89
90    Due to a limitation with numeric, this can not evaluate 0/0
91    In general, the user can fix by adding 1e-30 to the numerator.
92    SciPy core can handle this situation.
93    """
94
95    import types
96    import re
97
98    assert isinstance(expression, basestring)
99    assert type(dictionary) == types.DictType
100
101    #Convert dictionary values to textual representations suitable for eval
102    D = {}
103    for key in dictionary:
104        D[key] = 'dictionary["%s"]' % key
105
106    #Perform substitution of variables   
107    expression = multiple_replace(expression, D)
108
109    #Evaluate and return
110    try:
111        return eval(expression)
112    except NameError, e:
113        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
114        raise NameError, msg
115    except ValueError, e:
116        msg = 'Expression "%s" could not be evaluated: %s' % (expression, e)
117        raise ValueError, msg
118
119
120##
121# @brief Format a float into a string.
122# @param value Float value to format.
123# @param format The format to use (%.2f is default).
124# @return The formatted float as a string.
125def get_textual_float(value, format = '%.2f'):
126    """Get textual representation of floating point numbers
127    and accept None as valid entry
128
129    format is a string - default = '%.2f'
130    """
131
132    if value is None:
133        return 'None'
134    else:
135        try:
136            float(value)
137        except:
138            # May this is a vector
139            if len(value) > 1:
140                s = '('
141                for v in value:
142                    s += get_textual_float(v, format) + ', '
143                   
144                s = s[:-2] + ')' # Strip trailing comma and close
145                return s
146            else:
147                raise 'Illegal input to get_textual_float:', value
148        else:
149            return format % float(value)
150
151
152#################################################################################
153# OBSOLETE STUFF
154#################################################################################
155
156# @note TEMP
157def angle(v1, v2):
158    """Temporary Interface to new location"""
159
160    import anuga.utilities.numerical_tools as NT   
161   
162    msg = 'angle has moved from util.py.  '
163    msg += 'Please use "from anuga.utilities.numerical_tools import angle"'
164    warn(msg, DeprecationWarning) 
165
166    return NT.angle(v1, v2)
167
168   
169# @note TEMP
170def anglediff(v0, v1):
171    """Temporary Interface to new location"""
172
173    import anuga.utilities.numerical_tools as NT
174   
175    msg = 'anglediff has moved from util.py.  '
176    msg += 'Please use "from anuga.utilities.numerical_tools import anglediff"'
177    warn(msg, DeprecationWarning) 
178
179    return NT.anglediff(v0, v1)   
180
181# @note TEMP
182def point_on_line(*args, **kwargs):
183    """Temporary Interface to new location"""
184
185    msg = 'point_on_line has moved from util.py.  '
186    msg += 'Please use "from anuga.utilities.polygon import point_on_line"'
187    warn(msg, DeprecationWarning) 
188
189    return utilities.polygon.point_on_line(*args, **kwargs)   
190
191   
192# @note TEMP
193def inside_polygon(*args, **kwargs):
194    """Temporary Interface to new location"""
195
196    log.critical('inside_polygon has moved from util.py.')
197    log.critical('Please use '
198                 '"from anuga.utilities.polygon import inside_polygon"')
199
200    return utilities.polygon.inside_polygon(*args, **kwargs)   
201
202   
203# @note TEMP
204def outside_polygon(*args, **kwargs):
205    """Temporary Interface to new location"""
206
207    log.critical('outside_polygon has moved from util.py.')
208    log.critical('Please use '
209                 '"from anuga.utilities.polygon import outside_polygon"')
210
211    return utilities.polygon.outside_polygon(*args, **kwargs)   
212
213   
214# @note TEMP
215def read_polygon(*args, **kwargs):
216    """Temporary Interface to new location"""
217
218    log.critical('read_polygon has moved from util.py.')
219    log.critical('Please use '
220                 '"from anuga.utilities.polygon import read_polygon"')
221
222    return utilities.polygon.read_polygon(*args, **kwargs)   
223
224
225# @note TEMP
226def populate_polygon(*args, **kwargs):
227    """Temporary Interface to new location"""
228
229    log.critical('populate_polygon has moved from util.py.')
230    log.critical('Please use '
231                 '"from anuga.utilities.polygon import populate_polygon"')
232
233    return utilities.polygon.populate_polygon(*args, **kwargs)   
234
235
236#################################################################################
237# End of obsolete stuff ?
238#################################################################################
239
240# @note TEMP
241def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='',
242                         verbose=False):
243    """Temporary Interface to new location"""
244    from anuga.shallow_water.data_manager import start_screen_catcher \
245         as dm_start_screen_catcher
246
247    log.critical('start_screen_catcher has moved from util.py.')
248    log.critical('Please use "from anuga.shallow_water.data_manager import '
249                 'start_screen_catcher"')
250   
251    return dm_start_screen_catcher(dir_name, myid='', numprocs='',
252                                   extra_info='', verbose=False)
253
254
255
256##
257# @brief Read gauge info from a file.
258# @param filename The name of the file to read.
259# @return A (gauges, gaugelocation, elev) tuple.
260def get_gauges_from_file(filename):
261    return gauge_get_from_file(filename)
262
263
264##
265# @brief Check that input quantities in quantity list are legal.
266# @param quantity Quantity list to check.
267# @note Raises an exception of list is not legal.
268def check_list(quantity):
269    """ Check that input quantities in quantity list are possible
270    """
271    import sys
272
273    all_quantity = ['stage', 'depth', 'momentum', 'xmomentum',
274                    'ymomentum', 'speed', 'bearing', 'elevation']
275
276    # convert all quanitiy names to lowercase
277    for i,j in enumerate(quantity):
278        quantity[i] = quantity[i].lower()
279
280    # check that all names in 'quantity' appear in 'all_quantity'
281    p = list(set(quantity).difference(set(all_quantity)))
282    if len(p) != 0:
283        msg = 'Quantities %s do not exist - please try again' %p
284        raise Exception, msg
285
286
287##
288# @brief Calculate velocity bearing from North.
289# @param uh ??
290# @param vh ??
291# @return The calculated bearing.
292def calc_bearing(uh, vh):
293    """ Calculate velocity bearing from North
294    """
295    #FIXME (Ole): I reckon we should refactor this one to use
296    #             the function angle() in utilities/numerical_tools
297    #
298    #             It will be a simple matter of
299    # * converting from radians to degrees
300    # * moving the reference direction from [1,0] to North
301    # * changing from counter clockwise to clocwise.
302
303    # if indeterminate, just return
304    if uh==0 and vh==0:
305        return NAN
306   
307    return degrees(angle([uh, vh], [0, -1]))   
308
309
310# FIXME (DSG): Add unit test, make general, not just 2 files,
311# but any number of files.
312##
313# @brief ??
314# @param dir_name ??
315# @param filename1 ??
316# @param filename2 ??
317# @return ??
318# @note TEMP
319def copy_code_files(dir_name, filename1, filename2):
320    """Temporary Interface to new location"""
321
322    from anuga.shallow_water.data_manager import \
323                    copy_code_files as dm_copy_code_files
324    log.critical('copy_code_files has moved from util.py.')
325    log.critical('Please use "from anuga.shallow_water.data_manager import '
326                 'copy_code_files"')
327   
328    return dm_copy_code_files(dir_name, filename1, filename2)
329
330
331##
332# @brief Create a nested sub-directory path.
333# @param root_directory The base diretory path.
334# @param directories An iterable of sub-directory names.
335# @return The final joined directory path.
336# @note If each sub-directory doesn't exist, it will be created.
337def add_directories(root_directory, directories):
338    """
339    Add the first sub-directory in 'directories' to root_directory.
340    Then add the second sub-directory to the accumulating path and so on.
341
342    Return the path of the final directory.
343
344    This is handy for specifying and creating a directory where data will go.
345    """
346    dir = root_directory
347    for new_dir in directories:
348        dir = os.path.join(dir, new_dir)
349        if not access(dir,F_OK):
350            mkdir(dir)
351    return dir
352
353
354##
355# @brief
356# @param filename
357# @param separator_value
358# @return
359# @note TEMP
360def get_data_from_file(filename, separator_value=','):
361    """Temporary Interface to new location"""
362    from anuga.shallow_water.data_manager import \
363                        get_data_from_file as dm_get_data_from_file
364    log.critical('get_data_from_file has moved from util.py')
365    log.critical('Please use "from anuga.shallow_water.data_manager import '
366                 'get_data_from_file"')
367   
368    return dm_get_data_from_file(filename,separator_value = ',')
369
370
371##
372# @brief
373# @param verbose
374# @param kwargs
375# @return
376# @note TEMP
377def store_parameters(verbose=False,**kwargs):
378    """Temporary Interface to new location"""
379   
380    from anuga.shallow_water.data_manager \
381                    import store_parameters as dm_store_parameters
382    log.critical('store_parameters has moved from util.py.')
383    log.critical('Please use "from anuga.shallow_water.data_manager '
384                 'import store_parameters"')
385   
386    return dm_store_parameters(verbose=False,**kwargs)
387
388
389##
390# @brief Remove vertices that are not associated with any triangle.
391# @param verts An iterable (or array) of points.
392# @param triangles An iterable of 3 element tuples.
393# @param number_of_full_nodes ??
394# @return (verts, triangles) where 'verts' has been updated.
395def remove_lone_verts(verts, triangles, number_of_full_nodes=None):
396    """Removes vertices that are not associated with any triangles.
397
398    verts is a list/array of points.
399    triangles is a list of 3 element tuples.  Each tuple represents a triangle.
400    number_of_full_nodes relate to parallelism when a mesh has an
401        extra layer of ghost points.
402    """
403
404    verts = ensure_numeric(verts)
405    triangles = ensure_numeric(triangles)
406   
407    N = len(verts)
408   
409    # initialise the array to easily find the index of the first loner
410    # ie, if N=3 -> [6,5,4]
411    loners=num.arange(2*N, N, -1)
412    for t in triangles:
413        for vert in t:
414            loners[vert]= vert # all non-loners will have loners[i]=i
415
416    lone_start = 2*N - max(loners) # The index of the first loner
417
418    if lone_start-1 == N:
419        # no loners
420        pass
421    elif min(loners[lone_start:N]) > N:
422        # All the loners are at the end of the vert array
423        verts = verts[0:lone_start]
424    else:
425        # change the loners list so it can be used to modify triangles
426        # Remove the loners from verts
427        # Could've used X=compress(less(loners,N),loners)
428        # verts=num.take(verts,X,axis=0)  to Remove the loners from verts
429        # but I think it would use more memory
430        new_i = lone_start    # point at first loner - 'shuffle down' target
431        for i in range(lone_start, N):
432            if loners[i] >= N:    # [i] is a loner, leave alone
433                pass
434            else:        # a non-loner, move down
435                loners[i] = new_i
436                verts[new_i] = verts[i]
437                new_i += 1
438        verts = verts[0:new_i]
439
440        # Modify the triangles
441        triangles = num.choose(triangles,loners)
442    return verts, triangles
443
444
445##
446# @brief Compute centroid values from vertex values
447# @param x Values at vertices of triangular mesh.
448# @param triangles Nx3 integer array pointing to vertex information.
449# @return [N] array of centroid values.
450def get_centroid_values(x, triangles):
451    """Compute centroid values from vertex values
452   
453    x: Values at vertices of triangular mesh
454    triangles: Nx3 integer array pointing to vertex information
455    for each of the N triangels. Elements of triangles are
456    indices into x
457    """
458       
459    xc = num.zeros(triangles.shape[0], num.float) # Space for centroid info
460   
461    for k in range(triangles.shape[0]):
462        # Indices of vertices
463        i0 = triangles[k][0]
464        i1 = triangles[k][1]
465        i2 = triangles[k][2]       
466       
467        xc[k] = (x[i0] + x[i1] + x[i2])/3
468
469    return xc
470
471
472# @note TEMP
473def make_plots_from_csv_file(directories_dic={dir:['gauge', 0, 0]},
474                                output_dir='',
475                                base_name='',
476                                plot_numbers=['3-5'],
477                                quantities=['speed','stage','momentum'],
478                                assess_all_csv_files=True,
479                                extra_plot_name='test'):
480
481    msg = 'make_plots_from_csv_file has been replaced by csv2timeseries_graphs '
482    msg += 'Please use "from anuga.abstract_2d_finite_volumes.util import ' \
483           'csv2timeseries_graphs"'
484    raise Exception, msg
485
486    return csv2timeseries_graphs(directories_dic,
487                                 output_dir,
488                                 base_name,
489                                 plot_numbers,
490                                 quantities,
491                                 extra_plot_name,
492                                 assess_all_csv_files)
493
494
495##
496# @brief Plot time series from CSV files.
497# @param directories_dic
498# @param output_dir
499# @param base_name
500# @param plot_numbers
501# @param quantities
502# @param extra_plot_name
503# @param assess_all_csv_files
504# @param create_latex
505# @param verbose
506# @note Assumes that 'elevation' is in the CSV file(s).
507def csv2timeseries_graphs(directories_dic={},
508                          output_dir='',
509                          base_name=None,
510                          plot_numbers='',
511                          quantities=['stage'],
512                          extra_plot_name='',
513                          assess_all_csv_files=True,
514                          create_latex=False,
515                          verbose=False):
516                               
517    """
518    Read in csv files that have the right header information and
519    plot time series such as Stage, Speed, etc. Will also plot several
520    time series on one plot. Filenames must follow this convention,
521    <base_name><plot_number>.csv eg gauge_timeseries3.csv
522   
523    NOTE: relies that 'elevation' is in the csv file!
524
525    Each file represents a location and within each file there are
526    time, quantity columns.
527   
528    For example:   
529    if "directories_dic" defines 4 directories and in each directories
530    there is a csv files corresponding to the right "plot_numbers",
531    this will create a plot with 4 lines one for each directory AND
532    one plot for each "quantities".  ??? FIXME: unclear.
533   
534    Usage:
535        csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
536                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
537                            output_dir='fixed_wave'+sep,
538                            base_name='gauge_timeseries_',
539                            plot_numbers='',
540                            quantities=['stage','speed'],
541                            extra_plot_name='',
542                            assess_all_csv_files=True,                           
543                            create_latex=False,
544                            verbose=True)
545            this will create one plot for stage with both 'slide' and
546            'fixed_wave' lines on it for stage and speed for each csv
547            file with 'gauge_timeseries_' as the prefix. The graghs
548            will be in the output directory 'fixed_wave' and the graph
549            axis will be determined by assessing all the
550   
551    ANOTHER EXAMPLE
552        new_csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0],
553                                       'fixed_wave'+sep:['Fixed Wave',0,0]},
554                            output_dir='fixed_wave'+sep,
555                            base_name='gauge_timeseries_',
556                            plot_numbers=['1-3'],
557                            quantities=['stage','speed'],
558                            extra_plot_name='',
559                            assess_all_csv_files=False,                           
560                            create_latex=False,
561                            verbose=True)
562        This will plot csv files called gauge_timeseries_1.csv and
563        gauge_timeseries3.csv from both 'slide' and 'fixed_wave' directories
564        to 'fixed_wave'. There will be 4 plots created two speed and two stage
565        one for each csv file. There will be two lines on each of these plots.
566        And the axis will have been determined from only these files, had
567        assess_all_csv_files = True all csv file with 'gauges_timeseries_' prefix
568        would of been assessed.
569   
570    ANOTHER EXAMPLE   
571         csv2timeseries_graphs({'J:'+sep+'anuga_validation'+sep:['new',20,-.1],
572                                   'J:'+sep+'conical_island'+sep:['test',0,0]},
573                                   output_dir='',
574                                   plot_numbers=['1','3'],
575                                   quantities=['stage','depth','bearing'],
576                                   base_name='gauge_b',
577                                   assess_all_csv_files=True,
578                                  verbose=True)   
579       
580            This will produce one plot for each quantity (therefore 3) in the
581            current directory, each plot will have 2 lines on them. The first
582            plot named 'new' will have the time offseted by 20secs and the stage
583            height adjusted by -0.1m
584       
585    Inputs:
586        directories_dic: dictionary of directory with values (plot
587                         legend name for directory), (start time of
588                         the time series) and the (value to add to
589                         stage if needed). For example
590                         {dir1:['Anuga_ons',5000, 0],
591                          dir2:['b_emoth',5000,1.5],
592                          dir3:['b_ons',5000,1.5]}
593                         Having multiple directories defined will plot them on
594                         one plot, therefore there will be 3 lines on each of
595                         these plot. If you only want one line per plot call
596                         csv2timeseries_graph separately for each directory,
597                         eg only have one directory in the 'directories_dic' in
598                         each call.
599                         
600        output_dir: directory for the plot outputs. Only important to define when
601                    you have more than one directory in your directories_dic, if
602                    you have not defined it and you have multiple directories in
603                    'directories_dic' there will be plots in each directory,
604                    however only one directory will contain the complete
605                    plot/graphs.
606       
607        base_name: Is used a couple of times.
608                   1) to find the csv files to be plotted if there is no
609                      'plot_numbers' then csv files with 'base_name' are plotted
610                   2) in the title of the plots, the length of base_name is
611                      removed from the front of the filename to be used in the
612                      title.
613                   This could be changed if needed.
614                   Note is ignored if assess_all_csv_files=True
615       
616        plot_numbers: a String list of numbers to plot. For example
617                      [0-4,10,15-17] will read and attempt to plot
618                      the follow 0,1,2,3,4,10,15,16,17
619                      NOTE: if no plot numbers this will create one plot per
620                            quantity, per gauge
621
622        quantities: Will get available quantities from the header in the csv
623                    file.  Quantities must be one of these.
624                    NOTE: ALL QUANTITY NAMES MUST BE lower case!
625                   
626        extra_plot_name: A string that is appended to the end of the
627                         output filename.
628                   
629        assess_all_csv_files: if true it will read ALL csv file with
630                             "base_name", regardless of 'plot_numbers'
631                              and determine a uniform set of axes for
632                              Stage, Speed and Momentum. IF FALSE it
633                              will only read the csv file within the
634                             'plot_numbers'
635                             
636        create_latex: NOT IMPLEMENTED YET!! sorry Jane....
637       
638    OUTPUTS: saves the plots to
639              <output_dir><base_name><plot_number><extra_plot_name>.png
640    """
641
642    try: 
643        import pylab
644    except ImportError:
645        msg='csv2timeseries_graphs needs pylab to be installed correctly'
646        raise msg
647            #ANUGA don't need pylab to work so the system doesn't
648            #rely on pylab being installed
649        return
650
651    from os import sep
652    from anuga.shallow_water.data_manager import \
653                               get_all_files_with_extension, csv2dict
654
655    seconds_in_hour = 3600
656    seconds_in_minutes = 60
657   
658    quantities_label={}
659#    quantities_label['time'] = 'time (hours)'
660    quantities_label['time'] = 'time (minutes)'
661    quantities_label['stage'] = 'wave height (m)'
662    quantities_label['speed'] = 'speed (m/s)'
663    quantities_label['momentum'] = 'momentum (m^2/sec)'
664    quantities_label['depth'] = 'water depth (m)'
665    quantities_label['xmomentum'] = 'momentum (m^2/sec)'
666    quantities_label['ymomentum'] = 'momentum (m^2/sec)'
667    quantities_label['bearing'] = 'degrees (o)'
668    quantities_label['elevation'] = 'elevation (m)'
669   
670    if extra_plot_name != '':
671        extra_plot_name = '_' + extra_plot_name
672
673    new_plot_numbers=[]
674    #change plot_numbers to list, eg ['0-4','10']
675    #to ['0','1','2','3','4','10']
676    for i, num_string in enumerate(plot_numbers):
677        if '-' in num_string: 
678            start = int(num_string[:num_string.rfind('-')])
679            end = int(num_string[num_string.rfind('-') + 1:]) + 1
680            for x in range(start, end):
681                new_plot_numbers.append(str(x))
682        else:
683            new_plot_numbers.append(num_string)
684
685    #finds all the files that fit the specs provided and return a list of them
686    #so to help find a uniform max and min for the plots...
687    list_filenames=[]
688    all_csv_filenames=[]
689    if verbose: log.critical('Determining files to access for axes ranges.')
690   
691    for i,directory in enumerate(directories_dic.keys()):
692        all_csv_filenames.append(get_all_files_with_extension(directory,
693                                                              base_name, '.csv'))
694
695        filenames=[]
696        if plot_numbers == '': 
697            list_filenames.append(get_all_files_with_extension(directory,
698                                                               base_name,'.csv'))
699        else:
700            for number in new_plot_numbers:
701                filenames.append(base_name + number)
702            list_filenames.append(filenames)
703
704    #use all the files to get the values for the plot axis
705    max_start_time= -1000.
706    min_start_time = 100000 
707   
708    if verbose: log.critical('Determining uniform axes')
709
710    #this entire loop is to determine the min and max range for the
711    #axes of the plots
712
713#    quantities.insert(0,'elevation')
714    quantities.insert(0,'time')
715
716    directory_quantity_value={}
717#    quantity_value={}
718    min_quantity_value={}
719    max_quantity_value={}
720
721    for i, directory in enumerate(directories_dic.keys()):
722        filename_quantity_value = {}
723        if assess_all_csv_files == False:
724            which_csv_to_assess = list_filenames[i]
725        else:
726            #gets list of filenames for directory "i"
727            which_csv_to_assess = all_csv_filenames[i]
728       
729        for j, filename in enumerate(which_csv_to_assess):
730            quantity_value = {}
731
732            dir_filename = join(directory,filename)
733            attribute_dic, title_index_dic = csv2dict(dir_filename + '.csv')
734            directory_start_time = directories_dic[directory][1]
735            directory_add_tide = directories_dic[directory][2]
736
737            if verbose: log.critical('reading: %s.csv' % dir_filename)
738
739            #add time to get values
740            for k, quantity in enumerate(quantities):
741                quantity_value[quantity] = [float(x) for
742                                                x in attribute_dic[quantity]]
743
744                #add tide to stage if provided
745                if quantity == 'stage':
746                     quantity_value[quantity] = num.array(quantity_value[quantity],
747                                                          num.float) + directory_add_tide
748
749                #condition to find max and mins for all the plots
750                # populate the list with something when i=0 and j=0 and
751                # then compare to the other values to determine abs max and min
752                if i==0 and j==0:
753                    min_quantity_value[quantity], \
754                        max_quantity_value[quantity] = \
755                            get_min_max_values(quantity_value[quantity])
756
757                    if quantity != 'time':
758                        min_quantity_value[quantity] = \
759                            min_quantity_value[quantity] *1.1
760                        max_quantity_value[quantity] = \
761                            max_quantity_value[quantity] *1.1
762                else:
763                    min, max = get_min_max_values(quantity_value[quantity])
764               
765                    # min and max are multipled by "1+increase_axis" to get axes
766                    # that are slighty bigger than the max and mins
767                    # so the plots look good.
768
769                    increase_axis = (max-min)*0.05
770                    if min <= min_quantity_value[quantity]:
771                        if quantity == 'time': 
772                            min_quantity_value[quantity] = min
773                        else:
774                            if round(min,2) == 0.00:
775                                min_quantity_value[quantity] = -increase_axis
776#                                min_quantity_value[quantity] = -2.
777                                #min_quantity_value[quantity] = \
778                                #    -max_quantity_value[quantity]*increase_axis
779                            else:
780#                                min_quantity_value[quantity] = \
781#                                    min*(1+increase_axis)
782                                min_quantity_value[quantity]=min-increase_axis
783                   
784                    if max > max_quantity_value[quantity]: 
785                        if quantity == 'time': 
786                            max_quantity_value[quantity] = max
787                        else:
788                            max_quantity_value[quantity] = max + increase_axis
789#                            max_quantity_value[quantity]=max*(1+increase_axis)
790
791            #set the time... ???
792            if min_start_time > directory_start_time: 
793                min_start_time = directory_start_time
794            if max_start_time < directory_start_time: 
795                max_start_time = directory_start_time
796           
797            filename_quantity_value[filename]=quantity_value
798           
799        directory_quantity_value[directory]=filename_quantity_value
800   
801    #final step to unifrom axis for the graphs
802    quantities_axis={}
803   
804    for i, quantity in enumerate(quantities):
805        quantities_axis[quantity] = (float(min_start_time) \
806                                         / float(seconds_in_minutes),
807                                     (float(max_quantity_value['time']) \
808                                          + float(max_start_time)) \
809                                              / float(seconds_in_minutes),
810                                     min_quantity_value[quantity],
811                                     max_quantity_value[quantity])
812
813        if verbose and (quantity != 'time' and quantity != 'elevation'): 
814            log.critical('axis for quantity %s are x:(%s to %s)%s '
815                         'and y:(%s to %s)%s' 
816                         % (quantity, quantities_axis[quantity][0],
817                            quantities_axis[quantity][1],
818                            quantities_label['time'],
819                            quantities_axis[quantity][2],
820                            quantities_axis[quantity][3],
821                            quantities_label[quantity]))
822
823    cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k']
824
825    if verbose: log.critical('Now start to plot')
826   
827    i_max = len(directories_dic.keys())
828    legend_list_dic = {}
829    legend_list = []
830    for i, directory in enumerate(directories_dic.keys()):
831        if verbose: log.critical('Plotting in %s %s'
832                                 % (directory, new_plot_numbers))
833
834        # FIXME THIS SORT IS VERY IMPORTANT
835        # Without it the assigned plot numbers may not work correctly
836        # there must be a better way
837        list_filenames[i].sort()
838        for j, filename in enumerate(list_filenames[i]):
839            if verbose: log.critical('Starting %s' % filename)
840
841            directory_name = directories_dic[directory][0]
842            directory_start_time = directories_dic[directory][1]
843            directory_add_tide = directories_dic[directory][2]
844           
845            # create an if about the start time and tide height if don't exist
846            attribute_dic, title_index_dic = csv2dict(directory + sep
847                                                      + filename + '.csv')
848            #get data from dict in to list
849            #do maths to list by changing to array
850            t = (num.array(directory_quantity_value[directory][filename]['time'])
851                     + directory_start_time) / seconds_in_minutes
852
853            #finds the maximum elevation, used only as a test
854            # and as info in the graphs
855            max_ele=-100000
856            min_ele=100000
857            elevation = [float(x) for x in attribute_dic["elevation"]]
858           
859            min_ele, max_ele = get_min_max_values(elevation)
860           
861            if min_ele != max_ele:
862                log.critical("Note! Elevation changes in %s" % dir_filename)
863
864            # creates a dictionary with keys that is the filename and attributes
865            # are a list of lists containing 'directory_name' and 'elevation'.
866            # This is used to make the contents for the legends in the graphs,
867            # this is the name of the model and the elevation.  All in this
868            # great one liner from DG. If the key 'filename' doesn't exist it
869            # creates the entry if the entry exist it appends to the key.
870
871            legend_list_dic.setdefault(filename,[]) \
872                .append([directory_name, round(max_ele, 3)])
873
874            # creates a LIST for the legend on the last iteration of the
875            # directories which is when "legend_list_dic" has been fully
876            # populated. Creates a list of strings which is used in the legend
877            # only runs on the last iteration for all the gauges(csv) files
878            # empties the list before creating it
879
880            if i == i_max - 1:
881                legend_list = []
882   
883                for name_and_elevation in legend_list_dic[filename]:
884                    legend_list.append('%s (elevation = %sm)'\
885                                       % (name_and_elevation[0],
886                                          name_and_elevation[1]))
887           
888            #skip time and elevation so it is not plotted!
889            for k, quantity in enumerate(quantities):
890                if quantity != 'time' and quantity != 'elevation':
891                    pylab.figure(int(k*100+j))
892                    pylab.ylabel(quantities_label[quantity])
893                    pylab.plot(t,
894                               directory_quantity_value[directory]\
895                                                       [filename][quantity],
896                               c = cstr[i], linewidth=1)
897                    pylab.xlabel(quantities_label['time'])
898                    pylab.axis(quantities_axis[quantity])
899                    pylab.legend(legend_list,loc='upper right')
900                   
901                    pylab.title('%s at %s gauge'
902                                % (quantity, filename[len(base_name):]))
903
904                    if output_dir == '':
905                        figname = '%s%s%s_%s%s.png' \
906                                  % (directory, sep, filename, quantity,
907                                     extra_plot_name)
908                    else:
909                        figname = '%s%s%s_%s%s.png' \
910                                  % (output_dir, sep, filename, quantity,
911                                     extra_plot_name)
912
913                    if verbose: log.critical('saving figure here %s' % figname)
914
915                    pylab.savefig(figname)
916           
917    if verbose: log.critical('Closing all plots')
918
919    pylab.close('all')
920    del pylab
921
922    if verbose: log.critical('Finished closing plots')
923
924##
925# @brief Return min and max of an iterable.
926# @param list The iterable to return min & max of.
927# @return (min, max) of 'list'.
928def get_min_max_values(list=None):
929    """
930    Returns the min and max of the list it was provided.
931    """
932
933    if list == None: log.critical('List must be provided')
934       
935    return min(list), max(list)
936
937
938##
939# @brief Get runup around a point in a CSV file.
940# @param gauge_filename gauge file name.
941# @param sww_filename SWW file name.
942# @param runup_filename Name of file to report into.
943# @param size ??
944# @param verbose ??
945def get_runup_data_for_locations_from_file(gauge_filename,
946                                           sww_filename,
947                                           runup_filename,
948                                           size=10,
949                                           verbose=False):
950    """this will read a csv file with the header x,y. Then look in a square
951    'size'x2 around this position for the 'max_inundaiton_height' in the
952    'sww_filename' and report the findings in the 'runup_filename'.
953   
954    WARNING: NO TESTS!
955    """
956
957    from anuga.shallow_water.data_manager import get_all_directories_with_name,\
958                                                 get_maximum_inundation_data,\
959                                                 csv2dict
960                                                 
961    file = open(runup_filename, "w")
962    file.write("easting,northing,runup \n ")
963    file.close()
964   
965    #read gauge csv file to dictionary
966    attribute_dic, title_index_dic = csv2dict(gauge_filename)
967    northing = [float(x) for x in attribute_dic["y"]]
968    easting = [float(x) for x in attribute_dic["x"]]
969
970    log.critical('Reading %s' % sww_filename)
971
972    runup_locations=[]
973    for i, x in enumerate(northing):
974        poly = [[int(easting[i]+size),int(northing[i]+size)],
975                [int(easting[i]+size),int(northing[i]-size)],
976                [int(easting[i]-size),int(northing[i]-size)],
977                [int(easting[i]-size),int(northing[i]+size)]]
978       
979        run_up, x_y = get_maximum_inundation_data(filename=sww_filename,
980                                                  polygon=poly,
981                                                  verbose=False) 
982
983        #if no runup will return 0 instead of NONE
984        if run_up==None: run_up=0
985        if x_y==None: x_y=[0,0]
986       
987        if verbose:
988            log.critical('maximum inundation runup near %s is %s meters'
989                         % (x_y, run_up))
990       
991        #writes to file
992        file = open(runup_filename, "a")
993        temp = '%s,%s,%s \n' % (x_y[0], x_y[1], run_up)
994        file.write(temp)
995        file.close()
996
997##
998# @brief ??
999# @param  ??
1000# @param gauge_file ??
1001# @param out_name ??
1002# @param quantities ??
1003# @param verbose ??
1004# @param use_cache ??
1005def sww2csv_gauges(sww_file,
1006                   gauge_file,
1007                   out_name='gauge_',
1008                   quantities=['stage', 'depth', 'elevation',
1009                               'xmomentum', 'ymomentum'],
1010                   verbose=False,
1011                   use_cache=True):
1012    """
1013   
1014    Inputs:
1015        NOTE: if using csv2timeseries_graphs after creating csv file,
1016        it is essential to export quantities 'depth' and 'elevation'.
1017        'depth' is good to analyse gauges on land and elevation is used
1018        automatically by csv2timeseries_graphs in the legend.
1019       
1020        sww_file: path to any sww file
1021       
1022        gauge_file: Assumes that it follows this format
1023            name, easting, northing, elevation
1024            point1, 100.3, 50.2, 10.0
1025            point2, 10.3, 70.3, 78.0
1026       
1027        NOTE: order of column can change but names eg 'easting', 'elevation'
1028        must be the same! ALL lowercaps!
1029
1030        out_name: prefix for output file name (default is 'gauge_')
1031       
1032    Outputs:
1033        one file for each gauge/point location in the points file. They
1034        will be named with this format in the same directory as the 'sww_file'
1035            <out_name><name>.csv
1036        eg gauge_point1.csv if <out_name> not supplied
1037           myfile_2_point1.csv if <out_name> ='myfile_2_'
1038           
1039        They will all have a header
1040   
1041    Usage: sww2csv_gauges(sww_file='test1.sww',
1042                          quantities = ['stage', 'elevation','depth','bearing'],
1043                          gauge_file='gauge.txt')   
1044   
1045    Interpolate the quantities at a given set of locations, given
1046    an sww file.
1047    The results are written to a csv file.
1048
1049    In the future let points be a points file.
1050    And the user choose the quantities.
1051
1052    This is currently quite specific.
1053    If it needs to be more general, change things.
1054
1055    This is really returning speed, not velocity.
1056    """
1057    from gauge import sww2csv_gauges as sww2csv
1058   
1059    return sww2csv(sww_file, gauge_file, out_name, quantities, verbose, use_cache)
1060   
1061def sww2timeseries(swwfiles,
1062                   gauge_filename,
1063                   production_dirs,
1064                   report=None,
1065                   reportname=None,
1066                   plot_quantity=None,
1067                   generate_fig=False,
1068                   surface=None,
1069                   time_min=None,
1070                   time_max=None,
1071                   time_thinning=1,                   
1072                   time_unit=None,
1073                   title_on=None,
1074                   use_cache=False,
1075                   verbose=False,
1076                   output_centroids=False):
1077    from gauge import sww2timeseries as sww2timeseries_new
1078    return sww2timeseries_new(swwfiles,
1079                   gauge_filename,
1080                   production_dirs,
1081                   report,
1082                   reportname,
1083                   plot_quantity,
1084                   generate_fig,
1085                   surface,
1086                   time_min,
1087                   time_max,
1088                   time_thinning,                   
1089                   time_unit,
1090                   title_on,
1091                   use_cache,
1092                   verbose,
1093                   output_centroids)                   
1094   
1095##
1096# @brief Get a wave height at a certain depth given wave height at another depth.
1097# @param d1 The first depth.
1098# @param d2 The second depth.
1099# @param h1 Wave ampitude at d1
1100# @param verbose True if this function is to be verbose.
1101# @return The wave height at d2.
1102def greens_law(d1, d2, h1, verbose=False):
1103    """Green's Law
1104
1105    Green's Law allows an approximation of wave amplitude at
1106    a given depth based on the fourh root of the ratio of two depths
1107    and the amplitude at another given depth.
1108
1109    Note, wave amplitude is equal to stage.
1110   
1111    Inputs:
1112
1113    d1, d2 - the two depths
1114    h1     - the wave amplitude at d1
1115    h2     - the derived amplitude at d2
1116
1117    h2 = h1 (d1/d2)^(1/4), where d2 cannot equal 0.
1118   
1119    """
1120
1121    d1 = ensure_numeric(d1)
1122    d2 = ensure_numeric(d2)
1123    h1 = ensure_numeric(h1)
1124
1125    if d1 <= 0.0:
1126        msg = 'the first depth, d1 (%f), must be strictly positive' % (d1)
1127        raise Exception(msg)
1128
1129    if d2 <= 0.0:
1130        msg = 'the second depth, d2 (%f), must be strictly positive' % (d2)
1131        raise Exception(msg)
1132   
1133    if h1 <= 0.0:
1134        msg = 'the wave amplitude, h1 (%f), must be strictly positive' % (h1)
1135        raise Exception(msg)
1136   
1137    h2 = h1*(d1/d2)**0.25
1138
1139    assert h2 > 0
1140   
1141    return h2
1142       
1143
1144##
1145# @brief Get the square-root of a value.
1146# @param s The value to get the square-root of.
1147# @return The square-root of 's'.
1148def square_root(s):
1149    return sqrt(s)
1150
1151
Note: See TracBrowser for help on using the repository browser.