1 | """This module contains various auxiliary function used by pyvolution. |
---|
2 | |
---|
3 | It is also a clearing house for functions that may later earn a module |
---|
4 | of their own. |
---|
5 | """ |
---|
6 | |
---|
7 | import anuga.utilities.polygon |
---|
8 | import sys |
---|
9 | import os |
---|
10 | |
---|
11 | from os import remove, mkdir, access, F_OK, R_OK, W_OK, sep,mkdir |
---|
12 | from os.path import exists, basename, split,join |
---|
13 | from warnings import warn |
---|
14 | from shutil import copy |
---|
15 | |
---|
16 | from anuga.utilities.numerical_tools import ensure_numeric |
---|
17 | from Numeric import arange, choose, zeros, Float, array |
---|
18 | |
---|
19 | from anuga.geospatial_data.geospatial_data import ensure_absolute |
---|
20 | from math import sqrt, atan, degrees |
---|
21 | |
---|
22 | |
---|
23 | |
---|
24 | # FIXME (Ole): Temporary short cuts - remove and update scripts where they are used |
---|
25 | from anuga.utilities.system_tools import get_revision_number |
---|
26 | from anuga.utilities.system_tools import store_version_info |
---|
27 | |
---|
28 | |
---|
29 | def file_function(filename, |
---|
30 | domain=None, |
---|
31 | quantities=None, |
---|
32 | interpolation_points=None, |
---|
33 | time_thinning=1, |
---|
34 | verbose=False, |
---|
35 | use_cache=False): |
---|
36 | """Read time history of spatial data from NetCDF file and return |
---|
37 | a callable object. |
---|
38 | |
---|
39 | Input variables: |
---|
40 | |
---|
41 | filename - Name of sww or tms file |
---|
42 | |
---|
43 | If the file has extension 'sww' then it is assumed to be spatio-temporal |
---|
44 | or temporal and the callable object will have the form f(t,x,y) or f(t) |
---|
45 | depending on whether the file contains spatial data |
---|
46 | |
---|
47 | If the file has extension 'tms' then it is assumed to be temporal only |
---|
48 | and the callable object will have the form f(t) |
---|
49 | |
---|
50 | Either form will return interpolated values based on the input file |
---|
51 | using the underlying interpolation_function. |
---|
52 | |
---|
53 | domain - Associated domain object |
---|
54 | If domain is specified, model time (domain.starttime) |
---|
55 | will be checked and possibly modified. |
---|
56 | |
---|
57 | All times are assumed to be in UTC |
---|
58 | |
---|
59 | All spatial information is assumed to be in absolute UTM coordinates. |
---|
60 | |
---|
61 | quantities - the name of the quantity to be interpolated or a |
---|
62 | list of quantity names. The resulting function will return |
---|
63 | a tuple of values - one for each quantity |
---|
64 | If quantities are None, domain's conserved quantities |
---|
65 | are used. |
---|
66 | |
---|
67 | interpolation_points - list of absolute UTM coordinates for points (N x 2) |
---|
68 | or geospatial object or points file name at which values are sought |
---|
69 | |
---|
70 | use_cache: True means that caching of intermediate result of |
---|
71 | Interpolation_function is attempted |
---|
72 | |
---|
73 | |
---|
74 | See Interpolation function for further documentation |
---|
75 | """ |
---|
76 | |
---|
77 | |
---|
78 | # FIXME (OLE): Should check origin of domain against that of file |
---|
79 | # In fact, this is where origin should be converted to that of domain |
---|
80 | # Also, check that file covers domain fully. |
---|
81 | |
---|
82 | # Take into account: |
---|
83 | # - domain's georef |
---|
84 | # - sww file's georef |
---|
85 | # - interpolation points as absolute UTM coordinates |
---|
86 | |
---|
87 | |
---|
88 | |
---|
89 | |
---|
90 | # Use domain's conserved_quantity names as defaults |
---|
91 | if domain is not None: |
---|
92 | if quantities is None: |
---|
93 | quantities = domain.conserved_quantities |
---|
94 | |
---|
95 | domain_starttime = domain.get_starttime() |
---|
96 | else: |
---|
97 | domain_starttime = None |
---|
98 | |
---|
99 | |
---|
100 | # Build arguments and keyword arguments for use with caching or apply. |
---|
101 | args = (filename,) |
---|
102 | |
---|
103 | |
---|
104 | # FIXME (Ole): Caching this function will not work well |
---|
105 | # if domain is passed in as instances change hash code. |
---|
106 | # Instead we pass in those attributes that are needed (and return them |
---|
107 | # if modified) |
---|
108 | kwargs = {'quantities': quantities, |
---|
109 | 'interpolation_points': interpolation_points, |
---|
110 | 'domain_starttime': domain_starttime, |
---|
111 | 'time_thinning': time_thinning, |
---|
112 | 'verbose': verbose} |
---|
113 | |
---|
114 | |
---|
115 | # Call underlying engine with or without caching |
---|
116 | if use_cache is True: |
---|
117 | try: |
---|
118 | from caching import cache |
---|
119 | except: |
---|
120 | msg = 'Caching was requested, but caching module'+\ |
---|
121 | 'could not be imported' |
---|
122 | raise msg |
---|
123 | |
---|
124 | f, starttime = cache(_file_function, |
---|
125 | args, kwargs, |
---|
126 | dependencies=[filename], |
---|
127 | compression=False, |
---|
128 | verbose=verbose) |
---|
129 | |
---|
130 | else: |
---|
131 | f, starttime = apply(_file_function, |
---|
132 | args, kwargs) |
---|
133 | |
---|
134 | |
---|
135 | #FIXME (Ole): Pass cache arguments, such as compression, in some sort of |
---|
136 | #structure |
---|
137 | |
---|
138 | f.starttime = starttime |
---|
139 | f.filename = filename |
---|
140 | |
---|
141 | if domain is not None: |
---|
142 | #Update domain.startime if it is *earlier* than starttime from file |
---|
143 | if starttime > domain.starttime: |
---|
144 | msg = 'WARNING: Start time as specified in domain (%f)'\ |
---|
145 | %domain.starttime |
---|
146 | msg += ' is earlier than the starttime of file %s (%f).'\ |
---|
147 | %(filename, starttime) |
---|
148 | msg += ' Modifying domain starttime accordingly.' |
---|
149 | |
---|
150 | if verbose: print msg |
---|
151 | domain.set_starttime(starttime) #Modifying model time |
---|
152 | if verbose: print 'Domain starttime is now set to %f'\ |
---|
153 | %domain.starttime |
---|
154 | |
---|
155 | return f |
---|
156 | |
---|
157 | |
---|
158 | |
---|
159 | def _file_function(filename, |
---|
160 | quantities=None, |
---|
161 | interpolation_points=None, |
---|
162 | domain_starttime=None, |
---|
163 | time_thinning=1, |
---|
164 | verbose=False): |
---|
165 | """Internal function |
---|
166 | |
---|
167 | See file_function for documentatiton |
---|
168 | """ |
---|
169 | |
---|
170 | |
---|
171 | assert type(filename) == type(''),\ |
---|
172 | 'First argument to File_function must be a string' |
---|
173 | |
---|
174 | try: |
---|
175 | fid = open(filename) |
---|
176 | except Exception, e: |
---|
177 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
178 | %(filename, e) |
---|
179 | raise msg |
---|
180 | |
---|
181 | line = fid.readline() |
---|
182 | fid.close() |
---|
183 | |
---|
184 | |
---|
185 | if line[:3] == 'CDF': |
---|
186 | return get_netcdf_file_function(filename, |
---|
187 | quantities, |
---|
188 | interpolation_points, |
---|
189 | domain_starttime, |
---|
190 | time_thinning=time_thinning, |
---|
191 | verbose=verbose) |
---|
192 | else: |
---|
193 | raise 'Must be a NetCDF File' |
---|
194 | |
---|
195 | |
---|
196 | |
---|
197 | def get_netcdf_file_function(filename, |
---|
198 | quantity_names=None, |
---|
199 | interpolation_points=None, |
---|
200 | domain_starttime=None, |
---|
201 | time_thinning=1, |
---|
202 | verbose=False): |
---|
203 | """Read time history of spatial data from NetCDF sww file and |
---|
204 | return a callable object f(t,x,y) |
---|
205 | which will return interpolated values based on the input file. |
---|
206 | |
---|
207 | Model time (domain_starttime) |
---|
208 | will be checked, possibly modified and returned |
---|
209 | |
---|
210 | All times are assumed to be in UTC |
---|
211 | |
---|
212 | See Interpolation function for further documetation |
---|
213 | |
---|
214 | """ |
---|
215 | |
---|
216 | |
---|
217 | # FIXME: Check that model origin is the same as file's origin |
---|
218 | # (both in UTM coordinates) |
---|
219 | # If not - modify those from file to match domain |
---|
220 | # (origin should be passed in) |
---|
221 | # Take this code from e.g. dem2pts in data_manager.py |
---|
222 | # FIXME: Use geo_reference to read and write xllcorner... |
---|
223 | |
---|
224 | |
---|
225 | import time, calendar, types |
---|
226 | from anuga.config import time_format |
---|
227 | from Scientific.IO.NetCDF import NetCDFFile |
---|
228 | from Numeric import array, zeros, Float, alltrue, concatenate, reshape |
---|
229 | |
---|
230 | # Open NetCDF file |
---|
231 | if verbose: print 'Reading', filename |
---|
232 | fid = NetCDFFile(filename, 'r') |
---|
233 | |
---|
234 | if type(quantity_names) == types.StringType: |
---|
235 | quantity_names = [quantity_names] |
---|
236 | |
---|
237 | if quantity_names is None or len(quantity_names) < 1: |
---|
238 | # If no quantities are specified get quantities from file |
---|
239 | # x, y, time are assumed as the independent variables so |
---|
240 | # they are excluded as potentiol quantities |
---|
241 | quantity_names = [] |
---|
242 | for name in fid.variables.keys(): |
---|
243 | if name not in ['x', 'y', 'time']: |
---|
244 | quantity_names.append(name) |
---|
245 | |
---|
246 | if len(quantity_names) < 1: |
---|
247 | msg = 'ERROR: At least one independent value must be specified' |
---|
248 | raise msg |
---|
249 | |
---|
250 | |
---|
251 | if interpolation_points is not None: |
---|
252 | interpolation_points = ensure_absolute(interpolation_points) |
---|
253 | msg = 'Points must by N x 2. I got %d' %interpolation_points.shape[1] |
---|
254 | assert interpolation_points.shape[1] == 2, msg |
---|
255 | |
---|
256 | |
---|
257 | # Now assert that requested quantitites (and the independent ones) |
---|
258 | # are present in file |
---|
259 | missing = [] |
---|
260 | for quantity in ['time'] + quantity_names: |
---|
261 | if not fid.variables.has_key(quantity): |
---|
262 | missing.append(quantity) |
---|
263 | |
---|
264 | if len(missing) > 0: |
---|
265 | msg = 'Quantities %s could not be found in file %s'\ |
---|
266 | %(str(missing), filename) |
---|
267 | fid.close() |
---|
268 | raise Exception, msg |
---|
269 | |
---|
270 | # Decide whether this data has a spatial dimension |
---|
271 | spatial = True |
---|
272 | for quantity in ['x', 'y']: |
---|
273 | if not fid.variables.has_key(quantity): |
---|
274 | spatial = False |
---|
275 | |
---|
276 | if filename[-3:] == 'tms' and spatial is True: |
---|
277 | msg = 'Files of type tms must not contain spatial information' |
---|
278 | raise msg |
---|
279 | |
---|
280 | if filename[-3:] == 'sww' and spatial is False: |
---|
281 | msg = 'Files of type sww must contain spatial information' |
---|
282 | raise msg |
---|
283 | |
---|
284 | # Get first timestep |
---|
285 | try: |
---|
286 | starttime = fid.starttime[0] |
---|
287 | except ValueError: |
---|
288 | msg = 'Could not read starttime from file %s' %filename |
---|
289 | raise msg |
---|
290 | |
---|
291 | # Get variables |
---|
292 | # if verbose: print 'Get variables' |
---|
293 | time = fid.variables['time'][:] |
---|
294 | |
---|
295 | # Get time independent stuff |
---|
296 | if spatial: |
---|
297 | # Get origin |
---|
298 | xllcorner = fid.xllcorner[0] |
---|
299 | yllcorner = fid.yllcorner[0] |
---|
300 | zone = fid.zone[0] |
---|
301 | |
---|
302 | x = fid.variables['x'][:] |
---|
303 | y = fid.variables['y'][:] |
---|
304 | triangles = fid.variables['volumes'][:] |
---|
305 | |
---|
306 | x = reshape(x, (len(x),1)) |
---|
307 | y = reshape(y, (len(y),1)) |
---|
308 | vertex_coordinates = concatenate((x,y), axis=1) #m x 2 array |
---|
309 | |
---|
310 | if interpolation_points is not None: |
---|
311 | # Adjust for georef |
---|
312 | interpolation_points[:,0] -= xllcorner |
---|
313 | interpolation_points[:,1] -= yllcorner |
---|
314 | |
---|
315 | |
---|
316 | |
---|
317 | |
---|
318 | if domain_starttime is not None: |
---|
319 | |
---|
320 | # If domain_startime is *later* than starttime, |
---|
321 | # move time back - relative to domain's time |
---|
322 | if domain_starttime > starttime: |
---|
323 | time = time - domain_starttime + starttime |
---|
324 | |
---|
325 | # FIXME Use method in geo to reconcile |
---|
326 | # if spatial: |
---|
327 | # assert domain.geo_reference.xllcorner == xllcorner |
---|
328 | # assert domain.geo_reference.yllcorner == yllcorner |
---|
329 | # assert domain.geo_reference.zone == zone |
---|
330 | |
---|
331 | if verbose: |
---|
332 | print 'File_function data obtained from: %s' %filename |
---|
333 | print ' References:' |
---|
334 | #print ' Datum ....' #FIXME |
---|
335 | if spatial: |
---|
336 | print ' Lower left corner: [%f, %f]'\ |
---|
337 | %(xllcorner, yllcorner) |
---|
338 | print ' Start time: %f' %starttime |
---|
339 | |
---|
340 | |
---|
341 | # Produce values for desired data points at |
---|
342 | # each timestep for each quantity |
---|
343 | quantities = {} |
---|
344 | for i, name in enumerate(quantity_names): |
---|
345 | quantities[name] = fid.variables[name][:] |
---|
346 | fid.close() |
---|
347 | |
---|
348 | |
---|
349 | from anuga.fit_interpolate.interpolate import Interpolation_function |
---|
350 | |
---|
351 | if not spatial: |
---|
352 | vertex_coordinates = triangles = interpolation_points = None |
---|
353 | |
---|
354 | |
---|
355 | # Return Interpolation_function instance as well as |
---|
356 | # starttime for use to possible modify that of domain |
---|
357 | return Interpolation_function(time, |
---|
358 | quantities, |
---|
359 | quantity_names, |
---|
360 | vertex_coordinates, |
---|
361 | triangles, |
---|
362 | interpolation_points, |
---|
363 | time_thinning=time_thinning, |
---|
364 | verbose=verbose), starttime |
---|
365 | |
---|
366 | # NOTE (Ole): Caching Interpolation function is too slow as |
---|
367 | # the very long parameters need to be hashed. |
---|
368 | |
---|
369 | |
---|
370 | |
---|
371 | |
---|
372 | |
---|
373 | |
---|
374 | def multiple_replace(text, dictionary): |
---|
375 | """Multiple replace of words in text |
---|
376 | |
---|
377 | text: String to be modified |
---|
378 | dictionary: Mapping of words that are to be substituted |
---|
379 | |
---|
380 | Python Cookbook 3.14 page 88 and page 90 |
---|
381 | """ |
---|
382 | |
---|
383 | import re |
---|
384 | |
---|
385 | #Create a regular expression from all of the dictionary keys |
---|
386 | #matching only entire words |
---|
387 | regex = re.compile(r'\b'+ \ |
---|
388 | r'\b|\b'.join(map(re.escape, dictionary.keys()))+ \ |
---|
389 | r'\b' ) |
---|
390 | |
---|
391 | #For each match, lookup the corresponding value in the dictionary |
---|
392 | return regex.sub(lambda match: dictionary[match.group(0)], text) |
---|
393 | |
---|
394 | |
---|
395 | |
---|
396 | |
---|
397 | def apply_expression_to_dictionary(expression, dictionary):#dictionary): |
---|
398 | """Apply arbitrary expression to values of dictionary |
---|
399 | |
---|
400 | Given an expression in terms of the keys, replace key by the |
---|
401 | corresponding values and evaluate. |
---|
402 | |
---|
403 | |
---|
404 | expression: Arbitrary, e.g. arithmetric, expression relating keys |
---|
405 | from dictionary. |
---|
406 | |
---|
407 | dictionary: Mapping between symbols in expression and objects that |
---|
408 | will be evaluated by expression. |
---|
409 | Values in dictionary must support operators given in |
---|
410 | expression e.g. by overloading |
---|
411 | |
---|
412 | due to a limitation with Numeric, this can not evaluate 0/0 |
---|
413 | In general, the user can fix by adding 1e-30 to the numerator. |
---|
414 | SciPy core can handle this situation. |
---|
415 | """ |
---|
416 | |
---|
417 | import types |
---|
418 | import re |
---|
419 | |
---|
420 | assert isinstance(expression, basestring) |
---|
421 | assert type(dictionary) == types.DictType |
---|
422 | |
---|
423 | #Convert dictionary values to textual representations suitable for eval |
---|
424 | D = {} |
---|
425 | for key in dictionary: |
---|
426 | D[key] = 'dictionary["%s"]' %key |
---|
427 | |
---|
428 | #Perform substitution of variables |
---|
429 | expression = multiple_replace(expression, D) |
---|
430 | |
---|
431 | #Evaluate and return |
---|
432 | try: |
---|
433 | return eval(expression) |
---|
434 | except NameError, e: |
---|
435 | msg = 'Expression "%s" could not be evaluated: %s' %(expression, e) |
---|
436 | raise NameError, msg |
---|
437 | except ValueError, e: |
---|
438 | msg = 'Expression "%s" could not be evaluated: %s' %(expression, e) |
---|
439 | raise ValueError, msg |
---|
440 | |
---|
441 | |
---|
442 | def get_textual_float(value, format = '%.2f'): |
---|
443 | """Get textual representation of floating point numbers |
---|
444 | and accept None as valid entry |
---|
445 | |
---|
446 | format is a string - default = '%.2f' |
---|
447 | """ |
---|
448 | |
---|
449 | if value is None: |
---|
450 | return 'None' |
---|
451 | else: |
---|
452 | try: |
---|
453 | float(value) |
---|
454 | except: |
---|
455 | # May this is a vector |
---|
456 | if len(value) > 1: |
---|
457 | s = '(' |
---|
458 | for v in value: |
---|
459 | s += get_textual_float(v, format) + ', ' |
---|
460 | |
---|
461 | s = s[:-2] + ')' # Strip trailing comma and close |
---|
462 | return s |
---|
463 | else: |
---|
464 | raise 'Illegal input to get_textual_float:', value |
---|
465 | else: |
---|
466 | return format %float(value) |
---|
467 | |
---|
468 | |
---|
469 | |
---|
470 | #################################### |
---|
471 | ####OBSOLETE STUFF |
---|
472 | |
---|
473 | |
---|
474 | def angle(v1, v2): |
---|
475 | """Temporary Interface to new location""" |
---|
476 | |
---|
477 | import anuga.utilities.numerical_tools as NT |
---|
478 | |
---|
479 | msg = 'angle has moved from util.py. ' |
---|
480 | msg += 'Please use "from anuga.utilities.numerical_tools import angle"' |
---|
481 | warn(msg, DeprecationWarning) |
---|
482 | |
---|
483 | return NT.angle(v1, v2) |
---|
484 | |
---|
485 | def anglediff(v0, v1): |
---|
486 | """Temporary Interface to new location""" |
---|
487 | |
---|
488 | import anuga.utilities.numerical_tools as NT |
---|
489 | |
---|
490 | msg = 'anglediff has moved from util.py. ' |
---|
491 | msg += 'Please use "from anuga.utilities.numerical_tools import anglediff"' |
---|
492 | warn(msg, DeprecationWarning) |
---|
493 | |
---|
494 | return NT.anglediff(v0, v1) |
---|
495 | |
---|
496 | |
---|
497 | def mean(x): |
---|
498 | """Temporary Interface to new location""" |
---|
499 | |
---|
500 | import anuga.utilities.numerical_tools as NT |
---|
501 | |
---|
502 | msg = 'mean has moved from util.py. ' |
---|
503 | msg += 'Please use "from anuga.utilities.numerical_tools import mean"' |
---|
504 | warn(msg, DeprecationWarning) |
---|
505 | |
---|
506 | return NT.mean(x) |
---|
507 | |
---|
508 | def point_on_line(*args, **kwargs): |
---|
509 | """Temporary Interface to new location""" |
---|
510 | |
---|
511 | msg = 'point_on_line has moved from util.py. ' |
---|
512 | msg += 'Please use "from anuga.utilities.polygon import point_on_line"' |
---|
513 | warn(msg, DeprecationWarning) |
---|
514 | |
---|
515 | return utilities.polygon.point_on_line(*args, **kwargs) |
---|
516 | |
---|
517 | def inside_polygon(*args, **kwargs): |
---|
518 | """Temporary Interface to new location""" |
---|
519 | |
---|
520 | print 'inside_polygon has moved from util.py. ', |
---|
521 | print 'Please use "from anuga.utilities.polygon import inside_polygon"' |
---|
522 | |
---|
523 | return utilities.polygon.inside_polygon(*args, **kwargs) |
---|
524 | |
---|
525 | def outside_polygon(*args, **kwargs): |
---|
526 | """Temporary Interface to new location""" |
---|
527 | |
---|
528 | print 'outside_polygon has moved from util.py. ', |
---|
529 | print 'Please use "from anuga.utilities.polygon import outside_polygon"' |
---|
530 | |
---|
531 | return utilities.polygon.outside_polygon(*args, **kwargs) |
---|
532 | |
---|
533 | |
---|
534 | def separate_points_by_polygon(*args, **kwargs): |
---|
535 | """Temporary Interface to new location""" |
---|
536 | |
---|
537 | print 'separate_points_by_polygon has moved from util.py. ', |
---|
538 | print 'Please use "from anuga.utilities.polygon import separate_points_by_polygon"' |
---|
539 | |
---|
540 | return utilities.polygon.separate_points_by_polygon(*args, **kwargs) |
---|
541 | |
---|
542 | |
---|
543 | |
---|
544 | def read_polygon(*args, **kwargs): |
---|
545 | """Temporary Interface to new location""" |
---|
546 | |
---|
547 | print 'read_polygon has moved from util.py. ', |
---|
548 | print 'Please use "from anuga.utilities.polygon import read_polygon"' |
---|
549 | |
---|
550 | return utilities.polygon.read_polygon(*args, **kwargs) |
---|
551 | |
---|
552 | |
---|
553 | def populate_polygon(*args, **kwargs): |
---|
554 | """Temporary Interface to new location""" |
---|
555 | |
---|
556 | print 'populate_polygon has moved from util.py. ', |
---|
557 | print 'Please use "from anuga.utilities.polygon import populate_polygon"' |
---|
558 | |
---|
559 | return utilities.polygon.populate_polygon(*args, **kwargs) |
---|
560 | |
---|
561 | ##################### end of obsolete stuff ? ############ |
---|
562 | |
---|
563 | def start_screen_catcher(dir_name, myid='', numprocs='', extra_info='', |
---|
564 | print_to_screen=False, verbose=False): |
---|
565 | """Temporary Interface to new location""" |
---|
566 | from anuga.shallow_water.data_manager import start_screen_catcher as dm_start_screen_catcher |
---|
567 | |
---|
568 | print 'start_screen_catcher has moved from util.py. ', |
---|
569 | print 'Please use "from anuga.shallow_water.data_manager import start_screen_catcher"' |
---|
570 | |
---|
571 | return dm_start_screen_catcher(dir_name, myid='', numprocs='', extra_info='', |
---|
572 | print_to_screen=False, verbose=False) |
---|
573 | |
---|
574 | |
---|
575 | def sww2timeseries(swwfiles, |
---|
576 | gauge_filename, |
---|
577 | production_dirs, |
---|
578 | report = None, |
---|
579 | reportname = None, |
---|
580 | plot_quantity = None, |
---|
581 | generate_fig = False, |
---|
582 | surface = None, |
---|
583 | time_min = None, |
---|
584 | time_max = None, |
---|
585 | time_thinning = 1, |
---|
586 | time_unit = None, |
---|
587 | title_on = None, |
---|
588 | use_cache = False, |
---|
589 | verbose = False): |
---|
590 | |
---|
591 | """ Read sww file and plot the time series for the |
---|
592 | prescribed quantities at defined gauge locations and |
---|
593 | prescribed time range. |
---|
594 | |
---|
595 | Input variables: |
---|
596 | |
---|
597 | swwfiles - dictionary of sww files with label_ids (used in |
---|
598 | generating latex output. It will be part of |
---|
599 | the directory name of file_loc (typically the timestamp). |
---|
600 | Helps to differentiate latex files for different simulations |
---|
601 | for a particular scenario. |
---|
602 | - assume that all conserved quantities have been stored |
---|
603 | - assume each sww file has been simulated with same timestep |
---|
604 | |
---|
605 | gauge_filename - name of file containing gauge data |
---|
606 | - easting, northing, name , elevation? |
---|
607 | - OR (this is not yet done) |
---|
608 | - structure which can be converted to a Numeric array, |
---|
609 | such as a geospatial data object |
---|
610 | |
---|
611 | production_dirs - A list of list, example {20061101_121212: '1 in 10000', |
---|
612 | 'boundaries': 'urs boundary'} |
---|
613 | this will use the second part as the label and the first part |
---|
614 | as the ? |
---|
615 | #FIXME: Is it a list or a dictionary |
---|
616 | # This is probably obsolete by now |
---|
617 | |
---|
618 | report - if True, then write figures to report_figures directory in |
---|
619 | relevant production directory |
---|
620 | - if False, figures are already stored with sww file |
---|
621 | - default to False |
---|
622 | |
---|
623 | reportname - name for report if wishing to generate report |
---|
624 | |
---|
625 | plot_quantity - list containing quantities to plot, they must |
---|
626 | be the name of an existing quantity or one of |
---|
627 | the following possibilities |
---|
628 | - possibilities: |
---|
629 | - stage; 'stage' |
---|
630 | - depth; 'depth' |
---|
631 | - speed; calculated as absolute momentum |
---|
632 | (pointwise) divided by depth; 'speed' |
---|
633 | - bearing; calculated as the angle of the momentum |
---|
634 | vector (xmomentum, ymomentum) from the North; 'bearing' |
---|
635 | - absolute momentum; calculated as |
---|
636 | sqrt(xmomentum^2 + ymomentum^2); 'momentum' |
---|
637 | - x momentum; 'xmomentum' |
---|
638 | - y momentum; 'ymomentum' |
---|
639 | - default will be ['stage', 'speed', 'bearing'] |
---|
640 | |
---|
641 | generate_fig - if True, generate figures as well as csv files of quantities |
---|
642 | - if False, csv files created only |
---|
643 | |
---|
644 | surface - if True, then generate solution surface with 3d plot |
---|
645 | and save to current working directory |
---|
646 | - default = False |
---|
647 | |
---|
648 | time_min - beginning of user defined time range for plotting purposes |
---|
649 | - default will be first available time found in swwfile |
---|
650 | |
---|
651 | time_max - end of user defined time range for plotting purposes |
---|
652 | - default will be last available time found in swwfile |
---|
653 | |
---|
654 | title_on - if True, export standard graphics with title |
---|
655 | - if False, export standard graphics without title |
---|
656 | |
---|
657 | |
---|
658 | |
---|
659 | Output: |
---|
660 | |
---|
661 | - time series data stored in .csv for later use if required. |
---|
662 | Name = gauges_timeseries followed by gauge name |
---|
663 | - latex file will be generated in same directory as where script is |
---|
664 | run (usually production scenario directory. |
---|
665 | Name = latexoutputlabel_id.tex |
---|
666 | |
---|
667 | Other important information: |
---|
668 | |
---|
669 | It is assumed that the used has stored all the conserved quantities |
---|
670 | and elevation during the scenario run, i.e. |
---|
671 | ['stage', 'elevation', 'xmomentum', 'ymomentum'] |
---|
672 | If this has not occurred then sww2timeseries will not work. |
---|
673 | |
---|
674 | |
---|
675 | Usage example |
---|
676 | texname = sww2timeseries({project.boundary_name + '.sww': ''}, |
---|
677 | project.polygons_dir + sep + 'boundary_extent.csv', |
---|
678 | project.anuga_dir, |
---|
679 | report = False, |
---|
680 | plot_quantity = ['stage', 'speed', 'bearing'], |
---|
681 | time_min = None, |
---|
682 | time_max = None, |
---|
683 | title_on = True, |
---|
684 | verbose = True) |
---|
685 | |
---|
686 | """ |
---|
687 | |
---|
688 | msg = 'NOTE: A new function is available to create csv files from sww files called' |
---|
689 | msg += 'sww2csv_gauges in anuga.abstract_2d_finite_volumes.util' |
---|
690 | msg += 'PLUS another new function to create graphs from csv files called' |
---|
691 | msg += 'csv2timeseries_graphs in anuga.abstract_2d_finite_volumes.util' |
---|
692 | print msg |
---|
693 | |
---|
694 | k = _sww2timeseries(swwfiles, |
---|
695 | gauge_filename, |
---|
696 | production_dirs, |
---|
697 | report, |
---|
698 | reportname, |
---|
699 | plot_quantity, |
---|
700 | generate_fig, |
---|
701 | surface, |
---|
702 | time_min, |
---|
703 | time_max, |
---|
704 | time_thinning, |
---|
705 | time_unit, |
---|
706 | title_on, |
---|
707 | use_cache, |
---|
708 | verbose) |
---|
709 | |
---|
710 | return k |
---|
711 | |
---|
712 | def _sww2timeseries(swwfiles, |
---|
713 | gauge_filename, |
---|
714 | production_dirs, |
---|
715 | report = None, |
---|
716 | reportname = None, |
---|
717 | plot_quantity = None, |
---|
718 | generate_fig = False, |
---|
719 | surface = None, |
---|
720 | time_min = None, |
---|
721 | time_max = None, |
---|
722 | time_thinning = 1, |
---|
723 | time_unit = None, |
---|
724 | title_on = None, |
---|
725 | use_cache = False, |
---|
726 | verbose = False): |
---|
727 | |
---|
728 | assert type(gauge_filename) == type(''),\ |
---|
729 | 'Gauge filename must be a string' |
---|
730 | |
---|
731 | try: |
---|
732 | fid = open(gauge_filename) |
---|
733 | except Exception, e: |
---|
734 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
735 | %(gauge_filename, e) |
---|
736 | raise msg |
---|
737 | |
---|
738 | if report is None: |
---|
739 | report = False |
---|
740 | |
---|
741 | if plot_quantity is None: |
---|
742 | plot_quantity = ['depth', 'speed'] |
---|
743 | else: |
---|
744 | assert type(plot_quantity) == list,\ |
---|
745 | 'plot_quantity must be a list' |
---|
746 | check_list(plot_quantity) |
---|
747 | |
---|
748 | if surface is None: |
---|
749 | surface = False |
---|
750 | |
---|
751 | if time_unit is None: |
---|
752 | time_unit = 'hours' |
---|
753 | |
---|
754 | if title_on is None: |
---|
755 | title_on = True |
---|
756 | |
---|
757 | if verbose: print '\n Gauges obtained from: %s \n' %gauge_filename |
---|
758 | gauges, locations, elev = get_gauges_from_file(gauge_filename) |
---|
759 | |
---|
760 | sww_quantity = ['stage', 'elevation', 'xmomentum', 'ymomentum'] |
---|
761 | |
---|
762 | file_loc = [] |
---|
763 | f_list = [] |
---|
764 | label_id = [] |
---|
765 | leg_label = [] |
---|
766 | themaxT = 0.0 |
---|
767 | theminT = 0.0 |
---|
768 | |
---|
769 | for swwfile in swwfiles.keys(): |
---|
770 | |
---|
771 | try: |
---|
772 | fid = open(swwfile) |
---|
773 | except Exception, e: |
---|
774 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
775 | %(swwfile, e) |
---|
776 | raise msg |
---|
777 | |
---|
778 | print 'swwfile', swwfile |
---|
779 | |
---|
780 | # Extract parent dir name and use as label |
---|
781 | path, _ = os.path.split(swwfile) |
---|
782 | _, label = os.path.split(path) |
---|
783 | |
---|
784 | #print 'label', label |
---|
785 | leg_label.append(label) |
---|
786 | |
---|
787 | |
---|
788 | |
---|
789 | f = file_function(swwfile, |
---|
790 | quantities = sww_quantity, |
---|
791 | interpolation_points = gauges, |
---|
792 | time_thinning = time_thinning, |
---|
793 | verbose = verbose, |
---|
794 | use_cache = use_cache) |
---|
795 | |
---|
796 | # determine which gauges are contained in sww file |
---|
797 | count = 0 |
---|
798 | gauge_index = [] |
---|
799 | for k, g in enumerate(gauges): |
---|
800 | if f(0.0, point_id = k)[2] > 1.0e6: |
---|
801 | count += 1 |
---|
802 | if count == 1: print 'Gauges not contained here:' |
---|
803 | print locations[k] |
---|
804 | else: |
---|
805 | gauge_index.append(k) |
---|
806 | |
---|
807 | if len(gauge_index) > 0: |
---|
808 | print 'Gauges contained here: \n', |
---|
809 | else: |
---|
810 | print 'No gauges contained here. \n' |
---|
811 | for i in range(len(gauge_index)): |
---|
812 | print locations[gauge_index[i]] |
---|
813 | |
---|
814 | index = swwfile.rfind(sep) |
---|
815 | file_loc.append(swwfile[:index+1]) |
---|
816 | label_id.append(swwfiles[swwfile]) |
---|
817 | |
---|
818 | |
---|
819 | f_list.append(f) |
---|
820 | maxT = max(f.get_time()) |
---|
821 | minT = min(f.get_time()) |
---|
822 | if maxT > themaxT: themaxT = maxT |
---|
823 | if minT > theminT: theminT = minT |
---|
824 | |
---|
825 | if time_min is None: |
---|
826 | time_min = theminT # min(T) |
---|
827 | else: |
---|
828 | if time_min < theminT: # min(T): |
---|
829 | msg = 'Minimum time entered not correct - please try again' |
---|
830 | raise Exception, msg |
---|
831 | |
---|
832 | if time_max is None: |
---|
833 | time_max = themaxT # max(T) |
---|
834 | else: |
---|
835 | if time_max > themaxT: # max(T): |
---|
836 | msg = 'Maximum time entered not correct - please try again' |
---|
837 | raise Exception, msg |
---|
838 | |
---|
839 | if verbose and len(gauge_index) > 0: print 'Inputs OK - going to generate figures' |
---|
840 | |
---|
841 | if len(gauge_index) <> 0: |
---|
842 | texfile, elev_output = generate_figures(plot_quantity, file_loc, report, reportname, surface, |
---|
843 | leg_label, f_list, gauges, locations, elev, gauge_index, |
---|
844 | production_dirs, time_min, time_max, time_unit, |
---|
845 | title_on, label_id, generate_fig, verbose) |
---|
846 | else: |
---|
847 | texfile = '' |
---|
848 | elev_output = [] |
---|
849 | |
---|
850 | return texfile, elev_output |
---|
851 | |
---|
852 | def get_gauges_from_file(filename): |
---|
853 | """ Read in gauge information from file |
---|
854 | """ |
---|
855 | from os import sep, getcwd, access, F_OK, mkdir |
---|
856 | fid = open(filename) |
---|
857 | lines = fid.readlines() |
---|
858 | fid.close() |
---|
859 | |
---|
860 | gauges = [] |
---|
861 | gaugelocation = [] |
---|
862 | elev = [] |
---|
863 | |
---|
864 | # Check header information |
---|
865 | line1 = lines[0] |
---|
866 | line11 = line1.split(',') |
---|
867 | |
---|
868 | if isinstance(line11[0],str) is True: |
---|
869 | # We have found text in the first line |
---|
870 | east_index = None |
---|
871 | north_index = None |
---|
872 | name_index = None |
---|
873 | elev_index = None |
---|
874 | for i in range(len(line11)): |
---|
875 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'easting': east_index = i |
---|
876 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'northing': north_index = i |
---|
877 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'name': name_index = i |
---|
878 | if line11[i].strip('\n').strip('\r').strip(' ').lower() == 'elevation': elev_index = i |
---|
879 | |
---|
880 | if east_index < len(line11) and north_index < len(line11): |
---|
881 | pass |
---|
882 | else: |
---|
883 | msg = 'WARNING: %s does not contain correct header information' %(filename) |
---|
884 | msg += 'The header must be: easting, northing, name, elevation' |
---|
885 | raise Exception, msg |
---|
886 | |
---|
887 | if elev_index is None: |
---|
888 | raise Exception |
---|
889 | |
---|
890 | if name_index is None: |
---|
891 | raise Exception |
---|
892 | |
---|
893 | lines = lines[1:] # Remove header from data |
---|
894 | else: |
---|
895 | # No header, assume that this is a simple easting, northing file |
---|
896 | |
---|
897 | msg = 'There was no header in file %s and the number of columns is %d' %(filename, len(line11)) |
---|
898 | msg += '- I was assuming two columns corresponding to Easting and Northing' |
---|
899 | assert len(line11) == 2, msg |
---|
900 | |
---|
901 | east_index = 0 |
---|
902 | north_index = 1 |
---|
903 | |
---|
904 | N = len(lines) |
---|
905 | elev = [-9999]*N |
---|
906 | gaugelocation = range(N) |
---|
907 | |
---|
908 | # Read in gauge data |
---|
909 | for line in lines: |
---|
910 | fields = line.split(',') |
---|
911 | |
---|
912 | gauges.append([float(fields[east_index]), float(fields[north_index])]) |
---|
913 | |
---|
914 | if len(fields) > 2: |
---|
915 | elev.append(float(fields[elev_index])) |
---|
916 | loc = fields[name_index] |
---|
917 | gaugelocation.append(loc.strip('\n')) |
---|
918 | |
---|
919 | return gauges, gaugelocation, elev |
---|
920 | |
---|
921 | |
---|
922 | |
---|
923 | def check_list(quantity): |
---|
924 | """ Check that input quantities in quantity list are possible |
---|
925 | """ |
---|
926 | all_quantity = ['stage', 'depth', 'momentum', 'xmomentum', |
---|
927 | 'ymomentum', 'speed', 'bearing', 'elevation'] |
---|
928 | |
---|
929 | |
---|
930 | |
---|
931 | import sys |
---|
932 | #if not sys.version.startswith('2.4'): |
---|
933 | # Backwards compatibility |
---|
934 | # from sets import Set as set |
---|
935 | |
---|
936 | from sets import Set as set |
---|
937 | |
---|
938 | for i,j in enumerate(quantity): |
---|
939 | quantity[i] = quantity[i].lower() |
---|
940 | p = list(set(quantity).difference(set(all_quantity))) |
---|
941 | if len(p) <> 0: |
---|
942 | msg = 'Quantities %s do not exist - please try again' %p |
---|
943 | raise Exception, msg |
---|
944 | |
---|
945 | return |
---|
946 | |
---|
947 | def calc_bearing(uh, vh): |
---|
948 | """ Calculate velocity bearing from North |
---|
949 | """ |
---|
950 | #FIXME (Ole): I reckon we should refactor this one to use |
---|
951 | # the function angle() in utilities/numerical_tools |
---|
952 | # |
---|
953 | # It will be a simple matter of |
---|
954 | # * converting from radians to degrees |
---|
955 | # * moving the reference direction from [1,0] to North |
---|
956 | # * changing from counter clockwise to clocwise. |
---|
957 | |
---|
958 | |
---|
959 | angle = degrees(atan(vh/(uh+1.e-15))) |
---|
960 | if (0 < angle < 90.0): |
---|
961 | if vh > 0: |
---|
962 | bearing = 90.0 - abs(angle) |
---|
963 | if vh < 0: |
---|
964 | bearing = 270.0 - abs(angle) |
---|
965 | if (-90 < angle < 0): |
---|
966 | if vh < 0: |
---|
967 | bearing = 90.0 - abs(angle) |
---|
968 | if vh > 0: |
---|
969 | bearing = 270.0 - abs(angle) |
---|
970 | if angle == 0: bearing = 0.0 |
---|
971 | |
---|
972 | return bearing |
---|
973 | |
---|
974 | def generate_figures(plot_quantity, file_loc, report, reportname, surface, |
---|
975 | leg_label, f_list, gauges, locations, elev, gauge_index, |
---|
976 | production_dirs, time_min, time_max, time_unit, |
---|
977 | title_on, label_id, generate_fig, verbose): |
---|
978 | """ Generate figures based on required quantities and gauges for |
---|
979 | each sww file |
---|
980 | """ |
---|
981 | # from math import sqrt, atan, degrees |
---|
982 | from Numeric import ones, allclose, zeros, Float, ravel |
---|
983 | from os import sep, altsep, getcwd, mkdir, access, F_OK, environ |
---|
984 | |
---|
985 | if generate_fig is True: |
---|
986 | from pylab import ion, hold, plot, axis, figure, legend, savefig, \ |
---|
987 | xlabel, ylabel, title, close, subplot |
---|
988 | |
---|
989 | if surface is True: |
---|
990 | import pylab as p1 |
---|
991 | import mpl3d.mplot3d as p3 |
---|
992 | |
---|
993 | if report == True: |
---|
994 | texdir = getcwd()+sep+'report'+sep |
---|
995 | if access(texdir,F_OK) == 0: |
---|
996 | mkdir (texdir) |
---|
997 | if len(label_id) == 1: |
---|
998 | label_id1 = label_id[0].replace(sep,'') |
---|
999 | label_id2 = label_id1.replace('_','') |
---|
1000 | texfile = texdir+reportname+'%s' %(label_id2) |
---|
1001 | texfile2 = reportname+'%s' %(label_id2) |
---|
1002 | texfilename = texfile + '.tex' |
---|
1003 | if verbose: print '\n Latex output printed to %s \n' %texfilename |
---|
1004 | fid = open(texfilename, 'w') |
---|
1005 | else: |
---|
1006 | texfile = texdir+reportname |
---|
1007 | texfile2 = reportname |
---|
1008 | texfilename = texfile + '.tex' |
---|
1009 | if verbose: print '\n Latex output printed to %s \n' %texfilename |
---|
1010 | fid = open(texfilename, 'w') |
---|
1011 | else: |
---|
1012 | texfile = '' |
---|
1013 | texfile2 = '' |
---|
1014 | |
---|
1015 | p = len(f_list) |
---|
1016 | n = [] |
---|
1017 | n0 = 0 |
---|
1018 | for i in range(len(f_list)): |
---|
1019 | n.append(len(f_list[i].get_time())) |
---|
1020 | if n[i] > n0: n0 = n[i] |
---|
1021 | n0 = int(n0) |
---|
1022 | m = len(locations) |
---|
1023 | model_time = zeros((n0,m,p), Float) |
---|
1024 | stages = zeros((n0,m,p), Float) |
---|
1025 | elevations = zeros((n0,m,p), Float) |
---|
1026 | momenta = zeros((n0,m,p), Float) |
---|
1027 | xmom = zeros((n0,m,p), Float) |
---|
1028 | ymom = zeros((n0,m,p), Float) |
---|
1029 | speed = zeros((n0,m,p), Float) |
---|
1030 | bearings = zeros((n0,m,p), Float) |
---|
1031 | due_east = 90.0*ones((n0,1), Float) |
---|
1032 | due_west = 270.0*ones((n0,1), Float) |
---|
1033 | depths = zeros((n0,m,p), Float) |
---|
1034 | eastings = zeros((n0,m,p), Float) |
---|
1035 | min_stages = [] |
---|
1036 | max_stages = [] |
---|
1037 | min_momentums = [] |
---|
1038 | max_momentums = [] |
---|
1039 | max_xmomentums = [] |
---|
1040 | max_ymomentums = [] |
---|
1041 | min_xmomentums = [] |
---|
1042 | min_ymomentums = [] |
---|
1043 | max_speeds = [] |
---|
1044 | min_speeds = [] |
---|
1045 | max_depths = [] |
---|
1046 | model_time_plot3d = zeros((n0,m), Float) |
---|
1047 | stages_plot3d = zeros((n0,m), Float) |
---|
1048 | eastings_plot3d = zeros((n0,m),Float) |
---|
1049 | if time_unit is 'mins': scale = 60.0 |
---|
1050 | if time_unit is 'hours': scale = 3600.0 |
---|
1051 | ##### loop over each swwfile ##### |
---|
1052 | for j, f in enumerate(f_list): |
---|
1053 | starttime = f.starttime |
---|
1054 | if verbose: print 'swwfile %d of %d' %(j, len(f_list)) |
---|
1055 | comparefile = file_loc[j]+sep+'gauges_maxmins'+'.csv' |
---|
1056 | fid_compare = open(comparefile, 'w') |
---|
1057 | file0 = file_loc[j]+'gauges_t0.csv' |
---|
1058 | fid_0 = open(file0, 'w') |
---|
1059 | ##### loop over each gauge ##### |
---|
1060 | for k in gauge_index: |
---|
1061 | g = gauges[k] |
---|
1062 | if verbose: print 'Gauge %d of %d' %(k, len(gauges)) |
---|
1063 | min_stage = 10 |
---|
1064 | max_stage = 0 |
---|
1065 | max_momentum = max_xmomentum = max_ymomentum = 0 |
---|
1066 | min_momentum = min_xmomentum = min_ymomentum = 100 |
---|
1067 | max_speed = 0 |
---|
1068 | min_speed = 0 |
---|
1069 | max_depth = 0 |
---|
1070 | gaugeloc = str(locations[k]) |
---|
1071 | thisfile = file_loc[j]+sep+'gauges_time_series'+'_'\ |
---|
1072 | +gaugeloc+'.csv' |
---|
1073 | fid_out = open(thisfile, 'w') |
---|
1074 | s = 'Time, Stage, Momentum, Speed, Elevation, xmom, ymom, Bearing \n' |
---|
1075 | fid_out.write(s) |
---|
1076 | #### generate quantities ####### |
---|
1077 | for i, t in enumerate(f.get_time()): |
---|
1078 | if time_min <= t <= time_max: |
---|
1079 | w = f(t, point_id = k)[0] |
---|
1080 | z = f(t, point_id = k)[1] |
---|
1081 | uh = f(t, point_id = k)[2] |
---|
1082 | vh = f(t, point_id = k)[3] |
---|
1083 | depth = w-z |
---|
1084 | m = sqrt(uh*uh + vh*vh) |
---|
1085 | if depth < 0.001: |
---|
1086 | vel = 0.0 |
---|
1087 | else: |
---|
1088 | vel = m / (depth + 1.e-6/depth) |
---|
1089 | bearing = calc_bearing(uh, vh) |
---|
1090 | model_time[i,k,j] = (t + starttime)/scale #t/60.0 |
---|
1091 | stages[i,k,j] = w |
---|
1092 | elevations[i,k,j] = z |
---|
1093 | xmom[i,k,j] = uh |
---|
1094 | ymom[i,k,j] = vh |
---|
1095 | momenta[i,k,j] = m |
---|
1096 | speed[i,k,j] = vel |
---|
1097 | bearings[i,k,j] = bearing |
---|
1098 | depths[i,k,j] = depth |
---|
1099 | thisgauge = gauges[k] |
---|
1100 | eastings[i,k,j] = thisgauge[0] |
---|
1101 | s = '%.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f,\n' %(t, w, m, vel, z, uh, vh, bearing) |
---|
1102 | fid_out.write(s) |
---|
1103 | if t == 0: |
---|
1104 | s = '%.2f, %.2f, %.2f\n' %(g[0], g[1], w) |
---|
1105 | fid_0.write(s) |
---|
1106 | if t/60.0 <= 13920: tindex = i |
---|
1107 | if w > max_stage: max_stage = w |
---|
1108 | if w < min_stage: min_stage = w |
---|
1109 | if m > max_momentum: max_momentum = m |
---|
1110 | if m < min_momentum: min_momentum = m |
---|
1111 | if uh > max_xmomentum: max_xmomentum = uh |
---|
1112 | if vh > max_ymomentum: max_ymomentum = vh |
---|
1113 | if uh < min_xmomentum: min_xmomentum = uh |
---|
1114 | if vh < min_ymomentum: min_ymomentum = vh |
---|
1115 | if vel > max_speed: max_speed = vel |
---|
1116 | if vel < min_speed: min_speed = vel |
---|
1117 | if z > 0 and depth > max_depth: max_depth = depth |
---|
1118 | |
---|
1119 | |
---|
1120 | s = '%.2f, %.2f, %.2f, %.2f, %s\n' %(max_stage, min_stage, z, thisgauge[0], leg_label[j]) |
---|
1121 | fid_compare.write(s) |
---|
1122 | max_stages.append(max_stage) |
---|
1123 | min_stages.append(min_stage) |
---|
1124 | max_momentums.append(max_momentum) |
---|
1125 | max_xmomentums.append(max_xmomentum) |
---|
1126 | max_ymomentums.append(max_ymomentum) |
---|
1127 | min_xmomentums.append(min_xmomentum) |
---|
1128 | min_ymomentums.append(min_ymomentum) |
---|
1129 | min_momentums.append(min_momentum) |
---|
1130 | max_depths.append(max_depth) |
---|
1131 | max_speeds.append(max_speed) |
---|
1132 | min_speeds.append(min_speed) |
---|
1133 | #### finished generating quantities for each swwfile ##### |
---|
1134 | |
---|
1135 | model_time_plot3d[:,:] = model_time[:,:,j] |
---|
1136 | stages_plot3d[:,:] = stages[:,:,j] |
---|
1137 | eastings_plot3d[:,] = eastings[:,:,j] |
---|
1138 | |
---|
1139 | if surface is True: |
---|
1140 | print 'Printing surface figure' |
---|
1141 | for i in range(2): |
---|
1142 | fig = p1.figure(10) |
---|
1143 | ax = p3.Axes3D(fig) |
---|
1144 | if len(gauges) > 80: |
---|
1145 | ax.plot_surface(model_time[:,:,j],eastings[:,:,j],stages[:,:,j]) |
---|
1146 | else: |
---|
1147 | #ax.plot_wireframe(model_time[:,:,j],eastings[:,:,j],stages[:,:,j]) |
---|
1148 | ax.plot3D(ravel(eastings[:,:,j]),ravel(model_time[:,:,j]),ravel(stages[:,:,j])) |
---|
1149 | ax.set_xlabel('time') |
---|
1150 | ax.set_ylabel('x') |
---|
1151 | ax.set_zlabel('stage') |
---|
1152 | fig.add_axes(ax) |
---|
1153 | p1.show() |
---|
1154 | surfacefig = 'solution_surface%s' %leg_label[j] |
---|
1155 | p1.savefig(surfacefig) |
---|
1156 | p1.close() |
---|
1157 | |
---|
1158 | #### finished generating quantities for all swwfiles ##### |
---|
1159 | |
---|
1160 | # x profile for given time |
---|
1161 | if surface is True: |
---|
1162 | figure(11) |
---|
1163 | plot(eastings[tindex,:,j],stages[tindex,:,j]) |
---|
1164 | xlabel('x') |
---|
1165 | ylabel('stage') |
---|
1166 | profilefig = 'solution_xprofile' |
---|
1167 | savefig('profilefig') |
---|
1168 | |
---|
1169 | elev_output = [] |
---|
1170 | if generate_fig is True: |
---|
1171 | depth_axis = axis([starttime/scale, time_max/scale, -0.1, max(max_depths)*1.1]) |
---|
1172 | stage_axis = axis([starttime/scale, time_max/scale, min(min_stages), max(max_stages)*1.1]) |
---|
1173 | vel_axis = axis([starttime/scale, time_max/scale, min(min_speeds), max(max_speeds)*1.1]) |
---|
1174 | mom_axis = axis([starttime/scale, time_max/scale, min(min_momentums), max(max_momentums)*1.1]) |
---|
1175 | xmom_axis = axis([starttime/scale, time_max/scale, min(min_xmomentums), max(max_xmomentums)*1.1]) |
---|
1176 | ymom_axis = axis([starttime/scale, time_max/scale, min(min_ymomentums), max(max_ymomentums)*1.1]) |
---|
1177 | cstr = ['g', 'r', 'b', 'c', 'm', 'y', 'k'] |
---|
1178 | nn = len(plot_quantity) |
---|
1179 | no_cols = 2 |
---|
1180 | |
---|
1181 | if len(label_id) > 1: graphname_report = [] |
---|
1182 | pp = 1 |
---|
1183 | div = 11. |
---|
1184 | cc = 0 |
---|
1185 | for k in gauge_index: |
---|
1186 | g = gauges[k] |
---|
1187 | count1 = 0 |
---|
1188 | if report == True and len(label_id) > 1: |
---|
1189 | s = '\\begin{figure}[ht] \n \\centering \n \\begin{tabular}{cc} \n' |
---|
1190 | fid.write(s) |
---|
1191 | if len(label_id) > 1: graphname_report = [] |
---|
1192 | #### generate figures for each gauge #### |
---|
1193 | for j, f in enumerate(f_list): |
---|
1194 | ion() |
---|
1195 | hold(True) |
---|
1196 | count = 0 |
---|
1197 | where1 = 0 |
---|
1198 | where2 = 0 |
---|
1199 | word_quantity = '' |
---|
1200 | if report == True and len(label_id) == 1: |
---|
1201 | s = '\\begin{figure}[hbt] \n \\centering \n \\begin{tabular}{cc} \n' |
---|
1202 | fid.write(s) |
---|
1203 | |
---|
1204 | for which_quantity in plot_quantity: |
---|
1205 | count += 1 |
---|
1206 | where1 += 1 |
---|
1207 | figure(count, frameon = False) |
---|
1208 | if which_quantity == 'depth': |
---|
1209 | plot(model_time[0:n[j]-1,k,j], depths[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1210 | units = 'm' |
---|
1211 | axis(depth_axis) |
---|
1212 | if which_quantity == 'stage': |
---|
1213 | if elevations[0,k,j] <= 0: |
---|
1214 | plot(model_time[0:n[j]-1,k,j], stages[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1215 | axis(stage_axis) |
---|
1216 | else: |
---|
1217 | plot(model_time[0:n[j]-1,k,j], depths[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1218 | #axis(depth_axis) |
---|
1219 | units = 'm' |
---|
1220 | if which_quantity == 'momentum': |
---|
1221 | plot(model_time[0:n[j]-1,k,j], momenta[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1222 | axis(mom_axis) |
---|
1223 | units = 'm^2 / sec' |
---|
1224 | if which_quantity == 'xmomentum': |
---|
1225 | plot(model_time[0:n[j]-1,k,j], xmom[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1226 | axis(xmom_axis) |
---|
1227 | units = 'm^2 / sec' |
---|
1228 | if which_quantity == 'ymomentum': |
---|
1229 | plot(model_time[0:n[j]-1,k,j], ymom[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1230 | axis(ymom_axis) |
---|
1231 | units = 'm^2 / sec' |
---|
1232 | if which_quantity == 'speed': |
---|
1233 | plot(model_time[0:n[j]-1,k,j], speed[0:n[j]-1,k,j], '-', c = cstr[j]) |
---|
1234 | axis(vel_axis) |
---|
1235 | units = 'm / sec' |
---|
1236 | if which_quantity == 'bearing': |
---|
1237 | #due_east = 90.0*ones(shape(model_time[0:n[j]-1,k,j],Float)) |
---|
1238 | #due_west = 270.0*ones(shape(model_time[0:n[j]-1,k,j],Float)) |
---|
1239 | print 'hello', bearings |
---|
1240 | print 'east', due_east |
---|
1241 | plot(model_time[0:n[j]-1,k,j], bearings[0:n[j]-1,k,j], '-', |
---|
1242 | model_time[0:n[j]-1,k,j], due_west[0:n[j]-1], '-.', |
---|
1243 | model_time[0:n[j]-1,k,j], due_east[0:n[j]-1], '-.') |
---|
1244 | units = 'degrees from North' |
---|
1245 | #ax = axis([time_min, time_max, 0.0, 360.0]) |
---|
1246 | legend(('Bearing','West','East')) |
---|
1247 | |
---|
1248 | if time_unit is 'mins': xlabel('time (mins)') |
---|
1249 | if time_unit is 'hours': xlabel('time (hours)') |
---|
1250 | #if which_quantity == 'stage' and elevations[0:n[j]-1,k,j] > 0: |
---|
1251 | # ylabel('%s (%s)' %('depth', units)) |
---|
1252 | #else: |
---|
1253 | # ylabel('%s (%s)' %(which_quantity, units)) |
---|
1254 | #ylabel('%s (%s)' %('wave height', units)) |
---|
1255 | ylabel('%s (%s)' %(which_quantity, units)) |
---|
1256 | if len(label_id) > 1: legend((leg_label),loc='upper right') |
---|
1257 | |
---|
1258 | #gaugeloc1 = gaugeloc.replace(' ','') |
---|
1259 | #gaugeloc2 = gaugeloc1.replace('_','') |
---|
1260 | gaugeloc2 = str(locations[k]).replace(' ','') |
---|
1261 | graphname = '%sgauge%s_%s' %(file_loc[j], |
---|
1262 | gaugeloc2, |
---|
1263 | which_quantity) |
---|
1264 | |
---|
1265 | if report == True and len(label_id) > 1: |
---|
1266 | figdir = getcwd()+sep+'report_figures'+sep |
---|
1267 | if access(figdir,F_OK) == 0 : |
---|
1268 | mkdir (figdir) |
---|
1269 | latex_file_loc = figdir.replace(sep,altsep) |
---|
1270 | graphname_latex = '%sgauge%s%s' %(latex_file_loc, gaugeloc2, which_quantity) # storing files in production directory |
---|
1271 | graphname_report_input = '%sgauge%s%s' %('..'+altsep+'report_figures'+altsep, gaugeloc2, which_quantity) # giving location in latex output file |
---|
1272 | graphname_report.append(graphname_report_input) |
---|
1273 | |
---|
1274 | savefig(graphname_latex) # save figures in production directory for report generation |
---|
1275 | |
---|
1276 | if report == True: |
---|
1277 | |
---|
1278 | figdir = getcwd()+sep+'report_figures'+sep |
---|
1279 | if access(figdir,F_OK) == 0 : |
---|
1280 | mkdir (figdir) |
---|
1281 | latex_file_loc = figdir.replace(sep,altsep) |
---|
1282 | |
---|
1283 | if len(label_id) == 1: |
---|
1284 | graphname_latex = '%sgauge%s%s%s' %(latex_file_loc, gaugeloc2, which_quantity, label_id2) # storing files in production directory |
---|
1285 | graphname_report = '%sgauge%s%s%s' %('..'+altsep+'report_figures'+altsep, gaugeloc2, which_quantity, label_id2) # giving location in latex output file |
---|
1286 | s = '\includegraphics[width=0.49\linewidth, height=50mm]{%s%s}' %(graphname_report, '.png') |
---|
1287 | fid.write(s) |
---|
1288 | if where1 % 2 == 0: |
---|
1289 | s = '\\\\ \n' |
---|
1290 | where1 = 0 |
---|
1291 | else: |
---|
1292 | s = '& \n' |
---|
1293 | fid.write(s) |
---|
1294 | savefig(graphname_latex) |
---|
1295 | |
---|
1296 | if title_on == True: |
---|
1297 | title('%s scenario: %s at %s gauge' %(label_id, which_quantity, gaugeloc2)) |
---|
1298 | #title('Gauge %s (MOST elevation %.2f, ANUGA elevation %.2f)' %(gaugeloc2, elevations[10,k,0], elevations[10,k,1] )) |
---|
1299 | |
---|
1300 | savefig(graphname) # save figures with sww file |
---|
1301 | |
---|
1302 | if report == True and len(label_id) == 1: |
---|
1303 | for i in range(nn-1): |
---|
1304 | if nn > 2: |
---|
1305 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
1306 | word_quantity += 'depth' + ', ' |
---|
1307 | else: |
---|
1308 | word_quantity += plot_quantity[i] + ', ' |
---|
1309 | else: |
---|
1310 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
1311 | word_quantity += 'depth' + ', ' |
---|
1312 | else: |
---|
1313 | word_quantity += plot_quantity[i] |
---|
1314 | |
---|
1315 | if plot_quantity[nn-1] == 'stage' and elevations[0,k,j] > 0: |
---|
1316 | word_quantity += ' and ' + 'depth' |
---|
1317 | else: |
---|
1318 | word_quantity += ' and ' + plot_quantity[nn-1] |
---|
1319 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elev[k]) #gaugeloc.replace('_',' ')) |
---|
1320 | if elev[k] == 0.0: |
---|
1321 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elevations[0,k,j]) |
---|
1322 | east = gauges[0] |
---|
1323 | north = gauges[1] |
---|
1324 | elev_output.append([locations[k],east,north,elevations[0,k,j]]) |
---|
1325 | label = '%sgauge%s' %(label_id2, gaugeloc2) |
---|
1326 | s = '\end{tabular} \n \\caption{%s} \n \label{fig:%s} \n \end{figure} \n \n' %(caption, label) |
---|
1327 | fid.write(s) |
---|
1328 | cc += 1 |
---|
1329 | if cc % 6 == 0: fid.write('\\clearpage \n') |
---|
1330 | savefig(graphname_latex) |
---|
1331 | |
---|
1332 | if report == True and len(label_id) > 1: |
---|
1333 | for i in range(nn-1): |
---|
1334 | if nn > 2: |
---|
1335 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
1336 | word_quantity += 'depth' + ',' |
---|
1337 | else: |
---|
1338 | word_quantity += plot_quantity[i] + ', ' |
---|
1339 | else: |
---|
1340 | if plot_quantity[i] == 'stage' and elevations[0,k,j] > 0: |
---|
1341 | word_quantity += 'depth' |
---|
1342 | else: |
---|
1343 | word_quantity += plot_quantity[i] |
---|
1344 | where1 = 0 |
---|
1345 | count1 += 1 |
---|
1346 | index = j*len(plot_quantity) |
---|
1347 | for which_quantity in plot_quantity: |
---|
1348 | where1 += 1 |
---|
1349 | s = '\includegraphics[width=0.49\linewidth, height=50mm]{%s%s}' %(graphname_report[index], '.png') |
---|
1350 | index += 1 |
---|
1351 | fid.write(s) |
---|
1352 | if where1 % 2 == 0: |
---|
1353 | s = '\\\\ \n' |
---|
1354 | where1 = 0 |
---|
1355 | else: |
---|
1356 | s = '& \n' |
---|
1357 | fid.write(s) |
---|
1358 | word_quantity += ' and ' + plot_quantity[nn-1] |
---|
1359 | label = 'gauge%s' %(gaugeloc2) |
---|
1360 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elev[k]) |
---|
1361 | if elev[k] == 0.0: |
---|
1362 | caption = 'Time series for %s at %s location (elevation %.2fm)' %(word_quantity, locations[k], elevations[0,k,j]) |
---|
1363 | thisgauge = gauges[k] |
---|
1364 | east = thisgauge[0] |
---|
1365 | north = thisgauge[1] |
---|
1366 | elev_output.append([locations[k],east,north,elevations[0,k,j]]) |
---|
1367 | |
---|
1368 | s = '\end{tabular} \n \\caption{%s} \n \label{fig:%s} \n \end{figure} \n \n' %(caption, label) |
---|
1369 | fid.write(s) |
---|
1370 | if float((k+1)/div - pp) == 0.: |
---|
1371 | fid.write('\\clearpage \n') |
---|
1372 | pp += 1 |
---|
1373 | |
---|
1374 | #### finished generating figures ### |
---|
1375 | |
---|
1376 | close('all') |
---|
1377 | |
---|
1378 | return texfile2, elev_output |
---|
1379 | |
---|
1380 | # FIXME (DSG): Add unit test, make general, not just 2 files, |
---|
1381 | # but any number of files. |
---|
1382 | def copy_code_files(dir_name, filename1, filename2): |
---|
1383 | """Temporary Interface to new location""" |
---|
1384 | |
---|
1385 | from anuga.shallow_water.data_manager import \ |
---|
1386 | copy_code_files as dm_copy_code_files |
---|
1387 | print 'copy_code_files has moved from util.py. ', |
---|
1388 | print 'Please use "from anuga.shallow_water.data_manager \ |
---|
1389 | import copy_code_files"' |
---|
1390 | |
---|
1391 | return dm_copy_code_files(dir_name, filename1, filename2) |
---|
1392 | |
---|
1393 | |
---|
1394 | def add_directories(root_directory, directories): |
---|
1395 | """ |
---|
1396 | Add the first directory in directories to root_directory. |
---|
1397 | Then add the second |
---|
1398 | directory to the first directory and so on. |
---|
1399 | |
---|
1400 | Return the path of the final directory. |
---|
1401 | |
---|
1402 | This is handy for specifying and creating a directory |
---|
1403 | where data will go. |
---|
1404 | """ |
---|
1405 | dir = root_directory |
---|
1406 | for new_dir in directories: |
---|
1407 | dir = os.path.join(dir, new_dir) |
---|
1408 | if not access(dir,F_OK): |
---|
1409 | mkdir (dir) |
---|
1410 | return dir |
---|
1411 | |
---|
1412 | def get_data_from_file(filename,separator_value = ','): |
---|
1413 | """Temporary Interface to new location""" |
---|
1414 | from anuga.shallow_water.data_manager import \ |
---|
1415 | get_data_from_file as dm_get_data_from_file |
---|
1416 | print 'get_data_from_file has moved from util.py' |
---|
1417 | print 'Please use "from anuga.shallow_water.data_manager \ |
---|
1418 | import get_data_from_file"' |
---|
1419 | |
---|
1420 | return dm_get_data_from_file(filename,separator_value = ',') |
---|
1421 | |
---|
1422 | def store_parameters(verbose=False,**kwargs): |
---|
1423 | """Temporary Interface to new location""" |
---|
1424 | |
---|
1425 | from anuga.shallow_water.data_manager \ |
---|
1426 | import store_parameters as dm_store_parameters |
---|
1427 | print 'store_parameters has moved from util.py.' |
---|
1428 | print 'Please use "from anuga.shallow_water.data_manager \ |
---|
1429 | import store_parameters"' |
---|
1430 | |
---|
1431 | return dm_store_parameters(verbose=False,**kwargs) |
---|
1432 | |
---|
1433 | def remove_lone_verts(verts, triangles, number_of_full_nodes=None): |
---|
1434 | """ |
---|
1435 | Removes vertices that are not associated with any triangles. |
---|
1436 | |
---|
1437 | verts is a list/array of points |
---|
1438 | triangles is a list of 3 element tuples. |
---|
1439 | Each tuple represents a triangle. |
---|
1440 | |
---|
1441 | number_of_full_nodes relate to parallelism when a mesh has an |
---|
1442 | extra layer of ghost points. |
---|
1443 | |
---|
1444 | """ |
---|
1445 | verts = ensure_numeric(verts) |
---|
1446 | triangles = ensure_numeric(triangles) |
---|
1447 | |
---|
1448 | N = len(verts) |
---|
1449 | |
---|
1450 | # initialise the array to easily find the index of the first loner |
---|
1451 | loners=arange(2*N, N, -1) # if N=3 [6,5,4] |
---|
1452 | for t in triangles: |
---|
1453 | for vert in t: |
---|
1454 | loners[vert]= vert # all non-loners will have loners[i]=i |
---|
1455 | #print loners |
---|
1456 | |
---|
1457 | lone_start = 2*N - max(loners) # The index of the first loner |
---|
1458 | |
---|
1459 | if lone_start-1 == N: |
---|
1460 | # no loners |
---|
1461 | pass |
---|
1462 | elif min(loners[lone_start:N]) > N: |
---|
1463 | # All the loners are at the end of the vert array |
---|
1464 | verts = verts[0:lone_start] |
---|
1465 | else: |
---|
1466 | # change the loners list so it can be used to modify triangles |
---|
1467 | # Remove the loners from verts |
---|
1468 | # Could've used X=compress(less(loners,N),loners) |
---|
1469 | # verts=take(verts,X) to Remove the loners from verts |
---|
1470 | # but I think it would use more memory |
---|
1471 | new_i = 0 |
---|
1472 | for i in range(lone_start, N): |
---|
1473 | if loners[i] >= N: |
---|
1474 | # Loner! |
---|
1475 | pass |
---|
1476 | else: |
---|
1477 | loners[i] = new_i |
---|
1478 | verts[new_i] = verts[i] |
---|
1479 | new_i += 1 |
---|
1480 | verts = verts[0:new_i] |
---|
1481 | |
---|
1482 | # Modify the triangles |
---|
1483 | #print "loners", loners |
---|
1484 | #print "triangles before", triangles |
---|
1485 | triangles = choose(triangles,loners) |
---|
1486 | #print "triangles after", triangles |
---|
1487 | return verts, triangles |
---|
1488 | |
---|
1489 | |
---|
1490 | def get_centroid_values(x, triangles): |
---|
1491 | """Compute centroid values from vertex values |
---|
1492 | |
---|
1493 | x: Values at vertices of triangular mesh |
---|
1494 | triangles: Nx3 integer array pointing to vertex information |
---|
1495 | for each of the N triangels. Elements of triangles are |
---|
1496 | indices into x |
---|
1497 | """ |
---|
1498 | |
---|
1499 | |
---|
1500 | xc = zeros(triangles.shape[0], Float) # Space for centroid info |
---|
1501 | |
---|
1502 | for k in range(triangles.shape[0]): |
---|
1503 | # Indices of vertices |
---|
1504 | i0 = triangles[k][0] |
---|
1505 | i1 = triangles[k][1] |
---|
1506 | i2 = triangles[k][2] |
---|
1507 | |
---|
1508 | xc[k] = (x[i0] + x[i1] + x[i2])/3 |
---|
1509 | |
---|
1510 | |
---|
1511 | return xc |
---|
1512 | |
---|
1513 | def make_plots_from_csv_file(directories_dic={dir:['gauge', 0, 0]}, |
---|
1514 | output_dir='', |
---|
1515 | base_name='', |
---|
1516 | plot_numbers=['3-5'], |
---|
1517 | quantities=['Speed','Stage','Momentum'], |
---|
1518 | assess_all_csv_files=True, |
---|
1519 | extra_plot_name='test' ): |
---|
1520 | |
---|
1521 | print 'make_plots_from_csv_file has been replaced by csv2timeseries_graphs ', |
---|
1522 | print 'Please use "from anuga.shallow_water.util import csv2timeseries_graphs"' |
---|
1523 | |
---|
1524 | return csv2timeseries_graphs(directories_dic, |
---|
1525 | output_dir, |
---|
1526 | base_name, |
---|
1527 | plot_numbers, |
---|
1528 | quantities, |
---|
1529 | extra_plot_name, |
---|
1530 | assess_all_csv_files |
---|
1531 | ) |
---|
1532 | |
---|
1533 | |
---|
1534 | def OLD_csv2timeseries_graphs(directories_dic={}, |
---|
1535 | output_dir='', |
---|
1536 | base_name=None, |
---|
1537 | plot_numbers='0', |
---|
1538 | quantities=['stage'], |
---|
1539 | extra_plot_name='', |
---|
1540 | assess_all_csv_files=True, |
---|
1541 | create_latex=False, |
---|
1542 | verbose=True): |
---|
1543 | |
---|
1544 | """WARNING!! NO UNIT TESTS... could make some as to test filename..but have |
---|
1545 | spend ages on this already... |
---|
1546 | AND NOTE Create_latex is NOT implemented yet. |
---|
1547 | |
---|
1548 | |
---|
1549 | Read in csv files that have the right header information and |
---|
1550 | plot time series such as Stage, Speed, etc. Will also plot several |
---|
1551 | time series on one plot. Filenames must follow this convention, |
---|
1552 | <base_name><plot_number>.csv eg gauge_timeseries3.csv |
---|
1553 | |
---|
1554 | Each file represents a location and within each file there are |
---|
1555 | time, quantity columns. |
---|
1556 | |
---|
1557 | For example: |
---|
1558 | if "directories_dic" defines 4 directories and in each directories |
---|
1559 | there is a csv files corresponding to the right "plot_numbers", |
---|
1560 | this will create a plot with 4 lines one for each directory AND |
---|
1561 | one plot for each "quantities". |
---|
1562 | |
---|
1563 | Inputs: |
---|
1564 | directories_dic: dictionary of directory with values (plot |
---|
1565 | legend name for directory), (start time of |
---|
1566 | the time series) and the (value to add to |
---|
1567 | stage if needed). For example |
---|
1568 | {dir1:['Anuga_ons',5000, 0], |
---|
1569 | dir2:['b_emoth',5000,1.5], |
---|
1570 | dir3:['b_ons',5000,1.5]} |
---|
1571 | |
---|
1572 | output_dir: directory for the plot outputs |
---|
1573 | |
---|
1574 | base_name: common name to all the csv files to be read. if |
---|
1575 | using plot_numbers must be the maximum common string |
---|
1576 | eg. gauges0.csv, gauges1.csv and gauges2.csv |
---|
1577 | base_name must be 'gauges' not 'gau' |
---|
1578 | |
---|
1579 | plot_numbers: a String list of numbers to plot. For example |
---|
1580 | [0-4,10,15-17] will read and attempt to plot |
---|
1581 | the follow 0,1,2,3,4,10,15,16,17 |
---|
1582 | quantities: Currently limited to "stage", "speed", and |
---|
1583 | "momentum", should be changed to incorporate |
---|
1584 | any quantity read from CSV file.... |
---|
1585 | NOTE: ALL QUANTITY NAMES MUST BE lower case! |
---|
1586 | |
---|
1587 | extra_plot_name: A string that is appended to the end of the |
---|
1588 | output filename. |
---|
1589 | |
---|
1590 | assess_all_csv_files: if true it will read ALL csv file with |
---|
1591 | "base_name", regardless of 'plot_numbers' |
---|
1592 | and determine a uniform set of axes for |
---|
1593 | Stage, Speed and Momentum. IF FALSE it |
---|
1594 | will only read the csv file within the |
---|
1595 | 'plot_numbers' |
---|
1596 | |
---|
1597 | create_latex: NOT IMPLEMENTED YET!! sorry Jane.... |
---|
1598 | |
---|
1599 | OUTPUTS: None, it saves the plots to |
---|
1600 | <output_dir><base_name><plot_number><extra_plot_name>.png |
---|
1601 | |
---|
1602 | Usage: csv2timeseries_graphs({'J:'+sep+'anuga_validation'+sep:['new',20,-.1], |
---|
1603 | 'J:'+sep+'conical_island'+sep:['test',0,0]}, |
---|
1604 | output_dir='', |
---|
1605 | plot_numbers=['1','3'], |
---|
1606 | quantities=['stage','depth','bearing'], |
---|
1607 | base_name='gauge_b', |
---|
1608 | assess_all_csv_files=True, |
---|
1609 | verbose=True) |
---|
1610 | This will produce one plot for each quantity (therefore 3) in the current directory, |
---|
1611 | each plot will have 2 lines on them. The first plot named 'new' will have the time |
---|
1612 | offseted by 20secs and the stage height adjusted by -0.1m |
---|
1613 | |
---|
1614 | """ |
---|
1615 | # import pylab |
---|
1616 | try: |
---|
1617 | import pylab |
---|
1618 | except ImportError: |
---|
1619 | msg='csv2timeseries_graphs needs pylab to be installed correctly' |
---|
1620 | raise msg |
---|
1621 | #ANUGA don't need pylab to work so the system doesn't |
---|
1622 | #rely on pylab being installed |
---|
1623 | return |
---|
1624 | from os import sep |
---|
1625 | from anuga.shallow_water.data_manager import \ |
---|
1626 | get_all_files_with_extension, csv2dict |
---|
1627 | #find all the files that meet the specs |
---|
1628 | #FIXME remove all references to word that such as Stage |
---|
1629 | #(with a Capital letter) could use the header info from |
---|
1630 | #the dictionary that is returned from the csv2dict this could |
---|
1631 | #be very good and very flexable.... but this works for now! |
---|
1632 | |
---|
1633 | #FIXME MAKE NICER at the end i have started a new version... |
---|
1634 | # it almost completely works... |
---|
1635 | |
---|
1636 | seconds_in_hour = 3600 |
---|
1637 | time_label = 'time (hour)' |
---|
1638 | stage_label = 'wave height (m)' |
---|
1639 | speed_label = 'speed (m/s)' |
---|
1640 | momentum_label = 'momentum (m^2/sec)' |
---|
1641 | xmomentum_label = 'momentum (m^2/sec)' |
---|
1642 | ymomentum_label = 'momentum (m^2/sec)' |
---|
1643 | depth_label = 'water depth (m)' |
---|
1644 | bearing_label = 'degrees (o)' |
---|
1645 | |
---|
1646 | if extra_plot_name != '': |
---|
1647 | extra_plot_name='_'+extra_plot_name |
---|
1648 | |
---|
1649 | #finds all the files that fit the specs and return a list of them |
---|
1650 | #so to help find a uniform max and min for the plots... |
---|
1651 | list_filenames=[] |
---|
1652 | #print'oaeuaoe',verbose |
---|
1653 | if verbose: print 'Determining files to access for axes ranges \n' |
---|
1654 | #print 'heaoeu' |
---|
1655 | for i,directory in enumerate(directories_dic.keys()): |
---|
1656 | #print 'base',basename,directory |
---|
1657 | list_filenames.append(get_all_files_with_extension(directory, |
---|
1658 | base_name,'.csv')) |
---|
1659 | # print 'list_filenames',list_filenames |
---|
1660 | |
---|
1661 | #use all the files to get the values for the plot axis |
---|
1662 | max_xmom=min_xmom=max_ymom=min_ymom=max_st=max_sp=max_mom=min_st=min_sp=min_mom=\ |
---|
1663 | max_depth=min_depth=max_bearing=min_bearing=max_t=min_t=0. |
---|
1664 | max_start_time= 0. |
---|
1665 | min_start_time = 100000 |
---|
1666 | |
---|
1667 | print'hello' |
---|
1668 | new_plot_numbers=[] |
---|
1669 | #change plot_numbers to list, eg ['0-4','10'] |
---|
1670 | #to ['0','1','2','3','4','10'] |
---|
1671 | for i, num_string in enumerate(plot_numbers): |
---|
1672 | if '-' in num_string: |
---|
1673 | start = int(num_string[:num_string.rfind('-')]) |
---|
1674 | end = int(num_string[num_string.rfind('-')+1:])+1 |
---|
1675 | for x in range(start, end): |
---|
1676 | new_plot_numbers.append(str(x)) |
---|
1677 | else: |
---|
1678 | new_plot_numbers.append(num_string) |
---|
1679 | # print 'new_plot_numbers',new_plot_numbers |
---|
1680 | |
---|
1681 | if verbose: print 'Determining uniform axes \n' |
---|
1682 | #this entire loop is to determine the min and max range for the |
---|
1683 | #axes of the plot |
---|
1684 | for i, directory in enumerate(directories_dic.keys()): |
---|
1685 | |
---|
1686 | if assess_all_csv_files==False: |
---|
1687 | which_csv_to_assess = new_plot_numbers |
---|
1688 | else: |
---|
1689 | which_csv_to_assess = list_filenames[i] |
---|
1690 | |
---|
1691 | for j, filename in enumerate(which_csv_to_assess): |
---|
1692 | if assess_all_csv_files==False: |
---|
1693 | # dir_filename=directory+sep+base_name+filename |
---|
1694 | dir_filename=join(directory,base_name+filename) |
---|
1695 | else: |
---|
1696 | dir_filename=join(directory,filename) |
---|
1697 | print'dir_filename',dir_filename |
---|
1698 | attribute_dic, title_index_dic = csv2dict(dir_filename+ |
---|
1699 | '.csv') |
---|
1700 | |
---|
1701 | directory_start_time = directories_dic[directory][1] |
---|
1702 | directory_add_tide = directories_dic[directory][2] |
---|
1703 | |
---|
1704 | time = [float(x) for x in attribute_dic["time"]] |
---|
1705 | min_t, max_t = get_min_max_values(time,min_t,max_t) |
---|
1706 | |
---|
1707 | stage = [float(x) for x in attribute_dic["stage"]] |
---|
1708 | stage =array(stage)+directory_add_tide |
---|
1709 | min_st, max_st = get_min_max_values(stage,min_st,max_st) |
---|
1710 | |
---|
1711 | if "speed" in quantities: |
---|
1712 | speed = [float(x) for x in attribute_dic["speed"]] |
---|
1713 | min_sp, max_sp = get_min_max_values(speed,min_sp,max_sp) |
---|
1714 | |
---|
1715 | if "momentum" in quantities: |
---|
1716 | momentum = [float(x) for x in attribute_dic["momentum"]] |
---|
1717 | min_mom, max_mom = get_min_max_values(momentum, |
---|
1718 | min_mom, |
---|
1719 | max_mom) |
---|
1720 | if "xmomentum" in quantities: |
---|
1721 | xmomentum = [float(x) for x in attribute_dic["xmomentum"]] |
---|
1722 | min_xmom, max_xmom = get_min_max_values(xmomentum, |
---|
1723 | min_xmom, |
---|
1724 | max_xmom) |
---|
1725 | if "ymomentum" in quantities: |
---|
1726 | ymomentum = [float(x) for x in attribute_dic["ymomentum"]] |
---|
1727 | min_ymom, max_ymom = get_min_max_values(ymomentum, |
---|
1728 | min_ymom, |
---|
1729 | max_ymom) |
---|
1730 | if "depth" in quantities: |
---|
1731 | depth = [float(x) for x in attribute_dic["depth"]] |
---|
1732 | min_depth, max_depth = get_min_max_values(depth, |
---|
1733 | min_depth, |
---|
1734 | max_depth) |
---|
1735 | if "bearing" in quantities: |
---|
1736 | bearing = [float(x) for x in attribute_dic["bearing"]] |
---|
1737 | min_bearing, max_bearing = get_min_max_values(bearing, |
---|
1738 | min_bearing, |
---|
1739 | max_bearing) |
---|
1740 | |
---|
1741 | # print 'min_sp, max_sp',min_sp, max_sp, |
---|
1742 | print directory_start_time |
---|
1743 | if min_start_time > directory_start_time: |
---|
1744 | min_start_time = directory_start_time |
---|
1745 | if max_start_time < directory_start_time: |
---|
1746 | max_start_time = directory_start_time |
---|
1747 | # print 'start_time' , max_start_time, min_start_time |
---|
1748 | |
---|
1749 | stage_axis = (min_start_time/seconds_in_hour, |
---|
1750 | (max_t+max_start_time)/seconds_in_hour, |
---|
1751 | min_st, max_st) |
---|
1752 | speed_axis = (min_start_time/seconds_in_hour, |
---|
1753 | (max_t+max_start_time)/seconds_in_hour, |
---|
1754 | min_sp, max_sp) |
---|
1755 | xmomentum_axis = (min_start_time/seconds_in_hour, |
---|
1756 | (max_t+max_start_time)/seconds_in_hour, |
---|
1757 | min_xmom, max_xmom) |
---|
1758 | ymomentum_axis = (min_start_time/seconds_in_hour, |
---|
1759 | (max_t+max_start_time)/seconds_in_hour, |
---|
1760 | min_ymom, max_ymom) |
---|
1761 | momentum_axis = (min_start_time/seconds_in_hour, |
---|
1762 | (max_t+max_start_time)/seconds_in_hour, |
---|
1763 | min_mom, max_mom) |
---|
1764 | depth_axis = (min_start_time/seconds_in_hour, |
---|
1765 | (max_t+max_start_time)/seconds_in_hour, |
---|
1766 | min_depth, max_depth) |
---|
1767 | bearing_axis = (min_start_time/seconds_in_hour, |
---|
1768 | (max_t+max_start_time)/seconds_in_hour, |
---|
1769 | min_bearing, max_bearing) |
---|
1770 | |
---|
1771 | cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k'] |
---|
1772 | |
---|
1773 | # if verbose: print 'Now start to plot \n' |
---|
1774 | |
---|
1775 | i_max = len(directories_dic.keys()) |
---|
1776 | legend_list_dic =[] |
---|
1777 | legend_list =[] |
---|
1778 | for i, directory in enumerate(directories_dic.keys()): |
---|
1779 | if verbose: print'Plotting in %s' %(directory) |
---|
1780 | print new_plot_numbers,base_name |
---|
1781 | |
---|
1782 | if assess_all_csv_files==False: |
---|
1783 | which_csv_to_assess = new_plot_numbers |
---|
1784 | else: |
---|
1785 | which_csv_to_assess = list_filenames[i] |
---|
1786 | |
---|
1787 | for j, number in enumerate(which_csv_to_assess): |
---|
1788 | |
---|
1789 | # for j, number in enumerate(new_plot_numbers): |
---|
1790 | print 'number',number |
---|
1791 | if verbose: print'Starting %s%s'%(base_name,number) |
---|
1792 | directory_name = directories_dic[directory][0] |
---|
1793 | directory_start_time = directories_dic[directory][1] |
---|
1794 | directory_add_tide = directories_dic[directory][2] |
---|
1795 | |
---|
1796 | #create an if about the start time and tide hieght |
---|
1797 | #if they don't exist |
---|
1798 | file=directory+number |
---|
1799 | #print 'i %s,j %s, number %s, file %s' %(i,j,number,file) |
---|
1800 | attribute_dic, title_index_dic = csv2dict(file+'.csv') |
---|
1801 | #get data from dict in to list |
---|
1802 | t = [float(x) for x in attribute_dic["time"]] |
---|
1803 | |
---|
1804 | #do maths to list by changing to array |
---|
1805 | t=(array(t)+directory_start_time)/seconds_in_hour |
---|
1806 | stage = [float(x) for x in attribute_dic["stage"]] |
---|
1807 | if "speed" in quantities: |
---|
1808 | speed = [float(x) for x in attribute_dic["speed"]] |
---|
1809 | if "xmomentum" in quantities: |
---|
1810 | xmomentum = [float(x) for x in attribute_dic["xmomentum"]] |
---|
1811 | if "ymomentum" in quantities: |
---|
1812 | ymomentum = [float(x) for x in attribute_dic["ymomentum"]] |
---|
1813 | if "momentum" in quantities: |
---|
1814 | momentum = [float(x) for x in attribute_dic["momentum"]] |
---|
1815 | if "depth" in quantities: |
---|
1816 | depth = [float(x) for x in attribute_dic["depth"]] |
---|
1817 | if "bearing" in quantities: |
---|
1818 | bearing = [float(x) for x in attribute_dic["bearing"]] |
---|
1819 | |
---|
1820 | #Can add tide so plots are all on the same tide height |
---|
1821 | stage =array(stage)+directory_add_tide |
---|
1822 | |
---|
1823 | #finds the maximum elevation |
---|
1824 | max_ele=-100000 |
---|
1825 | min_ele=100000 |
---|
1826 | # legend_list=[] |
---|
1827 | if "elevation" in quantities: |
---|
1828 | elevation = [float(x) for x in attribute_dic["elevation"]] |
---|
1829 | |
---|
1830 | min_ele, max_ele = get_min_max_values(elevation, |
---|
1831 | min_ele, |
---|
1832 | max_ele) |
---|
1833 | if min_ele != max_ele: |
---|
1834 | print "Note! Elevation changes in %s" %dir_filename |
---|
1835 | # print 'min_ele, max_ele',min_ele, max_ele |
---|
1836 | |
---|
1837 | |
---|
1838 | #populates the legend_list_dic with dir_name and the elevation |
---|
1839 | if i==0: |
---|
1840 | legend_list_dic.append({directory_name:max_ele}) |
---|
1841 | else: |
---|
1842 | legend_list_dic[j][directory_name]=max_ele |
---|
1843 | |
---|
1844 | # creates a list for the legend after at "legend_dic" has been fully populated |
---|
1845 | # only runs on the last iteration for all the gauges(csv) files |
---|
1846 | # empties the list before creating it |
---|
1847 | |
---|
1848 | if i==i_max-1: |
---|
1849 | legend_list=[] |
---|
1850 | print 'legend',legend_list_dic, j |
---|
1851 | for k, l in legend_list_dic[j].iteritems(): |
---|
1852 | # if "elevation" in quantities: |
---|
1853 | # legend_list.append('%s (elevation = %sm)'%(k,l)) |
---|
1854 | # else: |
---|
1855 | legend_list.append('%s '%k) |
---|
1856 | #print k,l, legend_list_dic[j] |
---|
1857 | |
---|
1858 | if "stage" in quantities: |
---|
1859 | pylab.figure(100+j) |
---|
1860 | pylab.plot(t, stage, c = cstr[i], linewidth=1) |
---|
1861 | pylab.xlabel(time_label) |
---|
1862 | pylab.ylabel(stage_label) |
---|
1863 | pylab.axis(stage_axis) |
---|
1864 | pylab.legend(legend_list,loc='upper right') |
---|
1865 | if output_dir =='': |
---|
1866 | figname = '%sstage_%s%s.png' %(directory+sep, |
---|
1867 | base_name+number, |
---|
1868 | extra_plot_name) |
---|
1869 | else: |
---|
1870 | figname = '%sstage_%s%s.png' %(output_dir+sep, |
---|
1871 | base_name+number, |
---|
1872 | extra_plot_name) |
---|
1873 | if verbose: print 'saving figure here %s' %figname |
---|
1874 | pylab.savefig(figname) |
---|
1875 | |
---|
1876 | if "speed" in quantities: |
---|
1877 | pylab.figure(200+j) |
---|
1878 | pylab.plot(t, speed, c = cstr[i], linewidth=1) |
---|
1879 | pylab.xlabel(time_label) |
---|
1880 | pylab.ylabel(speed_label) |
---|
1881 | pylab.axis(speed_axis) |
---|
1882 | pylab.legend(legend_list,loc='upper right') |
---|
1883 | if output_dir =='': |
---|
1884 | figname = '%sspeed_%s%s.png' %(directory+sep, |
---|
1885 | base_name+number, |
---|
1886 | extra_plot_name) |
---|
1887 | else: |
---|
1888 | figname = '%sspeed_%s%s.png' %(output_dir+sep, |
---|
1889 | base_name+number, |
---|
1890 | extra_plot_name) |
---|
1891 | if verbose: print 'saving figure here %s' %figname |
---|
1892 | pylab.savefig(figname) |
---|
1893 | if "xmomentum" in quantities: |
---|
1894 | pylab.figure(300+j) |
---|
1895 | pylab.plot(t, xmomentum, c = cstr[i], linewidth=1) |
---|
1896 | pylab.xlabel(time_label) |
---|
1897 | pylab.ylabel(xmomentum_label) |
---|
1898 | pylab.axis(xmomentum_axis) |
---|
1899 | pylab.legend(legend_list,loc='upper right') |
---|
1900 | if output_dir =='': |
---|
1901 | figname = '%sxmomentum_%s%s.png' %(directory+sep, |
---|
1902 | base_name+number, |
---|
1903 | extra_plot_name) |
---|
1904 | else: |
---|
1905 | figname = '%sxmomentum_%s%s.png' %(output_dir+sep, |
---|
1906 | base_name+number, |
---|
1907 | extra_plot_name) |
---|
1908 | if verbose: print 'saving figure here %s' %figname |
---|
1909 | pylab.savefig(figname) |
---|
1910 | |
---|
1911 | if "ymomentum" in quantities: |
---|
1912 | pylab.figure(400+j) |
---|
1913 | pylab.plot(t, ymomentum, c = cstr[i], linewidth=1) |
---|
1914 | pylab.xlabel(time_label) |
---|
1915 | pylab.ylabel(ymomentum_label) |
---|
1916 | pylab.axis(ymomentum_axis) |
---|
1917 | pylab.legend(legend_list,loc='upper right') |
---|
1918 | if output_dir =='': |
---|
1919 | figname = '%symomentum_%s%s.png' %(directory+sep, |
---|
1920 | base_name+number, |
---|
1921 | extra_plot_name) |
---|
1922 | else: |
---|
1923 | figname = '%symomentum_%s%s.png' %(output_dir+sep, |
---|
1924 | base_name+number, |
---|
1925 | extra_plot_name) |
---|
1926 | if verbose: print 'saving figure here %s' %figname |
---|
1927 | pylab.savefig(figname) |
---|
1928 | |
---|
1929 | if "momentum" in quantities: |
---|
1930 | pylab.figure(500+j) |
---|
1931 | pylab.plot(t, momentum, c = cstr[i], linewidth=1) |
---|
1932 | pylab.xlabel(time_label) |
---|
1933 | pylab.ylabel(momentum_label) |
---|
1934 | pylab.axis(momentum_axis) |
---|
1935 | pylab.legend(legend_list,loc='upper right') |
---|
1936 | if output_dir =='': |
---|
1937 | figname = '%smomentum_%s%s.png' %(directory+sep, |
---|
1938 | base_name+number, |
---|
1939 | extra_plot_name) |
---|
1940 | else: |
---|
1941 | figname = '%smomentum_%s%s.png' %(output_dir+sep, |
---|
1942 | base_name+number, |
---|
1943 | extra_plot_name) |
---|
1944 | if verbose: print 'saving figure here %s' %figname |
---|
1945 | pylab.savefig(figname) |
---|
1946 | |
---|
1947 | if "bearing" in quantities: |
---|
1948 | pylab.figure(600+j) |
---|
1949 | pylab.plot(t, bearing, c = cstr[i], linewidth=1) |
---|
1950 | pylab.xlabel(time_label) |
---|
1951 | pylab.ylabel(bearing_label) |
---|
1952 | pylab.axis(bearing_axis) |
---|
1953 | pylab.legend(legend_list,loc='upper right') |
---|
1954 | if output_dir =='': |
---|
1955 | figname = '%sbearing_%s%s.png' %(output_dir+sep, |
---|
1956 | base_name+number, |
---|
1957 | extra_plot_name) |
---|
1958 | else: |
---|
1959 | figname = '%sbearing_%s%s.png' %(output_dir+sep, |
---|
1960 | base_name+number, |
---|
1961 | extra_plot_name) |
---|
1962 | if verbose: print 'saving figure here %s' %figname |
---|
1963 | pylab.savefig(figname) |
---|
1964 | |
---|
1965 | if "depth" in quantities: |
---|
1966 | pylab.figure(700+j) |
---|
1967 | pylab.plot(t, depth, c = cstr[i], linewidth=1) |
---|
1968 | pylab.xlabel(time_label) |
---|
1969 | pylab.ylabel(depth_label) |
---|
1970 | pylab.axis(depth_axis) |
---|
1971 | pylab.legend(legend_list,loc='upper right') |
---|
1972 | if output_dir == '': |
---|
1973 | figname = '%sdepth_%s%s.png' %(directory, |
---|
1974 | base_name+number, |
---|
1975 | extra_plot_name) |
---|
1976 | else: |
---|
1977 | figname = '%sdepth_%s%s.png' %(output_dir+sep, |
---|
1978 | base_name+number, |
---|
1979 | extra_plot_name) |
---|
1980 | if verbose: print 'saving figure here %s' %figname |
---|
1981 | pylab.savefig(figname) |
---|
1982 | |
---|
1983 | if verbose: print 'Closing all plots' |
---|
1984 | pylab.close('all') |
---|
1985 | del pylab |
---|
1986 | if verbose: print 'Finished closing plots' |
---|
1987 | |
---|
1988 | def csv2timeseries_graphs(directories_dic={}, |
---|
1989 | output_dir='', |
---|
1990 | base_name=None, |
---|
1991 | plot_numbers='', |
---|
1992 | quantities=['stage'], |
---|
1993 | extra_plot_name='', |
---|
1994 | assess_all_csv_files=True, |
---|
1995 | create_latex=False, |
---|
1996 | verbose=False): |
---|
1997 | |
---|
1998 | """ Read in csv files that have the right header information and |
---|
1999 | plot time series such as Stage, Speed, etc. Will also plot several |
---|
2000 | time series on one plot. Filenames must follow this convention, |
---|
2001 | <base_name><plot_number>.csv eg gauge_timeseries3.csv |
---|
2002 | |
---|
2003 | NOTE: relies that 'elevation' is in the csv file! |
---|
2004 | |
---|
2005 | Each file represents a location and within each file there are |
---|
2006 | time, quantity columns. |
---|
2007 | |
---|
2008 | For example: |
---|
2009 | if "directories_dic" defines 4 directories and in each directories |
---|
2010 | there is a csv files corresponding to the right "plot_numbers", |
---|
2011 | this will create a plot with 4 lines one for each directory AND |
---|
2012 | one plot for each "quantities". ??? FIXME: unclear. |
---|
2013 | |
---|
2014 | Usage: |
---|
2015 | csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0], |
---|
2016 | 'fixed_wave'+sep:['Fixed Wave',0,0]}, |
---|
2017 | output_dir='fixed_wave'+sep, |
---|
2018 | base_name='gauge_timeseries_', |
---|
2019 | plot_numbers='', |
---|
2020 | quantities=['stage','speed'], |
---|
2021 | extra_plot_name='', |
---|
2022 | assess_all_csv_files=True, |
---|
2023 | create_latex=False, |
---|
2024 | verbose=True) |
---|
2025 | this will create one plot for stage with both 'slide' and |
---|
2026 | 'fixed_wave' lines on it for stage and speed for each csv |
---|
2027 | file with 'gauge_timeseries_' as the prefix. The graghs |
---|
2028 | will be in the output directory 'fixed_wave' and the graph |
---|
2029 | axis will be determined by assessing all the |
---|
2030 | |
---|
2031 | ANOTHER EXAMPLE |
---|
2032 | new_csv2timeseries_graphs(directories_dic={'slide'+sep:['Slide',0, 0], |
---|
2033 | 'fixed_wave'+sep:['Fixed Wave',0,0]}, |
---|
2034 | output_dir='fixed_wave'+sep, |
---|
2035 | base_name='gauge_timeseries_', |
---|
2036 | plot_numbers=['1-3'], |
---|
2037 | quantities=['stage','speed'], |
---|
2038 | extra_plot_name='', |
---|
2039 | assess_all_csv_files=False, |
---|
2040 | create_latex=False, |
---|
2041 | verbose=True) |
---|
2042 | This will plot csv files called gauge_timeseries_1.csv and |
---|
2043 | gauge_timeseries3.csv from both 'slide' and 'fixed_wave' directories |
---|
2044 | to 'fixed_wave'. There will be 4 plots created two speed and two stage |
---|
2045 | one for each csv file. There will be two lines on each of these plots. |
---|
2046 | And the axis will have been determined from only these files, had |
---|
2047 | assess_all_csv_files = True all csv file with 'gauges_timeseries_' prefix |
---|
2048 | would of been assessed. |
---|
2049 | |
---|
2050 | ANOTHER EXAMPLE |
---|
2051 | csv2timeseries_graphs({'J:'+sep+'anuga_validation'+sep:['new',20,-.1], |
---|
2052 | 'J:'+sep+'conical_island'+sep:['test',0,0]}, |
---|
2053 | output_dir='', |
---|
2054 | plot_numbers=['1','3'], |
---|
2055 | quantities=['stage','depth','bearing'], |
---|
2056 | base_name='gauge_b', |
---|
2057 | assess_all_csv_files=True, |
---|
2058 | verbose=True) |
---|
2059 | |
---|
2060 | This will produce one plot for each quantity (therefore 3) in the current directory, |
---|
2061 | each plot will have 2 lines on them. The first plot named 'new' will have the time |
---|
2062 | offseted by 20secs and the stage height adjusted by -0.1m |
---|
2063 | |
---|
2064 | Inputs: |
---|
2065 | directories_dic: dictionary of directory with values (plot |
---|
2066 | legend name for directory), (start time of |
---|
2067 | the time series) and the (value to add to |
---|
2068 | stage if needed). For example |
---|
2069 | {dir1:['Anuga_ons',5000, 0], |
---|
2070 | dir2:['b_emoth',5000,1.5], |
---|
2071 | dir3:['b_ons',5000,1.5]} |
---|
2072 | |
---|
2073 | output_dir: directory for the plot outputs, essential to define |
---|
2074 | if you want a plot with multiple timeseries. |
---|
2075 | |
---|
2076 | base_name: Is used a couple of times. 1) to find the csv files to be plotted |
---|
2077 | if there is no 'plot_numbers' then csv files with 'base_name' are |
---|
2078 | plotted and 2) in the title of the plots, the lenght of base_name is |
---|
2079 | removed from the front of the filename to be used in the title. |
---|
2080 | This could be changed if needed. |
---|
2081 | Note is ignored if assess_all_csv_files=True |
---|
2082 | |
---|
2083 | plot_numbers: a String list of numbers to plot. For example |
---|
2084 | [0-4,10,15-17] will read and attempt to plot |
---|
2085 | the follow 0,1,2,3,4,10,15,16,17 |
---|
2086 | NOTE: if no plot numbers this will create |
---|
2087 | one plot per quantity, per gauge |
---|
2088 | quantities: Currently limited to "stage", "speed", and |
---|
2089 | "Momentum", should be changed to incorporate |
---|
2090 | any quantity read from CSV file.... |
---|
2091 | |
---|
2092 | extra_plot_name: A string that is appended to the end of the |
---|
2093 | output filename. |
---|
2094 | |
---|
2095 | assess_all_csv_files: if true it will read ALL csv file with |
---|
2096 | "base_name", regardless of 'plot_numbers' |
---|
2097 | and determine a uniform set of axes for |
---|
2098 | Stage, Speed and Momentum. IF FALSE it |
---|
2099 | will only read the csv file within the |
---|
2100 | 'plot_numbers' |
---|
2101 | |
---|
2102 | create_latex: NOT IMPLEMENTED YET!! sorry Jane.... |
---|
2103 | |
---|
2104 | OUTPUTS: None, it saves the plots to |
---|
2105 | <output_dir><base_name><plot_number><extra_plot_name>.png |
---|
2106 | |
---|
2107 | KNOWN PROBLEMS: Currently the axis for the stage/depth will be incorporate |
---|
2108 | |
---|
2109 | """ |
---|
2110 | |
---|
2111 | import pylab |
---|
2112 | from os import sep |
---|
2113 | from anuga.shallow_water.data_manager import \ |
---|
2114 | get_all_files_with_extension, csv2dict |
---|
2115 | |
---|
2116 | seconds_in_hour = 3600 |
---|
2117 | seconds_in_minutes = 60 |
---|
2118 | |
---|
2119 | quantities_label={} |
---|
2120 | # quantities_label['time'] = 'time (hours)' |
---|
2121 | quantities_label['time'] = 'time (minutes)' |
---|
2122 | quantities_label['stage'] = 'wave height (m)' |
---|
2123 | quantities_label['speed'] = 'speed (m/s)' |
---|
2124 | quantities_label['momentum'] = 'momentum (m^2/sec)' |
---|
2125 | quantities_label['depth'] = 'water depth (m)' |
---|
2126 | quantities_label['xmomentum'] = 'momentum (m^2/sec)' |
---|
2127 | quantities_label['ymomentum'] = 'momentum (m^2/sec)' |
---|
2128 | quantities_label['bearing'] = 'degrees (o)' |
---|
2129 | quantities_label['elevation'] = 'elevation (m)' |
---|
2130 | |
---|
2131 | |
---|
2132 | if extra_plot_name != '': |
---|
2133 | extra_plot_name='_'+extra_plot_name |
---|
2134 | |
---|
2135 | new_plot_numbers=[] |
---|
2136 | #change plot_numbers to list, eg ['0-4','10'] |
---|
2137 | #to ['0','1','2','3','4','10'] |
---|
2138 | for i, num_string in enumerate(plot_numbers): |
---|
2139 | if '-' in num_string: |
---|
2140 | start = int(num_string[:num_string.rfind('-')]) |
---|
2141 | end = int(num_string[num_string.rfind('-')+1:])+1 |
---|
2142 | for x in range(start, end): |
---|
2143 | new_plot_numbers.append(str(x)) |
---|
2144 | else: |
---|
2145 | new_plot_numbers.append(num_string) |
---|
2146 | |
---|
2147 | #finds all the files that fit the specs provided and return a list of them |
---|
2148 | #so to help find a uniform max and min for the plots... |
---|
2149 | list_filenames=[] |
---|
2150 | all_csv_filenames=[] |
---|
2151 | if verbose: print 'Determining files to access for axes ranges \n' |
---|
2152 | # print directories_dic.keys, base_name |
---|
2153 | |
---|
2154 | for i,directory in enumerate(directories_dic.keys()): |
---|
2155 | all_csv_filenames.append(get_all_files_with_extension(directory, |
---|
2156 | base_name,'.csv')) |
---|
2157 | |
---|
2158 | filenames=[] |
---|
2159 | if plot_numbers == '': |
---|
2160 | list_filenames.append(get_all_files_with_extension(directory, |
---|
2161 | base_name,'.csv')) |
---|
2162 | else: |
---|
2163 | for number in new_plot_numbers: |
---|
2164 | # print 'number!!!', base_name, number |
---|
2165 | filenames.append(base_name+number) |
---|
2166 | # print filenames |
---|
2167 | list_filenames.append(filenames) |
---|
2168 | |
---|
2169 | |
---|
2170 | |
---|
2171 | # print "list_filenames", list_filenames |
---|
2172 | |
---|
2173 | #use all the files to get the values for the plot axis |
---|
2174 | max_start_time= -1000. |
---|
2175 | min_start_time = 100000 |
---|
2176 | |
---|
2177 | if verbose: print 'Determining uniform axes \n' |
---|
2178 | #this entire loop is to determine the min and max range for the |
---|
2179 | #axes of the plots |
---|
2180 | |
---|
2181 | # quantities.insert(0,'elevation') |
---|
2182 | quantities.insert(0,'time') |
---|
2183 | |
---|
2184 | quantity_value={} |
---|
2185 | min_quantity_value={} |
---|
2186 | max_quantity_value={} |
---|
2187 | |
---|
2188 | |
---|
2189 | for i, directory in enumerate(directories_dic.keys()): |
---|
2190 | |
---|
2191 | if assess_all_csv_files==False: |
---|
2192 | which_csv_to_assess = list_filenames[i] |
---|
2193 | else: |
---|
2194 | #gets list of filenames for directory "i" |
---|
2195 | which_csv_to_assess = all_csv_filenames[i] |
---|
2196 | |
---|
2197 | |
---|
2198 | for j, filename in enumerate(which_csv_to_assess): |
---|
2199 | |
---|
2200 | dir_filename=join(directory,filename) |
---|
2201 | attribute_dic, title_index_dic = csv2dict(dir_filename+ |
---|
2202 | '.csv') |
---|
2203 | |
---|
2204 | directory_start_time = directories_dic[directory][1] |
---|
2205 | directory_add_tide = directories_dic[directory][2] |
---|
2206 | |
---|
2207 | # print 'keys',attribute_dic.keys() |
---|
2208 | #add time to get values |
---|
2209 | for k, quantity in enumerate(quantities): |
---|
2210 | quantity_value[quantity] = [float(x) for x in attribute_dic[quantity]] |
---|
2211 | |
---|
2212 | #add tide to stage if provided |
---|
2213 | if quantity == 'stage': |
---|
2214 | quantity_value[quantity]=array(quantity_value[quantity])+directory_add_tide |
---|
2215 | |
---|
2216 | #condition to find max and mins for all the plots |
---|
2217 | # populate the list with something when i=0 and j=0 and |
---|
2218 | # then compare to the other values to determine abs max and min |
---|
2219 | if i==0 and j==0: |
---|
2220 | |
---|
2221 | min_quantity_value[quantity], \ |
---|
2222 | max_quantity_value[quantity] = get_min_max_values(quantity_value[quantity]) |
---|
2223 | |
---|
2224 | # print '1 min,max',i,j,k,quantity, min_quantity_value[quantity],max_quantity_value[quantity],directory, filename |
---|
2225 | else: |
---|
2226 | # print 'min,max',i,j,k,quantity, min_quantity_value[quantity],max_quantity_value[quantity],directory, filename |
---|
2227 | min, max = get_min_max_values(quantity_value[quantity]) |
---|
2228 | |
---|
2229 | if min<min_quantity_value[quantity]: min_quantity_value[quantity]=min |
---|
2230 | if max>max_quantity_value[quantity]: max_quantity_value[quantity]=max |
---|
2231 | |
---|
2232 | #print 'min,maj',quantity, min_quantity_value[quantity],max_quantity_value[quantity] |
---|
2233 | |
---|
2234 | #add tide to stage if provided |
---|
2235 | # if quantity == 'stage': |
---|
2236 | # quantity_value[quantity]=array(quantity_value[quantity])+directory_add_tide |
---|
2237 | |
---|
2238 | #set the time... ??? |
---|
2239 | if min_start_time > directory_start_time: |
---|
2240 | min_start_time = directory_start_time |
---|
2241 | if max_start_time < directory_start_time: |
---|
2242 | max_start_time = directory_start_time |
---|
2243 | #print 'start_time' , max_start_time, min_start_time |
---|
2244 | |
---|
2245 | |
---|
2246 | #final step to unifrom axis for the graphs |
---|
2247 | quantities_axis={} |
---|
2248 | for i, quantity in enumerate(quantities): |
---|
2249 | quantities_axis[quantity] = (min_start_time/seconds_in_minutes, |
---|
2250 | (max_quantity_value['time']+max_start_time)\ |
---|
2251 | /seconds_in_minutes, |
---|
2252 | min_quantity_value[quantity], |
---|
2253 | max_quantity_value[quantity]) |
---|
2254 | if verbose and (quantity != 'time' and quantity != 'elevation'): |
---|
2255 | print 'axis for quantity %s are x:(%s to %s)%s and y:(%s to %s)%s' %(quantity, |
---|
2256 | quantities_axis[quantity][0], |
---|
2257 | quantities_axis[quantity][1], |
---|
2258 | quantities_label['time'], |
---|
2259 | quantities_axis[quantity][2], |
---|
2260 | quantities_axis[quantity][3], |
---|
2261 | quantities_label[quantity]) |
---|
2262 | print quantities_axis[quantity] |
---|
2263 | |
---|
2264 | cstr = ['b', 'r', 'g', 'c', 'm', 'y', 'k'] |
---|
2265 | |
---|
2266 | if verbose: print 'Now start to plot \n' |
---|
2267 | |
---|
2268 | i_max = len(directories_dic.keys()) |
---|
2269 | legend_list_dic =[] |
---|
2270 | legend_list =[] |
---|
2271 | for i, directory in enumerate(directories_dic.keys()): |
---|
2272 | if verbose: print'Plotting in %s %s' %(directory, new_plot_numbers) |
---|
2273 | # print 'LIST',list_filenames |
---|
2274 | for j, filename in enumerate(list_filenames[i]): |
---|
2275 | |
---|
2276 | if verbose: print'Starting %s' %filename |
---|
2277 | directory_name = directories_dic[directory][0] |
---|
2278 | directory_start_time = directories_dic[directory][1] |
---|
2279 | directory_add_tide = directories_dic[directory][2] |
---|
2280 | |
---|
2281 | #create an if about the start time and tide hieght |
---|
2282 | #if they don't exist |
---|
2283 | #print 'i %s,j %s, number %s, file %s' %(i,j,number,file) |
---|
2284 | attribute_dic, title_index_dic = csv2dict(directory+filename+'.csv') |
---|
2285 | #get data from dict in to list |
---|
2286 | t = [float(x) for x in attribute_dic["time"]] |
---|
2287 | |
---|
2288 | #do maths to list by changing to array |
---|
2289 | t=(array(t)+directory_start_time)/seconds_in_minutes |
---|
2290 | |
---|
2291 | #finds the maximum elevation, used only as a test |
---|
2292 | # and as info in the graphs |
---|
2293 | max_ele=-100000 |
---|
2294 | min_ele=100000 |
---|
2295 | elevation = [float(x) for x in attribute_dic["elevation"]] |
---|
2296 | |
---|
2297 | min_ele, max_ele = get_min_max_values(elevation) |
---|
2298 | |
---|
2299 | if min_ele != max_ele: |
---|
2300 | print "Note! Elevation changes in %s" %dir_filename |
---|
2301 | |
---|
2302 | #populates the legend_list_dic with dir_name and the elevation |
---|
2303 | if i==0: |
---|
2304 | legend_list_dic.append({directory_name:round(max_ele,2)}) |
---|
2305 | else: |
---|
2306 | #print j,max_ele, directory_name, legend_list_dic |
---|
2307 | legend_list_dic[j][directory_name]=round(max_ele,2) |
---|
2308 | |
---|
2309 | # creates a list for the legend after at "legend_dic" has been fully populated |
---|
2310 | # only runs on the last iteration for all the gauges(csv) files |
---|
2311 | # empties the list before creating it |
---|
2312 | if i==i_max-1: |
---|
2313 | legend_list=[] |
---|
2314 | for k, l in legend_list_dic[j].iteritems(): |
---|
2315 | legend_list.append('%s (elevation = %sm)'%(k,l)) |
---|
2316 | #print k,l, legend_list_dic[j] |
---|
2317 | |
---|
2318 | #print 'filename',filename, quantities |
---|
2319 | #remove time so it is not plotted! |
---|
2320 | for k, quantity in enumerate(quantities): |
---|
2321 | if quantity != 'time' and quantity != 'elevation': |
---|
2322 | quantity_value[quantity] = [float(x) for x in attribute_dic[quantity]] |
---|
2323 | |
---|
2324 | num=int(k*100+j) |
---|
2325 | pylab.figure(num) |
---|
2326 | # note if elevation is greater than 0m the stage plot is called 'depth' and the stage has |
---|
2327 | # the elevation minused from it. This allows the plots to make more sense see manual |
---|
2328 | # section 4.7 |
---|
2329 | # if quantity=='stage': |
---|
2330 | # quantity_value[quantity] =array(quantity_value[quantity])+directory_add_tide |
---|
2331 | |
---|
2332 | # if min_ele>=0: |
---|
2333 | # quantity_value['stage']=array(quantity_value['stage'])-min_ele |
---|
2334 | # pylab.ylabel(quantities_label['depth']) |
---|
2335 | # pylab.axis([quantities_axis['stage'][0], |
---|
2336 | # quantities_axis['stage'][1], |
---|
2337 | # quantities_axis['stage'][2], |
---|
2338 | # quantities_axis['stage'][3]])#-quantities_axis['elevation'][3]]) |
---|
2339 | # print "AXIS", quantities_axis['stage'] |
---|
2340 | # else: |
---|
2341 | # pylab.ylabel(quantities_label['stage']) |
---|
2342 | # else: |
---|
2343 | # pylab.ylabel(quantities_label[quantity]) |
---|
2344 | pylab.ylabel(quantities_label[quantity]) |
---|
2345 | pylab.plot(t, quantity_value[quantity], c = cstr[i], linewidth=1) |
---|
2346 | pylab.xlabel(quantities_label['time']) |
---|
2347 | pylab.axis(quantities_axis[quantity]) |
---|
2348 | pylab.legend(legend_list,loc='upper right') |
---|
2349 | pylab.title('%s at %s gauge' %(quantity,filename[len(base_name):])) |
---|
2350 | if output_dir == '': |
---|
2351 | figname = '%s%s_%s%s.png' %(directory, |
---|
2352 | quantity, |
---|
2353 | filename, |
---|
2354 | extra_plot_name) |
---|
2355 | else: |
---|
2356 | figname = '%s%s_%s%s.png' %(output_dir, |
---|
2357 | quantity, |
---|
2358 | filename, |
---|
2359 | extra_plot_name) |
---|
2360 | if verbose: print 'saving figure here %s' %figname |
---|
2361 | pylab.savefig(figname) |
---|
2362 | |
---|
2363 | if verbose: print 'Closing all plots' |
---|
2364 | pylab.close('all') |
---|
2365 | del pylab |
---|
2366 | if verbose: print 'Finished closing plots' |
---|
2367 | |
---|
2368 | |
---|
2369 | def old_get_min_max_values(list=None,min1=100000,max1=-100000): |
---|
2370 | """ Returns the min and max of the list it was provided. |
---|
2371 | NOTE: default min and max may need to change depeending on |
---|
2372 | your list |
---|
2373 | """ |
---|
2374 | if list == None: print 'List must be provided' |
---|
2375 | if max(list) > max1: |
---|
2376 | max1 = max(list) |
---|
2377 | if min(list) < min1: |
---|
2378 | min1 = min(list) |
---|
2379 | |
---|
2380 | return min1, max1 |
---|
2381 | |
---|
2382 | def get_min_max_values(list=None): |
---|
2383 | """ |
---|
2384 | Returns the min and max of the list it was provided. |
---|
2385 | """ |
---|
2386 | if list == None: print 'List must be provided' |
---|
2387 | |
---|
2388 | return min(list), max(list) |
---|
2389 | |
---|
2390 | |
---|
2391 | |
---|
2392 | def get_runup_data_for_locations_from_file(gauge_filename, |
---|
2393 | sww_filename, |
---|
2394 | runup_filename, |
---|
2395 | size=10, |
---|
2396 | verbose=False): |
---|
2397 | |
---|
2398 | '''this will read a csv file with the header x,y. Then look in a square 'size'x2 |
---|
2399 | around this position for the 'max_inundaiton_height' in the 'sww_filename' and |
---|
2400 | report the findings in the 'runup_filename |
---|
2401 | |
---|
2402 | WARNING: NO TESTS! |
---|
2403 | ''' |
---|
2404 | |
---|
2405 | from anuga.shallow_water.data_manager import get_all_directories_with_name,\ |
---|
2406 | get_maximum_inundation_data,\ |
---|
2407 | csv2dict |
---|
2408 | |
---|
2409 | file = open(runup_filename,"w") |
---|
2410 | file.write("easting,northing,runup \n ") |
---|
2411 | file.close() |
---|
2412 | |
---|
2413 | #read gauge csv file to dictionary |
---|
2414 | attribute_dic, title_index_dic = csv2dict(gauge_filename) |
---|
2415 | northing = [float(x) for x in attribute_dic["y"]] |
---|
2416 | easting = [float(x) for x in attribute_dic["x"]] |
---|
2417 | |
---|
2418 | print 'Reading %s' %sww_filename |
---|
2419 | |
---|
2420 | runup_locations=[] |
---|
2421 | for i, x in enumerate(northing): |
---|
2422 | # print 'easting,northing',i,easting[i],northing[i] |
---|
2423 | poly = [[int(easting[i]+size),int(northing[i]+size)], |
---|
2424 | [int(easting[i]+size),int(northing[i]-size)], |
---|
2425 | [int(easting[i]-size),int(northing[i]-size)], |
---|
2426 | [int(easting[i]-size),int(northing[i]+size)]] |
---|
2427 | |
---|
2428 | run_up, x_y = get_maximum_inundation_data(filename=sww_filename, |
---|
2429 | polygon=poly, |
---|
2430 | verbose=False) |
---|
2431 | #if no runup will return 0 instead of NONE |
---|
2432 | if run_up==None: run_up=0 |
---|
2433 | if x_y==None: x_y=[0,0] |
---|
2434 | |
---|
2435 | if verbose:print 'maximum inundation runup near %s is %s meters' %(x_y,run_up) |
---|
2436 | |
---|
2437 | #writes to file |
---|
2438 | file = open(runup_filename,"a") |
---|
2439 | temp = '%s,%s,%s \n' %(x_y[0], x_y[1], run_up) |
---|
2440 | file.write(temp) |
---|
2441 | file.close() |
---|
2442 | |
---|
2443 | def sww2csv_gauges(sww_file, |
---|
2444 | gauge_file, |
---|
2445 | quantities = ['stage','depth', 'elevation', 'xmomentum', 'ymomentum'], |
---|
2446 | verbose=False, |
---|
2447 | use_cache = True): |
---|
2448 | """ |
---|
2449 | |
---|
2450 | Inputs: |
---|
2451 | |
---|
2452 | NOTE: if using csv2timeseries_graphs after creating csv file, it is essential to |
---|
2453 | export quantities 'depth' and 'elevation'. 'depth' is good to analyse gauges on |
---|
2454 | land and elevation is used automatically by csv2timeseries_graphs in the legend. |
---|
2455 | |
---|
2456 | sww_file: path to any sww file |
---|
2457 | |
---|
2458 | points_file: Assumes that it follows this format |
---|
2459 | name, easting, northing, elevation |
---|
2460 | point1, 100.3, 50.2, 10.0 |
---|
2461 | point2, 10.3, 70.3, 78.0 |
---|
2462 | |
---|
2463 | NOTE: order of column can change but names eg 'easting', elevation' |
---|
2464 | must be the same! |
---|
2465 | |
---|
2466 | Outputs: |
---|
2467 | one file for each gauge/point location in the points file. They |
---|
2468 | will be named with this format |
---|
2469 | gauge_<name>.csv eg gauge_point1.csv |
---|
2470 | |
---|
2471 | They will all have a header |
---|
2472 | |
---|
2473 | Usage: gauges_sww2csv(sww_file='test1.sww', |
---|
2474 | quantities = ['stage', 'elevation','depth','bearing'], |
---|
2475 | gauge_file='gauge.txt') |
---|
2476 | |
---|
2477 | Interpolate the quantities at a given set of locations, given |
---|
2478 | an sww file. |
---|
2479 | The results are written to a csv file. |
---|
2480 | |
---|
2481 | In the future let points be a points file. |
---|
2482 | And the user choose the quantities. |
---|
2483 | |
---|
2484 | This is currently quite specific. |
---|
2485 | If it need to be more general, chagne things. |
---|
2486 | |
---|
2487 | This is really returning speed, not velocity. |
---|
2488 | |
---|
2489 | |
---|
2490 | """ |
---|
2491 | from csv import reader,writer |
---|
2492 | from anuga.utilities.numerical_tools import ensure_numeric, mean, NAN |
---|
2493 | from Numeric import array, resize,shape,Float,zeros,take,argsort,argmin |
---|
2494 | import string |
---|
2495 | from anuga.shallow_water.data_manager import get_all_swwfiles |
---|
2496 | # quantities = ['stage', 'elevation', 'xmomentum', 'ymomentum'] |
---|
2497 | #print "points",points |
---|
2498 | |
---|
2499 | assert type(gauge_file) == type(''),\ |
---|
2500 | 'Gauge filename must be a string' |
---|
2501 | |
---|
2502 | try: |
---|
2503 | # fid = open(gauge_file) |
---|
2504 | point_reader = reader(file(gauge_file)) |
---|
2505 | |
---|
2506 | except Exception, e: |
---|
2507 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
2508 | %(gauge_file, e) |
---|
2509 | raise msg |
---|
2510 | if verbose: print '\n Gauges obtained from: %s \n' %gauge_file |
---|
2511 | |
---|
2512 | |
---|
2513 | point_reader = reader(file(gauge_file)) |
---|
2514 | points = [] |
---|
2515 | point_name = [] |
---|
2516 | |
---|
2517 | #read point info from file |
---|
2518 | for i,row in enumerate(point_reader): |
---|
2519 | # print 'i',i,'row',row |
---|
2520 | #read header and determine the column numbers to read correcty. |
---|
2521 | if i==0: |
---|
2522 | for j,value in enumerate(row): |
---|
2523 | # print 'j',j,value, row |
---|
2524 | if value.strip()=='easting':easting=j |
---|
2525 | if value.strip()=='northing':northing=j |
---|
2526 | if value.strip()=='name':name=j |
---|
2527 | if value.strip()=='elevation':elevation=j |
---|
2528 | else: |
---|
2529 | # print i,'easting',easting,'northing',northing, row[easting] |
---|
2530 | points.append([float(row[easting]),float(row[northing])]) |
---|
2531 | point_name.append(row[name]) |
---|
2532 | |
---|
2533 | #convert to array for file_function |
---|
2534 | points_array = array(points,Float) |
---|
2535 | |
---|
2536 | points_array = ensure_absolute(points_array) |
---|
2537 | |
---|
2538 | dir_name, base = os.path.split(sww_file) |
---|
2539 | # print 'dirname',dir_name, base |
---|
2540 | if access(sww_file,R_OK): |
---|
2541 | if verbose: print 'File %s exists' %(sww_file) |
---|
2542 | else: |
---|
2543 | msg = 'File "%s" could not be opened: Error="%s"'\ |
---|
2544 | %(sww_file, e) |
---|
2545 | raise msg |
---|
2546 | |
---|
2547 | sww_files = get_all_swwfiles(look_in_dir=dir_name, |
---|
2548 | base_name=base, |
---|
2549 | verbose=verbose) |
---|
2550 | |
---|
2551 | #to make all the quantities lower case for file_function |
---|
2552 | quantities = [quantity.lower() for quantity in quantities] |
---|
2553 | |
---|
2554 | # what is quantities are needed from sww file to calculate output quantities |
---|
2555 | # also |
---|
2556 | |
---|
2557 | core_quantities = ['stage', 'elevation', 'xmomentum', 'ymomentum'] |
---|
2558 | |
---|
2559 | for sww_file in sww_files: |
---|
2560 | |
---|
2561 | # print 'sww_file',sww_file |
---|
2562 | sww_file = join(dir_name, sww_file+'.sww') |
---|
2563 | # print 'sww_file',sww_file, core_quantities |
---|
2564 | |
---|
2565 | callable_sww = file_function(sww_file, |
---|
2566 | quantities=core_quantities, |
---|
2567 | interpolation_points=points_array, |
---|
2568 | verbose=verbose, |
---|
2569 | use_cache=use_cache) |
---|
2570 | |
---|
2571 | gauge_file='gauge_' |
---|
2572 | |
---|
2573 | heading = [quantity for quantity in quantities] |
---|
2574 | heading.insert(0,'time') |
---|
2575 | |
---|
2576 | # print heading, quantities |
---|
2577 | |
---|
2578 | #create a list of csv writers for all the points and write header |
---|
2579 | points_writer = [] |
---|
2580 | for i,point in enumerate(points): |
---|
2581 | points_writer.append(writer(file(dir_name+sep+'gauge_'+point_name[i]+'.csv', "wb"))) |
---|
2582 | points_writer[i].writerow(heading) |
---|
2583 | |
---|
2584 | for time in callable_sww.get_time(): |
---|
2585 | # points_list = [] |
---|
2586 | |
---|
2587 | for point_i, point in enumerate(points_array): |
---|
2588 | points_list = [time] |
---|
2589 | # print'time',time,'point_i',point_i,point, points_array |
---|
2590 | point_quantities = callable_sww(time,point_i) |
---|
2591 | # print "quantities", point_quantities |
---|
2592 | |
---|
2593 | # for i, quantity in enumerate(quantities): |
---|
2594 | # points_list.append(quantities) |
---|
2595 | for quantity in quantities: |
---|
2596 | if quantity==NAN: |
---|
2597 | if verbose: print 'quantity does not exist in' %callable_sww.get_name |
---|
2598 | else: |
---|
2599 | if quantity == 'stage': |
---|
2600 | points_list.append(point_quantities[0]) |
---|
2601 | |
---|
2602 | if quantity == 'elevation': |
---|
2603 | points_list.append(point_quantities[1]) |
---|
2604 | |
---|
2605 | if quantity == 'xmomentum': |
---|
2606 | points_list.append(point_quantities[2]) |
---|
2607 | |
---|
2608 | if quantity == 'ymomentum': |
---|
2609 | points_list.append(point_quantities[3]) |
---|
2610 | |
---|
2611 | if quantity == 'depth': |
---|
2612 | points_list.append(point_quantities[0] - point_quantities[1]) |
---|
2613 | |
---|
2614 | |
---|
2615 | if quantity == 'momentum': |
---|
2616 | momentum = sqrt(point_quantities[2]**2 +\ |
---|
2617 | point_quantities[3]**2) |
---|
2618 | points_list.append(momentum) |
---|
2619 | |
---|
2620 | if quantity == 'speed': |
---|
2621 | #if depth is less than 0.001 then speed = 0.0 |
---|
2622 | if point_quantities[0] - point_quantities[1] < 0.001: |
---|
2623 | vel = 0.0 |
---|
2624 | else: |
---|
2625 | momentum = sqrt(point_quantities[2]**2 +\ |
---|
2626 | point_quantities[3]**2) |
---|
2627 | # vel = momentum/depth |
---|
2628 | vel = momentum/(point_quantities[0] - point_quantities[1]) |
---|
2629 | # vel = momentum/(depth + 1.e-6/depth) |
---|
2630 | |
---|
2631 | points_list.append(vel) |
---|
2632 | |
---|
2633 | if quantity == 'bearing': |
---|
2634 | points_list.append(calc_bearing(point_quantities[2], |
---|
2635 | point_quantities[3])) |
---|
2636 | |
---|
2637 | #print 'list',points_list |
---|
2638 | |
---|
2639 | points_writer[point_i].writerow(points_list) |
---|
2640 | |
---|
2641 | |
---|
2642 | def greens_law(d1,d2,h1,verbose=False): |
---|
2643 | """ |
---|
2644 | |
---|
2645 | Green's Law allows an approximation of wave amplitude at |
---|
2646 | a given depth based on the fourh root of the ratio of two depths |
---|
2647 | and the amplitude at another given depth. |
---|
2648 | |
---|
2649 | Note, wave amplitude is equal to stage. |
---|
2650 | |
---|
2651 | Inputs: |
---|
2652 | |
---|
2653 | d1, d2 - the two depths |
---|
2654 | h1 - the wave amplitude at d1 |
---|
2655 | h2 - the derived amplitude at d2 |
---|
2656 | |
---|
2657 | h2 = h1 (d1/d2)^(1/4), where d2 cannot equal 0. |
---|
2658 | |
---|
2659 | """ |
---|
2660 | |
---|
2661 | d1 = ensure_numeric(d1) |
---|
2662 | d2 = ensure_numeric(d2) |
---|
2663 | h1 = ensure_numeric(h1) |
---|
2664 | |
---|
2665 | if d1 <= 0.0: |
---|
2666 | msg = 'the first depth, d1 (%f), must be strictly positive' %(d1) |
---|
2667 | raise Exception(msg) |
---|
2668 | |
---|
2669 | if d2 <= 0.0: |
---|
2670 | msg = 'the second depth, d2 (%f), must be strictly positive' %(d2) |
---|
2671 | raise Exception(msg) |
---|
2672 | |
---|
2673 | if h1 <= 0.0: |
---|
2674 | msg = 'the wave amplitude, h1 (%f), must be strictly positive' %(h1) |
---|
2675 | raise Exception(msg) |
---|
2676 | |
---|
2677 | h2 = h1*(d1/d2)**0.25 |
---|
2678 | |
---|
2679 | assert h2 > 0 |
---|
2680 | |
---|
2681 | return h2 |
---|
2682 | |
---|