source: trunk/anuga_core/source/anuga/file/sww.py @ 7770

Last change on this file since 7770 was 7770, checked in by hudson, 13 years ago

Broke up data_manager into more modules - some failing tests.

File size: 50.2 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4from anuga.coordinate_transforms.geo_reference import Geo_reference
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
6from anuga.config import netcdf_float, netcdf_float32, netcdf_int
7from anuga.config import max_float
8from anuga.utilities.numerical_tools import ensure_numeric
9import anuga.utilities.log as log
10from Scientific.IO.NetCDF import NetCDFFile
11
12from anuga.coordinate_transforms.geo_reference import \
13        ensure_geo_reference
14
15from file_utils import create_filename
16import numpy as num
17
18##
19# @brief Generic class for storing output to e.g. visualisation or checkpointing
20class Data_format:
21    """Generic interface to data formats
22    """
23
24    ##
25    # @brief Instantiate this instance.
26    # @param domain
27    # @param extension
28    # @param mode The mode of the underlying file.
29    def __init__(self, domain, extension, mode=netcdf_mode_w):
30        assert mode[0] in ['r', 'w', 'a'], \
31               "Mode %s must be either:\n" % mode + \
32               "   'w' (write)\n" + \
33               "   'r' (read)\n" + \
34               "   'a' (append)"
35
36        #Create filename
37        self.filename = create_filename(domain.get_datadir(),
38                                        domain.get_name(), extension)
39
40        self.timestep = 0
41        self.domain = domain
42
43        # Exclude ghosts in case this is a parallel domain
44        self.number_of_nodes = domain.number_of_full_nodes
45        self.number_of_volumes = domain.number_of_full_triangles
46        #self.number_of_volumes = len(domain)
47
48        #FIXME: Should we have a general set_precision function?
49
50
51##
52# @brief Class for handling checkpoints data
53# @note This is not operational at the moment
54class CPT_file(Data_format):
55    """Interface to native NetCDF format (.cpt) to be
56    used for checkpointing (one day)
57    """
58
59    ##
60    # @brief Initialize this instantiation.
61    # @param domain ??
62    # @param mode Mode of underlying data file (default WRITE).
63    def __init__(self, domain, mode=netcdf_mode_w):
64        from Scientific.IO.NetCDF import NetCDFFile
65
66        self.precision = netcdf_float #Use full precision
67
68        Data_format.__init__(self, domain, 'sww', mode)
69
70        # NetCDF file definition
71        fid = NetCDFFile(self.filename, mode)
72        if mode[0] == 'w':
73            # Create new file
74            fid.institution = 'Geoscience Australia'
75            fid.description = 'Checkpoint data'
76            #fid.smooth = domain.smooth
77            fid.order = domain.default_order
78
79            # Dimension definitions
80            fid.createDimension('number_of_volumes', self.number_of_volumes)
81            fid.createDimension('number_of_vertices', 3)
82
83            # Store info at all vertices (no smoothing)
84            fid.createDimension('number_of_points', 3*self.number_of_volumes)
85            fid.createDimension('number_of_timesteps', None) #extensible
86
87            # Variable definitions
88
89            # Mesh
90            fid.createVariable('x', self.precision, ('number_of_points',))
91            fid.createVariable('y', self.precision, ('number_of_points',))
92
93
94            fid.createVariable('volumes', netcdf_int, ('number_of_volumes',
95                                                       'number_of_vertices'))
96
97            fid.createVariable('time', self.precision, ('number_of_timesteps',))
98
99            #Allocate space for all quantities
100            for name in domain.quantities.keys():
101                fid.createVariable(name, self.precision,
102                                   ('number_of_timesteps',
103                                    'number_of_points'))
104
105        fid.close()
106
107    ##
108    # @brief Store connectivity data to underlying data file.
109    def store_checkpoint(self):
110        """Write x,y coordinates of triangles.
111        Write connectivity (
112        constituting
113        the bed elevation.
114        """
115
116        from Scientific.IO.NetCDF import NetCDFFile
117
118        domain = self.domain
119
120        #Get NetCDF
121        fid = NetCDFFile(self.filename, netcdf_mode_a)
122
123        # Get the variables
124        x = fid.variables['x']
125        y = fid.variables['y']
126
127        volumes = fid.variables['volumes']
128
129        # Get X, Y and bed elevation Z
130        Q = domain.quantities['elevation']
131        X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)
132
133        x[:] = X.astype(self.precision)
134        y[:] = Y.astype(self.precision)
135        z[:] = Z.astype(self.precision)
136
137        volumes[:] = V
138
139        fid.close()
140
141    ##
142    # @brief Store time and named quantities to underlying data file.
143    # @param name
144    def store_timestep(self, name):
145        """Store time and named quantity to file
146        """
147
148        from Scientific.IO.NetCDF import NetCDFFile
149        from time import sleep
150
151        #Get NetCDF
152        retries = 0
153        file_open = False
154        while not file_open and retries < 10:
155            try:
156                fid = NetCDFFile(self.filename, netcdf_mode_a)
157            except IOError:
158                #This could happen if someone was reading the file.
159                #In that case, wait a while and try again
160                msg = 'Warning (store_timestep): File %s could not be opened' \
161                      ' - trying again' % self.filename
162                log.critical(msg)
163                retries += 1
164                sleep(1)
165            else:
166                file_open = True
167
168        if not file_open:
169            msg = 'File %s could not be opened for append' % self.filename
170            raise DataFileNotOpenError, msg
171
172        domain = self.domain
173
174        # Get the variables
175        time = fid.variables['time']
176        stage = fid.variables['stage']
177        i = len(time)
178
179        #Store stage
180        time[i] = self.domain.time
181
182        # Get quantity
183        Q = domain.quantities[name]
184        A,V = Q.get_vertex_values(xy=False, precision=self.precision)
185
186        stage[i,:] = A.astype(self.precision)
187
188        #Flush and close
189        fid.sync()
190        fid.close()
191
192
193class SWW_file(Data_format):
194    """Interface to native NetCDF format (.sww) for storing model output
195
196    There are two kinds of data
197
198    1: Constant data: Vertex coordinates and field values. Stored once
199    2: Variable data: Conserved quantities. Stored once per timestep.
200
201    All data is assumed to reside at vertex locations.
202    """
203
204    ##
205    # @brief Instantiate this instance.
206    # @param domain ??
207    # @param mode Mode of the underlying data file.
208    # @param max_size ??
209    # @param recursion ??
210    # @note Prepare the underlying data file if mode starts with 'w'.
211    def __init__(self, domain, 
212                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
213        from Scientific.IO.NetCDF import NetCDFFile
214
215        self.precision = netcdf_float32 # Use single precision for quantities
216        self.recursion = recursion
217        self.mode = mode
218        if hasattr(domain, 'max_size'):
219            self.max_size = domain.max_size # File size max is 2Gig
220        else:
221            self.max_size = max_size
222        if hasattr(domain, 'minimum_storable_height'):
223            self.minimum_storable_height = domain.minimum_storable_height
224        else:
225            self.minimum_storable_height = default_minimum_storable_height
226
227        # Call parent constructor
228        Data_format.__init__(self, domain, 'sww', mode)
229
230        # Get static and dynamic quantities from domain
231        static_quantities = []
232        dynamic_quantities = []
233       
234        for q in domain.quantities_to_be_stored:
235            flag = domain.quantities_to_be_stored[q]
236       
237            msg = 'Quantity %s is requested to be stored ' % q
238            msg += 'but it does not exist in domain.quantities'
239            assert q in domain.quantities, msg
240       
241            assert flag in [1,2]
242            if flag == 1: static_quantities.append(q)
243            if flag == 2: dynamic_quantities.append(q)               
244                       
245       
246        # NetCDF file definition
247        fid = NetCDFFile(self.filename, mode)
248        if mode[0] == 'w':
249            description = 'Output from anuga.abstract_2d_finite_volumes ' \
250                          'suitable for plotting'
251                         
252            self.writer = Write_sww(static_quantities, dynamic_quantities)
253            self.writer.store_header(fid,
254                                     domain.starttime,
255                                     self.number_of_volumes,
256                                     self.domain.number_of_full_nodes,
257                                     description=description,
258                                     smoothing=domain.smooth,
259                                     order=domain.default_order,
260                                     sww_precision=self.precision)
261
262            # Extra optional information
263            if hasattr(domain, 'texture'):
264                fid.texture = domain.texture
265
266            if domain.quantities_to_be_monitored is not None:
267                fid.createDimension('singleton', 1)
268                fid.createDimension('two', 2)
269
270                poly = domain.monitor_polygon
271                if poly is not None:
272                    N = len(poly)
273                    fid.createDimension('polygon_length', N)
274                    fid.createVariable('extrema.polygon',
275                                       self.precision,
276                                       ('polygon_length', 'two'))
277                    fid.variables['extrema.polygon'][:] = poly
278
279                interval = domain.monitor_time_interval
280                if interval is not None:
281                    fid.createVariable('extrema.time_interval',
282                                       self.precision,
283                                       ('two',))
284                    fid.variables['extrema.time_interval'][:] = interval
285
286                for q in domain.quantities_to_be_monitored:
287                    fid.createVariable(q + '.extrema', self.precision,
288                                       ('numbers_in_range',))
289                    fid.createVariable(q + '.min_location', self.precision,
290                                       ('numbers_in_range',))
291                    fid.createVariable(q + '.max_location', self.precision,
292                                       ('numbers_in_range',))
293                    fid.createVariable(q + '.min_time', self.precision,
294                                       ('singleton',))
295                    fid.createVariable(q + '.max_time', self.precision,
296                                       ('singleton',))
297
298        fid.close()
299
300    ##
301    # @brief Store connectivity data into the underlying data file.
302    def store_connectivity(self):
303        """Store information about nodes, triangles and static quantities
304
305        Writes x,y coordinates of triangles and their connectivity.
306       
307        Store also any quantity that has been identified as static.
308        """
309
310        # FIXME: Change name to reflect the fact thta this function
311        # stores both connectivity (triangulation) and static quantities
312       
313        from Scientific.IO.NetCDF import NetCDFFile
314
315        domain = self.domain
316
317        # append to the NetCDF file
318        fid = NetCDFFile(self.filename, netcdf_mode_a)
319
320        # Get X, Y from one (any) of the quantities
321        Q = domain.quantities.values()[0]
322        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
323
324        # store the connectivity data
325        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
326        self.writer.store_triangulation(fid,
327                                        points,
328                                        V.astype(num.float32),
329                                        points_georeference=\
330                                            domain.geo_reference)
331
332
333        # Get names of static quantities
334        static_quantities = {}
335        for name in self.writer.static_quantities:
336            Q = domain.quantities[name]
337            A, _ = Q.get_vertex_values(xy=False, 
338                                       precision=self.precision)
339            static_quantities[name] = A
340       
341        # Store static quantities       
342        self.writer.store_static_quantities(fid, **static_quantities)
343                                           
344        fid.close()
345
346    ##
347    # @brief Store time and time dependent quantities
348    # to the underlying data file.
349    def store_timestep(self):
350        """Store time and time dependent quantities
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354        import types
355        from time import sleep
356        from os import stat
357
358        # Get NetCDF
359        retries = 0
360        file_open = False
361        while not file_open and retries < 10:
362            try:
363                # Open existing file
364                fid = NetCDFFile(self.filename, netcdf_mode_a)
365            except IOError:
366                # This could happen if someone was reading the file.
367                # In that case, wait a while and try again
368                msg = 'Warning (store_timestep): File %s could not be opened' \
369                      % self.filename
370                msg += ' - trying step %s again' % self.domain.time
371                log.critical(msg)
372                retries += 1
373                sleep(1)
374            else:
375                file_open = True
376
377        if not file_open:
378            msg = 'File %s could not be opened for append' % self.filename
379            raise DataFileNotOpenError, msg
380
381        # Check to see if the file is already too big:
382        time = fid.variables['time']
383        i = len(time) + 1
384        file_size = stat(self.filename)[6]
385        file_size_increase = file_size / i
386        if file_size + file_size_increase > self.max_size * 2**self.recursion:
387            # In order to get the file name and start time correct,
388            # I change the domain.filename and domain.starttime.
389            # This is the only way to do this without changing
390            # other modules (I think).
391
392            # Write a filename addon that won't break the anuga viewers
393            # (10.sww is bad)
394            filename_ext = '_time_%s' % self.domain.time
395            filename_ext = filename_ext.replace('.', '_')
396
397            # Remember the old filename, then give domain a
398            # name with the extension
399            old_domain_filename = self.domain.get_name()
400            if not self.recursion:
401                self.domain.set_name(old_domain_filename + filename_ext)
402
403            # Temporarily change the domain starttime to the current time
404            old_domain_starttime = self.domain.starttime
405            self.domain.starttime = self.domain.get_time()
406
407            # Build a new data_structure.
408            next_data_structure = SWW_file(self.domain, mode=self.mode,
409                                           max_size=self.max_size,
410                                           recursion=self.recursion+1)
411            if not self.recursion:
412                log.critical('    file_size = %s' % file_size)
413                log.critical('    saving file to %s'
414                             % next_data_structure.filename) 
415
416            # Set up the new data_structure
417            self.domain.writer = next_data_structure
418
419            # Store connectivity and first timestep
420            next_data_structure.store_connectivity()
421            next_data_structure.store_timestep()
422            fid.sync()
423            fid.close()
424
425            # Restore the old starttime and filename
426            self.domain.starttime = old_domain_starttime
427            self.domain.set_name(old_domain_filename)
428        else:
429            self.recursion = False
430            domain = self.domain
431
432            # Get the variables
433            time = fid.variables['time']
434            i = len(time)
435             
436            if 'stage' in self.writer.dynamic_quantities:           
437                # Select only those values for stage,
438                # xmomentum and ymomentum (if stored) where
439                # depth exceeds minimum_storable_height
440                #
441                # In this branch it is assumed that elevation
442                # is also available as a quantity           
443           
444                Q = domain.quantities['stage']
445                w, _ = Q.get_vertex_values(xy=False)
446               
447                Q = domain.quantities['elevation']
448                z, _ = Q.get_vertex_values(xy=False)               
449               
450                storable_indices = (w-z >= self.minimum_storable_height)
451            else:
452                # Very unlikely branch
453                storable_indices = None # This means take all
454           
455           
456            # Now store dynamic quantities
457            dynamic_quantities = {}
458            for name in self.writer.dynamic_quantities:
459                netcdf_array = fid.variables[name]
460               
461                Q = domain.quantities[name]
462                A, _ = Q.get_vertex_values(xy=False,
463                                           precision=self.precision)
464
465                if storable_indices is not None:
466                    if name == 'stage':
467                        A = num.choose(storable_indices, (z, A))
468
469                    if name in ['xmomentum', 'ymomentum']:
470                        # Get xmomentum where depth exceeds
471                        # minimum_storable_height
472                       
473                        # Define a zero vector of same size and type as A
474                        # for use with momenta
475                        null = num.zeros(num.size(A), A.dtype.char)
476                        A = num.choose(storable_indices, (null, A))
477               
478                dynamic_quantities[name] = A
479               
480                                       
481            # Store dynamic quantities
482            self.writer.store_quantities(fid,
483                                         time=self.domain.time,
484                                         sww_precision=self.precision,
485                                         **dynamic_quantities)
486
487
488            # Update extrema if requested
489            domain = self.domain
490            if domain.quantities_to_be_monitored is not None:
491                for q, info in domain.quantities_to_be_monitored.items():
492                    if info['min'] is not None:
493                        fid.variables[q + '.extrema'][0] = info['min']
494                        fid.variables[q + '.min_location'][:] = \
495                                        info['min_location']
496                        fid.variables[q + '.min_time'][0] = info['min_time']
497
498                    if info['max'] is not None:
499                        fid.variables[q + '.extrema'][1] = info['max']
500                        fid.variables[q + '.max_location'][:] = \
501                                        info['max_location']
502                        fid.variables[q + '.max_time'][0] = info['max_time']
503
504            # Flush and close
505            fid.sync()
506            fid.close()
507
508
509##
510# @brief Class to open an sww file so that domain can be populated with quantity values
511class Read_sww:
512
513    def __init__(self, source):
514        """The source parameter is assumed to be a NetCDF sww file.
515        """
516
517        self.source = source
518
519        self.frame_number = 0
520
521        fin = NetCDFFile(self.source, 'r')
522
523        self.time = num.array(fin.variables['time'], num.float)
524        self.last_frame_number = self.time.shape[0] - 1
525
526        self.frames = num.arange(self.last_frame_number+1)
527
528        fin.close()
529       
530        self.read_mesh()
531
532        self.quantities = {}
533
534        self.read_quantities()
535
536
537    def read_mesh(self):
538        fin = NetCDFFile(self.source, 'r')
539
540        self.vertices = num.array(fin.variables['volumes'], num.int)
541       
542        self.x = x = num.array(fin.variables['x'], num.float)
543        self.y = y = num.array(fin.variables['y'], num.float)
544
545        assert len(self.x) == len(self.y)
546       
547        self.xmin = num.min(x)
548        self.xmax = num.max(x)
549        self.ymin = num.min(y)
550        self.ymax = num.max(y)
551
552
553
554        fin.close()
555       
556    def read_quantities(self, frame_number=0):
557
558        assert frame_number >= 0 and frame_number <= self.last_frame_number
559
560        self.frame_number = frame_number
561
562        M = len(self.x)/3
563       
564        fin = NetCDFFile(self.source, 'r')
565       
566        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
567                        '_range' not in n, \
568                        fin.variables.keys()):
569            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
570                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
571            else: # Time-varying, get the current timestep data
572                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
573        fin.close()
574        return self.quantities
575
576    def get_bounds(self):
577        return [self.xmin, self.xmax, self.ymin, self.ymax]
578
579    def get_last_frame_number(self):
580        return self.last_frame_number
581
582    def get_time(self):
583        return self.time[self.frame_number]
584
585
586# @brief A class to write an SWW file.
587class Write_sww:
588    from anuga.shallow_water.shallow_water_domain import Domain
589
590    RANGE = '_range'
591    EXTREMA = ':extrema'
592
593    ##
594    # brief Instantiate the SWW writer class.
595    def __init__(self, static_quantities, dynamic_quantities):
596        """Initialise Write_sww with two list af quantity names:
597       
598        static_quantities (e.g. elevation or friction):
599            Stored once at the beginning of the simulation in a 1D array
600            of length number_of_points   
601        dynamic_quantities (e.g stage):
602            Stored every timestep in a 2D array with
603            dimensions number_of_points X number_of_timesteps       
604       
605        """
606        self.static_quantities = static_quantities   
607        self.dynamic_quantities = dynamic_quantities
608
609
610    ##
611    # @brief Store a header in the SWW file.
612    # @param outfile Open handle to the file that will be written.
613    # @param times A list of time slices *or* a start time.
614    # @param number_of_volumes The number of triangles.
615    # @param number_of_points The number of points.
616    # @param description The internal file description string.
617    # @param smoothing True if smoothing is to be used.
618    # @param order
619    # @param sww_precision Data type of the quantity written (netcdf constant)
620    # @param verbose True if this function is to be verbose.
621    # @note If 'times' is a list, the info will be made relative.
622    def store_header(self,
623                     outfile,
624                     times,
625                     number_of_volumes,
626                     number_of_points,
627                     description='Generated by ANUGA',
628                     smoothing=True,
629                     order=1,
630                     sww_precision=netcdf_float32,
631                     verbose=False):
632        """Write an SWW file header.
633
634        outfile - the open file that will be written
635        times - A list of the time slice times OR a start time
636        Note, if a list is given the info will be made relative.
637        number_of_volumes - the number of triangles
638        """
639
640        from anuga.abstract_2d_finite_volumes.util \
641            import get_revision_number
642
643        outfile.institution = 'Geoscience Australia'
644        outfile.description = description
645
646        # For sww compatibility
647        if smoothing is True:
648            # Smoothing to be depreciated
649            outfile.smoothing = 'Yes'
650            outfile.vertices_are_stored_uniquely = 'False'
651        else:
652            # Smoothing to be depreciated
653            outfile.smoothing = 'No'
654            outfile.vertices_are_stored_uniquely = 'True'
655        outfile.order = order
656
657        try:
658            revision_number = get_revision_number()
659        except:
660            # This will be triggered if the system cannot get the SVN
661            # revision number.
662            revision_number = None
663        # Allow None to be stored as a string
664        outfile.revision_number = str(revision_number)
665
666        # This is being used to seperate one number from a list.
667        # what it is actually doing is sorting lists from numeric arrays.
668        if isinstance(times, (list, num.ndarray)):
669            number_of_times = len(times)
670            times = ensure_numeric(times)
671            if number_of_times == 0:
672                starttime = 0
673            else:
674                starttime = times[0]
675                times = times - starttime  #Store relative times
676        else:
677            number_of_times = 0
678            starttime = times
679
680
681        outfile.starttime = starttime
682
683        # dimension definitions
684        outfile.createDimension('number_of_volumes', number_of_volumes)
685        outfile.createDimension('number_of_vertices', 3)
686        outfile.createDimension('numbers_in_range', 2)
687
688        if smoothing is True:
689            outfile.createDimension('number_of_points', number_of_points)
690            # FIXME(Ole): This will cause sww files for parallel domains to
691            # have ghost nodes stored (but not used by triangles).
692            # To clean this up, we have to change get_vertex_values and
693            # friends in quantity.py (but I can't be bothered right now)
694        else:
695            outfile.createDimension('number_of_points', 3*number_of_volumes)
696
697        outfile.createDimension('number_of_timesteps', number_of_times)
698
699        # variable definitions
700        outfile.createVariable('x', sww_precision, ('number_of_points',))
701        outfile.createVariable('y', sww_precision, ('number_of_points',))
702
703        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
704                                                       'number_of_vertices'))
705
706        # Doing sww_precision instead of Float gives cast errors.
707        outfile.createVariable('time', netcdf_float,
708                               ('number_of_timesteps',))
709
710                               
711        for q in self.static_quantities:
712           
713            outfile.createVariable(q, sww_precision,
714                                   ('number_of_points',))
715           
716            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
717                                   ('numbers_in_range',))
718                                   
719            # Initialise ranges with small and large sentinels.
720            # If this was in pure Python we could have used None sensibly
721            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
722            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
723
724        #if 'elevation' in self.static_quantities:   
725        #    # FIXME: Backwards compat - get rid of z once old view has retired
726        #    outfile.createVariable('z', sww_precision,
727        #                           ('number_of_points',))
728                               
729        for q in self.dynamic_quantities:
730            outfile.createVariable(q, sww_precision, ('number_of_timesteps',
731                                                      'number_of_points'))
732            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
733                                   ('numbers_in_range',))
734
735            # Initialise ranges with small and large sentinels.
736            # If this was in pure Python we could have used None sensibly
737            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
738            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
739
740        if isinstance(times, (list, num.ndarray)):
741            outfile.variables['time'][:] = times    # Store time relative
742
743        if verbose:
744            log.critical('------------------------------------------------')
745            log.critical('Statistics:')
746            log.critical('    t in [%f, %f], len(t) == %d'
747                         % (num.min(times), num.max(times), len(times.flat)))
748
749    ##
750    # @brief Store triangulation data in the underlying file.
751    # @param outfile Open handle to underlying file.
752    # @param points_utm List or array of points in UTM.
753    # @param volumes
754    # @param zone
755    # @param new_origin georeference that the points can be set to.
756    # @param points_georeference The georeference of the points_utm.
757    # @param verbose True if this function is to be verbose.
758    def store_triangulation(self,
759                            outfile,
760                            points_utm,
761                            volumes,
762                            zone=None, 
763                            new_origin=None,
764                            points_georeference=None, 
765                            verbose=False):
766        """
767        new_origin - qa georeference that the points can be set to. (Maybe
768        do this before calling this function.)
769
770        points_utm - currently a list or array of the points in UTM.
771        points_georeference - the georeference of the points_utm
772
773        How about passing new_origin and current_origin.
774        If you get both, do a convertion from the old to the new.
775
776        If you only get new_origin, the points are absolute,
777        convert to relative
778
779        if you only get the current_origin the points are relative, store
780        as relative.
781
782        if you get no georefs create a new georef based on the minimums of
783        points_utm.  (Another option would be to default to absolute)
784
785        Yes, and this is done in another part of the code.
786        Probably geospatial.
787
788        If you don't supply either geo_refs, then supply a zone. If not
789        the default zone will be used.
790
791        precon:
792            header has been called.
793        """
794
795        number_of_points = len(points_utm)
796        volumes = num.array(volumes)
797        points_utm = num.array(points_utm)
798
799        # Given the two geo_refs and the points, do the stuff
800        # described in the method header
801        # if this is needed else where, pull out as a function
802        points_georeference = ensure_geo_reference(points_georeference)
803        new_origin = ensure_geo_reference(new_origin)
804        if new_origin is None and points_georeference is not None:
805            points = points_utm
806            geo_ref = points_georeference
807        else:
808            if new_origin is None:
809                new_origin = Geo_reference(zone, min(points_utm[:,0]),
810                                                 min(points_utm[:,1]))
811            points = new_origin.change_points_geo_ref(points_utm,
812                                                      points_georeference)
813            geo_ref = new_origin
814
815        # At this stage I need a georef and points
816        # the points are relative to the georef
817        geo_ref.write_NetCDF(outfile)
818
819        # This will put the geo ref in the middle
820        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
821
822        x =  points[:,0]
823        y =  points[:,1]
824
825        if verbose:
826            log.critical('------------------------------------------------')
827            log.critical('More Statistics:')
828            log.critical('  Extent (/lon):')
829            log.critical('    x in [%f, %f], len(lat) == %d'
830                         % (min(x), max(x), len(x)))
831            log.critical('    y in [%f, %f], len(lon) == %d'
832                         % (min(y), max(y), len(y)))
833            #log.critical('    z in [%f, %f], len(z) == %d'
834            #             % (min(elevation), max(elevation), len(elevation)))
835            log.critical('geo_ref: %s' % str(geo_ref))
836            log.critical('------------------------------------------------')
837
838        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
839        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
840        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
841
842
843
844    # @brief Write the static quantity data to the underlying file.
845    # @param outfile Handle to open underlying file.
846    # @param sww_precision Format of quantity data to write (default Float32).
847    # @param verbose True if this function is to be verbose.
848    # @param **quant
849    def store_static_quantities(self, 
850                                outfile, 
851                                sww_precision=num.float32,
852                                verbose=False, 
853                                **quant):
854        """
855        Write the static quantity info.
856
857        **quant is extra keyword arguments passed in. These must be
858          the numpy arrays to be stored in the sww file at each timestep.
859
860        The argument sww_precision allows for storing as either
861        * single precision (default): num.float32
862        * double precision: num.float64 or num.float
863
864        Precondition:
865            store_triangulation and
866            store_header have been called.
867        """
868
869        # The dictionary quant must contain numpy arrays for each name.
870        # These will typically be quantities from Domain such as friction
871        #
872        # Arrays not listed in static_quantitiues will be ignored, silently.
873        #
874        # This method will also write the ranges for each quantity,
875        # e.g. stage_range, xmomentum_range and ymomentum_range
876        for q in self.static_quantities:
877            if not quant.has_key(q):
878                msg = 'Values for quantity %s was not specified in ' % q
879                msg += 'store_quantities so they cannot be stored.'
880                raise NewQuantity, msg
881            else:
882                q_values = ensure_numeric(quant[q])
883               
884                x = q_values.astype(sww_precision)
885                outfile.variables[q][:] = x
886       
887                # This populates the _range values
888                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
889                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
890                   
891        # FIXME: Hack for backwards compatibility with old viewer
892        #if 'elevation' in self.static_quantities:
893        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
894
895                   
896                   
897       
898       
899    ##
900    # @brief Write the quantity data to the underlying file.
901    # @param outfile Handle to open underlying file.
902    # @param sww_precision Format of quantity data to write (default Float32).
903    # @param slice_index
904    # @param time
905    # @param verbose True if this function is to be verbose.
906    # @param **quant
907    def store_quantities(self, 
908                         outfile, 
909                         sww_precision=num.float32,
910                         slice_index=None,
911                         time=None,
912                         verbose=False, 
913                         **quant):
914        """
915        Write the quantity info at each timestep.
916
917        **quant is extra keyword arguments passed in. These must be
918          the numpy arrays to be stored in the sww file at each timestep.
919
920        if the time array is already been built, use the slice_index
921        to specify the index.
922
923        Otherwise, use time to increase the time dimension
924
925        Maybe make this general, but the viewer assumes these quantities,
926        so maybe we don't want it general - unless the viewer is general
927       
928        The argument sww_precision allows for storing as either
929        * single precision (default): num.float32
930        * double precision: num.float64 or num.float
931
932        Precondition:
933            store_triangulation and
934            store_header have been called.
935        """
936
937        if time is not None:
938            file_time = outfile.variables['time']
939            slice_index = len(file_time)
940            file_time[slice_index] = time
941        else:
942            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
943
944        # Write the named dynamic quantities
945        # The dictionary quant must contain numpy arrays for each name.
946        # These will typically be the conserved quantities from Domain
947        # (Typically stage,  xmomentum, ymomentum).
948        #
949        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
950        #
951        # This method will also write the ranges for each quantity,
952        # e.g. stage_range, xmomentum_range and ymomentum_range
953        for q in self.dynamic_quantities:
954            if not quant.has_key(q):
955                msg = 'Values for quantity %s was not specified in ' % q
956                msg += 'store_quantities so they cannot be stored.'
957                raise NewQuantity, msg
958            else:
959                q_values = ensure_numeric(quant[q])
960               
961                x = q_values.astype(sww_precision)
962                outfile.variables[q][slice_index] = x
963                   
964       
965                # This updates the _range values
966                q_range = outfile.variables[q + Write_sww.RANGE][:]
967                q_values_min = num.min(q_values)
968                if q_values_min < q_range[0]:
969                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
970                q_values_max = num.max(q_values)
971                if q_values_max > q_range[1]:
972                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
973
974    ##
975    # @brief Print the quantities in the underlying file.
976    # @param outfile UNUSED.
977    def verbose_quantities(self, outfile):
978        log.critical('------------------------------------------------')
979        log.critical('More Statistics:')
980        for q in self.dynamic_quantities:
981            log.critical(%s in [%f, %f]'
982                         % (q, outfile.variables[q+Write_sww.RANGE][0],
983                            outfile.variables[q+Write_sww.RANGE][1]))
984        log.critical('------------------------------------------------')
985
986
987
988
989##
990# @brief Get the extents of a NetCDF data file.
991# @param file_name The path to the SWW file.
992# @return A list of x, y, z and stage limits (min, max).
993def extent_sww(file_name):
994    """Read in an sww file.
995
996    Input:
997    file_name - the sww file
998
999    Output:
1000    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
1001    """
1002
1003    from Scientific.IO.NetCDF import NetCDFFile
1004
1005    #Get NetCDF
1006    fid = NetCDFFile(file_name, netcdf_mode_r)
1007
1008    # Get the variables
1009    x = fid.variables['x'][:]
1010    y = fid.variables['y'][:]
1011    stage = fid.variables['stage'][:]
1012
1013    fid.close()
1014
1015    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
1016
1017
1018##
1019# @brief
1020# @param filename
1021# @param boundary
1022# @param t
1023# @param fail_if_NaN
1024# @param NaN_filler
1025# @param verbose
1026# @param very_verbose
1027# @return
1028def load_sww_as_domain(filename, boundary=None, t=None,
1029               fail_if_NaN=True, NaN_filler=0,
1030               verbose=False, very_verbose=False):
1031    """
1032    Usage: domain = load_sww_as_domain('file.sww',
1033                        t=time (default = last time in file))
1034
1035    Boundary is not recommended if domain.smooth is not selected, as it
1036    uses unique coordinates, but not unique boundaries. This means that
1037    the boundary file will not be compatable with the coordinates, and will
1038    give a different final boundary, or crash.
1039    """
1040   
1041    from Scientific.IO.NetCDF import NetCDFFile
1042    from shallow_water import Domain
1043
1044    # initialise NaN.
1045    NaN = 9.969209968386869e+036
1046
1047    if verbose: log.critical('Reading from %s' % filename)
1048
1049    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1050    time = fid.variables['time']       # Timesteps
1051    if t is None:
1052        t = time[-1]
1053    time_interp = get_time_interp(time,t)
1054
1055    # Get the variables as numeric arrays
1056    x = fid.variables['x'][:]                   # x-coordinates of vertices
1057    y = fid.variables['y'][:]                   # y-coordinates of vertices
1058    elevation = fid.variables['elevation']      # Elevation
1059    stage = fid.variables['stage']              # Water level
1060    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
1061    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
1062
1063    starttime = fid.starttime[0]
1064    volumes = fid.variables['volumes'][:]       # Connectivity
1065    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
1066    # FIXME (Ole): Something like this might be better:
1067    #                 concatenate((x, y), axis=1)
1068    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
1069
1070    conserved_quantities = []
1071    interpolated_quantities = {}
1072    other_quantities = []
1073
1074    # get geo_reference
1075    try:                             # sww files don't have to have a geo_ref
1076        geo_reference = Geo_reference(NetCDFObject=fid)
1077    except: # AttributeError, e:
1078        geo_reference = None
1079
1080    if verbose: log.critical('    getting quantities')
1081
1082    for quantity in fid.variables.keys():
1083        dimensions = fid.variables[quantity].dimensions
1084        if 'number_of_timesteps' in dimensions:
1085            conserved_quantities.append(quantity)
1086            interpolated_quantities[quantity] = \
1087                  interpolated_quantity(fid.variables[quantity][:], time_interp)
1088        else:
1089            other_quantities.append(quantity)
1090
1091    other_quantities.remove('x')
1092    other_quantities.remove('y')
1093    #other_quantities.remove('z')
1094    other_quantities.remove('volumes')
1095    try:
1096        other_quantities.remove('stage_range')
1097        other_quantities.remove('xmomentum_range')
1098        other_quantities.remove('ymomentum_range')
1099        other_quantities.remove('elevation_range')
1100    except:
1101        pass
1102
1103    conserved_quantities.remove('time')
1104
1105    if verbose: log.critical('    building domain')
1106
1107    #    From domain.Domain:
1108    #    domain = Domain(coordinates, volumes,\
1109    #                    conserved_quantities = conserved_quantities,\
1110    #                    other_quantities = other_quantities,zone=zone,\
1111    #                    xllcorner=xllcorner, yllcorner=yllcorner)
1112
1113    # From shallow_water.Domain:
1114    coordinates = coordinates.tolist()
1115    volumes = volumes.tolist()
1116    # FIXME:should this be in mesh? (peter row)
1117    if fid.smoothing == 'Yes':
1118        unique = False
1119    else:
1120        unique = True
1121    if unique:
1122        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
1123
1124     
1125   
1126    try:
1127        domain = Domain(coordinates, volumes, boundary)
1128    except AssertionError, e:
1129        fid.close()
1130        msg = 'Domain could not be created: %s. ' \
1131              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
1132        raise DataDomainError, msg
1133
1134    if not boundary is None:
1135        domain.boundary = boundary
1136
1137    domain.geo_reference = geo_reference
1138
1139    domain.starttime = float(starttime) + float(t)
1140    domain.time = 0.0
1141
1142    for quantity in other_quantities:
1143        try:
1144            NaN = fid.variables[quantity].missing_value
1145        except:
1146            pass                       # quantity has no missing_value number
1147        X = fid.variables[quantity][:]
1148        if very_verbose:
1149            log.critical('       %s' % str(quantity))
1150            log.critical('        NaN = %s' % str(NaN))
1151            log.critical('        max(X)')
1152            log.critical('       %s' % str(max(X)))
1153            log.critical('        max(X)==NaN')
1154            log.critical('       %s' % str(max(X)==NaN))
1155            log.critical('')
1156        if max(X) == NaN or min(X) == NaN:
1157            if fail_if_NaN:
1158                msg = 'quantity "%s" contains no_data entry' % quantity
1159                raise DataMissingValuesError, msg
1160            else:
1161                data = (X != NaN)
1162                X = (X*data) + (data==0)*NaN_filler
1163        if unique:
1164            X = num.resize(X, (len(X)/3, 3))
1165        domain.set_quantity(quantity, X)
1166    #
1167    for quantity in conserved_quantities:
1168        try:
1169            NaN = fid.variables[quantity].missing_value
1170        except:
1171            pass                       # quantity has no missing_value number
1172        X = interpolated_quantities[quantity]
1173        if very_verbose:
1174            log.critical('       %s' % str(quantity))
1175            log.critical('        NaN = %s' % str(NaN))
1176            log.critical('        max(X)')
1177            log.critical('       %s' % str(max(X)))
1178            log.critical('        max(X)==NaN')
1179            log.critical('       %s' % str(max(X)==NaN))
1180            log.critical('')
1181        if max(X) == NaN or min(X) == NaN:
1182            if fail_if_NaN:
1183                msg = 'quantity "%s" contains no_data entry' % quantity
1184                raise DataMissingValuesError, msg
1185            else:
1186                data = (X != NaN)
1187                X = (X*data) + (data==0)*NaN_filler
1188        if unique:
1189            X = num.resize(X, (X.shape[0]/3, 3))
1190        domain.set_quantity(quantity, X)
1191
1192    fid.close()
1193
1194    return domain
1195
1196
1197##
1198# @brief Get mesh and quantity data from an SWW file.
1199# @param filename Path to data file to read.
1200# @param quantities UNUSED!
1201# @param verbose True if this function is to be verbose.
1202# @return (mesh, quantities, time) where mesh is the mesh data, quantities is
1203#         a dictionary of {name: value}, and time is the time vector.
1204# @note Quantities extracted: 'elevation', 'stage', 'xmomentum' and 'ymomentum'
1205def get_mesh_and_quantities_from_file(filename,
1206                                      quantities=None,
1207                                      verbose=False):
1208    """Get and rebuild mesh structure and associated quantities from sww file
1209
1210    Input:
1211        filename - Name os sww file
1212        quantities - Names of quantities to load
1213
1214    Output:
1215        mesh - instance of class Interpolate
1216               (including mesh and interpolation functionality)
1217        quantities - arrays with quantity values at each mesh node
1218        time - vector of stored timesteps
1219
1220    This function is used by e.g.:
1221        get_interpolated_quantities_at_polyline_midpoints
1222    """
1223
1224    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1225
1226    import types
1227    from Scientific.IO.NetCDF import NetCDFFile
1228    from shallow_water import Domain
1229    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1230
1231    if verbose: log.critical('Reading from %s' % filename)
1232
1233    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1234    time = fid.variables['time'][:]    # Time vector
1235    time += fid.starttime[0]
1236
1237    # Get the variables as numeric arrays
1238    x = fid.variables['x'][:]                   # x-coordinates of nodes
1239    y = fid.variables['y'][:]                   # y-coordinates of nodes
1240    elevation = fid.variables['elevation'][:]   # Elevation
1241    stage = fid.variables['stage'][:]           # Water level
1242    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1243    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1244
1245    # Mesh (nodes (Mx2), triangles (Nx3))
1246    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1247    triangles = fid.variables['volumes'][:]
1248
1249    # Get geo_reference
1250    try:
1251        geo_reference = Geo_reference(NetCDFObject=fid)
1252    except: #AttributeError, e:
1253        # Sww files don't have to have a geo_ref
1254        geo_reference = None
1255
1256    if verbose: log.critical('    building mesh from sww file %s' % filename)
1257
1258    boundary = None
1259
1260    #FIXME (Peter Row): Should this be in mesh?
1261    if fid.smoothing != 'Yes':
1262        nodes = nodes.tolist()
1263        triangles = triangles.tolist()
1264        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1265
1266    try:
1267        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1268    except AssertionError, e:
1269        fid.close()
1270        msg = 'Domain could not be created: %s. "' % e
1271        raise DataDomainError, msg
1272
1273    quantities = {}
1274    quantities['elevation'] = elevation
1275    quantities['stage'] = stage
1276    quantities['xmomentum'] = xmomentum
1277    quantities['ymomentum'] = ymomentum
1278
1279    fid.close()
1280
1281    return mesh, quantities, time
1282
1283
1284
1285##
1286# @brief
1287# @parm time
1288# @param t
1289# @return An (index, ration) tuple.
1290def get_time_interp(time, t=None):
1291    #Finds the ratio and index for time interpolation.
1292    #It is borrowed from previous abstract_2d_finite_volumes code.
1293    if t is None:
1294        t=time[-1]
1295        index = -1
1296        ratio = 0.
1297    else:
1298        T = time
1299        tau = t
1300        index=0
1301        msg = 'Time interval derived from file %s [%s:%s]' \
1302              % ('FIXMEfilename', T[0], T[-1])
1303        msg += ' does not match model time: %s' % tau
1304        if tau < time[0]: raise DataTimeError, msg
1305        if tau > time[-1]: raise DataTimeError, msg
1306        while tau > time[index]: index += 1
1307        while tau < time[index]: index -= 1
1308        if tau == time[index]:
1309            #Protect against case where tau == time[-1] (last time)
1310            # - also works in general when tau == time[i]
1311            ratio = 0
1312        else:
1313            #t is now between index and index+1
1314            ratio = (tau - time[index])/(time[index+1] - time[index])
1315
1316    return (index, ratio)
1317
1318
1319
1320##
1321# @brief Interpolate a quantity wrt time.
1322# @param saved_quantity The quantity to interpolate.
1323# @param time_interp (index, ratio)
1324# @return A vector of interpolated values.
1325def interpolated_quantity(saved_quantity, time_interp):
1326    '''Given an index and ratio, interpolate quantity with respect to time.'''
1327
1328    index, ratio = time_interp
1329
1330    Q = saved_quantity
1331
1332    if ratio > 0:
1333        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1334    else:
1335        q = Q[index]
1336
1337    #Return vector of interpolated values
1338    return q
1339
1340
1341##
1342# @brief
1343# @param coordinates
1344# @param volumes
1345# @param boundary
1346def weed(coordinates, volumes, boundary=None):
1347    if isinstance(coordinates, num.ndarray):
1348        coordinates = coordinates.tolist()
1349    if isinstance(volumes, num.ndarray):
1350        volumes = volumes.tolist()
1351
1352    unique = False
1353    point_dict = {}
1354    same_point = {}
1355    for i in range(len(coordinates)):
1356        point = tuple(coordinates[i])
1357        if point_dict.has_key(point):
1358            unique = True
1359            same_point[i] = point
1360            #to change all point i references to point j
1361        else:
1362            point_dict[point] = i
1363            same_point[i] = point
1364
1365    coordinates = []
1366    i = 0
1367    for point in point_dict.keys():
1368        point = tuple(point)
1369        coordinates.append(list(point))
1370        point_dict[point] = i
1371        i += 1
1372
1373    for volume in volumes:
1374        for i in range(len(volume)):
1375            index = volume[i]
1376            if index > -1:
1377                volume[i] = point_dict[same_point[index]]
1378
1379    new_boundary = {}
1380    if not boundary is None:
1381        for segment in boundary.keys():
1382            point0 = point_dict[same_point[segment[0]]]
1383            point1 = point_dict[same_point[segment[1]]]
1384            label = boundary[segment]
1385            #FIXME should the bounday attributes be concaterated
1386            #('exterior, pond') or replaced ('pond')(peter row)
1387
1388            if new_boundary.has_key((point0, point1)):
1389                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1390
1391            elif new_boundary.has_key((point1, point0)):
1392                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1393            else: new_boundary[(point0, point1)] = label
1394
1395        boundary = new_boundary
1396
1397    return coordinates, volumes, boundary
1398
1399
1400
Note: See TracBrowser for help on using the repository browser.