source: trunk/anuga_core/source/anuga/file/sww.py @ 7780

Last change on this file since 7780 was 7780, checked in by hudson, 14 years ago

Almost all failing tests fixed.

File size: 50.1 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4from anuga.coordinate_transforms.geo_reference import Geo_reference
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
6from anuga.config import netcdf_float, netcdf_float32, netcdf_int
7from anuga.config import max_float
8from anuga.utilities.numerical_tools import ensure_numeric
9import anuga.utilities.log as log
10from Scientific.IO.NetCDF import NetCDFFile
11
12from anuga.coordinate_transforms.geo_reference import \
13        ensure_geo_reference
14
15from file_utils import create_filename
16import numpy as num
17
18##
19# @brief Generic class for storing output to e.g. visualisation or checkpointing
20class Data_format:
21    """Generic interface to data formats
22    """
23
24    ##
25    # @brief Instantiate this instance.
26    # @param domain
27    # @param extension
28    # @param mode The mode of the underlying file.
29    def __init__(self, domain, extension, mode=netcdf_mode_w):
30        assert mode[0] in ['r', 'w', 'a'], \
31               "Mode %s must be either:\n" % mode + \
32               "   'w' (write)\n" + \
33               "   'r' (read)\n" + \
34               "   'a' (append)"
35
36        #Create filename
37        self.filename = create_filename(domain.get_datadir(),
38                                        domain.get_name(), extension)
39
40        self.timestep = 0
41        self.domain = domain
42
43        # Exclude ghosts in case this is a parallel domain
44        self.number_of_nodes = domain.number_of_full_nodes
45        self.number_of_volumes = domain.number_of_full_triangles
46        #self.number_of_volumes = len(domain)
47
48        #FIXME: Should we have a general set_precision function?
49
50
51##
52# @brief Class for handling checkpoints data
53# @note This is not operational at the moment
54class CPT_file(Data_format):
55    """Interface to native NetCDF format (.cpt) to be
56    used for checkpointing (one day)
57    """
58
59    ##
60    # @brief Initialize this instantiation.
61    # @param domain ??
62    # @param mode Mode of underlying data file (default WRITE).
63    def __init__(self, domain, mode=netcdf_mode_w):
64        from Scientific.IO.NetCDF import NetCDFFile
65
66        self.precision = netcdf_float #Use full precision
67
68        Data_format.__init__(self, domain, 'sww', mode)
69
70        # NetCDF file definition
71        fid = NetCDFFile(self.filename, mode)
72        if mode[0] == 'w':
73            # Create new file
74            fid.institution = 'Geoscience Australia'
75            fid.description = 'Checkpoint data'
76            #fid.smooth = domain.smooth
77            fid.order = domain.default_order
78
79            # Dimension definitions
80            fid.createDimension('number_of_volumes', self.number_of_volumes)
81            fid.createDimension('number_of_vertices', 3)
82
83            # Store info at all vertices (no smoothing)
84            fid.createDimension('number_of_points', 3*self.number_of_volumes)
85            fid.createDimension('number_of_timesteps', None) #extensible
86
87            # Variable definitions
88
89            # Mesh
90            fid.createVariable('x', self.precision, ('number_of_points',))
91            fid.createVariable('y', self.precision, ('number_of_points',))
92
93
94            fid.createVariable('volumes', netcdf_int, ('number_of_volumes',
95                                                       'number_of_vertices'))
96
97            fid.createVariable('time', self.precision, ('number_of_timesteps',))
98
99            #Allocate space for all quantities
100            for name in domain.quantities.keys():
101                fid.createVariable(name, self.precision,
102                                   ('number_of_timesteps',
103                                    'number_of_points'))
104
105        fid.close()
106
107    ##
108    # @brief Store connectivity data to underlying data file.
109    def store_checkpoint(self):
110        """Write x,y coordinates of triangles.
111        Write connectivity (
112        constituting
113        the bed elevation.
114        """
115
116        from Scientific.IO.NetCDF import NetCDFFile
117
118        domain = self.domain
119
120        #Get NetCDF
121        fid = NetCDFFile(self.filename, netcdf_mode_a)
122
123        # Get the variables
124        x = fid.variables['x']
125        y = fid.variables['y']
126
127        volumes = fid.variables['volumes']
128
129        # Get X, Y and bed elevation Z
130        Q = domain.quantities['elevation']
131        X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)
132
133        x[:] = X.astype(self.precision)
134        y[:] = Y.astype(self.precision)
135        z[:] = Z.astype(self.precision)
136
137        volumes[:] = V
138
139        fid.close()
140
141    ##
142    # @brief Store time and named quantities to underlying data file.
143    # @param name
144    def store_timestep(self, name):
145        """Store time and named quantity to file
146        """
147
148        from Scientific.IO.NetCDF import NetCDFFile
149        from time import sleep
150
151        #Get NetCDF
152        retries = 0
153        file_open = False
154        while not file_open and retries < 10:
155            try:
156                fid = NetCDFFile(self.filename, netcdf_mode_a)
157            except IOError:
158                #This could happen if someone was reading the file.
159                #In that case, wait a while and try again
160                msg = 'Warning (store_timestep): File %s could not be opened' \
161                      ' - trying again' % self.filename
162                log.critical(msg)
163                retries += 1
164                sleep(1)
165            else:
166                file_open = True
167
168        if not file_open:
169            msg = 'File %s could not be opened for append' % self.filename
170            raise DataFileNotOpenError, msg
171
172        domain = self.domain
173
174        # Get the variables
175        time = fid.variables['time']
176        stage = fid.variables['stage']
177        i = len(time)
178
179        #Store stage
180        time[i] = self.domain.time
181
182        # Get quantity
183        Q = domain.quantities[name]
184        A,V = Q.get_vertex_values(xy=False, precision=self.precision)
185
186        stage[i,:] = A.astype(self.precision)
187
188        #Flush and close
189        fid.sync()
190        fid.close()
191
192
193class SWW_file(Data_format):
194    """Interface to native NetCDF format (.sww) for storing model output
195
196    There are two kinds of data
197
198    1: Constant data: Vertex coordinates and field values. Stored once
199    2: Variable data: Conserved quantities. Stored once per timestep.
200
201    All data is assumed to reside at vertex locations.
202    """
203
204    ##
205    # @brief Instantiate this instance.
206    # @param domain ??
207    # @param mode Mode of the underlying data file.
208    # @param max_size ??
209    # @param recursion ??
210    # @note Prepare the underlying data file if mode starts with 'w'.
211    def __init__(self, domain, 
212                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
213        from Scientific.IO.NetCDF import NetCDFFile
214
215        self.precision = netcdf_float32 # Use single precision for quantities
216        self.recursion = recursion
217        self.mode = mode
218        if hasattr(domain, 'max_size'):
219            self.max_size = domain.max_size # File size max is 2Gig
220        else:
221            self.max_size = max_size
222        if hasattr(domain, 'minimum_storable_height'):
223            self.minimum_storable_height = domain.minimum_storable_height
224        else:
225            self.minimum_storable_height = default_minimum_storable_height
226
227        # Call parent constructor
228        Data_format.__init__(self, domain, 'sww', mode)
229
230        # Get static and dynamic quantities from domain
231        static_quantities = []
232        dynamic_quantities = []
233       
234        for q in domain.quantities_to_be_stored:
235            flag = domain.quantities_to_be_stored[q]
236       
237            msg = 'Quantity %s is requested to be stored ' % q
238            msg += 'but it does not exist in domain.quantities'
239            assert q in domain.quantities, msg
240       
241            assert flag in [1,2]
242            if flag == 1: static_quantities.append(q)
243            if flag == 2: dynamic_quantities.append(q)               
244                       
245       
246        # NetCDF file definition
247        fid = NetCDFFile(self.filename, mode)
248        if mode[0] == 'w':
249            description = 'Output from anuga.abstract_2d_finite_volumes ' \
250                          'suitable for plotting'
251                         
252            self.writer = Write_sww(static_quantities, dynamic_quantities)
253            self.writer.store_header(fid,
254                                     domain.starttime,
255                                     self.number_of_volumes,
256                                     self.domain.number_of_full_nodes,
257                                     description=description,
258                                     smoothing=domain.smooth,
259                                     order=domain.default_order,
260                                     sww_precision=self.precision)
261
262            # Extra optional information
263            if hasattr(domain, 'texture'):
264                fid.texture = domain.texture
265
266            if domain.quantities_to_be_monitored is not None:
267                fid.createDimension('singleton', 1)
268                fid.createDimension('two', 2)
269
270                poly = domain.monitor_polygon
271                if poly is not None:
272                    N = len(poly)
273                    fid.createDimension('polygon_length', N)
274                    fid.createVariable('extrema.polygon',
275                                       self.precision,
276                                       ('polygon_length', 'two'))
277                    fid.variables['extrema.polygon'][:] = poly
278
279                interval = domain.monitor_time_interval
280                if interval is not None:
281                    fid.createVariable('extrema.time_interval',
282                                       self.precision,
283                                       ('two',))
284                    fid.variables['extrema.time_interval'][:] = interval
285
286                for q in domain.quantities_to_be_monitored:
287                    fid.createVariable(q + '.extrema', self.precision,
288                                       ('numbers_in_range',))
289                    fid.createVariable(q + '.min_location', self.precision,
290                                       ('numbers_in_range',))
291                    fid.createVariable(q + '.max_location', self.precision,
292                                       ('numbers_in_range',))
293                    fid.createVariable(q + '.min_time', self.precision,
294                                       ('singleton',))
295                    fid.createVariable(q + '.max_time', self.precision,
296                                       ('singleton',))
297
298        fid.close()
299
300    ##
301    # @brief Store connectivity data into the underlying data file.
302    def store_connectivity(self):
303        """Store information about nodes, triangles and static quantities
304
305        Writes x,y coordinates of triangles and their connectivity.
306       
307        Store also any quantity that has been identified as static.
308        """
309
310        # FIXME: Change name to reflect the fact thta this function
311        # stores both connectivity (triangulation) and static quantities
312       
313        from Scientific.IO.NetCDF import NetCDFFile
314
315        domain = self.domain
316
317        # append to the NetCDF file
318        fid = NetCDFFile(self.filename, netcdf_mode_a)
319
320        # Get X, Y from one (any) of the quantities
321        Q = domain.quantities.values()[0]
322        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
323
324        # store the connectivity data
325        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
326        self.writer.store_triangulation(fid,
327                                        points,
328                                        V.astype(num.float32),
329                                        points_georeference=\
330                                            domain.geo_reference)
331
332
333        # Get names of static quantities
334        static_quantities = {}
335        for name in self.writer.static_quantities:
336            Q = domain.quantities[name]
337            A, _ = Q.get_vertex_values(xy=False, 
338                                       precision=self.precision)
339            static_quantities[name] = A
340       
341        # Store static quantities       
342        self.writer.store_static_quantities(fid, **static_quantities)
343                                           
344        fid.close()
345
346    ##
347    # @brief Store time and time dependent quantities
348    # to the underlying data file.
349    def store_timestep(self):
350        """Store time and time dependent quantities
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354        import types
355        from time import sleep
356        from os import stat
357
358        # Get NetCDF
359        retries = 0
360        file_open = False
361        while not file_open and retries < 10:
362            try:
363                # Open existing file
364                fid = NetCDFFile(self.filename, netcdf_mode_a)
365            except IOError:
366                # This could happen if someone was reading the file.
367                # In that case, wait a while and try again
368                msg = 'Warning (store_timestep): File %s could not be opened' \
369                      % self.filename
370                msg += ' - trying step %s again' % self.domain.time
371                log.critical(msg)
372                retries += 1
373                sleep(1)
374            else:
375                file_open = True
376
377        if not file_open:
378            msg = 'File %s could not be opened for append' % self.filename
379            raise DataFileNotOpenError, msg
380
381        # Check to see if the file is already too big:
382        time = fid.variables['time']
383        i = len(time) + 1
384        file_size = stat(self.filename)[6]
385        file_size_increase = file_size / i
386        if file_size + file_size_increase > self.max_size * 2**self.recursion:
387            # In order to get the file name and start time correct,
388            # I change the domain.filename and domain.starttime.
389            # This is the only way to do this without changing
390            # other modules (I think).
391
392            # Write a filename addon that won't break the anuga viewers
393            # (10.sww is bad)
394            filename_ext = '_time_%s' % self.domain.time
395            filename_ext = filename_ext.replace('.', '_')
396
397            # Remember the old filename, then give domain a
398            # name with the extension
399            old_domain_filename = self.domain.get_name()
400            if not self.recursion:
401                self.domain.set_name(old_domain_filename + filename_ext)
402
403            # Temporarily change the domain starttime to the current time
404            old_domain_starttime = self.domain.starttime
405            self.domain.starttime = self.domain.get_time()
406
407            # Build a new data_structure.
408            next_data_structure = SWW_file(self.domain, mode=self.mode,
409                                           max_size=self.max_size,
410                                           recursion=self.recursion+1)
411            if not self.recursion:
412                log.critical('    file_size = %s' % file_size)
413                log.critical('    saving file to %s'
414                             % next_data_structure.filename) 
415
416            # Set up the new data_structure
417            self.domain.writer = next_data_structure
418
419            # Store connectivity and first timestep
420            next_data_structure.store_connectivity()
421            next_data_structure.store_timestep()
422            fid.sync()
423            fid.close()
424
425            # Restore the old starttime and filename
426            self.domain.starttime = old_domain_starttime
427            self.domain.set_name(old_domain_filename)
428        else:
429            self.recursion = False
430            domain = self.domain
431
432            # Get the variables
433            time = fid.variables['time']
434            i = len(time)
435             
436            if 'stage' in self.writer.dynamic_quantities:           
437                # Select only those values for stage,
438                # xmomentum and ymomentum (if stored) where
439                # depth exceeds minimum_storable_height
440                #
441                # In this branch it is assumed that elevation
442                # is also available as a quantity           
443           
444                Q = domain.quantities['stage']
445                w, _ = Q.get_vertex_values(xy=False)
446               
447                Q = domain.quantities['elevation']
448                z, _ = Q.get_vertex_values(xy=False)               
449               
450                storable_indices = (w-z >= self.minimum_storable_height)
451            else:
452                # Very unlikely branch
453                storable_indices = None # This means take all
454           
455           
456            # Now store dynamic quantities
457            dynamic_quantities = {}
458            for name in self.writer.dynamic_quantities:
459                netcdf_array = fid.variables[name]
460               
461                Q = domain.quantities[name]
462                A, _ = Q.get_vertex_values(xy=False,
463                                           precision=self.precision)
464
465                if storable_indices is not None:
466                    if name == 'stage':
467                        A = num.choose(storable_indices, (z, A))
468
469                    if name in ['xmomentum', 'ymomentum']:
470                        # Get xmomentum where depth exceeds
471                        # minimum_storable_height
472                       
473                        # Define a zero vector of same size and type as A
474                        # for use with momenta
475                        null = num.zeros(num.size(A), A.dtype.char)
476                        A = num.choose(storable_indices, (null, A))
477               
478                dynamic_quantities[name] = A
479               
480                                       
481            # Store dynamic quantities
482            self.writer.store_quantities(fid,
483                                         time=self.domain.time,
484                                         sww_precision=self.precision,
485                                         **dynamic_quantities)
486
487
488            # Update extrema if requested
489            domain = self.domain
490            if domain.quantities_to_be_monitored is not None:
491                for q, info in domain.quantities_to_be_monitored.items():
492                    if info['min'] is not None:
493                        fid.variables[q + '.extrema'][0] = info['min']
494                        fid.variables[q + '.min_location'][:] = \
495                                        info['min_location']
496                        fid.variables[q + '.min_time'][0] = info['min_time']
497
498                    if info['max'] is not None:
499                        fid.variables[q + '.extrema'][1] = info['max']
500                        fid.variables[q + '.max_location'][:] = \
501                                        info['max_location']
502                        fid.variables[q + '.max_time'][0] = info['max_time']
503
504            # Flush and close
505            fid.sync()
506            fid.close()
507
508
509##
510# @brief Class to open an sww file so that domain can be populated with quantity values
511class Read_sww:
512
513    def __init__(self, source):
514        """The source parameter is assumed to be a NetCDF sww file.
515        """
516
517        self.source = source
518
519        self.frame_number = 0
520
521        fin = NetCDFFile(self.source, 'r')
522
523        self.time = num.array(fin.variables['time'], num.float)
524        self.last_frame_number = self.time.shape[0] - 1
525
526        self.frames = num.arange(self.last_frame_number+1)
527
528        fin.close()
529       
530        self.read_mesh()
531
532        self.quantities = {}
533
534        self.read_quantities()
535
536
537    def read_mesh(self):
538        fin = NetCDFFile(self.source, 'r')
539
540        self.vertices = num.array(fin.variables['volumes'], num.int)
541       
542        self.x = x = num.array(fin.variables['x'], num.float)
543        self.y = y = num.array(fin.variables['y'], num.float)
544
545        assert len(self.x) == len(self.y)
546       
547        self.xmin = num.min(x)
548        self.xmax = num.max(x)
549        self.ymin = num.min(y)
550        self.ymax = num.max(y)
551
552
553
554        fin.close()
555       
556    def read_quantities(self, frame_number=0):
557
558        assert frame_number >= 0 and frame_number <= self.last_frame_number
559
560        self.frame_number = frame_number
561
562        M = len(self.x)/3
563       
564        fin = NetCDFFile(self.source, 'r')
565       
566        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
567                        '_range' not in n, \
568                        fin.variables.keys()):
569            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
570                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
571            else: # Time-varying, get the current timestep data
572                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
573        fin.close()
574        return self.quantities
575
576    def get_bounds(self):
577        return [self.xmin, self.xmax, self.ymin, self.ymax]
578
579    def get_last_frame_number(self):
580        return self.last_frame_number
581
582    def get_time(self):
583        return self.time[self.frame_number]
584
585
586# @brief A class to write an SWW file.
587class Write_sww:
588   
589    RANGE = '_range'
590    EXTREMA = ':extrema'
591
592    ##
593    # brief Instantiate the SWW writer class.
594    def __init__(self, static_quantities, dynamic_quantities):
595        """Initialise Write_sww with two list af quantity names:
596       
597        static_quantities (e.g. elevation or friction):
598            Stored once at the beginning of the simulation in a 1D array
599            of length number_of_points   
600        dynamic_quantities (e.g stage):
601            Stored every timestep in a 2D array with
602            dimensions number_of_points X number_of_timesteps       
603       
604        """
605        self.static_quantities = static_quantities   
606        self.dynamic_quantities = dynamic_quantities
607
608
609    ##
610    # @brief Store a header in the SWW file.
611    # @param outfile Open handle to the file that will be written.
612    # @param times A list of time slices *or* a start time.
613    # @param number_of_volumes The number of triangles.
614    # @param number_of_points The number of points.
615    # @param description The internal file description string.
616    # @param smoothing True if smoothing is to be used.
617    # @param order
618    # @param sww_precision Data type of the quantity written (netcdf constant)
619    # @param verbose True if this function is to be verbose.
620    # @note If 'times' is a list, the info will be made relative.
621    def store_header(self,
622                     outfile,
623                     times,
624                     number_of_volumes,
625                     number_of_points,
626                     description='Generated by ANUGA',
627                     smoothing=True,
628                     order=1,
629                     sww_precision=netcdf_float32,
630                     verbose=False):
631        """Write an SWW file header.
632
633        outfile - the open file that will be written
634        times - A list of the time slice times OR a start time
635        Note, if a list is given the info will be made relative.
636        number_of_volumes - the number of triangles
637        """
638
639        from anuga.abstract_2d_finite_volumes.util \
640            import get_revision_number
641
642        outfile.institution = 'Geoscience Australia'
643        outfile.description = description
644
645        # For sww compatibility
646        if smoothing is True:
647            # Smoothing to be depreciated
648            outfile.smoothing = 'Yes'
649            outfile.vertices_are_stored_uniquely = 'False'
650        else:
651            # Smoothing to be depreciated
652            outfile.smoothing = 'No'
653            outfile.vertices_are_stored_uniquely = 'True'
654        outfile.order = order
655
656        try:
657            revision_number = get_revision_number()
658        except:
659            # This will be triggered if the system cannot get the SVN
660            # revision number.
661            revision_number = None
662        # Allow None to be stored as a string
663        outfile.revision_number = str(revision_number)
664
665        # This is being used to seperate one number from a list.
666        # what it is actually doing is sorting lists from numeric arrays.
667        if isinstance(times, (list, num.ndarray)):
668            number_of_times = len(times)
669            times = ensure_numeric(times)
670            if number_of_times == 0:
671                starttime = 0
672            else:
673                starttime = times[0]
674                times = times - starttime  #Store relative times
675        else:
676            number_of_times = 0
677            starttime = times
678
679
680        outfile.starttime = starttime
681
682        # dimension definitions
683        outfile.createDimension('number_of_volumes', number_of_volumes)
684        outfile.createDimension('number_of_vertices', 3)
685        outfile.createDimension('numbers_in_range', 2)
686
687        if smoothing is True:
688            outfile.createDimension('number_of_points', number_of_points)
689            # FIXME(Ole): This will cause sww files for parallel domains to
690            # have ghost nodes stored (but not used by triangles).
691            # To clean this up, we have to change get_vertex_values and
692            # friends in quantity.py (but I can't be bothered right now)
693        else:
694            outfile.createDimension('number_of_points', 3*number_of_volumes)
695
696        outfile.createDimension('number_of_timesteps', number_of_times)
697
698        # variable definitions
699        outfile.createVariable('x', sww_precision, ('number_of_points',))
700        outfile.createVariable('y', sww_precision, ('number_of_points',))
701
702        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
703                                                       'number_of_vertices'))
704
705        # Doing sww_precision instead of Float gives cast errors.
706        outfile.createVariable('time', netcdf_float,
707                               ('number_of_timesteps',))
708
709                               
710        for q in self.static_quantities:
711           
712            outfile.createVariable(q, sww_precision,
713                                   ('number_of_points',))
714           
715            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
716                                   ('numbers_in_range',))
717                                   
718            # Initialise ranges with small and large sentinels.
719            # If this was in pure Python we could have used None sensibly
720            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
721            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
722
723        #if 'elevation' in self.static_quantities:   
724        #    # FIXME: Backwards compat - get rid of z once old view has retired
725        #    outfile.createVariable('z', sww_precision,
726        #                           ('number_of_points',))
727                               
728        for q in self.dynamic_quantities:
729            outfile.createVariable(q, sww_precision, ('number_of_timesteps',
730                                                      'number_of_points'))
731            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
732                                   ('numbers_in_range',))
733
734            # Initialise ranges with small and large sentinels.
735            # If this was in pure Python we could have used None sensibly
736            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
737            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
738
739        if isinstance(times, (list, num.ndarray)):
740            outfile.variables['time'][:] = times    # Store time relative
741
742        if verbose:
743            log.critical('------------------------------------------------')
744            log.critical('Statistics:')
745            log.critical('    t in [%f, %f], len(t) == %d'
746                         % (num.min(times), num.max(times), len(times.flat)))
747
748    ##
749    # @brief Store triangulation data in the underlying file.
750    # @param outfile Open handle to underlying file.
751    # @param points_utm List or array of points in UTM.
752    # @param volumes
753    # @param zone
754    # @param new_origin georeference that the points can be set to.
755    # @param points_georeference The georeference of the points_utm.
756    # @param verbose True if this function is to be verbose.
757    def store_triangulation(self,
758                            outfile,
759                            points_utm,
760                            volumes,
761                            zone=None, 
762                            new_origin=None,
763                            points_georeference=None, 
764                            verbose=False):
765        """
766        new_origin - qa georeference that the points can be set to. (Maybe
767        do this before calling this function.)
768
769        points_utm - currently a list or array of the points in UTM.
770        points_georeference - the georeference of the points_utm
771
772        How about passing new_origin and current_origin.
773        If you get both, do a convertion from the old to the new.
774
775        If you only get new_origin, the points are absolute,
776        convert to relative
777
778        if you only get the current_origin the points are relative, store
779        as relative.
780
781        if you get no georefs create a new georef based on the minimums of
782        points_utm.  (Another option would be to default to absolute)
783
784        Yes, and this is done in another part of the code.
785        Probably geospatial.
786
787        If you don't supply either geo_refs, then supply a zone. If not
788        the default zone will be used.
789
790        precon:
791            header has been called.
792        """
793
794        number_of_points = len(points_utm)
795        volumes = num.array(volumes)
796        points_utm = num.array(points_utm)
797
798        # Given the two geo_refs and the points, do the stuff
799        # described in the method header
800        # if this is needed else where, pull out as a function
801        points_georeference = ensure_geo_reference(points_georeference)
802        new_origin = ensure_geo_reference(new_origin)
803        if new_origin is None and points_georeference is not None:
804            points = points_utm
805            geo_ref = points_georeference
806        else:
807            if new_origin is None:
808                new_origin = Geo_reference(zone, min(points_utm[:,0]),
809                                                 min(points_utm[:,1]))
810            points = new_origin.change_points_geo_ref(points_utm,
811                                                      points_georeference)
812            geo_ref = new_origin
813
814        # At this stage I need a georef and points
815        # the points are relative to the georef
816        geo_ref.write_NetCDF(outfile)
817
818        # This will put the geo ref in the middle
819        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
820
821        x =  points[:,0]
822        y =  points[:,1]
823
824        if verbose:
825            log.critical('------------------------------------------------')
826            log.critical('More Statistics:')
827            log.critical('  Extent (/lon):')
828            log.critical('    x in [%f, %f], len(lat) == %d'
829                         % (min(x), max(x), len(x)))
830            log.critical('    y in [%f, %f], len(lon) == %d'
831                         % (min(y), max(y), len(y)))
832            #log.critical('    z in [%f, %f], len(z) == %d'
833            #             % (min(elevation), max(elevation), len(elevation)))
834            log.critical('geo_ref: %s' % str(geo_ref))
835            log.critical('------------------------------------------------')
836
837        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
838        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
839        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
840
841
842
843    # @brief Write the static quantity data to the underlying file.
844    # @param outfile Handle to open underlying file.
845    # @param sww_precision Format of quantity data to write (default Float32).
846    # @param verbose True if this function is to be verbose.
847    # @param **quant
848    def store_static_quantities(self, 
849                                outfile, 
850                                sww_precision=num.float32,
851                                verbose=False, 
852                                **quant):
853        """
854        Write the static quantity info.
855
856        **quant is extra keyword arguments passed in. These must be
857          the numpy arrays to be stored in the sww file at each timestep.
858
859        The argument sww_precision allows for storing as either
860        * single precision (default): num.float32
861        * double precision: num.float64 or num.float
862
863        Precondition:
864            store_triangulation and
865            store_header have been called.
866        """
867
868        # The dictionary quant must contain numpy arrays for each name.
869        # These will typically be quantities from Domain such as friction
870        #
871        # Arrays not listed in static_quantitiues will be ignored, silently.
872        #
873        # This method will also write the ranges for each quantity,
874        # e.g. stage_range, xmomentum_range and ymomentum_range
875        for q in self.static_quantities:
876            if not quant.has_key(q):
877                msg = 'Values for quantity %s was not specified in ' % q
878                msg += 'store_quantities so they cannot be stored.'
879                raise NewQuantity, msg
880            else:
881                q_values = ensure_numeric(quant[q])
882               
883                x = q_values.astype(sww_precision)
884                outfile.variables[q][:] = x
885       
886                # This populates the _range values
887                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
888                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
889                   
890        # FIXME: Hack for backwards compatibility with old viewer
891        #if 'elevation' in self.static_quantities:
892        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
893
894                   
895                   
896       
897       
898    ##
899    # @brief Write the quantity data to the underlying file.
900    # @param outfile Handle to open underlying file.
901    # @param sww_precision Format of quantity data to write (default Float32).
902    # @param slice_index
903    # @param time
904    # @param verbose True if this function is to be verbose.
905    # @param **quant
906    def store_quantities(self, 
907                         outfile, 
908                         sww_precision=num.float32,
909                         slice_index=None,
910                         time=None,
911                         verbose=False, 
912                         **quant):
913        """
914        Write the quantity info at each timestep.
915
916        **quant is extra keyword arguments passed in. These must be
917          the numpy arrays to be stored in the sww file at each timestep.
918
919        if the time array is already been built, use the slice_index
920        to specify the index.
921
922        Otherwise, use time to increase the time dimension
923
924        Maybe make this general, but the viewer assumes these quantities,
925        so maybe we don't want it general - unless the viewer is general
926       
927        The argument sww_precision allows for storing as either
928        * single precision (default): num.float32
929        * double precision: num.float64 or num.float
930
931        Precondition:
932            store_triangulation and
933            store_header have been called.
934        """
935
936        if time is not None:
937            file_time = outfile.variables['time']
938            slice_index = len(file_time)
939            file_time[slice_index] = time
940        else:
941            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
942
943        # Write the named dynamic quantities
944        # The dictionary quant must contain numpy arrays for each name.
945        # These will typically be the conserved quantities from Domain
946        # (Typically stage,  xmomentum, ymomentum).
947        #
948        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
949        #
950        # This method will also write the ranges for each quantity,
951        # e.g. stage_range, xmomentum_range and ymomentum_range
952        for q in self.dynamic_quantities:
953            if not quant.has_key(q):
954                msg = 'Values for quantity %s was not specified in ' % q
955                msg += 'store_quantities so they cannot be stored.'
956                raise NewQuantity, msg
957            else:
958                q_values = ensure_numeric(quant[q])
959               
960                x = q_values.astype(sww_precision)
961                outfile.variables[q][slice_index] = x
962                   
963       
964                # This updates the _range values
965                q_range = outfile.variables[q + Write_sww.RANGE][:]
966                q_values_min = num.min(q_values)
967                if q_values_min < q_range[0]:
968                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
969                q_values_max = num.max(q_values)
970                if q_values_max > q_range[1]:
971                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
972
973    ##
974    # @brief Print the quantities in the underlying file.
975    # @param outfile UNUSED.
976    def verbose_quantities(self, outfile):
977        log.critical('------------------------------------------------')
978        log.critical('More Statistics:')
979        for q in self.dynamic_quantities:
980            log.critical(%s in [%f, %f]'
981                         % (q, outfile.variables[q+Write_sww.RANGE][0],
982                            outfile.variables[q+Write_sww.RANGE][1]))
983        log.critical('------------------------------------------------')
984
985
986
987
988##
989# @brief Get the extents of a NetCDF data file.
990# @param file_name The path to the SWW file.
991# @return A list of x, y, z and stage limits (min, max).
992def extent_sww(file_name):
993    """Read in an sww file.
994
995    Input:
996    file_name - the sww file
997
998    Output:
999    A list: [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
1000    """
1001
1002    from Scientific.IO.NetCDF import NetCDFFile
1003
1004    #Get NetCDF
1005    fid = NetCDFFile(file_name, netcdf_mode_r)
1006
1007    # Get the variables
1008    x = fid.variables['x'][:]
1009    y = fid.variables['y'][:]
1010    stage = fid.variables['stage'][:]
1011
1012    fid.close()
1013
1014    return [min(x), max(x), min(y), max(y), num.min(stage), num.max(stage)]
1015
1016
1017##
1018# @brief
1019# @param filename
1020# @param boundary
1021# @param t
1022# @param fail_if_NaN
1023# @param NaN_filler
1024# @param verbose
1025# @param very_verbose
1026# @return
1027def load_sww_as_domain(filename, boundary=None, t=None,
1028               fail_if_NaN=True, NaN_filler=0,
1029               verbose=False, very_verbose=False):
1030    """
1031    Usage: domain = load_sww_as_domain('file.sww',
1032                        t=time (default = last time in file))
1033
1034    Boundary is not recommended if domain.smooth is not selected, as it
1035    uses unique coordinates, but not unique boundaries. This means that
1036    the boundary file will not be compatable with the coordinates, and will
1037    give a different final boundary, or crash.
1038    """
1039   
1040    from Scientific.IO.NetCDF import NetCDFFile
1041    from anuga.shallow_water_domain.shallow_water import Domain
1042
1043    # initialise NaN.
1044    NaN = 9.969209968386869e+036
1045
1046    if verbose: log.critical('Reading from %s' % filename)
1047
1048    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1049    time = fid.variables['time']       # Timesteps
1050    if t is None:
1051        t = time[-1]
1052    time_interp = get_time_interp(time,t)
1053
1054    # Get the variables as numeric arrays
1055    x = fid.variables['x'][:]                   # x-coordinates of vertices
1056    y = fid.variables['y'][:]                   # y-coordinates of vertices
1057    elevation = fid.variables['elevation']      # Elevation
1058    stage = fid.variables['stage']              # Water level
1059    xmomentum = fid.variables['xmomentum']      # Momentum in the x-direction
1060    ymomentum = fid.variables['ymomentum']      # Momentum in the y-direction
1061
1062    starttime = fid.starttime[0]
1063    volumes = fid.variables['volumes'][:]       # Connectivity
1064    coordinates = num.transpose(num.asarray([x.tolist(), y.tolist()]))
1065    # FIXME (Ole): Something like this might be better:
1066    #                 concatenate((x, y), axis=1)
1067    # or              concatenate((x[:,num.newaxis], x[:,num.newaxis]), axis=1)
1068
1069    conserved_quantities = []
1070    interpolated_quantities = {}
1071    other_quantities = []
1072
1073    # get geo_reference
1074    try:                             # sww files don't have to have a geo_ref
1075        geo_reference = Geo_reference(NetCDFObject=fid)
1076    except: # AttributeError, e:
1077        geo_reference = None
1078
1079    if verbose: log.critical('    getting quantities')
1080
1081    for quantity in fid.variables.keys():
1082        dimensions = fid.variables[quantity].dimensions
1083        if 'number_of_timesteps' in dimensions:
1084            conserved_quantities.append(quantity)
1085            interpolated_quantities[quantity] = \
1086                  interpolated_quantity(fid.variables[quantity][:], time_interp)
1087        else:
1088            other_quantities.append(quantity)
1089
1090    other_quantities.remove('x')
1091    other_quantities.remove('y')
1092    #other_quantities.remove('z')
1093    other_quantities.remove('volumes')
1094    try:
1095        other_quantities.remove('stage_range')
1096        other_quantities.remove('xmomentum_range')
1097        other_quantities.remove('ymomentum_range')
1098        other_quantities.remove('elevation_range')
1099    except:
1100        pass
1101
1102    conserved_quantities.remove('time')
1103
1104    if verbose: log.critical('    building domain')
1105
1106    #    From domain.Domain:
1107    #    domain = Domain(coordinates, volumes,\
1108    #                    conserved_quantities = conserved_quantities,\
1109    #                    other_quantities = other_quantities,zone=zone,\
1110    #                    xllcorner=xllcorner, yllcorner=yllcorner)
1111
1112    # From shallow_water.Domain:
1113    coordinates = coordinates.tolist()
1114    volumes = volumes.tolist()
1115    # FIXME:should this be in mesh? (peter row)
1116    if fid.smoothing == 'Yes':
1117        unique = False
1118    else:
1119        unique = True
1120    if unique:
1121        coordinates, volumes, boundary = weed(coordinates, volumes,boundary)
1122
1123     
1124   
1125    try:
1126        domain = Domain(coordinates, volumes, boundary)
1127    except AssertionError, e:
1128        fid.close()
1129        msg = 'Domain could not be created: %s. ' \
1130              'Perhaps use "fail_if_NaN=False and NaN_filler = ..."' % e
1131        raise DataDomainError, msg
1132
1133    if not boundary is None:
1134        domain.boundary = boundary
1135
1136    domain.geo_reference = geo_reference
1137
1138    domain.starttime = float(starttime) + float(t)
1139    domain.time = 0.0
1140
1141    for quantity in other_quantities:
1142        try:
1143            NaN = fid.variables[quantity].missing_value
1144        except:
1145            pass                       # quantity has no missing_value number
1146        X = fid.variables[quantity][:]
1147        if very_verbose:
1148            log.critical('       %s' % str(quantity))
1149            log.critical('        NaN = %s' % str(NaN))
1150            log.critical('        max(X)')
1151            log.critical('       %s' % str(max(X)))
1152            log.critical('        max(X)==NaN')
1153            log.critical('       %s' % str(max(X)==NaN))
1154            log.critical('')
1155        if max(X) == NaN or min(X) == NaN:
1156            if fail_if_NaN:
1157                msg = 'quantity "%s" contains no_data entry' % quantity
1158                raise DataMissingValuesError, msg
1159            else:
1160                data = (X != NaN)
1161                X = (X*data) + (data==0)*NaN_filler
1162        if unique:
1163            X = num.resize(X, (len(X)/3, 3))
1164        domain.set_quantity(quantity, X)
1165    #
1166    for quantity in conserved_quantities:
1167        try:
1168            NaN = fid.variables[quantity].missing_value
1169        except:
1170            pass                       # quantity has no missing_value number
1171        X = interpolated_quantities[quantity]
1172        if very_verbose:
1173            log.critical('       %s' % str(quantity))
1174            log.critical('        NaN = %s' % str(NaN))
1175            log.critical('        max(X)')
1176            log.critical('       %s' % str(max(X)))
1177            log.critical('        max(X)==NaN')
1178            log.critical('       %s' % str(max(X)==NaN))
1179            log.critical('')
1180        if max(X) == NaN or min(X) == NaN:
1181            if fail_if_NaN:
1182                msg = 'quantity "%s" contains no_data entry' % quantity
1183                raise DataMissingValuesError, msg
1184            else:
1185                data = (X != NaN)
1186                X = (X*data) + (data==0)*NaN_filler
1187        if unique:
1188            X = num.resize(X, (X.shape[0]/3, 3))
1189        domain.set_quantity(quantity, X)
1190
1191    fid.close()
1192
1193    return domain
1194
1195
1196##
1197# @brief Get mesh and quantity data from an SWW file.
1198# @param filename Path to data file to read.
1199# @param quantities UNUSED!
1200# @param verbose True if this function is to be verbose.
1201# @return (mesh, quantities, time) where mesh is the mesh data, quantities is
1202#         a dictionary of {name: value}, and time is the time vector.
1203# @note Quantities extracted: 'elevation', 'stage', 'xmomentum' and 'ymomentum'
1204def get_mesh_and_quantities_from_file(filename,
1205                                      quantities=None,
1206                                      verbose=False):
1207    """Get and rebuild mesh structure and associated quantities from sww file
1208
1209    Input:
1210        filename - Name os sww file
1211        quantities - Names of quantities to load
1212
1213    Output:
1214        mesh - instance of class Interpolate
1215               (including mesh and interpolation functionality)
1216        quantities - arrays with quantity values at each mesh node
1217        time - vector of stored timesteps
1218
1219    This function is used by e.g.:
1220        get_interpolated_quantities_at_polyline_midpoints
1221    """
1222
1223    # FIXME (Ole): Maybe refactor filefunction using this more fundamental code.
1224
1225    import types
1226    from Scientific.IO.NetCDF import NetCDFFile
1227    from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
1228
1229    if verbose: log.critical('Reading from %s' % filename)
1230
1231    fid = NetCDFFile(filename, netcdf_mode_r)    # Open existing file for read
1232    time = fid.variables['time'][:]    # Time vector
1233    time += fid.starttime[0]
1234
1235    # Get the variables as numeric arrays
1236    x = fid.variables['x'][:]                   # x-coordinates of nodes
1237    y = fid.variables['y'][:]                   # y-coordinates of nodes
1238    elevation = fid.variables['elevation'][:]   # Elevation
1239    stage = fid.variables['stage'][:]           # Water level
1240    xmomentum = fid.variables['xmomentum'][:]   # Momentum in the x-direction
1241    ymomentum = fid.variables['ymomentum'][:]   # Momentum in the y-direction
1242
1243    # Mesh (nodes (Mx2), triangles (Nx3))
1244    nodes = num.concatenate((x[:,num.newaxis], y[:,num.newaxis]), axis=1)
1245    triangles = fid.variables['volumes'][:]
1246
1247    # Get geo_reference
1248    try:
1249        geo_reference = Geo_reference(NetCDFObject=fid)
1250    except: #AttributeError, e:
1251        # Sww files don't have to have a geo_ref
1252        geo_reference = None
1253
1254    if verbose: log.critical('    building mesh from sww file %s' % filename)
1255
1256    boundary = None
1257
1258    #FIXME (Peter Row): Should this be in mesh?
1259    if fid.smoothing != 'Yes':
1260        nodes = nodes.tolist()
1261        triangles = triangles.tolist()
1262        nodes, triangles, boundary = weed(nodes, triangles, boundary)
1263
1264    try:
1265        mesh = Mesh(nodes, triangles, boundary, geo_reference=geo_reference)
1266    except AssertionError, e:
1267        fid.close()
1268        msg = 'Domain could not be created: %s. "' % e
1269        raise DataDomainError, msg
1270
1271    quantities = {}
1272    quantities['elevation'] = elevation
1273    quantities['stage'] = stage
1274    quantities['xmomentum'] = xmomentum
1275    quantities['ymomentum'] = ymomentum
1276
1277    fid.close()
1278
1279    return mesh, quantities, time
1280
1281
1282
1283##
1284# @brief
1285# @parm time
1286# @param t
1287# @return An (index, ration) tuple.
1288def get_time_interp(time, t=None):
1289    #Finds the ratio and index for time interpolation.
1290    #It is borrowed from previous abstract_2d_finite_volumes code.
1291    if t is None:
1292        t=time[-1]
1293        index = -1
1294        ratio = 0.
1295    else:
1296        T = time
1297        tau = t
1298        index=0
1299        msg = 'Time interval derived from file %s [%s:%s]' \
1300              % ('FIXMEfilename', T[0], T[-1])
1301        msg += ' does not match model time: %s' % tau
1302        if tau < time[0]: raise DataTimeError, msg
1303        if tau > time[-1]: raise DataTimeError, msg
1304        while tau > time[index]: index += 1
1305        while tau < time[index]: index -= 1
1306        if tau == time[index]:
1307            #Protect against case where tau == time[-1] (last time)
1308            # - also works in general when tau == time[i]
1309            ratio = 0
1310        else:
1311            #t is now between index and index+1
1312            ratio = (tau - time[index])/(time[index+1] - time[index])
1313
1314    return (index, ratio)
1315
1316
1317
1318##
1319# @brief Interpolate a quantity wrt time.
1320# @param saved_quantity The quantity to interpolate.
1321# @param time_interp (index, ratio)
1322# @return A vector of interpolated values.
1323def interpolated_quantity(saved_quantity, time_interp):
1324    '''Given an index and ratio, interpolate quantity with respect to time.'''
1325
1326    index, ratio = time_interp
1327
1328    Q = saved_quantity
1329
1330    if ratio > 0:
1331        q = (1-ratio)*Q[index] + ratio*Q[index+1]
1332    else:
1333        q = Q[index]
1334
1335    #Return vector of interpolated values
1336    return q
1337
1338
1339##
1340# @brief
1341# @param coordinates
1342# @param volumes
1343# @param boundary
1344def weed(coordinates, volumes, boundary=None):
1345    if isinstance(coordinates, num.ndarray):
1346        coordinates = coordinates.tolist()
1347    if isinstance(volumes, num.ndarray):
1348        volumes = volumes.tolist()
1349
1350    unique = False
1351    point_dict = {}
1352    same_point = {}
1353    for i in range(len(coordinates)):
1354        point = tuple(coordinates[i])
1355        if point_dict.has_key(point):
1356            unique = True
1357            same_point[i] = point
1358            #to change all point i references to point j
1359        else:
1360            point_dict[point] = i
1361            same_point[i] = point
1362
1363    coordinates = []
1364    i = 0
1365    for point in point_dict.keys():
1366        point = tuple(point)
1367        coordinates.append(list(point))
1368        point_dict[point] = i
1369        i += 1
1370
1371    for volume in volumes:
1372        for i in range(len(volume)):
1373            index = volume[i]
1374            if index > -1:
1375                volume[i] = point_dict[same_point[index]]
1376
1377    new_boundary = {}
1378    if not boundary is None:
1379        for segment in boundary.keys():
1380            point0 = point_dict[same_point[segment[0]]]
1381            point1 = point_dict[same_point[segment[1]]]
1382            label = boundary[segment]
1383            #FIXME should the bounday attributes be concaterated
1384            #('exterior, pond') or replaced ('pond')(peter row)
1385
1386            if new_boundary.has_key((point0, point1)):
1387                new_boundary[(point0,point1)] = new_boundary[(point0, point1)]
1388
1389            elif new_boundary.has_key((point1, point0)):
1390                new_boundary[(point1,point0)] = new_boundary[(point1, point0)]
1391            else: new_boundary[(point0, point1)] = label
1392
1393        boundary = new_boundary
1394
1395    return coordinates, volumes, boundary
1396
1397
1398
Note: See TracBrowser for help on using the repository browser.