source: anuga_core/source/anuga/shallow_water/sww_file.py @ 7735

Last change on this file since 7735 was 7735, checked in by hudson, 14 years ago

Split up some of the huge modules in shallow_water, fixed most of the unit test dependencies.

File size: 36.7 KB
Line 
1""" Classes to read an SWW file.
2"""
3
4from anuga.coordinate_transforms.geo_reference import Geo_reference
5from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a
6from anuga.config import netcdf_float, netcdf_float32, netcdf_int
7from anuga.config import max_float
8from anuga.utilities.numerical_tools import ensure_numeric
9from Scientific.IO.NetCDF import NetCDFFile
10
11from anuga.coordinate_transforms.geo_reference import \
12        ensure_geo_reference
13
14from file_utils import create_filename
15import numpy as num
16
17##
18# @brief Generic class for storing output to e.g. visualisation or checkpointing
19class Data_format:
20    """Generic interface to data formats
21    """
22
23    ##
24    # @brief Instantiate this instance.
25    # @param domain
26    # @param extension
27    # @param mode The mode of the underlying file.
28    def __init__(self, domain, extension, mode=netcdf_mode_w):
29        assert mode[0] in ['r', 'w', 'a'], \
30               "Mode %s must be either:\n" % mode + \
31               "   'w' (write)\n" + \
32               "   'r' (read)\n" + \
33               "   'a' (append)"
34
35        #Create filename
36        self.filename = create_filename(domain.get_datadir(),
37                                        domain.get_name(), extension)
38
39        self.timestep = 0
40        self.domain = domain
41
42        # Exclude ghosts in case this is a parallel domain
43        self.number_of_nodes = domain.number_of_full_nodes
44        self.number_of_volumes = domain.number_of_full_triangles
45        #self.number_of_volumes = len(domain)
46
47        #FIXME: Should we have a general set_precision function?
48
49
50##
51# @brief Class for handling checkpoints data
52# @note This is not operational at the moment
53class CPT_file(Data_format):
54    """Interface to native NetCDF format (.cpt) to be
55    used for checkpointing (one day)
56    """
57
58    ##
59    # @brief Initialize this instantiation.
60    # @param domain ??
61    # @param mode Mode of underlying data file (default WRITE).
62    def __init__(self, domain, mode=netcdf_mode_w):
63        from Scientific.IO.NetCDF import NetCDFFile
64
65        self.precision = netcdf_float #Use full precision
66
67        Data_format.__init__(self, domain, 'sww', mode)
68
69        # NetCDF file definition
70        fid = NetCDFFile(self.filename, mode)
71        if mode[0] == 'w':
72            # Create new file
73            fid.institution = 'Geoscience Australia'
74            fid.description = 'Checkpoint data'
75            #fid.smooth = domain.smooth
76            fid.order = domain.default_order
77
78            # Dimension definitions
79            fid.createDimension('number_of_volumes', self.number_of_volumes)
80            fid.createDimension('number_of_vertices', 3)
81
82            # Store info at all vertices (no smoothing)
83            fid.createDimension('number_of_points', 3*self.number_of_volumes)
84            fid.createDimension('number_of_timesteps', None) #extensible
85
86            # Variable definitions
87
88            # Mesh
89            fid.createVariable('x', self.precision, ('number_of_points',))
90            fid.createVariable('y', self.precision, ('number_of_points',))
91
92
93            fid.createVariable('volumes', netcdf_int, ('number_of_volumes',
94                                                       'number_of_vertices'))
95
96            fid.createVariable('time', self.precision, ('number_of_timesteps',))
97
98            #Allocate space for all quantities
99            for name in domain.quantities.keys():
100                fid.createVariable(name, self.precision,
101                                   ('number_of_timesteps',
102                                    'number_of_points'))
103
104        fid.close()
105
106    ##
107    # @brief Store connectivity data to underlying data file.
108    def store_checkpoint(self):
109        """Write x,y coordinates of triangles.
110        Write connectivity (
111        constituting
112        the bed elevation.
113        """
114
115        from Scientific.IO.NetCDF import NetCDFFile
116
117        domain = self.domain
118
119        #Get NetCDF
120        fid = NetCDFFile(self.filename, netcdf_mode_a)
121
122        # Get the variables
123        x = fid.variables['x']
124        y = fid.variables['y']
125
126        volumes = fid.variables['volumes']
127
128        # Get X, Y and bed elevation Z
129        Q = domain.quantities['elevation']
130        X,Y,Z,V = Q.get_vertex_values(xy=True, precision=self.precision)
131
132        x[:] = X.astype(self.precision)
133        y[:] = Y.astype(self.precision)
134        z[:] = Z.astype(self.precision)
135
136        volumes[:] = V
137
138        fid.close()
139
140    ##
141    # @brief Store time and named quantities to underlying data file.
142    # @param name
143    def store_timestep(self, name):
144        """Store time and named quantity to file
145        """
146
147        from Scientific.IO.NetCDF import NetCDFFile
148        from time import sleep
149
150        #Get NetCDF
151        retries = 0
152        file_open = False
153        while not file_open and retries < 10:
154            try:
155                fid = NetCDFFile(self.filename, netcdf_mode_a)
156            except IOError:
157                #This could happen if someone was reading the file.
158                #In that case, wait a while and try again
159                msg = 'Warning (store_timestep): File %s could not be opened' \
160                      ' - trying again' % self.filename
161                log.critical(msg)
162                retries += 1
163                sleep(1)
164            else:
165                file_open = True
166
167        if not file_open:
168            msg = 'File %s could not be opened for append' % self.filename
169            raise DataFileNotOpenError, msg
170
171        domain = self.domain
172
173        # Get the variables
174        time = fid.variables['time']
175        stage = fid.variables['stage']
176        i = len(time)
177
178        #Store stage
179        time[i] = self.domain.time
180
181        # Get quantity
182        Q = domain.quantities[name]
183        A,V = Q.get_vertex_values(xy=False, precision=self.precision)
184
185        stage[i,:] = A.astype(self.precision)
186
187        #Flush and close
188        fid.sync()
189        fid.close()
190
191
192class SWW_file(Data_format):
193    """Interface to native NetCDF format (.sww) for storing model output
194
195    There are two kinds of data
196
197    1: Constant data: Vertex coordinates and field values. Stored once
198    2: Variable data: Conserved quantities. Stored once per timestep.
199
200    All data is assumed to reside at vertex locations.
201    """
202
203    ##
204    # @brief Instantiate this instance.
205    # @param domain ??
206    # @param mode Mode of the underlying data file.
207    # @param max_size ??
208    # @param recursion ??
209    # @note Prepare the underlying data file if mode starts with 'w'.
210    def __init__(self, domain, 
211                 mode=netcdf_mode_w, max_size=2000000000, recursion=False):
212        from Scientific.IO.NetCDF import NetCDFFile
213
214        self.precision = netcdf_float32 # Use single precision for quantities
215        self.recursion = recursion
216        self.mode = mode
217        if hasattr(domain, 'max_size'):
218            self.max_size = domain.max_size # File size max is 2Gig
219        else:
220            self.max_size = max_size
221        if hasattr(domain, 'minimum_storable_height'):
222            self.minimum_storable_height = domain.minimum_storable_height
223        else:
224            self.minimum_storable_height = default_minimum_storable_height
225
226        # Call parent constructor
227        Data_format.__init__(self, domain, 'sww', mode)
228
229        # Get static and dynamic quantities from domain
230        static_quantities = []
231        dynamic_quantities = []
232       
233        for q in domain.quantities_to_be_stored:
234            flag = domain.quantities_to_be_stored[q]
235       
236            msg = 'Quantity %s is requested to be stored ' % q
237            msg += 'but it does not exist in domain.quantities'
238            assert q in domain.quantities, msg
239       
240            assert flag in [1,2]
241            if flag == 1: static_quantities.append(q)
242            if flag == 2: dynamic_quantities.append(q)               
243                       
244       
245        # NetCDF file definition
246        fid = NetCDFFile(self.filename, mode)
247        if mode[0] == 'w':
248            description = 'Output from anuga.abstract_2d_finite_volumes ' \
249                          'suitable for plotting'
250                         
251            self.writer = Write_sww(static_quantities, dynamic_quantities)
252            self.writer.store_header(fid,
253                                     domain.starttime,
254                                     self.number_of_volumes,
255                                     self.domain.number_of_full_nodes,
256                                     description=description,
257                                     smoothing=domain.smooth,
258                                     order=domain.default_order,
259                                     sww_precision=self.precision)
260
261            # Extra optional information
262            if hasattr(domain, 'texture'):
263                fid.texture = domain.texture
264
265            if domain.quantities_to_be_monitored is not None:
266                fid.createDimension('singleton', 1)
267                fid.createDimension('two', 2)
268
269                poly = domain.monitor_polygon
270                if poly is not None:
271                    N = len(poly)
272                    fid.createDimension('polygon_length', N)
273                    fid.createVariable('extrema.polygon',
274                                       self.precision,
275                                       ('polygon_length', 'two'))
276                    fid.variables['extrema.polygon'][:] = poly
277
278                interval = domain.monitor_time_interval
279                if interval is not None:
280                    fid.createVariable('extrema.time_interval',
281                                       self.precision,
282                                       ('two',))
283                    fid.variables['extrema.time_interval'][:] = interval
284
285                for q in domain.quantities_to_be_monitored:
286                    fid.createVariable(q + '.extrema', self.precision,
287                                       ('numbers_in_range',))
288                    fid.createVariable(q + '.min_location', self.precision,
289                                       ('numbers_in_range',))
290                    fid.createVariable(q + '.max_location', self.precision,
291                                       ('numbers_in_range',))
292                    fid.createVariable(q + '.min_time', self.precision,
293                                       ('singleton',))
294                    fid.createVariable(q + '.max_time', self.precision,
295                                       ('singleton',))
296
297        fid.close()
298
299    ##
300    # @brief Store connectivity data into the underlying data file.
301    def store_connectivity(self):
302        """Store information about nodes, triangles and static quantities
303
304        Writes x,y coordinates of triangles and their connectivity.
305       
306        Store also any quantity that has been identified as static.
307        """
308
309        # FIXME: Change name to reflect the fact thta this function
310        # stores both connectivity (triangulation) and static quantities
311       
312        from Scientific.IO.NetCDF import NetCDFFile
313
314        domain = self.domain
315
316        # append to the NetCDF file
317        fid = NetCDFFile(self.filename, netcdf_mode_a)
318
319        # Get X, Y from one (any) of the quantities
320        Q = domain.quantities.values()[0]
321        X,Y,_,V = Q.get_vertex_values(xy=True, precision=self.precision)
322
323        # store the connectivity data
324        points = num.concatenate((X[:,num.newaxis],Y[:,num.newaxis]), axis=1)
325        self.writer.store_triangulation(fid,
326                                        points,
327                                        V.astype(num.float32),
328                                        points_georeference=\
329                                            domain.geo_reference)
330
331
332        # Get names of static quantities
333        static_quantities = {}
334        for name in self.writer.static_quantities:
335            Q = domain.quantities[name]
336            A, _ = Q.get_vertex_values(xy=False, 
337                                       precision=self.precision)
338            static_quantities[name] = A
339       
340        # Store static quantities       
341        self.writer.store_static_quantities(fid, **static_quantities)
342                                           
343        fid.close()
344
345    ##
346    # @brief Store time and time dependent quantities
347    # to the underlying data file.
348    def store_timestep(self):
349        """Store time and time dependent quantities
350        """
351
352        from Scientific.IO.NetCDF import NetCDFFile
353        import types
354        from time import sleep
355        from os import stat
356
357        # Get NetCDF
358        retries = 0
359        file_open = False
360        while not file_open and retries < 10:
361            try:
362                # Open existing file
363                fid = NetCDFFile(self.filename, netcdf_mode_a)
364            except IOError:
365                # This could happen if someone was reading the file.
366                # In that case, wait a while and try again
367                msg = 'Warning (store_timestep): File %s could not be opened' \
368                      % self.filename
369                msg += ' - trying step %s again' % self.domain.time
370                log.critical(msg)
371                retries += 1
372                sleep(1)
373            else:
374                file_open = True
375
376        if not file_open:
377            msg = 'File %s could not be opened for append' % self.filename
378            raise DataFileNotOpenError, msg
379
380        # Check to see if the file is already too big:
381        time = fid.variables['time']
382        i = len(time) + 1
383        file_size = stat(self.filename)[6]
384        file_size_increase = file_size / i
385        if file_size + file_size_increase > self.max_size * 2**self.recursion:
386            # In order to get the file name and start time correct,
387            # I change the domain.filename and domain.starttime.
388            # This is the only way to do this without changing
389            # other modules (I think).
390
391            # Write a filename addon that won't break the anuga viewers
392            # (10.sww is bad)
393            filename_ext = '_time_%s' % self.domain.time
394            filename_ext = filename_ext.replace('.', '_')
395
396            # Remember the old filename, then give domain a
397            # name with the extension
398            old_domain_filename = self.domain.get_name()
399            if not self.recursion:
400                self.domain.set_name(old_domain_filename + filename_ext)
401
402            # Temporarily change the domain starttime to the current time
403            old_domain_starttime = self.domain.starttime
404            self.domain.starttime = self.domain.get_time()
405
406            # Build a new data_structure.
407            next_data_structure = SWW_file(self.domain, mode=self.mode,
408                                           max_size=self.max_size,
409                                           recursion=self.recursion+1)
410            if not self.recursion:
411                log.critical('    file_size = %s' % file_size)
412                log.critical('    saving file to %s'
413                             % next_data_structure.filename) 
414
415            # Set up the new data_structure
416            self.domain.writer = next_data_structure
417
418            # Store connectivity and first timestep
419            next_data_structure.store_connectivity()
420            next_data_structure.store_timestep()
421            fid.sync()
422            fid.close()
423
424            # Restore the old starttime and filename
425            self.domain.starttime = old_domain_starttime
426            self.domain.set_name(old_domain_filename)
427        else:
428            self.recursion = False
429            domain = self.domain
430
431            # Get the variables
432            time = fid.variables['time']
433            i = len(time)
434             
435            if 'stage' in self.writer.dynamic_quantities:           
436                # Select only those values for stage,
437                # xmomentum and ymomentum (if stored) where
438                # depth exceeds minimum_storable_height
439                #
440                # In this branch it is assumed that elevation
441                # is also available as a quantity           
442           
443                Q = domain.quantities['stage']
444                w, _ = Q.get_vertex_values(xy=False)
445               
446                Q = domain.quantities['elevation']
447                z, _ = Q.get_vertex_values(xy=False)               
448               
449                storable_indices = (w-z >= self.minimum_storable_height)
450            else:
451                # Very unlikely branch
452                storable_indices = None # This means take all
453           
454           
455            # Now store dynamic quantities
456            dynamic_quantities = {}
457            for name in self.writer.dynamic_quantities:
458                netcdf_array = fid.variables[name]
459               
460                Q = domain.quantities[name]
461                A, _ = Q.get_vertex_values(xy=False,
462                                           precision=self.precision)
463
464                if storable_indices is not None:
465                    if name == 'stage':
466                        A = num.choose(storable_indices, (z, A))
467
468                    if name in ['xmomentum', 'ymomentum']:
469                        # Get xmomentum where depth exceeds
470                        # minimum_storable_height
471                       
472                        # Define a zero vector of same size and type as A
473                        # for use with momenta
474                        null = num.zeros(num.size(A), A.dtype.char)
475                        A = num.choose(storable_indices, (null, A))
476               
477                dynamic_quantities[name] = A
478               
479                                       
480            # Store dynamic quantities
481            self.writer.store_quantities(fid,
482                                         time=self.domain.time,
483                                         sww_precision=self.precision,
484                                         **dynamic_quantities)
485
486
487            # Update extrema if requested
488            domain = self.domain
489            if domain.quantities_to_be_monitored is not None:
490                for q, info in domain.quantities_to_be_monitored.items():
491                    if info['min'] is not None:
492                        fid.variables[q + '.extrema'][0] = info['min']
493                        fid.variables[q + '.min_location'][:] = \
494                                        info['min_location']
495                        fid.variables[q + '.min_time'][0] = info['min_time']
496
497                    if info['max'] is not None:
498                        fid.variables[q + '.extrema'][1] = info['max']
499                        fid.variables[q + '.max_location'][:] = \
500                                        info['max_location']
501                        fid.variables[q + '.max_time'][0] = info['max_time']
502
503            # Flush and close
504            fid.sync()
505            fid.close()
506
507
508##
509# @brief Class to open an sww file so that domain can be populated with quantity values
510class Read_sww:
511
512    def __init__(self, source):
513        """The source parameter is assumed to be a NetCDF sww file.
514        """
515
516        self.source = source
517
518        self.frame_number = 0
519
520        fin = NetCDFFile(self.source, 'r')
521
522        self.time = num.array(fin.variables['time'], num.float)
523        self.last_frame_number = self.time.shape[0] - 1
524
525        self.frames = num.arange(self.last_frame_number+1)
526
527        fin.close()
528       
529        self.read_mesh()
530
531        self.quantities = {}
532
533        self.read_quantities()
534
535
536    def read_mesh(self):
537        fin = NetCDFFile(self.source, 'r')
538
539        self.vertices = num.array(fin.variables['volumes'], num.int)
540       
541        self.x = x = num.array(fin.variables['x'], num.float)
542        self.y = y = num.array(fin.variables['y'], num.float)
543
544        assert len(self.x) == len(self.y)
545       
546        self.xmin = num.min(x)
547        self.xmax = num.max(x)
548        self.ymin = num.min(y)
549        self.ymax = num.max(y)
550
551
552
553        fin.close()
554       
555    def read_quantities(self, frame_number=0):
556
557        assert frame_number >= 0 and frame_number <= self.last_frame_number
558
559        self.frame_number = frame_number
560
561        M = len(self.x)/3
562       
563        fin = NetCDFFile(self.source, 'r')
564       
565        for q in filter(lambda n:n != 'x' and n != 'y' and n != 'time' and n != 'volumes' and \
566                        '_range' not in n, \
567                        fin.variables.keys()):
568            if len(fin.variables[q].shape) == 1: # Not a time-varying quantity
569                self.quantities[q] = num.ravel(num.array(fin.variables[q], num.float)).reshape(M,3)
570            else: # Time-varying, get the current timestep data
571                self.quantities[q] = num.array(fin.variables[q][self.frame_number], num.float).reshape(M,3)
572        fin.close()
573        return self.quantities
574
575    def get_bounds(self):
576        return [self.xmin, self.xmax, self.ymin, self.ymax]
577
578    def get_last_frame_number(self):
579        return self.last_frame_number
580
581    def get_time(self):
582        return self.time[self.frame_number]
583
584
585# @brief A class to write an SWW file.
586class Write_sww:
587    from anuga.shallow_water.shallow_water_domain import Domain
588
589    RANGE = '_range'
590    EXTREMA = ':extrema'
591
592    ##
593    # brief Instantiate the SWW writer class.
594    def __init__(self, static_quantities, dynamic_quantities):
595        """Initialise Write_sww with two list af quantity names:
596       
597        static_quantities (e.g. elevation or friction):
598            Stored once at the beginning of the simulation in a 1D array
599            of length number_of_points   
600        dynamic_quantities (e.g stage):
601            Stored every timestep in a 2D array with
602            dimensions number_of_points X number_of_timesteps       
603       
604        """
605        self.static_quantities = static_quantities   
606        self.dynamic_quantities = dynamic_quantities
607
608
609    ##
610    # @brief Store a header in the SWW file.
611    # @param outfile Open handle to the file that will be written.
612    # @param times A list of time slices *or* a start time.
613    # @param number_of_volumes The number of triangles.
614    # @param number_of_points The number of points.
615    # @param description The internal file description string.
616    # @param smoothing True if smoothing is to be used.
617    # @param order
618    # @param sww_precision Data type of the quantity written (netcdf constant)
619    # @param verbose True if this function is to be verbose.
620    # @note If 'times' is a list, the info will be made relative.
621    def store_header(self,
622                     outfile,
623                     times,
624                     number_of_volumes,
625                     number_of_points,
626                     description='Generated by ANUGA',
627                     smoothing=True,
628                     order=1,
629                     sww_precision=netcdf_float32,
630                     verbose=False):
631        """Write an SWW file header.
632
633        outfile - the open file that will be written
634        times - A list of the time slice times OR a start time
635        Note, if a list is given the info will be made relative.
636        number_of_volumes - the number of triangles
637        """
638
639        outfile.institution = 'Geoscience Australia'
640        outfile.description = description
641
642        # For sww compatibility
643        if smoothing is True:
644            # Smoothing to be depreciated
645            outfile.smoothing = 'Yes'
646            outfile.vertices_are_stored_uniquely = 'False'
647        else:
648            # Smoothing to be depreciated
649            outfile.smoothing = 'No'
650            outfile.vertices_are_stored_uniquely = 'True'
651        outfile.order = order
652
653        try:
654            revision_number = get_revision_number()
655        except:
656            revision_number = None
657        # Allow None to be stored as a string
658        outfile.revision_number = str(revision_number)
659
660        # This is being used to seperate one number from a list.
661        # what it is actually doing is sorting lists from numeric arrays.
662        if isinstance(times, (list, num.ndarray)):
663            number_of_times = len(times)
664            times = ensure_numeric(times)
665            if number_of_times == 0:
666                starttime = 0
667            else:
668                starttime = times[0]
669                times = times - starttime  #Store relative times
670        else:
671            number_of_times = 0
672            starttime = times
673
674
675        outfile.starttime = starttime
676
677        # dimension definitions
678        outfile.createDimension('number_of_volumes', number_of_volumes)
679        outfile.createDimension('number_of_vertices', 3)
680        outfile.createDimension('numbers_in_range', 2)
681
682        if smoothing is True:
683            outfile.createDimension('number_of_points', number_of_points)
684            # FIXME(Ole): This will cause sww files for parallel domains to
685            # have ghost nodes stored (but not used by triangles).
686            # To clean this up, we have to change get_vertex_values and
687            # friends in quantity.py (but I can't be bothered right now)
688        else:
689            outfile.createDimension('number_of_points', 3*number_of_volumes)
690
691        outfile.createDimension('number_of_timesteps', number_of_times)
692
693        # variable definitions
694        outfile.createVariable('x', sww_precision, ('number_of_points',))
695        outfile.createVariable('y', sww_precision, ('number_of_points',))
696
697        outfile.createVariable('volumes', netcdf_int, ('number_of_volumes',
698                                                       'number_of_vertices'))
699
700        # Doing sww_precision instead of Float gives cast errors.
701        outfile.createVariable('time', netcdf_float,
702                               ('number_of_timesteps',))
703
704                               
705        for q in self.static_quantities:
706           
707            outfile.createVariable(q, sww_precision,
708                                   ('number_of_points',))
709           
710            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
711                                   ('numbers_in_range',))
712                                   
713            # Initialise ranges with small and large sentinels.
714            # If this was in pure Python we could have used None sensibly
715            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
716            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
717
718        #if 'elevation' in self.static_quantities:   
719        #    # FIXME: Backwards compat - get rid of z once old view has retired
720        #    outfile.createVariable('z', sww_precision,
721        #                           ('number_of_points',))
722                               
723        for q in self.dynamic_quantities:
724            outfile.createVariable(q, sww_precision, ('number_of_timesteps',
725                                                      'number_of_points'))
726            outfile.createVariable(q + Write_sww.RANGE, sww_precision,
727                                   ('numbers_in_range',))
728
729            # Initialise ranges with small and large sentinels.
730            # If this was in pure Python we could have used None sensibly
731            outfile.variables[q+Write_sww.RANGE][0] = max_float  # Min
732            outfile.variables[q+Write_sww.RANGE][1] = -max_float # Max
733
734        if isinstance(times, (list, num.ndarray)):
735            outfile.variables['time'][:] = times    # Store time relative
736
737        if verbose:
738            log.critical('------------------------------------------------')
739            log.critical('Statistics:')
740            log.critical('    t in [%f, %f], len(t) == %d'
741                         % (num.min(times), num.max(times), len(times.flat)))
742
743    ##
744    # @brief Store triangulation data in the underlying file.
745    # @param outfile Open handle to underlying file.
746    # @param points_utm List or array of points in UTM.
747    # @param volumes
748    # @param zone
749    # @param new_origin georeference that the points can be set to.
750    # @param points_georeference The georeference of the points_utm.
751    # @param verbose True if this function is to be verbose.
752    def store_triangulation(self,
753                            outfile,
754                            points_utm,
755                            volumes,
756                            zone=None, 
757                            new_origin=None,
758                            points_georeference=None, 
759                            verbose=False):
760        """
761        new_origin - qa georeference that the points can be set to. (Maybe
762        do this before calling this function.)
763
764        points_utm - currently a list or array of the points in UTM.
765        points_georeference - the georeference of the points_utm
766
767        How about passing new_origin and current_origin.
768        If you get both, do a convertion from the old to the new.
769
770        If you only get new_origin, the points are absolute,
771        convert to relative
772
773        if you only get the current_origin the points are relative, store
774        as relative.
775
776        if you get no georefs create a new georef based on the minimums of
777        points_utm.  (Another option would be to default to absolute)
778
779        Yes, and this is done in another part of the code.
780        Probably geospatial.
781
782        If you don't supply either geo_refs, then supply a zone. If not
783        the default zone will be used.
784
785        precon:
786            header has been called.
787        """
788
789        number_of_points = len(points_utm)
790        volumes = num.array(volumes)
791        points_utm = num.array(points_utm)
792
793        # Given the two geo_refs and the points, do the stuff
794        # described in the method header
795        # if this is needed else where, pull out as a function
796        points_georeference = ensure_geo_reference(points_georeference)
797        new_origin = ensure_geo_reference(new_origin)
798        if new_origin is None and points_georeference is not None:
799            points = points_utm
800            geo_ref = points_georeference
801        else:
802            if new_origin is None:
803                new_origin = Geo_reference(zone, min(points_utm[:,0]),
804                                                 min(points_utm[:,1]))
805            points = new_origin.change_points_geo_ref(points_utm,
806                                                      points_georeference)
807            geo_ref = new_origin
808
809        # At this stage I need a georef and points
810        # the points are relative to the georef
811        geo_ref.write_NetCDF(outfile)
812
813        # This will put the geo ref in the middle
814        #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
815
816        x =  points[:,0]
817        y =  points[:,1]
818
819        if verbose:
820            log.critical('------------------------------------------------')
821            log.critical('More Statistics:')
822            log.critical('  Extent (/lon):')
823            log.critical('    x in [%f, %f], len(lat) == %d'
824                         % (min(x), max(x), len(x)))
825            log.critical('    y in [%f, %f], len(lon) == %d'
826                         % (min(y), max(y), len(y)))
827            #log.critical('    z in [%f, %f], len(z) == %d'
828            #             % (min(elevation), max(elevation), len(elevation)))
829            log.critical('geo_ref: %s' % str(geo_ref))
830            log.critical('------------------------------------------------')
831
832        outfile.variables['x'][:] = points[:,0] #- geo_ref.get_xllcorner()
833        outfile.variables['y'][:] = points[:,1] #- geo_ref.get_yllcorner()
834        outfile.variables['volumes'][:] = volumes.astype(num.int32) #On Opteron 64
835
836
837
838    # @brief Write the static quantity data to the underlying file.
839    # @param outfile Handle to open underlying file.
840    # @param sww_precision Format of quantity data to write (default Float32).
841    # @param verbose True if this function is to be verbose.
842    # @param **quant
843    def store_static_quantities(self, 
844                                outfile, 
845                                sww_precision=num.float32,
846                                verbose=False, 
847                                **quant):
848        """
849        Write the static quantity info.
850
851        **quant is extra keyword arguments passed in. These must be
852          the numpy arrays to be stored in the sww file at each timestep.
853
854        The argument sww_precision allows for storing as either
855        * single precision (default): num.float32
856        * double precision: num.float64 or num.float
857
858        Precondition:
859            store_triangulation and
860            store_header have been called.
861        """
862
863        # The dictionary quant must contain numpy arrays for each name.
864        # These will typically be quantities from Domain such as friction
865        #
866        # Arrays not listed in static_quantitiues will be ignored, silently.
867        #
868        # This method will also write the ranges for each quantity,
869        # e.g. stage_range, xmomentum_range and ymomentum_range
870        for q in self.static_quantities:
871            if not quant.has_key(q):
872                msg = 'Values for quantity %s was not specified in ' % q
873                msg += 'store_quantities so they cannot be stored.'
874                raise NewQuantity, msg
875            else:
876                q_values = ensure_numeric(quant[q])
877               
878                x = q_values.astype(sww_precision)
879                outfile.variables[q][:] = x
880       
881                # This populates the _range values
882                outfile.variables[q + Write_sww.RANGE][0] = num.min(x)
883                outfile.variables[q + Write_sww.RANGE][1] = num.max(x)
884                   
885        # FIXME: Hack for backwards compatibility with old viewer
886        #if 'elevation' in self.static_quantities:
887        #    outfile.variables['z'][:] = outfile.variables['elevation'][:]
888
889                   
890                   
891       
892       
893    ##
894    # @brief Write the quantity data to the underlying file.
895    # @param outfile Handle to open underlying file.
896    # @param sww_precision Format of quantity data to write (default Float32).
897    # @param slice_index
898    # @param time
899    # @param verbose True if this function is to be verbose.
900    # @param **quant
901    def store_quantities(self, 
902                         outfile, 
903                         sww_precision=num.float32,
904                         slice_index=None,
905                         time=None,
906                         verbose=False, 
907                         **quant):
908        """
909        Write the quantity info at each timestep.
910
911        **quant is extra keyword arguments passed in. These must be
912          the numpy arrays to be stored in the sww file at each timestep.
913
914        if the time array is already been built, use the slice_index
915        to specify the index.
916
917        Otherwise, use time to increase the time dimension
918
919        Maybe make this general, but the viewer assumes these quantities,
920        so maybe we don't want it general - unless the viewer is general
921       
922        The argument sww_precision allows for storing as either
923        * single precision (default): num.float32
924        * double precision: num.float64 or num.float
925
926        Precondition:
927            store_triangulation and
928            store_header have been called.
929        """
930
931        if time is not None:
932            file_time = outfile.variables['time']
933            slice_index = len(file_time)
934            file_time[slice_index] = time
935        else:
936            slice_index = int(slice_index) # Has to be cast in case it was numpy.int   
937
938        # Write the named dynamic quantities
939        # The dictionary quant must contain numpy arrays for each name.
940        # These will typically be the conserved quantities from Domain
941        # (Typically stage,  xmomentum, ymomentum).
942        #
943        # Arrays not listed in dynamic_quantitiues will be ignored, silently.
944        #
945        # This method will also write the ranges for each quantity,
946        # e.g. stage_range, xmomentum_range and ymomentum_range
947        for q in self.dynamic_quantities:
948            if not quant.has_key(q):
949                msg = 'Values for quantity %s was not specified in ' % q
950                msg += 'store_quantities so they cannot be stored.'
951                raise NewQuantity, msg
952            else:
953                q_values = ensure_numeric(quant[q])
954               
955                x = q_values.astype(sww_precision)
956                outfile.variables[q][slice_index] = x
957                   
958       
959                # This updates the _range values
960                q_range = outfile.variables[q + Write_sww.RANGE][:]
961                q_values_min = num.min(q_values)
962                if q_values_min < q_range[0]:
963                    outfile.variables[q + Write_sww.RANGE][0] = q_values_min
964                q_values_max = num.max(q_values)
965                if q_values_max > q_range[1]:
966                    outfile.variables[q + Write_sww.RANGE][1] = q_values_max
967
968    ##
969    # @brief Print the quantities in the underlying file.
970    # @param outfile UNUSED.
971    def verbose_quantities(self, outfile):
972        log.critical('------------------------------------------------')
973        log.critical('More Statistics:')
974        for q in self.dynamic_quantities:
975            log.critical(%s in [%f, %f]'
976                         % (q, outfile.variables[q+Write_sww.RANGE][0],
977                            outfile.variables[q+Write_sww.RANGE][1]))
978        log.critical('------------------------------------------------')
979
Note: See TracBrowser for help on using the repository browser.