source: anuga_core/source/anuga/shallow_water/data_manager.py @ 3930

Last change on this file since 3930 was 3930, checked in by nick, 17 years ago

added comments

File size: 135.5 KB
RevLine 
[2852]1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
[3535]33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
[2852]34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
[3560]47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
[2852]48
49"""
50
51import exceptions
[3292]52class TitleValueError(exceptions.Exception): pass
[2852]53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
[3292]60import csv
[2852]61import os
[3720]62from struct import unpack
63import array as p_array
[2852]64
65from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
[3820]66     searchsorted, zeros, allclose, around, reshape, transpose
[3720]67from Scientific.IO.NetCDF import NetCDFFile
[2852]68
69
[3514]70from anuga.coordinate_transforms.geo_reference import Geo_reference
71from anuga.geospatial_data.geospatial_data import Geospatial_data
[3642]72from anuga.config import minimum_storable_height as default_minimum_storable_height
[3720]73from anuga.utilities.numerical_tools import ensure_numeric
74from anuga.caching.caching import myhash
75from anuga.utilities.anuga_exceptions import ANUGAError
[2852]76
77def make_filename(s):
78    """Transform argument string into a suitable filename
79    """
80
81    s = s.strip()
82    s = s.replace(' ', '_')
83    s = s.replace('(', '')
84    s = s.replace(')', '')
85    s = s.replace('__', '_')
86
87    return s
88
89
90def check_dir(path, verbose=None):
91    """Check that specified path exists.
92    If path does not exist it will be created if possible
93
94    USAGE:
95       checkdir(path, verbose):
96
97    ARGUMENTS:
98        path -- Directory
99        verbose -- Flag verbose output (default: None)
100
101    RETURN VALUE:
102        Verified path including trailing separator
103
104    """
105
106    import os, sys
107    import os.path
108
109    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
110        unix = 0
111    else:
112        unix = 1
113
114
115    if path[-1] != os.sep:
116        path = path + os.sep  # Add separator for directories
117
118    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
119    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
120        try:
121            exitcode=os.mkdir(path)
122
123            # Change access rights if possible
124            #
125            if unix:
126                exitcode=os.system('chmod 775 '+path)
127            else:
128                pass  # FIXME: What about acces rights under Windows?
129
130            if verbose: print 'MESSAGE: Directory', path, 'created.'
131
132        except:
133            print 'WARNING: Directory', path, 'could not be created.'
134            if unix:
135                path = '/tmp/'
136            else:
137                path = 'C:'
138
139            print 'Using directory %s instead' %path
140
141    return(path)
142
143
144
145def del_dir(path):
146    """Recursively delete directory path and all its contents
147    """
148
149    import os
150
151    if os.path.isdir(path):
152        for file in os.listdir(path):
153            X = os.path.join(path, file)
154
155
156            if os.path.isdir(X) and not os.path.islink(X):
157                del_dir(X)
158            else:
159                try:
160                    os.remove(X)
161                except:
162                    print "Could not remove file %s" %X
163
164        os.rmdir(path)
165
166
167
168def create_filename(datadir, filename, format, size=None, time=None):
169
170    import os
[3514]171    #from anuga.config import data_dir
[2852]172
173    FN = check_dir(datadir) + filename
174
175    if size is not None:
176        FN += '_size%d' %size
177
178    if time is not None:
179        FN += '_time%.2f' %time
180
181    FN += '.' + format
182    return FN
183
184
185def get_files(datadir, filename, format, size):
186    """Get all file (names) with gven name, size and format
187    """
188
189    import glob
190
191    import os
[3514]192    #from anuga.config import data_dir
[2852]193
194    dir = check_dir(datadir)
195
196    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
197    return glob.glob(pattern)
198
199
200
201#Generic class for storing output to e.g. visualisation or checkpointing
202class Data_format:
203    """Generic interface to data formats
204    """
205
206
207    def __init__(self, domain, extension, mode = 'w'):
208        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
209                                        '''   'w' (write)'''+\
210                                        '''   'r' (read)''' +\
211                                        '''   'a' (append)'''
212
213        #Create filename
214        self.filename = create_filename(domain.get_datadir(),
[3928]215                                        domain.get_name(), extension)
[2852]216
217        #print 'F', self.filename
218        self.timestep = 0
219        self.domain = domain
[3928]220       
[2852]221
222
[3928]223        # Exclude ghosts in case this is a parallel domain
224        self.number_of_nodes = domain.number_of_full_nodes       
225        self.number_of_volumes = domain.number_of_full_triangles
226        #self.number_of_volumes = len(domain)       
227
228
229
230
[2852]231        #FIXME: Should we have a general set_precision function?
232
233
234
235#Class for storing output to e.g. visualisation
236class Data_format_sww(Data_format):
237    """Interface to native NetCDF format (.sww) for storing model output
238
239    There are two kinds of data
240
241    1: Constant data: Vertex coordinates and field values. Stored once
242    2: Variable data: Conserved quantities. Stored once per timestep.
243
244    All data is assumed to reside at vertex locations.
245    """
246
247
248    def __init__(self, domain, mode = 'w',\
249                 max_size = 2000000000,
250                 recursion = False):
251        from Scientific.IO.NetCDF import NetCDFFile
252        from Numeric import Int, Float, Float32
253
254        self.precision = Float32 #Use single precision for quantities
255        if hasattr(domain, 'max_size'):
256            self.max_size = domain.max_size #file size max is 2Gig
257        else:
258            self.max_size = max_size
259        self.recursion = recursion
260        self.mode = mode
261
262        Data_format.__init__(self, domain, 'sww', mode)
263
[3642]264        if hasattr(domain, 'minimum_storable_height'):
265            self.minimum_storable_height =  domain.minimum_storable_height
[3529]266        else:
[3642]267            self.minimum_storable_height = default_minimum_storable_height
[2852]268
269        # NetCDF file definition
270        fid = NetCDFFile(self.filename, mode)
271
272        if mode == 'w':
273
274            #Create new file
275            fid.institution = 'Geoscience Australia'
[3560]276            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
[2852]277
278            if domain.smooth:
279                fid.smoothing = 'Yes'
280            else:
281                fid.smoothing = 'No'
282
283            fid.order = domain.default_order
284
285            if hasattr(domain, 'texture'):
286                fid.texture = domain.texture
[3928]287            #else:
[2852]288            #    fid.texture = 'None'
289
290            #Reference point
291            #Start time in seconds since the epoch (midnight 1/1/1970)
292            #FIXME: Use Georef
293            fid.starttime = domain.starttime
294
295            # dimension definitions
296            fid.createDimension('number_of_volumes', self.number_of_volumes)
297            fid.createDimension('number_of_vertices', 3)
298
299            if domain.smooth is True:
300                fid.createDimension('number_of_points', len(domain.vertexlist))
[3928]301                #fid.createDimension('number_of_points', self.number_of_nodes)
302
303                # FIXME(Ole): This will cause sww files for paralle domains to
304                # have ghost nodes stored (but not used by triangles).
305                # To clean this up, we have to change get_vertex_values and friends in
306                # quantity.py (but I can't be bothered right now)
[2852]307            else:
308                fid.createDimension('number_of_points', 3*self.number_of_volumes)
309
310            fid.createDimension('number_of_timesteps', None) #extensible
311
312            # variable definitions
313            fid.createVariable('x', self.precision, ('number_of_points',))
314            fid.createVariable('y', self.precision, ('number_of_points',))
315            fid.createVariable('elevation', self.precision, ('number_of_points',))
316            if domain.geo_reference is not None:
317                domain.geo_reference.write_NetCDF(fid)
318
319            #FIXME: Backwards compatibility
320            fid.createVariable('z', self.precision, ('number_of_points',))
321            #################################
322
323            fid.createVariable('volumes', Int, ('number_of_volumes',
324                                                'number_of_vertices'))
325
326            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
327                                               # close to each other may appear as the same step
328                               ('number_of_timesteps',))
329
330            fid.createVariable('stage', self.precision,
331                               ('number_of_timesteps',
332                                'number_of_points'))
333
334            fid.createVariable('xmomentum', self.precision,
335                               ('number_of_timesteps',
336                                'number_of_points'))
337
338            fid.createVariable('ymomentum', self.precision,
339                               ('number_of_timesteps',
340                                'number_of_points'))
341
342        #Close
343        fid.close()
344
345
346    def store_connectivity(self):
347        """Specialisation of store_connectivity for net CDF format
348
349        Writes x,y,z coordinates of triangles constituting
350        the bed elevation.
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354
355        from Numeric import concatenate, Int
356
357        domain = self.domain
358
359        #Get NetCDF
360        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
361
362        # Get the variables
363        x = fid.variables['x']
364        y = fid.variables['y']
365        z = fid.variables['elevation']
366
367        volumes = fid.variables['volumes']
368
369        # Get X, Y and bed elevation Z
370        Q = domain.quantities['elevation']
371        X,Y,Z,V = Q.get_vertex_values(xy=True,
[3926]372                                      precision=self.precision)
[2852]373
374
375
376        x[:] = X.astype(self.precision)
377        y[:] = Y.astype(self.precision)
378        z[:] = Z.astype(self.precision)
379
380        #FIXME: Backwards compatibility
381        z = fid.variables['z']
382        z[:] = Z.astype(self.precision)
383        ################################
384
385        volumes[:] = V.astype(volumes.typecode())
386
387        #Close
388        fid.close()
389
390    def store_timestep(self, names):
391        """Store time and named quantities to file
392        """
393        from Scientific.IO.NetCDF import NetCDFFile
394        import types
395        from time import sleep
396        from os import stat
397
398        from Numeric import choose
399
400        #Get NetCDF
401        retries = 0
402        file_open = False
403        while not file_open and retries < 10:
404            try:
405                fid = NetCDFFile(self.filename, 'a') #Open existing file
406            except IOError:
407                #This could happen if someone was reading the file.
408                #In that case, wait a while and try again
409                msg = 'Warning (store_timestep): File %s could not be opened'\
410                      %self.filename
411                msg += ' - trying step %s again' %self.domain.time
412                print msg
413                retries += 1
414                sleep(1)
415            else:
416                file_open = True
417
418        if not file_open:
419            msg = 'File %s could not be opened for append' %self.filename
420            raise DataFileNotOpenError, msg
421
422
423
424        #Check to see if the file is already too big:
425        time = fid.variables['time']
426        i = len(time)+1
427        file_size = stat(self.filename)[6]
428        file_size_increase =  file_size/i
429        if file_size + file_size_increase > self.max_size*(2**self.recursion):
430            #in order to get the file name and start time correct,
431            #I change the domian.filename and domain.starttime.
432            #This is the only way to do this without changing
433            #other modules (I think).
434
435            #write a filename addon that won't break swollens reader
436            #(10.sww is bad)
437            filename_ext = '_time_%s'%self.domain.time
438            filename_ext = filename_ext.replace('.', '_')
439            #remember the old filename, then give domain a
440            #name with the extension
[3846]441            old_domain_filename = self.domain.get_name()
[2852]442            if not self.recursion:
[3846]443                self.domain.set_name(old_domain_filename+filename_ext)
[2852]444
[3846]445
[2852]446            #change the domain starttime to the current time
447            old_domain_starttime = self.domain.starttime
448            self.domain.starttime = self.domain.time
449
450            #build a new data_structure.
451            next_data_structure=\
452                Data_format_sww(self.domain, mode=self.mode,\
453                                max_size = self.max_size,\
454                                recursion = self.recursion+1)
455            if not self.recursion:
456                print '    file_size = %s'%file_size
457                print '    saving file to %s'%next_data_structure.filename
458            #set up the new data_structure
459            self.domain.writer = next_data_structure
460
461            #FIXME - could be cleaner to use domain.store_timestep etc.
462            next_data_structure.store_connectivity()
463            next_data_structure.store_timestep(names)
464            fid.sync()
465            fid.close()
466
467            #restore the old starttime and filename
468            self.domain.starttime = old_domain_starttime
[3846]469            self.domain.set_name(old_domain_filename)           
[2852]470        else:
471            self.recursion = False
472            domain = self.domain
473
474            # Get the variables
475            time = fid.variables['time']
476            stage = fid.variables['stage']
477            xmomentum = fid.variables['xmomentum']
478            ymomentum = fid.variables['ymomentum']
479            i = len(time)
480
481            #Store time
482            time[i] = self.domain.time
483
484
485            if type(names) not in [types.ListType, types.TupleType]:
486                names = [names]
487
488            for name in names:
489                # Get quantity
490                Q = domain.quantities[name]
491                A,V = Q.get_vertex_values(xy = False,
492                                          precision = self.precision)
493
494                #FIXME: Make this general (see below)
495                if name == 'stage':
496                    z = fid.variables['elevation']
497                    #print z[:]
498                    #print A-z[:]
[3642]499                    A = choose(A-z[:] >= self.minimum_storable_height,
500                               (z[:], A))
[2852]501                    stage[i,:] = A.astype(self.precision)
502                elif name == 'xmomentum':
503                    xmomentum[i,:] = A.astype(self.precision)
504                elif name == 'ymomentum':
505                    ymomentum[i,:] = A.astype(self.precision)
506
507                #As in....
508            #eval( name + '[i,:] = A.astype(self.precision)' )
509            #FIXME: But we need a UNIT test for that before refactoring
510
511
512
513            #Flush and close
514            fid.sync()
515            fid.close()
516
517
518
519#Class for handling checkpoints data
520class Data_format_cpt(Data_format):
521    """Interface to native NetCDF format (.cpt)
522    """
523
524
525    def __init__(self, domain, mode = 'w'):
526        from Scientific.IO.NetCDF import NetCDFFile
527        from Numeric import Int, Float, Float
528
529        self.precision = Float #Use full precision
530
531        Data_format.__init__(self, domain, 'sww', mode)
532
533
534        # NetCDF file definition
535        fid = NetCDFFile(self.filename, mode)
536
537        if mode == 'w':
538            #Create new file
539            fid.institution = 'Geoscience Australia'
540            fid.description = 'Checkpoint data'
541            #fid.smooth = domain.smooth
542            fid.order = domain.default_order
543
544            # dimension definitions
545            fid.createDimension('number_of_volumes', self.number_of_volumes)
546            fid.createDimension('number_of_vertices', 3)
547
548            #Store info at all vertices (no smoothing)
549            fid.createDimension('number_of_points', 3*self.number_of_volumes)
550            fid.createDimension('number_of_timesteps', None) #extensible
551
552            # variable definitions
553
554            #Mesh
555            fid.createVariable('x', self.precision, ('number_of_points',))
556            fid.createVariable('y', self.precision, ('number_of_points',))
557
558
559            fid.createVariable('volumes', Int, ('number_of_volumes',
560                                                'number_of_vertices'))
561
562            fid.createVariable('time', self.precision,
563                               ('number_of_timesteps',))
564
565            #Allocate space for all quantities
566            for name in domain.quantities.keys():
567                fid.createVariable(name, self.precision,
568                                   ('number_of_timesteps',
569                                    'number_of_points'))
570
571        #Close
572        fid.close()
573
574
575    def store_checkpoint(self):
576        """
577        Write x,y coordinates of triangles.
578        Write connectivity (
579        constituting
580        the bed elevation.
581        """
582
583        from Scientific.IO.NetCDF import NetCDFFile
584
585        from Numeric import concatenate
586
587        domain = self.domain
588
589        #Get NetCDF
590        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
591
592        # Get the variables
593        x = fid.variables['x']
594        y = fid.variables['y']
595
596        volumes = fid.variables['volumes']
597
598        # Get X, Y and bed elevation Z
599        Q = domain.quantities['elevation']
600        X,Y,Z,V = Q.get_vertex_values(xy=True,
601                      precision = self.precision)
602
603
604
605        x[:] = X.astype(self.precision)
606        y[:] = Y.astype(self.precision)
607        z[:] = Z.astype(self.precision)
608
609        volumes[:] = V
610
611        #Close
612        fid.close()
613
614
615    def store_timestep(self, name):
616        """Store time and named quantity to file
617        """
618        from Scientific.IO.NetCDF import NetCDFFile
619        from time import sleep
620
621        #Get NetCDF
622        retries = 0
623        file_open = False
624        while not file_open and retries < 10:
625            try:
626                fid = NetCDFFile(self.filename, 'a') #Open existing file
627            except IOError:
628                #This could happen if someone was reading the file.
629                #In that case, wait a while and try again
630                msg = 'Warning (store_timestep): File %s could not be opened'\
631                  %self.filename
632                msg += ' - trying again'
633                print msg
634                retries += 1
635                sleep(1)
636            else:
637                file_open = True
638
639        if not file_open:
640            msg = 'File %s could not be opened for append' %self.filename
641            raise DataFileNotOPenError, msg
642
643
644        domain = self.domain
645
646        # Get the variables
647        time = fid.variables['time']
648        stage = fid.variables['stage']
649        i = len(time)
650
651        #Store stage
652        time[i] = self.domain.time
653
654        # Get quantity
655        Q = domain.quantities[name]
656        A,V = Q.get_vertex_values(xy=False,
657                  precision = self.precision)
658
659        stage[i,:] = A.astype(self.precision)
660
661        #Flush and close
662        fid.sync()
663        fid.close()
664
665
666
667
668
669#Function for storing xya output
670#FIXME Not done yet for this version
[3292]671#This is obsolete.  Use geo_spatial_data instead
[2852]672class Data_format_xya(Data_format):
673    """Generic interface to data formats
674    """
675
676
677    def __init__(self, domain, mode = 'w'):
678        from Scientific.IO.NetCDF import NetCDFFile
679        from Numeric import Int, Float, Float32
680
681        self.precision = Float32 #Use single precision
682
683        Data_format.__init__(self, domain, 'xya', mode)
684
685
686
687    #FIXME -This is the old xya format
688    def store_all(self):
689        """Specialisation of store all for xya format
690
691        Writes x,y,z coordinates of triangles constituting
692        the bed elevation.
693        """
694
695        from Numeric import concatenate
696
697        domain = self.domain
698
699        fd = open(self.filename, 'w')
700
701
702        if domain.smooth is True:
703            number_of_points =  len(domain.vertexlist)
704        else:
705            number_of_points = 3*self.number_of_volumes
706
707        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
708
709        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
710                 " # <vertex #> <x> <y> [attributes]" + "\n")
711
712
713        # Get X, Y, bed elevation and friction (index=0,1)
714        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
715                                           indices = (0,1), precision = self.precision)
716
717        bed_eles = A[:,0]
718        fricts = A[:,1]
719
720        # Get stage (index=0)
721        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
722                                       indices = (0,), precision = self.precision)
723
724        stages = B[:,0]
725
726        #<vertex #> <x> <y> [attributes]
727        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
728                                               stages, fricts):
729
730            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
731            fd.write(s)
732
733        #close
734        fd.close()
735
736
737    def store_timestep(self, t, V0, V1, V2):
738        """Store time, water heights (and momentums) to file
739        """
740        pass
741
742
[3678]743#### NEED national exposure database
[3292]744   
745LAT_TITLE = 'LATITUDE'
746LONG_TITLE = 'LONGITUDE'
[3336]747X_TITLE = 'x'
748Y_TITLE = 'y'
[3292]749class Exposure_csv:
750    def __init__(self,file_name, latitude_title=LAT_TITLE,
[3398]751                 longitude_title=LONG_TITLE, is_x_y_locations=None,
[3336]752                 x_title=X_TITLE, y_title=Y_TITLE,
753                 refine_polygon=None):
[3292]754        """
[3296]755        This class is for handling the exposure csv file.
756        It reads the file in and converts the lats and longs to a geospatial
757        data object.
758        Use the methods to read and write columns.
759
760        The format of the csv files it reads is;
761           The first row is a title row.
762           comma's are the delimiters
763           each column is a 'set' of data
764
765        Feel free to use/expand it to read other csv files.
766           
767           
768        It is not for adding and deleting rows
769       
[3292]770        Can geospatial handle string attributes? It's not made for them.
771        Currently it can't load and save string att's.
772
773        So just use geospatial to hold the x, y and georef? Bad, since
774        different att's are in diferent structures.  Not so bad, the info
775        to write if the .csv file is saved is in attribute_dic
776
777        The location info is in the geospatial attribute.
778       
779       
780        """
781        self._file_name = file_name
782        self._geospatial = None #
783
784        # self._attribute_dic is a dictionary.
785        #The keys are the column titles.
786        #The values are lists of column data
787       
788        # self._title_index_dic is a dictionary.
789        #The keys are the column titles.
790        #The values are the index positions of file columns.
791        self._attribute_dic, self._title_index_dic = \
792        self._load_exposure_csv(self._file_name)
793        try:
794            lats = self._attribute_dic[latitude_title]
795            longs = self._attribute_dic[longitude_title]
796           
797        except KeyError:
798            # maybe a warning..
[3398]799            #Let's see if this works..
800            if False != is_x_y_locations:
801                is_x_y_locations = True
[3292]802            pass
803        else:
804            self._geospatial = Geospatial_data(latitudes = lats,
805                 longitudes = longs)
[3336]806
[3398]807        if is_x_y_locations is True:
[3336]808            if self._geospatial is not None:
809                pass #fixme throw an error
810            try:
811                xs = self._attribute_dic[x_title]
812                ys = self._attribute_dic[y_title]
813                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
814            except KeyError:
815                # maybe a warning..
[3664]816                msg = "Could not find location information."
[3336]817                raise TitleValueError, msg
818            else:
819                self._geospatial = Geospatial_data(data_points=points)
[3292]820           
821        # create a list of points that are in the refining_polygon
822        # described by a list of indexes representing the points
823
824    def __cmp__(self, other):
825        #print "self._attribute_dic",self._attribute_dic
826        #print "other._attribute_dic",other._attribute_dic
827        #print "self._title_index_dic", self._title_index_dic
828        #print "other._title_index_dic", other._title_index_dic
829       
830        #check that a is an instance of this class
831        if isinstance(self, type(other)):
832            result = cmp(self._attribute_dic, other._attribute_dic)
833            if result <>0:
834                return result
835            # The order of the columns is important. Therefore..
836            result = cmp(self._title_index_dic, other._title_index_dic)
837            if result <>0:
838                return result
839            for self_ls, other_ls in map(None,self._attribute_dic, \
840                    other._attribute_dic):
841                result = cmp(self._attribute_dic[self_ls],
842                             other._attribute_dic[other_ls])
843                if result <>0:
844                    return result
845            return 0
846        else:
847            return 1
848   
849    def _load_exposure_csv(self, file_name):
850        """
851        Load in the csv as a dic, title as key and column info as value, .
852        Also, create a dic, title as key and column index as value.
853        """
854        #
855        attribute_dic = {}
856        title_index_dic = {}
857        titles_stripped = [] # list of titles
858       
859        reader = csv.reader(file(file_name))
860
861        # Read in and manipulate the title info
862        titles = reader.next()
863        for i,title in enumerate(titles):
864            titles_stripped.append(title.strip())
865            title_index_dic[title.strip()] = i
866        title_count = len(titles_stripped)       
867        #print "title_index_dic",title_index_dic
868
869       
870        #create a dic of colum values, indexed by column title
871        for line in reader:
872            if len(line) <> title_count:
873                raise IOError #FIXME make this nicer
874            for i, value in enumerate(line):
875                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
876           
877        return attribute_dic, title_index_dic
878
879    def get_column(self, column_name, use_refind_polygon=False):
880        """
881        Given a column name return a list of the column values
882
883        Note, the type of the values will be String!
[3437]884        do this to change a list of strings to a list of floats
885        time = [float(x) for x in time]
[3292]886       
887        Not implemented:
888        if use_refind_polygon is True, only return values in the
889        refined polygon
890        """
891        if not self._attribute_dic.has_key(column_name):
892            msg = 'Therer is  no column called %s!' %column_name
893            raise TitleValueError, msg
894        return self._attribute_dic[column_name]
895
[3437]896
897    def get_value(self, value_column_name,
898                  known_column_name,
899                  known_values,
900                  use_refind_polygon=False):
901        """
902        Do linear interpolation on the known_colum, using the known_value,
903        to return a value of the column_value_name.
904        """
905        pass
906
907
[3292]908    def get_location(self, use_refind_polygon=False):
909        """
910        Return a geospatial object which describes the
911        locations of the location file.
912
913        Note, if there is not location info, this returns None.
914       
915        Not implemented:
916        if use_refind_polygon is True, only return values in the
917        refined polygon
918        """
919        return self._geospatial
920
921    def set_column(self, column_name, column_values, overwrite=False):
922        """
923        Add a column to the 'end' (with the right most column being the end)
924        of the csv file.
925
926        Set overwrite to True if you want to overwrite a column.
927       
928        Note, in column_name white space is removed and case is not checked.
929        Precondition
930        The column_name and column_values cannot have comma's in it.
931        """
932       
933        value_row_count = \
934            len(self._attribute_dic[self._title_index_dic.keys()[0]])
935        if len(column_values) <> value_row_count: 
936            msg = 'The number of column values must equal the number of rows.'
937            raise DataMissingValuesError, msg
938       
939        if self._attribute_dic.has_key(column_name):
940            if not overwrite:
941                msg = 'Column name %s already in use!' %column_name
942                raise TitleValueError, msg
943        else:
944            # New title.  Add it to the title index.
945            self._title_index_dic[column_name] = len(self._title_index_dic)
946        self._attribute_dic[column_name] = column_values
947        #print "self._title_index_dic[column_name]",self._title_index_dic
948
949    def save(self, file_name=None):
950        """
951        Save the exposure csv file
952        """
953        if file_name is None:
954            file_name = self._file_name
955       
956        fd = open(file_name,'wb')
957        writer = csv.writer(fd)
958       
959        #Write the title to a cvs file
960        line = [None]* len(self._title_index_dic)
961        for title in self._title_index_dic.iterkeys():
962            line[self._title_index_dic[title]]= title
963        writer.writerow(line)
964       
965        # Write the values to a cvs file
966        value_row_count = \
967            len(self._attribute_dic[self._title_index_dic.keys()[0]])
968        for row_i in range(value_row_count):
969            line = [None]* len(self._title_index_dic)
970            for title in self._title_index_dic.iterkeys():
971                line[self._title_index_dic[title]]= \
972                     self._attribute_dic[title][row_i]
973            writer.writerow(line)
974
975
[2852]976#Auxiliary
977def write_obj(filename,x,y,z):
978    """Store x,y,z vectors into filename (obj format)
979       Vectors are assumed to have dimension (M,3) where
980       M corresponds to the number elements.
981       triangles are assumed to be disconnected
982
983       The three numbers in each vector correspond to three vertices,
984
985       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
986
987    """
988    #print 'Writing obj to %s' % filename
989
990    import os.path
991
992    root, ext = os.path.splitext(filename)
993    if ext == '.obj':
994        FN = filename
995    else:
996        FN = filename + '.obj'
997
998
999    outfile = open(FN, 'wb')
1000    outfile.write("# Triangulation as an obj file\n")
1001
1002    M, N = x.shape
1003    assert N==3  #Assuming three vertices per element
1004
1005    for i in range(M):
1006        for j in range(N):
1007            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1008
1009    for i in range(M):
1010        base = i*N
1011        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1012
1013    outfile.close()
1014
1015
1016#########################################################
1017#Conversion routines
1018########################################################
1019
1020def sww2obj(basefilename, size):
1021    """Convert netcdf based data output to obj
1022    """
1023    from Scientific.IO.NetCDF import NetCDFFile
1024
1025    from Numeric import Float, zeros
1026
1027    #Get NetCDF
1028    FN = create_filename('.', basefilename, 'sww', size)
1029    print 'Reading from ', FN
1030    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1031
1032
1033    # Get the variables
1034    x = fid.variables['x']
1035    y = fid.variables['y']
1036    z = fid.variables['elevation']
1037    time = fid.variables['time']
1038    stage = fid.variables['stage']
1039
1040    M = size  #Number of lines
1041    xx = zeros((M,3), Float)
1042    yy = zeros((M,3), Float)
1043    zz = zeros((M,3), Float)
1044
1045    for i in range(M):
1046        for j in range(3):
1047            xx[i,j] = x[i+j*M]
1048            yy[i,j] = y[i+j*M]
1049            zz[i,j] = z[i+j*M]
1050
1051    #Write obj for bathymetry
1052    FN = create_filename('.', basefilename, 'obj', size)
1053    write_obj(FN,xx,yy,zz)
1054
1055
1056    #Now read all the data with variable information, combine with
1057    #x,y info and store as obj
1058
1059    for k in range(len(time)):
1060        t = time[k]
1061        print 'Processing timestep %f' %t
1062
1063        for i in range(M):
1064            for j in range(3):
1065                zz[i,j] = stage[k,i+j*M]
1066
1067
1068        #Write obj for variable data
1069        #FN = create_filename(basefilename, 'obj', size, time=t)
1070        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1071        write_obj(FN,xx,yy,zz)
1072
1073
1074def dat2obj(basefilename):
1075    """Convert line based data output to obj
1076    FIXME: Obsolete?
1077    """
1078
1079    import glob, os
[3514]1080    from anuga.config import data_dir
[2852]1081
1082
1083    #Get bathymetry and x,y's
1084    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1085
1086    from Numeric import zeros, Float
1087
1088    M = len(lines)  #Number of lines
1089    x = zeros((M,3), Float)
1090    y = zeros((M,3), Float)
1091    z = zeros((M,3), Float)
1092
1093    ##i = 0
1094    for i, line in enumerate(lines):
1095        tokens = line.split()
1096        values = map(float,tokens)
1097
1098        for j in range(3):
1099            x[i,j] = values[j*3]
1100            y[i,j] = values[j*3+1]
1101            z[i,j] = values[j*3+2]
1102
1103        ##i += 1
1104
1105
1106    #Write obj for bathymetry
1107    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1108
1109
1110    #Now read all the data files with variable information, combine with
1111    #x,y info
1112    #and store as obj
1113
1114    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1115
1116    for filename in files:
1117        print 'Processing %s' % filename
1118
1119        lines = open(data_dir+os.sep+filename,'r').readlines()
1120        assert len(lines) == M
1121        root, ext = os.path.splitext(filename)
1122
1123        #Get time from filename
1124        i0 = filename.find('_time=')
1125        if i0 == -1:
1126            #Skip bathymetry file
1127            continue
1128
1129        i0 += 6  #Position where time starts
1130        i1 = filename.find('.dat')
1131
1132        if i1 > i0:
1133            t = float(filename[i0:i1])
1134        else:
1135            raise DataTimeError, 'Hmmmm'
1136
1137
1138
1139        ##i = 0
1140        for i, line in enumerate(lines):
1141            tokens = line.split()
1142            values = map(float,tokens)
1143
1144            for j in range(3):
1145                z[i,j] = values[j]
1146
1147            ##i += 1
1148
1149        #Write obj for variable data
1150        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1151
1152
1153def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1154    """Read netcdf filename1, pick timesteps first:step:last and save to
1155    nettcdf file filename2
1156    """
1157    from Scientific.IO.NetCDF import NetCDFFile
1158
1159    #Get NetCDF
1160    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1161    outfile = NetCDFFile(filename2, 'w')  #Open new file
1162
1163
1164    #Copy dimensions
1165    for d in infile.dimensions:
1166        outfile.createDimension(d, infile.dimensions[d])
1167
1168    for name in infile.variables:
1169        var = infile.variables[name]
1170        outfile.createVariable(name, var.typecode(), var.dimensions)
1171
1172
1173    #Copy the static variables
1174    for name in infile.variables:
1175        if name == 'time' or name == 'stage':
1176            pass
1177        else:
1178            #Copy
1179            outfile.variables[name][:] = infile.variables[name][:]
1180
1181    #Copy selected timesteps
1182    time = infile.variables['time']
1183    stage = infile.variables['stage']
1184
1185    newtime = outfile.variables['time']
1186    newstage = outfile.variables['stage']
1187
1188    if last is None:
1189        last = len(time)
1190
1191    selection = range(first, last, step)
1192    for i, j in enumerate(selection):
1193        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1194        newtime[i] = time[j]
1195        newstage[i,:] = stage[j,:]
1196
1197    #Close
1198    infile.close()
1199    outfile.close()
1200
1201
1202#Get data objects
1203def get_dataobject(domain, mode='w'):
1204    """Return instance of class of given format using filename
1205    """
1206
1207    cls = eval('Data_format_%s' %domain.format)
1208    return cls(domain, mode)
1209
[3292]1210#FIXME move into geospatial.  There should only be one method that
1211# reads xya, and writes pts.
[2852]1212def xya2pts(basename_in, basename_out=None, verbose=False,
1213            #easting_min=None, easting_max=None,
1214            #northing_min=None, northing_max=None,
1215            stride = 1,
1216            attribute_name = 'elevation',
1217            z_func = None):
1218    """Read points data from ascii (.xya)
1219
1220    Example:
1221
1222              x(m)        y(m)        z(m)
1223         0.00000e+00  0.00000e+00  1.3535000e-01
1224         0.00000e+00  1.40000e-02  1.3535000e-01
1225
1226
1227
1228    Convert to NetCDF pts format which is
1229
1230    points:  (Nx2) Float array
1231    elevation: N Float array
1232
1233    Only lines that contain three numeric values are processed
1234
1235    If z_func is specified, it will be applied to the third field
1236    """
1237
1238    import os
1239    #from Scientific.IO.NetCDF import NetCDFFile
1240    from Numeric import Float, arrayrange, concatenate
1241
1242    root, ext = os.path.splitext(basename_in)
1243
1244    if ext == '': ext = '.xya'
1245
1246    #Get NetCDF
1247    infile = open(root + ext, 'r')  #Open existing xya file for read
1248
1249    if verbose: print 'Reading xya points from %s' %(root + ext)
1250
1251    points = []
1252    attribute = []
1253    for i, line in enumerate(infile.readlines()):
1254
1255        if i % stride != 0: continue
1256
1257        fields = line.split()
1258
1259        try:
1260            assert len(fields) == 3
1261        except:
1262            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1263
1264        try:
1265            x = float( fields[0] )
1266            y = float( fields[1] )
1267            z = float( fields[2] )
1268        except:
1269            continue
1270
1271        points.append( [x, y] )
1272
1273        if callable(z_func):
1274            attribute.append(z_func(z))
1275        else:
1276            attribute.append(z)
1277
1278
1279    #Get output file
1280    if basename_out == None:
1281        ptsname = root + '.pts'
1282    else:
1283        ptsname = basename_out + '.pts'
1284
1285    if verbose: print 'Store to NetCDF file %s' %ptsname
1286    write_ptsfile(ptsname, points, attribute, attribute_name)
1287
1288
1289
1290######Obsoleted by export_points in load_mesh
1291def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1292                  zone=None, xllcorner=None, yllcorner=None):
1293    """Write points and associated attribute to pts (NetCDF) format
1294    """
1295
1296    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1297
1298    from Numeric import Float
1299
1300    if attribute_name is None:
1301        attribute_name = 'attribute'
1302
1303
1304    from Scientific.IO.NetCDF import NetCDFFile
1305
1306    # NetCDF file definition
1307    outfile = NetCDFFile(ptsname, 'w')
1308
1309
1310    #Create new file
1311    outfile.institution = 'Geoscience Australia'
1312    outfile.description = 'NetCDF pts format for compact and '\
1313                          'portable storage of spatial point data'
1314
1315
1316    #Georeferencing
[3514]1317    from anuga.coordinate_transforms.geo_reference import Geo_reference
[2852]1318    if zone is None:
1319        assert xllcorner is None, 'xllcorner must be None'
1320        assert yllcorner is None, 'yllcorner must be None'
1321        Geo_reference().write_NetCDF(outfile)
1322    else:
1323        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1324
1325
1326
1327    outfile.createDimension('number_of_points', len(points))
1328    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1329
1330    # variable definitions
1331    outfile.createVariable('points', Float, ('number_of_points',
1332                                             'number_of_dimensions'))
1333    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1334
1335    # Get handles to the variables
1336    nc_points = outfile.variables['points']
1337    nc_attribute = outfile.variables[attribute_name]
1338
1339    #Store data
1340    nc_points[:, :] = points
1341    nc_attribute[:] = attribute
1342
1343    outfile.close()
1344
1345
1346def dem2pts(basename_in, basename_out=None,
1347            easting_min=None, easting_max=None,
1348            northing_min=None, northing_max=None,
1349            use_cache=False, verbose=False,):
1350    """Read Digitial Elevation model from the following NetCDF format (.dem)
1351
1352    Example:
1353
1354    ncols         3121
1355    nrows         1800
1356    xllcorner     722000
1357    yllcorner     5893000
1358    cellsize      25
1359    NODATA_value  -9999
1360    138.3698 137.4194 136.5062 135.5558 ..........
1361
1362    Convert to NetCDF pts format which is
1363
1364    points:  (Nx2) Float array
1365    elevation: N Float array
1366    """
1367
1368
1369
1370    kwargs = {'basename_out': basename_out,
1371              'easting_min': easting_min,
1372              'easting_max': easting_max,
1373              'northing_min': northing_min,
1374              'northing_max': northing_max,
1375              'verbose': verbose}
1376
1377    if use_cache is True:
1378        from caching import cache
1379        result = cache(_dem2pts, basename_in, kwargs,
1380                       dependencies = [basename_in + '.dem'],
1381                       verbose = verbose)
1382
1383    else:
1384        result = apply(_dem2pts, [basename_in], kwargs)
1385
1386    return result
1387
1388
1389def _dem2pts(basename_in, basename_out=None, verbose=False,
1390            easting_min=None, easting_max=None,
1391            northing_min=None, northing_max=None):
1392    """Read Digitial Elevation model from the following NetCDF format (.dem)
1393
1394    Internal function. See public function dem2pts for details.
1395    """
1396
1397    #FIXME: Can this be written feasibly using write_pts?
1398
1399    import os
1400    from Scientific.IO.NetCDF import NetCDFFile
1401    from Numeric import Float, zeros, reshape, sum
1402
1403    root = basename_in
1404
1405    #Get NetCDF
1406    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1407
1408    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1409
1410    ncols = infile.ncols[0]
1411    nrows = infile.nrows[0]
1412    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1413    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1414    cellsize = infile.cellsize[0]
1415    NODATA_value = infile.NODATA_value[0]
1416    dem_elevation = infile.variables['elevation']
1417
1418    zone = infile.zone[0]
1419    false_easting = infile.false_easting[0]
1420    false_northing = infile.false_northing[0]
1421
1422    #Text strings
1423    projection = infile.projection
1424    datum = infile.datum
1425    units = infile.units
1426
1427
1428    #Get output file
1429    if basename_out == None:
1430        ptsname = root + '.pts'
1431    else:
1432        ptsname = basename_out + '.pts'
1433
1434    if verbose: print 'Store to NetCDF file %s' %ptsname
1435    # NetCDF file definition
1436    outfile = NetCDFFile(ptsname, 'w')
1437
1438    #Create new file
1439    outfile.institution = 'Geoscience Australia'
1440    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1441                      'of spatial point data'
1442    #assign default values
1443    if easting_min is None: easting_min = xllcorner
1444    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1445    if northing_min is None: northing_min = yllcorner
1446    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1447
1448    #compute offsets to update georeferencing
1449    easting_offset = xllcorner - easting_min
1450    northing_offset = yllcorner - northing_min
1451
1452    #Georeferencing
1453    outfile.zone = zone
1454    outfile.xllcorner = easting_min #Easting of lower left corner
1455    outfile.yllcorner = northing_min #Northing of lower left corner
1456    outfile.false_easting = false_easting
1457    outfile.false_northing = false_northing
1458
1459    outfile.projection = projection
1460    outfile.datum = datum
1461    outfile.units = units
1462
1463
1464    #Grid info (FIXME: probably not going to be used, but heck)
1465    outfile.ncols = ncols
1466    outfile.nrows = nrows
1467
1468    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1469    totalnopoints = nrows*ncols
1470
1471    # calculating number of NODATA_values for each row in clipped region
1472    #FIXME: use array operations to do faster
1473    nn = 0
1474    k = 0
1475    i1_0 = 0
1476    j1_0 = 0
1477    thisj = 0
1478    thisi = 0
1479    for i in range(nrows):
1480        y = (nrows-i-1)*cellsize + yllcorner
1481        for j in range(ncols):
1482            x = j*cellsize + xllcorner
1483            if easting_min <= x <= easting_max and \
1484               northing_min <= y <= northing_max:
1485                thisj = j
1486                thisi = i
1487                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1488
1489                if k == 0:
1490                    i1_0 = i
1491                    j1_0 = j
1492                k += 1
1493
1494    index1 = j1_0
1495    index2 = thisj
1496
1497    # dimension definitions
1498    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1499    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1500
1501    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1502    nopoints = clippednopoints-nn
1503
1504    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1505
[3664]1506    if verbose:
[2852]1507        print 'There are %d values in the elevation' %totalnopoints
1508        print 'There are %d values in the clipped elevation' %clippednopoints
1509        print 'There are %d NODATA_values in the clipped elevation' %nn
1510
1511    outfile.createDimension('number_of_points', nopoints)
1512    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1513
1514    # variable definitions
1515    outfile.createVariable('points', Float, ('number_of_points',
1516                                             'number_of_dimensions'))
1517    outfile.createVariable('elevation', Float, ('number_of_points',))
1518
1519    # Get handles to the variables
1520    points = outfile.variables['points']
1521    elevation = outfile.variables['elevation']
1522
1523    lenv = index2-index1+1
1524    #Store data
1525    global_index = 0
1526    #for i in range(nrows):
1527    for i in range(i1_0,thisi+1,1):
1528        if verbose and i%((nrows+10)/10)==0:
1529            print 'Processing row %d of %d' %(i, nrows)
1530
1531        lower_index = global_index
1532
1533        v = dem_elevation_r[i,index1:index2+1]
1534        no_NODATA = sum(v == NODATA_value)
1535        if no_NODATA > 0:
1536            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1537        else:
1538            newcols = lenv #ncols_in_bounding_box
1539
1540        telev = zeros(newcols, Float)
1541        tpoints = zeros((newcols, 2), Float)
1542
1543        local_index = 0
1544
1545        y = (nrows-i-1)*cellsize + yllcorner
1546        #for j in range(ncols):
1547        for j in range(j1_0,index2+1,1):
1548
1549            x = j*cellsize + xllcorner
1550            if easting_min <= x <= easting_max and \
1551               northing_min <= y <= northing_max and \
1552               dem_elevation_r[i,j] <> NODATA_value:
1553                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1554                telev[local_index] = dem_elevation_r[i, j]
1555                global_index += 1
1556                local_index += 1
1557
1558        upper_index = global_index
1559
1560        if upper_index == lower_index + newcols:
1561            points[lower_index:upper_index, :] = tpoints
1562            elevation[lower_index:upper_index] = telev
1563
1564    assert global_index == nopoints, 'index not equal to number of points'
1565
1566    infile.close()
1567    outfile.close()
1568
1569
1570
1571def _read_hecras_cross_sections(lines):
1572    """Return block of surface lines for each cross section
1573    Starts with SURFACE LINE,
1574    Ends with END CROSS-SECTION
1575    """
1576
1577    points = []
1578
1579    reading_surface = False
1580    for i, line in enumerate(lines):
1581
1582        if len(line.strip()) == 0:    #Ignore blanks
1583            continue
1584
1585        if lines[i].strip().startswith('SURFACE LINE'):
1586            reading_surface = True
1587            continue
1588
1589        if lines[i].strip().startswith('END') and reading_surface:
1590            yield points
1591            reading_surface = False
1592            points = []
1593
1594        if reading_surface:
1595            fields = line.strip().split(',')
1596            easting = float(fields[0])
1597            northing = float(fields[1])
1598            elevation = float(fields[2])
1599            points.append([easting, northing, elevation])
1600
1601
1602
1603
1604def hecras_cross_sections2pts(basename_in,
1605                              basename_out=None,
1606                              verbose=False):
1607    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1608
1609    Example:
1610
1611
1612# RAS export file created on Mon 15Aug2005 11:42
1613# by HEC-RAS Version 3.1.1
1614
1615BEGIN HEADER:
1616  UNITS: METRIC
1617  DTM TYPE: TIN
1618  DTM: v:\1\cit\perth_topo\river_tin
1619  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1620  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1621  MAP PROJECTION: UTM
1622  PROJECTION ZONE: 50
1623  DATUM: AGD66
1624  VERTICAL DATUM:
1625  NUMBER OF REACHES:  19
1626  NUMBER OF CROSS-SECTIONS:  14206
1627END HEADER:
1628
1629
1630Only the SURFACE LINE data of the following form will be utilised
1631
1632  CROSS-SECTION:
1633    STREAM ID:Southern-Wungong
1634    REACH ID:Southern-Wungong
1635    STATION:19040.*
1636    CUT LINE:
1637      405548.671603161 , 6438142.7594925
1638      405734.536092045 , 6438326.10404912
1639      405745.130459356 , 6438331.48627354
1640      405813.89633823 , 6438368.6272789
1641    SURFACE LINE:
1642     405548.67,   6438142.76,   35.37
1643     405552.24,   6438146.28,   35.41
1644     405554.78,   6438148.78,   35.44
1645     405555.80,   6438149.79,   35.44
1646     405559.37,   6438153.31,   35.45
1647     405560.88,   6438154.81,   35.44
1648     405562.93,   6438156.83,   35.42
1649     405566.50,   6438160.35,   35.38
1650     405566.99,   6438160.83,   35.37
1651     ...
1652   END CROSS-SECTION
1653
1654    Convert to NetCDF pts format which is
1655
1656    points:  (Nx2) Float array
1657    elevation: N Float array
1658    """
1659
1660    #FIXME: Can this be written feasibly using write_pts?
1661
1662    import os
1663    from Scientific.IO.NetCDF import NetCDFFile
1664    from Numeric import Float, zeros, reshape
1665
1666    root = basename_in
1667
1668    #Get ASCII file
1669    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1670
1671    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1672
1673    lines = infile.readlines()
1674    infile.close()
1675
1676    if verbose: print 'Converting to pts format'
1677
1678    i = 0
1679    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1680        i += 1
1681
1682    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1683    i += 1
1684
1685    assert lines[i].strip().upper().startswith('UNITS:')
1686    units = lines[i].strip().split()[1]
1687    i += 1
1688
1689    assert lines[i].strip().upper().startswith('DTM TYPE:')
1690    i += 1
1691
1692    assert lines[i].strip().upper().startswith('DTM:')
1693    i += 1
1694
1695    assert lines[i].strip().upper().startswith('STREAM')
1696    i += 1
1697
1698    assert lines[i].strip().upper().startswith('CROSS')
1699    i += 1
1700
1701    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1702    projection = lines[i].strip().split(':')[1]
1703    i += 1
1704
1705    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1706    zone = int(lines[i].strip().split(':')[1])
1707    i += 1
1708
1709    assert lines[i].strip().upper().startswith('DATUM:')
1710    datum = lines[i].strip().split(':')[1]
1711    i += 1
1712
1713    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1714    i += 1
1715
1716    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1717    i += 1
1718
1719    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1720    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1721    i += 1
1722
1723
1724    #Now read all points
1725    points = []
1726    elevation = []
1727    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1728        for k, entry in enumerate(entries):
1729            points.append(entry[:2])
1730            elevation.append(entry[2])
1731
1732
1733    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1734          %(j+1, number_of_cross_sections)
1735    assert j+1 == number_of_cross_sections, msg
1736
1737    #Get output file
1738    if basename_out == None:
1739        ptsname = root + '.pts'
1740    else:
1741        ptsname = basename_out + '.pts'
1742
1743    #FIXME (DSG-ON): use loadASCII export_points_file
1744    if verbose: print 'Store to NetCDF file %s' %ptsname
1745    # NetCDF file definition
1746    outfile = NetCDFFile(ptsname, 'w')
1747
1748    #Create new file
1749    outfile.institution = 'Geoscience Australia'
1750    outfile.description = 'NetCDF pts format for compact and portable ' +\
1751                          'storage of spatial point data derived from HEC-RAS'
1752
1753    #Georeferencing
1754    outfile.zone = zone
1755    outfile.xllcorner = 0.0
1756    outfile.yllcorner = 0.0
1757    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1758    outfile.false_northing = 1000000
1759
1760    outfile.projection = projection
1761    outfile.datum = datum
1762    outfile.units = units
1763
1764
1765    outfile.createDimension('number_of_points', len(points))
1766    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1767
1768    # variable definitions
1769    outfile.createVariable('points', Float, ('number_of_points',
1770                                             'number_of_dimensions'))
1771    outfile.createVariable('elevation', Float, ('number_of_points',))
1772
1773    # Get handles to the variables
1774    outfile.variables['points'][:] = points
1775    outfile.variables['elevation'][:] = elevation
1776
1777    outfile.close()
1778
1779
1780
1781def sww2dem(basename_in, basename_out = None,
1782            quantity = None,
1783            timestep = None,
1784            reduction = None,
1785            cellsize = 10,
1786            NODATA_value = -9999,
1787            easting_min = None,
1788            easting_max = None,
1789            northing_min = None,
1790            northing_max = None,
1791            verbose = False,
1792            origin = None,
1793            datum = 'WGS84',
1794        format = 'ers'):
1795
1796    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1797
1798    Example (ASC):
1799
1800    ncols         3121
1801    nrows         1800
1802    xllcorner     722000
1803    yllcorner     5893000
1804    cellsize      25
1805    NODATA_value  -9999
1806    138.3698 137.4194 136.5062 135.5558 ..........
1807
1808    Also write accompanying file with same basename_in but extension .prj
1809    used to fix the UTM zone, datum, false northings and eastings.
1810
1811    The prj format is assumed to be as
1812
1813    Projection    UTM
1814    Zone          56
1815    Datum         WGS84
1816    Zunits        NO
1817    Units         METERS
1818    Spheroid      WGS84
1819    Xshift        0.0000000000
1820    Yshift        10000000.0000000000
1821    Parameters
1822
1823
1824    The parameter quantity must be the name of an existing quantity or
1825    an expression involving existing quantities. The default is
1826    'elevation'.
1827
1828    if timestep (an index) is given, output quantity at that timestep
1829
1830    if reduction is given use that to reduce quantity over all timesteps.
1831
1832    datum
1833
1834    format can be either 'asc' or 'ers'
1835    """
1836
1837    import sys
1838    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1839    from Numeric import array2string
1840
[3514]1841    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]1842    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2852]1843
1844    msg = 'Format must be either asc or ers'
1845    assert format.lower() in ['asc', 'ers'], msg
1846
1847
1848    false_easting = 500000
1849    false_northing = 10000000
1850
1851    if quantity is None:
1852        quantity = 'elevation'
1853
1854    if reduction is None:
1855        reduction = max
1856
1857    if basename_out is None:
1858        basename_out = basename_in + '_%s' %quantity
1859
1860    swwfile = basename_in + '.sww'
1861    demfile = basename_out + '.' + format
1862    # Note the use of a .ers extension is optional (write_ermapper_grid will
1863    # deal with either option
1864
1865    #Read sww file
1866    if verbose: print 'Reading from %s' %swwfile
1867    from Scientific.IO.NetCDF import NetCDFFile
1868    fid = NetCDFFile(swwfile)
1869
1870    #Get extent and reference
1871    x = fid.variables['x'][:]
1872    y = fid.variables['y'][:]
1873    volumes = fid.variables['volumes'][:]
1874
1875    number_of_timesteps = fid.dimensions['number_of_timesteps']
1876    number_of_points = fid.dimensions['number_of_points']
1877    if origin is None:
1878
1879        #Get geo_reference
1880        #sww files don't have to have a geo_ref
1881        try:
1882            geo_reference = Geo_reference(NetCDFObject=fid)
1883        except AttributeError, e:
1884            geo_reference = Geo_reference() #Default georef object
1885
1886        xllcorner = geo_reference.get_xllcorner()
1887        yllcorner = geo_reference.get_yllcorner()
1888        zone = geo_reference.get_zone()
1889    else:
1890        zone = origin[0]
1891        xllcorner = origin[1]
1892        yllcorner = origin[2]
1893
1894
1895
1896    #FIXME: Refactor using code from file_function.statistics
1897    #Something like print swwstats(swwname)
1898    if verbose:
1899        x = fid.variables['x'][:]
1900        y = fid.variables['y'][:]
1901        times = fid.variables['time'][:]
1902        print '------------------------------------------------'
1903        print 'Statistics of SWW file:'
1904        print '  Name: %s' %swwfile
1905        print '  Reference:'
1906        print '    Lower left corner: [%f, %f]'\
1907              %(xllcorner, yllcorner)
1908        print '    Start time: %f' %fid.starttime[0]
1909        print '  Extent:'
1910        print '    x [m] in [%f, %f], len(x) == %d'\
1911              %(min(x.flat), max(x.flat), len(x.flat))
1912        print '    y [m] in [%f, %f], len(y) == %d'\
1913              %(min(y.flat), max(y.flat), len(y.flat))
1914        print '    t [s] in [%f, %f], len(t) == %d'\
1915              %(min(times), max(times), len(times))
1916        print '  Quantities [SI units]:'
1917        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
1918            q = fid.variables[name][:].flat
1919            print '    %s in [%f, %f]' %(name, min(q), max(q))
1920
1921
1922
1923
1924
[2891]1925    # Get quantity and reduce if applicable
[2852]1926    if verbose: print 'Processing quantity %s' %quantity
1927
[2891]1928    # Turn NetCDF objects into Numeric arrays
[2852]1929    quantity_dict = {}
1930    for name in fid.variables.keys():
1931        quantity_dict[name] = fid.variables[name][:]
1932
1933
[2891]1934
1935    # Convert quantity expression to quantities found in sww file   
[2852]1936    q = apply_expression_to_dictionary(quantity, quantity_dict)
1937
1938
1939
1940    if len(q.shape) == 2:
1941        #q has a time component and needs to be reduced along
1942        #the temporal dimension
1943        if verbose: print 'Reducing quantity %s' %quantity
1944        q_reduced = zeros( number_of_points, Float )
1945
1946        for k in range(number_of_points):
1947            q_reduced[k] = reduction( q[:,k] )
1948
1949        q = q_reduced
1950
1951    #Post condition: Now q has dimension: number_of_points
1952    assert len(q.shape) == 1
1953    assert q.shape[0] == number_of_points
1954
1955
1956    if verbose:
1957        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1958
1959
1960    #Create grid and update xll/yll corner and x,y
1961
1962    #Relative extent
1963    if easting_min is None:
1964        xmin = min(x)
1965    else:
1966        xmin = easting_min - xllcorner
1967
1968    if easting_max is None:
1969        xmax = max(x)
1970    else:
1971        xmax = easting_max - xllcorner
1972
1973    if northing_min is None:
1974        ymin = min(y)
1975    else:
1976        ymin = northing_min - yllcorner
1977
1978    if northing_max is None:
1979        ymax = max(y)
1980    else:
1981        ymax = northing_max - yllcorner
1982
1983
1984
1985    if verbose: print 'Creating grid'
1986    ncols = int((xmax-xmin)/cellsize)+1
1987    nrows = int((ymax-ymin)/cellsize)+1
1988
1989
1990    #New absolute reference and coordinates
1991    newxllcorner = xmin+xllcorner
1992    newyllcorner = ymin+yllcorner
1993
1994    x = x+xllcorner-newxllcorner
1995    y = y+yllcorner-newyllcorner
1996
1997    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
1998    assert len(vertex_points.shape) == 2
1999
2000
2001
2002    grid_points = zeros ( (ncols*nrows, 2), Float )
2003
2004
2005    for i in xrange(nrows):
2006        if format.lower() == 'asc':
2007            yg = i*cellsize
2008        else:
2009        #this will flip the order of the y values for ers
2010            yg = (nrows-i)*cellsize
2011
2012        for j in xrange(ncols):
2013            xg = j*cellsize
2014            k = i*ncols + j
2015
2016            grid_points[k,0] = xg
2017            grid_points[k,1] = yg
2018
2019    #Interpolate
2020    #from least_squares import Interpolation
[3514]2021    from anuga.fit_interpolate.interpolate import Interpolate
[2852]2022
2023
2024    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2025
2026
2027
2028    #Interpolate using quantity values
2029    if verbose: print 'Interpolating'
2030    grid_values = interp.interpolate(q, grid_points).flat
2031
2032
2033    if verbose:
2034        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2035                                                      max(grid_values))
2036
2037    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2038    P = interp.mesh.get_boundary_polygon()
2039    outside_indices = outside_polygon(grid_points, P, closed=True)
2040
2041    for i in outside_indices:
2042        grid_values[i] = NODATA_value
2043
2044
2045
2046
2047    if format.lower() == 'ers':
2048        # setup ERS header information
2049        grid_values = reshape(grid_values,(nrows, ncols))
2050        header = {}
2051        header['datum'] = '"' + datum + '"'
2052        # FIXME The use of hardwired UTM and zone number needs to be made optional
2053        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2054        header['projection'] = '"UTM-' + str(zone) + '"'
2055        header['coordinatetype'] = 'EN'
2056        if header['coordinatetype'] == 'LL':
2057            header['longitude'] = str(newxllcorner)
2058            header['latitude'] = str(newyllcorner)
2059        elif header['coordinatetype'] == 'EN':
2060            header['eastings'] = str(newxllcorner)
2061            header['northings'] = str(newyllcorner)
2062        header['nullcellvalue'] = str(NODATA_value)
2063        header['xdimension'] = str(cellsize)
2064        header['ydimension'] = str(cellsize)
2065        header['value'] = '"' + quantity + '"'
2066        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2067
2068
2069        #Write
2070        if verbose: print 'Writing %s' %demfile
2071        import ermapper_grids
2072        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2073
2074        fid.close()
2075    else:
2076        #Write to Ascii format
2077
2078        #Write prj file
2079        prjfile = basename_out + '.prj'
2080
2081        if verbose: print 'Writing %s' %prjfile
2082        prjid = open(prjfile, 'w')
2083        prjid.write('Projection    %s\n' %'UTM')
2084        prjid.write('Zone          %d\n' %zone)
2085        prjid.write('Datum         %s\n' %datum)
2086        prjid.write('Zunits        NO\n')
2087        prjid.write('Units         METERS\n')
2088        prjid.write('Spheroid      %s\n' %datum)
2089        prjid.write('Xshift        %d\n' %false_easting)
2090        prjid.write('Yshift        %d\n' %false_northing)
2091        prjid.write('Parameters\n')
2092        prjid.close()
2093
2094
2095
2096        if verbose: print 'Writing %s' %demfile
2097
2098        ascid = open(demfile, 'w')
2099
2100        ascid.write('ncols         %d\n' %ncols)
2101        ascid.write('nrows         %d\n' %nrows)
2102        ascid.write('xllcorner     %d\n' %newxllcorner)
2103        ascid.write('yllcorner     %d\n' %newyllcorner)
2104        ascid.write('cellsize      %f\n' %cellsize)
2105        ascid.write('NODATA_value  %d\n' %NODATA_value)
2106
2107
2108        #Get bounding polygon from mesh
2109        #P = interp.mesh.get_boundary_polygon()
2110        #inside_indices = inside_polygon(grid_points, P)
2111
2112        for i in range(nrows):
2113            if verbose and i%((nrows+10)/10)==0:
2114                print 'Doing row %d of %d' %(i, nrows)
2115
2116            base_index = (nrows-i-1)*ncols
2117
2118            slice = grid_values[base_index:base_index+ncols]
2119            s = array2string(slice, max_line_width=sys.maxint)
2120            ascid.write(s[1:-1] + '\n')
2121
2122
2123            #print
2124            #for j in range(ncols):
2125            #    index = base_index+j#
2126            #    print grid_values[index],
2127            #    ascid.write('%f ' %grid_values[index])
2128            #ascid.write('\n')
2129
2130        #Close
2131        ascid.close()
2132        fid.close()
2133
2134#Backwards compatibility
2135def sww2asc(basename_in, basename_out = None,
2136            quantity = None,
2137            timestep = None,
2138            reduction = None,
2139            cellsize = 10,
2140            verbose = False,
2141            origin = None):
2142    print 'sww2asc will soon be obsoleted - please use sww2dem'
2143    sww2dem(basename_in,
2144            basename_out = basename_out,
2145            quantity = quantity,
2146            timestep = timestep,
2147            reduction = reduction,
2148            cellsize = cellsize,
2149            verbose = verbose,
2150            origin = origin,
2151        datum = 'WGS84',
2152        format = 'asc')
2153
2154def sww2ers(basename_in, basename_out = None,
2155            quantity = None,
2156            timestep = None,
2157            reduction = None,
2158            cellsize = 10,
2159            verbose = False,
2160            origin = None,
2161            datum = 'WGS84'):
2162    print 'sww2ers will soon be obsoleted - please use sww2dem'
2163    sww2dem(basename_in,
2164            basename_out = basename_out,
2165            quantity = quantity,
2166            timestep = timestep,
2167            reduction = reduction,
2168            cellsize = cellsize,
2169            verbose = verbose,
2170            origin = origin,
[2891]2171            datum = datum,
2172            format = 'ers')
[2852]2173################################# END COMPATIBILITY ##############
2174
2175
2176
[2891]2177def sww2pts(basename_in, basename_out=None,
2178            data_points=None,
2179            quantity=None,
2180            timestep=None,
2181            reduction=None,
2182            NODATA_value=-9999,
2183            verbose=False,
2184            origin=None):
2185            #datum = 'WGS84')
2186
2187
2188    """Read SWW file and convert to interpolated values at selected points
2189
2190    The parameter quantity' must be the name of an existing quantity or
2191    an expression involving existing quantities. The default is
2192    'elevation'.
2193
2194    if timestep (an index) is given, output quantity at that timestep
2195
2196    if reduction is given use that to reduce quantity over all timesteps.
2197
2198    data_points (Nx2 array) give locations of points where quantity is to be computed.
2199   
2200    """
2201
2202    import sys
2203    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2204    from Numeric import array2string
2205
[3514]2206    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
[3560]2207    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
[2891]2208
[3514]2209    from anuga.geospatial_data.geospatial_data import Geospatial_data
[2891]2210
2211    if quantity is None:
2212        quantity = 'elevation'
2213
2214    if reduction is None:
2215        reduction = max
2216
2217    if basename_out is None:
2218        basename_out = basename_in + '_%s' %quantity
2219
2220    swwfile = basename_in + '.sww'
2221    ptsfile = basename_out + '.pts'
2222
2223    # Read sww file
2224    if verbose: print 'Reading from %s' %swwfile
2225    from Scientific.IO.NetCDF import NetCDFFile
2226    fid = NetCDFFile(swwfile)
2227
2228    # Get extent and reference
2229    x = fid.variables['x'][:]
2230    y = fid.variables['y'][:]
2231    volumes = fid.variables['volumes'][:]
2232
2233    number_of_timesteps = fid.dimensions['number_of_timesteps']
2234    number_of_points = fid.dimensions['number_of_points']
2235    if origin is None:
2236
2237        # Get geo_reference
2238        # sww files don't have to have a geo_ref
2239        try:
2240            geo_reference = Geo_reference(NetCDFObject=fid)
2241        except AttributeError, e:
2242            geo_reference = Geo_reference() #Default georef object
2243
2244        xllcorner = geo_reference.get_xllcorner()
2245        yllcorner = geo_reference.get_yllcorner()
2246        zone = geo_reference.get_zone()
2247    else:
2248        zone = origin[0]
2249        xllcorner = origin[1]
2250        yllcorner = origin[2]
2251
2252
2253
2254    # FIXME: Refactor using code from file_function.statistics
2255    # Something like print swwstats(swwname)
2256    if verbose:
2257        x = fid.variables['x'][:]
2258        y = fid.variables['y'][:]
2259        times = fid.variables['time'][:]
2260        print '------------------------------------------------'
2261        print 'Statistics of SWW file:'
2262        print '  Name: %s' %swwfile
2263        print '  Reference:'
2264        print '    Lower left corner: [%f, %f]'\
2265              %(xllcorner, yllcorner)
2266        print '    Start time: %f' %fid.starttime[0]
2267        print '  Extent:'
2268        print '    x [m] in [%f, %f], len(x) == %d'\
2269              %(min(x.flat), max(x.flat), len(x.flat))
2270        print '    y [m] in [%f, %f], len(y) == %d'\
2271              %(min(y.flat), max(y.flat), len(y.flat))
2272        print '    t [s] in [%f, %f], len(t) == %d'\
2273              %(min(times), max(times), len(times))
2274        print '  Quantities [SI units]:'
2275        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2276            q = fid.variables[name][:].flat
2277            print '    %s in [%f, %f]' %(name, min(q), max(q))
2278
2279
2280
2281    # Get quantity and reduce if applicable
2282    if verbose: print 'Processing quantity %s' %quantity
2283
2284    # Turn NetCDF objects into Numeric arrays
2285    quantity_dict = {}
2286    for name in fid.variables.keys():
2287        quantity_dict[name] = fid.variables[name][:]
2288
2289
2290
2291    # Convert quantity expression to quantities found in sww file   
2292    q = apply_expression_to_dictionary(quantity, quantity_dict)
2293
2294
2295
2296    if len(q.shape) == 2:
2297        # q has a time component and needs to be reduced along
2298        # the temporal dimension
2299        if verbose: print 'Reducing quantity %s' %quantity
2300        q_reduced = zeros( number_of_points, Float )
2301
2302        for k in range(number_of_points):
2303            q_reduced[k] = reduction( q[:,k] )
2304
2305        q = q_reduced
2306
2307    # Post condition: Now q has dimension: number_of_points
2308    assert len(q.shape) == 1
2309    assert q.shape[0] == number_of_points
2310
2311
2312    if verbose:
2313        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2314
2315
2316    # Create grid and update xll/yll corner and x,y
2317    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2318    assert len(vertex_points.shape) == 2
2319
2320    # Interpolate
[3514]2321    from anuga.fit_interpolate.interpolate import Interpolate
[2891]2322    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2323
2324    # Interpolate using quantity values
2325    if verbose: print 'Interpolating'
2326    interpolated_values = interp.interpolate(q, data_points).flat
2327
2328
2329    if verbose:
2330        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2331                                                      max(interpolated_values))
2332
2333    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2334    P = interp.mesh.get_boundary_polygon()
2335    outside_indices = outside_polygon(data_points, P, closed=True)
2336
2337    for i in outside_indices:
2338        interpolated_values[i] = NODATA_value
2339
2340
2341    # Store results   
2342    G = Geospatial_data(data_points=data_points,
2343                        attributes=interpolated_values)
2344
2345    G.export_points_file(ptsfile, absolute = True)
2346
[2931]2347    fid.close()
[2891]2348
2349
[2852]2350def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2351                                  use_cache = False,
2352                                  verbose = False):
2353    """Read Digitial Elevation model from the following ASCII format (.asc)
2354
2355    Example:
2356
2357    ncols         3121
2358    nrows         1800
2359    xllcorner     722000
2360    yllcorner     5893000
2361    cellsize      25
2362    NODATA_value  -9999
2363    138.3698 137.4194 136.5062 135.5558 ..........
2364
2365    Convert basename_in + '.asc' to NetCDF format (.dem)
2366    mimicking the ASCII format closely.
2367
2368
2369    An accompanying file with same basename_in but extension .prj must exist
2370    and is used to fix the UTM zone, datum, false northings and eastings.
2371
2372    The prj format is assumed to be as
2373
2374    Projection    UTM
2375    Zone          56
2376    Datum         WGS84
2377    Zunits        NO
2378    Units         METERS
2379    Spheroid      WGS84
2380    Xshift        0.0000000000
2381    Yshift        10000000.0000000000
2382    Parameters
2383    """
2384
2385
2386
2387    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2388
2389    if use_cache is True:
2390        from caching import cache
2391        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2392                       dependencies = [basename_in + '.asc'],
2393                       verbose = verbose)
2394
2395    else:
2396        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2397
2398    return result
2399
2400
2401
2402
2403
2404
2405def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2406                                  verbose = False):
2407    """Read Digitial Elevation model from the following ASCII format (.asc)
2408
2409    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2410    """
2411
2412    import os
2413    from Scientific.IO.NetCDF import NetCDFFile
2414    from Numeric import Float, array
2415
2416    #root, ext = os.path.splitext(basename_in)
2417    root = basename_in
2418
2419    ###########################################
2420    # Read Meta data
2421    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2422    metadatafile = open(root + '.prj')
2423    metalines = metadatafile.readlines()
2424    metadatafile.close()
2425
2426    L = metalines[0].strip().split()
2427    assert L[0].strip().lower() == 'projection'
2428    projection = L[1].strip()                   #TEXT
2429
2430    L = metalines[1].strip().split()
2431    assert L[0].strip().lower() == 'zone'
2432    zone = int(L[1].strip())
2433
2434    L = metalines[2].strip().split()
2435    assert L[0].strip().lower() == 'datum'
2436    datum = L[1].strip()                        #TEXT
2437
2438    L = metalines[3].strip().split()
2439    assert L[0].strip().lower() == 'zunits'     #IGNORE
2440    zunits = L[1].strip()                       #TEXT
2441
2442    L = metalines[4].strip().split()
2443    assert L[0].strip().lower() == 'units'
2444    units = L[1].strip()                        #TEXT
2445
2446    L = metalines[5].strip().split()
2447    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2448    spheroid = L[1].strip()                     #TEXT
2449
2450    L = metalines[6].strip().split()
2451    assert L[0].strip().lower() == 'xshift'
2452    false_easting = float(L[1].strip())
2453
2454    L = metalines[7].strip().split()
2455    assert L[0].strip().lower() == 'yshift'
2456    false_northing = float(L[1].strip())
2457
2458    #print false_easting, false_northing, zone, datum
2459
2460
2461    ###########################################
2462    #Read DEM data
2463
2464    datafile = open(basename_in + '.asc')
2465
2466    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2467    lines = datafile.readlines()
2468    datafile.close()
2469
2470    if verbose: print 'Got', len(lines), ' lines'
2471
2472    ncols = int(lines[0].split()[1].strip())
2473    nrows = int(lines[1].split()[1].strip())
2474    xllcorner = float(lines[2].split()[1].strip())
2475    yllcorner = float(lines[3].split()[1].strip())
2476    cellsize = float(lines[4].split()[1].strip())
2477    NODATA_value = int(lines[5].split()[1].strip())
2478
2479    assert len(lines) == nrows + 6
2480
2481
2482    ##########################################
2483
2484
2485    if basename_out == None:
2486        netcdfname = root + '.dem'
2487    else:
2488        netcdfname = basename_out + '.dem'
2489
2490    if verbose: print 'Store to NetCDF file %s' %netcdfname
2491    # NetCDF file definition
2492    fid = NetCDFFile(netcdfname, 'w')
2493
2494    #Create new file
2495    fid.institution = 'Geoscience Australia'
2496    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2497                      'of spatial point data'
2498
2499    fid.ncols = ncols
2500    fid.nrows = nrows
2501    fid.xllcorner = xllcorner
2502    fid.yllcorner = yllcorner
2503    fid.cellsize = cellsize
2504    fid.NODATA_value = NODATA_value
2505
2506    fid.zone = zone
2507    fid.false_easting = false_easting
2508    fid.false_northing = false_northing
2509    fid.projection = projection
2510    fid.datum = datum
2511    fid.units = units
2512
2513
2514    # dimension definitions
2515    fid.createDimension('number_of_rows', nrows)
2516    fid.createDimension('number_of_columns', ncols)
2517
2518    # variable definitions
2519    fid.createVariable('elevation', Float, ('number_of_rows',
2520                                            'number_of_columns'))
2521
2522    # Get handles to the variables
2523    elevation = fid.variables['elevation']
2524
2525    #Store data
2526    n = len(lines[6:])
2527    for i, line in enumerate(lines[6:]):
2528        fields = line.split()
2529        if verbose and i%((n+10)/10)==0:
2530            print 'Processing row %d of %d' %(i, nrows)
2531
2532        elevation[i, :] = array([float(x) for x in fields])
2533
2534    fid.close()
2535
2536
2537
2538
2539
2540def ferret2sww(basename_in, basename_out = None,
2541               verbose = False,
2542               minlat = None, maxlat = None,
2543               minlon = None, maxlon = None,
2544               mint = None, maxt = None, mean_stage = 0,
2545               origin = None, zscale = 1,
2546               fail_on_NaN = True,
2547               NaN_filler = 0,
2548               elevation = None,
[3694]2549               inverted_bathymetry = True
[2852]2550               ): #FIXME: Bathymetry should be obtained
2551                                  #from MOST somehow.
2552                                  #Alternatively from elsewhere
2553                                  #or, as a last resort,
2554                                  #specified here.
2555                                  #The value of -100 will work
2556                                  #for the Wollongong tsunami
2557                                  #scenario but is very hacky
2558    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
[3560]2559    sww format native to abstract_2d_finite_volumes.
[2852]2560
2561    Specify only basename_in and read files of the form
2562    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2563    relative height, x-velocity and y-velocity, respectively.
2564
2565    Also convert latitude and longitude to UTM. All coordinates are
2566    assumed to be given in the GDA94 datum.
2567
2568    min's and max's: If omitted - full extend is used.
2569    To include a value min may equal it, while max must exceed it.
2570    Lat and lon are assuemd to be in decimal degrees
2571
2572    origin is a 3-tuple with geo referenced
2573    UTM coordinates (zone, easting, northing)
2574
2575    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2576    which means that longitude is the fastest
2577    varying dimension (row major order, so to speak)
2578
2579    ferret2sww uses grid points as vertices in a triangular grid
2580    counting vertices from lower left corner upwards, then right
2581    """
2582
2583    import os
2584    from Scientific.IO.NetCDF import NetCDFFile
2585    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2586    from Numeric import allclose, around
2587
2588    precision = Float
2589
2590    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2591
2592    if minlat != None:
2593        assert -90 < minlat < 90 , msg
2594    if maxlat != None:
2595        assert -90 < maxlat < 90 , msg
2596    if minlon != None:
2597        assert -180 < minlon < 180 , msg
2598    if maxlon != None:
2599        assert -180 < maxlon < 180 , msg
2600
2601
2602    #Get NetCDF data
2603    if verbose: print 'Reading files %s_*.nc' %basename_in
[3694]2604    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
[2852]2605    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2606    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2607    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2608    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2609
2610    if basename_out is None:
2611        swwname = basename_in + '.sww'
2612    else:
2613        swwname = basename_out + '.sww'
2614
2615#    Get dimensions of file_h
2616    for dimension in file_h.dimensions.keys():
2617        if dimension[:3] == 'LON':
2618            dim_h_longitude = dimension
2619        if dimension[:3] == 'LAT':
2620            dim_h_latitude = dimension
2621        if dimension[:4] == 'TIME':
2622            dim_h_time = dimension
2623
2624#    print 'long:', dim_h_longitude
2625#    print 'lats:', dim_h_latitude
2626#    print 'times:', dim_h_time
2627
2628    times = file_h.variables[dim_h_time]
2629    latitudes = file_h.variables[dim_h_latitude]
2630    longitudes = file_h.variables[dim_h_longitude]
2631   
2632# get dimensions for file_e
2633    for dimension in file_e.dimensions.keys():
2634        if dimension[:3] == 'LON':
2635            dim_e_longitude = dimension
2636        if dimension[:3] == 'LAT':
2637            dim_e_latitude = dimension
2638
2639# get dimensions for file_u
2640    for dimension in file_u.dimensions.keys():
2641        if dimension[:3] == 'LON':
2642            dim_u_longitude = dimension
2643        if dimension[:3] == 'LAT':
2644            dim_u_latitude = dimension
2645        if dimension[:4] == 'TIME':
2646            dim_u_time = dimension
2647           
2648# get dimensions for file_v
2649    for dimension in file_v.dimensions.keys():
2650        if dimension[:3] == 'LON':
2651            dim_v_longitude = dimension
2652        if dimension[:3] == 'LAT':
2653            dim_v_latitude = dimension
2654        if dimension[:4] == 'TIME':
2655            dim_v_time = dimension
2656
2657
2658    #Precision used by most for lat/lon is 4 or 5 decimals
2659    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2660    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2661
2662    #Check that files are compatible
2663    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2664    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2665    assert allclose(latitudes, e_lat)
2666
2667    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2668    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2669    assert allclose(longitudes, e_lon)
2670
2671    if mint == None:
2672        jmin = 0
2673    else:
2674        jmin = searchsorted(times, mint)
2675
2676    if maxt == None:
2677        jmax=len(times)
2678    else:
2679        jmax = searchsorted(times, maxt)
2680
2681    if minlat == None:
2682        kmin=0
2683    else:
2684        kmin = searchsorted(latitudes, minlat)
2685
2686    if maxlat == None:
2687        kmax = len(latitudes)
2688    else:
2689        kmax = searchsorted(latitudes, maxlat)
2690
2691    if minlon == None:
2692        lmin=0
2693    else:
2694        lmin = searchsorted(longitudes, minlon)
2695
2696    if maxlon == None:
2697        lmax = len(longitudes)
2698    else:
2699        lmax = searchsorted(longitudes, maxlon)
2700
2701#    print' j', jmin, jmax
2702    times = times[jmin:jmax]
2703    latitudes = latitudes[kmin:kmax]
2704    longitudes = longitudes[lmin:lmax]
2705
2706
2707    if verbose: print 'cropping'
2708    zname = 'ELEVATION'
2709
2710    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2711    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2712    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2713    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2714
2715    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2716    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2717    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2718    #        from Numeric import asarray
2719    #        elevations=elevations.tolist()
2720    #        elevations.reverse()
2721    #        elevations=asarray(elevations)
2722    #    else:
2723    #        from Numeric import asarray
2724    #        elevations=elevations.tolist()
2725    #        elevations.reverse()
2726    #        elevations=asarray(elevations)
2727    #        'print hmmm'
2728
2729
2730
2731    #Get missing values
2732    nan_ha = file_h.variables['HA'].missing_value[0]
2733    nan_ua = file_u.variables['UA'].missing_value[0]
2734    nan_va = file_v.variables['VA'].missing_value[0]
2735    if hasattr(file_e.variables[zname],'missing_value'):
2736        nan_e  = file_e.variables[zname].missing_value[0]
2737    else:
2738        nan_e = None
2739
2740    #Cleanup
2741    from Numeric import sometrue
2742
2743    missing = (amplitudes == nan_ha)
2744    if sometrue (missing):
2745        if fail_on_NaN:
2746            msg = 'NetCDFFile %s contains missing values'\
2747                  %(basename_in+'_ha.nc')
2748            raise DataMissingValuesError, msg
2749        else:
2750            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2751
2752    missing = (uspeed == nan_ua)
2753    if sometrue (missing):
2754        if fail_on_NaN:
2755            msg = 'NetCDFFile %s contains missing values'\
2756                  %(basename_in+'_ua.nc')
2757            raise DataMissingValuesError, msg
2758        else:
2759            uspeed = uspeed*(missing==0) + missing*NaN_filler
2760
2761    missing = (vspeed == nan_va)
2762    if sometrue (missing):
2763        if fail_on_NaN:
2764            msg = 'NetCDFFile %s contains missing values'\
2765                  %(basename_in+'_va.nc')
2766            raise DataMissingValuesError, msg
2767        else:
2768            vspeed = vspeed*(missing==0) + missing*NaN_filler
2769
2770
2771    missing = (elevations == nan_e)
2772    if sometrue (missing):
2773        if fail_on_NaN:
2774            msg = 'NetCDFFile %s contains missing values'\
2775                  %(basename_in+'_e.nc')
2776            raise DataMissingValuesError, msg
2777        else:
2778            elevations = elevations*(missing==0) + missing*NaN_filler
2779
2780    #######
2781
2782
2783
2784    number_of_times = times.shape[0]
2785    number_of_latitudes = latitudes.shape[0]
2786    number_of_longitudes = longitudes.shape[0]
2787
2788    assert amplitudes.shape[0] == number_of_times
2789    assert amplitudes.shape[1] == number_of_latitudes
2790    assert amplitudes.shape[2] == number_of_longitudes
2791
2792    if verbose:
2793        print '------------------------------------------------'
2794        print 'Statistics:'
2795        print '  Extent (lat/lon):'
2796        print '    lat in [%f, %f], len(lat) == %d'\
2797              %(min(latitudes.flat), max(latitudes.flat),
2798                len(latitudes.flat))
2799        print '    lon in [%f, %f], len(lon) == %d'\
2800              %(min(longitudes.flat), max(longitudes.flat),
2801                len(longitudes.flat))
2802        print '    t in [%f, %f], len(t) == %d'\
2803              %(min(times.flat), max(times.flat), len(times.flat))
2804
2805        q = amplitudes.flat
2806        name = 'Amplitudes (ha) [cm]'
2807        print %s in [%f, %f]' %(name, min(q), max(q))
2808
2809        q = uspeed.flat
2810        name = 'Speeds (ua) [cm/s]'
2811        print %s in [%f, %f]' %(name, min(q), max(q))
2812
2813        q = vspeed.flat
2814        name = 'Speeds (va) [cm/s]'
2815        print %s in [%f, %f]' %(name, min(q), max(q))
2816
2817        q = elevations.flat
2818        name = 'Elevations (e) [m]'
2819        print %s in [%f, %f]' %(name, min(q), max(q))
2820
2821
2822    #print number_of_latitudes, number_of_longitudes
2823    number_of_points = number_of_latitudes*number_of_longitudes
2824    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2825
2826
2827    file_h.close()
2828    file_u.close()
2829    file_v.close()
2830    file_e.close()
2831
2832
2833    # NetCDF file definition
2834    outfile = NetCDFFile(swwname, 'w')
2835
2836    #Create new file
2837    outfile.institution = 'Geoscience Australia'
2838    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2839                          %(basename_in + '_ha.nc',
2840                            basename_in + '_ua.nc',
2841                            basename_in + '_va.nc',
2842                            basename_in + '_e.nc')
2843
2844
2845    #For sww compatibility
2846    outfile.smoothing = 'Yes'
2847    outfile.order = 1
2848
2849    #Start time in seconds since the epoch (midnight 1/1/1970)
2850    outfile.starttime = starttime = times[0]
2851    times = times - starttime  #Store relative times
2852
2853    # dimension definitions
2854    outfile.createDimension('number_of_volumes', number_of_volumes)
2855
2856    outfile.createDimension('number_of_vertices', 3)
2857    outfile.createDimension('number_of_points', number_of_points)
2858
2859
2860    #outfile.createDimension('number_of_timesteps', len(times))
2861    outfile.createDimension('number_of_timesteps', len(times))
2862
2863    # variable definitions
2864    outfile.createVariable('x', precision, ('number_of_points',))
2865    outfile.createVariable('y', precision, ('number_of_points',))
2866    outfile.createVariable('elevation', precision, ('number_of_points',))
2867
2868    #FIXME: Backwards compatibility
2869    outfile.createVariable('z', precision, ('number_of_points',))
2870    #################################
2871
2872    outfile.createVariable('volumes', Int, ('number_of_volumes',
2873                                            'number_of_vertices'))
2874
2875    outfile.createVariable('time', precision,
2876                           ('number_of_timesteps',))
2877
2878    outfile.createVariable('stage', precision,
2879                           ('number_of_timesteps',
2880                            'number_of_points'))
2881
2882    outfile.createVariable('xmomentum', precision,
2883                           ('number_of_timesteps',
2884                            'number_of_points'))
2885
2886    outfile.createVariable('ymomentum', precision,
2887                           ('number_of_timesteps',
2888                            'number_of_points'))
2889
2890
2891    #Store
[3514]2892    from anuga.coordinate_transforms.redfearn import redfearn
[2852]2893    x = zeros(number_of_points, Float)  #Easting
2894    y = zeros(number_of_points, Float)  #Northing
2895
2896
2897    if verbose: print 'Making triangular grid'
2898    #Check zone boundaries
2899    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2900
2901    vertices = {}
2902    i = 0
2903    for k, lat in enumerate(latitudes):       #Y direction
2904        for l, lon in enumerate(longitudes):  #X direction
2905
2906            vertices[l,k] = i
2907
2908            zone, easting, northing = redfearn(lat,lon)
2909
2910            msg = 'Zone boundary crossed at longitude =', lon
2911            #assert zone == refzone, msg
2912            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2913            x[i] = easting
2914            y[i] = northing
2915            i += 1
2916
2917
2918    #Construct 2 triangles per 'rectangular' element
2919    volumes = []
2920    for l in range(number_of_longitudes-1):    #X direction
2921        for k in range(number_of_latitudes-1): #Y direction
2922            v1 = vertices[l,k+1]
2923            v2 = vertices[l,k]
2924            v3 = vertices[l+1,k+1]
2925            v4 = vertices[l+1,k]
2926
2927            volumes.append([v1,v2,v3]) #Upper element
2928            volumes.append([v4,v3,v2]) #Lower element
2929
2930    volumes = array(volumes)
2931
2932    if origin == None:
2933        zone = refzone
2934        xllcorner = min(x)
2935        yllcorner = min(y)
2936    else:
2937        zone = origin[0]
2938        xllcorner = origin[1]
2939        yllcorner = origin[2]
2940
2941
2942    outfile.xllcorner = xllcorner
2943    outfile.yllcorner = yllcorner
2944    outfile.zone = zone
2945
2946
2947    if elevation is not None:
2948        z = elevation
2949    else:
2950        if inverted_bathymetry:
2951            z = -1*elevations
2952        else:
2953            z = elevations
2954    #FIXME: z should be obtained from MOST and passed in here
2955
2956    from Numeric import resize
2957    z = resize(z,outfile.variables['z'][:].shape)
2958    outfile.variables['x'][:] = x - xllcorner
2959    outfile.variables['y'][:] = y - yllcorner
2960    outfile.variables['z'][:] = z             #FIXME HACK
2961    outfile.variables['elevation'][:] = z
2962    outfile.variables['time'][:] = times   #Store time relative
2963    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
2964
2965
2966
2967    #Time stepping
2968    stage = outfile.variables['stage']
2969    xmomentum = outfile.variables['xmomentum']
2970    ymomentum = outfile.variables['ymomentum']
2971
2972    if verbose: print 'Converting quantities'
2973    n = len(times)
2974    for j in range(n):
2975        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2976        i = 0
2977        for k in range(number_of_latitudes):      #Y direction
2978            for l in range(number_of_longitudes): #X direction
2979                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2980                stage[j,i] = w
2981                h = w - z[i]
2982                xmomentum[j,i] = uspeed[j,k,l]/100*h
2983                ymomentum[j,i] = vspeed[j,k,l]/100*h
2984                i += 1
2985
2986    #outfile.close()
2987
2988    #FIXME: Refactor using code from file_function.statistics
2989    #Something like print swwstats(swwname)
2990    if verbose:
2991        x = outfile.variables['x'][:]
2992        y = outfile.variables['y'][:]
2993        times = outfile.variables['time'][:]
2994        print '------------------------------------------------'
2995        print 'Statistics of output file:'
2996        print '  Name: %s' %swwname
2997        print '  Reference:'
2998        print '    Lower left corner: [%f, %f]'\
2999              %(xllcorner, yllcorner)
3000        print '    Start time: %f' %starttime
3001        print '  Extent:'
3002        print '    x [m] in [%f, %f], len(x) == %d'\
3003              %(min(x.flat), max(x.flat), len(x.flat))
3004        print '    y [m] in [%f, %f], len(y) == %d'\
3005              %(min(y.flat), max(y.flat), len(y.flat))
3006        print '    t [s] in [%f, %f], len(t) == %d'\
3007              %(min(times), max(times), len(times))
3008        print '  Quantities [SI units]:'
3009        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3010            q = outfile.variables[name][:].flat
3011            print '    %s in [%f, %f]' %(name, min(q), max(q))
3012
3013
3014
3015    outfile.close()
3016
3017
3018
3019
3020
3021def timefile2netcdf(filename, quantity_names = None):
3022    """Template for converting typical text files with time series to
3023    NetCDF tms file.
3024
3025
3026    The file format is assumed to be either two fields separated by a comma:
3027
3028        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3029
3030    E.g
3031
3032      31/08/04 00:00:00, 1.328223 0 0
3033      31/08/04 00:15:00, 1.292912 0 0
3034
3035    will provide a time dependent function f(t) with three attributes
3036
3037    filename is assumed to be the rootname with extenisons .txt and .sww
3038    """
3039
3040    import time, calendar
3041    from Numeric import array
[3514]3042    from anuga.config import time_format
3043    from anuga.utilities.numerical_tools import ensure_numeric
[2852]3044
3045
3046
3047    fid = open(filename + '.txt')
3048    line = fid.readline()
3049    fid.close()
3050
3051    fields = line.split(',')
3052    msg = 'File %s must have the format date, value0 value1 value2 ...'
3053    assert len(fields) == 2, msg
3054
3055    try:
3056        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3057    except ValueError:
3058        msg = 'First field in file %s must be' %filename
3059        msg += ' date-time with format %s.\n' %time_format
3060        msg += 'I got %s instead.' %fields[0]
3061        raise DataTimeError, msg
3062
3063
3064    #Split values
3065    values = []
3066    for value in fields[1].split():
3067        values.append(float(value))
3068
3069    q = ensure_numeric(values)
3070
3071    msg = 'ERROR: File must contain at least one independent value'
3072    assert len(q.shape) == 1, msg
3073
3074
3075
3076    #Read times proper
3077    from Numeric import zeros, Float, alltrue
[3514]3078    from anuga.config import time_format
[2852]3079    import time, calendar
3080
3081    fid = open(filename + '.txt')
3082    lines = fid.readlines()
3083    fid.close()
3084
3085    N = len(lines)
3086    d = len(q)
3087
3088    T = zeros(N, Float)       #Time
3089    Q = zeros((N, d), Float)  #Values
3090
3091    for i, line in enumerate(lines):
3092        fields = line.split(',')
3093        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3094
3095        T[i] = realtime - starttime
3096
3097        for j, value in enumerate(fields[1].split()):
3098            Q[i, j] = float(value)
3099
3100    msg = 'File %s must list time as a monotonuosly ' %filename
3101    msg += 'increasing sequence'
3102    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3103
3104
3105    #Create NetCDF file
3106    from Scientific.IO.NetCDF import NetCDFFile
3107
3108    fid = NetCDFFile(filename + '.tms', 'w')
3109
3110
3111    fid.institution = 'Geoscience Australia'
3112    fid.description = 'Time series'
3113
3114
3115    #Reference point
3116    #Start time in seconds since the epoch (midnight 1/1/1970)
3117    #FIXME: Use Georef
3118    fid.starttime = starttime
3119
3120
3121    # dimension definitions
3122    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3123    #fid.createDimension('number_of_vertices', 3)
3124
3125
3126    fid.createDimension('number_of_timesteps', len(T))
3127
3128    fid.createVariable('time', Float, ('number_of_timesteps',))
3129
3130    fid.variables['time'][:] = T
3131
3132    for i in range(Q.shape[1]):
3133        try:
3134            name = quantity_names[i]
3135        except:
3136            name = 'Attribute%d'%i
3137
3138        fid.createVariable(name, Float, ('number_of_timesteps',))
3139        fid.variables[name][:] = Q[:,i]
3140
3141    fid.close()
3142
3143
3144def extent_sww(file_name):
3145    """
3146    Read in an sww file.
3147
3148    Input;
3149    file_name - the sww file
3150
3151    Output;
3152    z - Vector of bed elevation
3153    volumes - Array.  Each row has 3 values, representing
3154    the vertices that define the volume
3155    time - Vector of the times where there is stage information
3156    stage - array with respect to time and vertices (x,y)
3157    """
3158
3159
3160    from Scientific.IO.NetCDF import NetCDFFile
3161
3162    #Check contents
3163    #Get NetCDF
3164    fid = NetCDFFile(file_name, 'r')
3165
3166    # Get the variables
3167    x = fid.variables['x'][:]
3168    y = fid.variables['y'][:]
3169    stage = fid.variables['stage'][:]
3170    #print "stage",stage
3171    #print "stage.shap",stage.shape
3172    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3173    #print "min(stage)",min(stage)
3174
3175    fid.close()
3176
3177    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3178
3179
3180def sww2domain(filename,boundary=None,t=None,\
3181               fail_if_NaN=True,NaN_filler=0\
3182               ,verbose = False,very_verbose = False):
3183    """
3184    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3185
3186    Boundary is not recommended if domain.smooth is not selected, as it
3187    uses unique coordinates, but not unique boundaries. This means that
3188    the boundary file will not be compatable with the coordinates, and will
3189    give a different final boundary, or crash.
3190    """
3191    NaN=9.969209968386869e+036
3192    #initialise NaN.
3193
3194    from Scientific.IO.NetCDF import NetCDFFile
3195    from shallow_water import Domain
3196    from Numeric import asarray, transpose, resize
3197
3198    if verbose: print 'Reading from ', filename
3199    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3200    time = fid.variables['time']       #Timesteps
3201    if t is None:
3202        t = time[-1]
3203    time_interp = get_time_interp(time,t)
3204
3205    # Get the variables as Numeric arrays
3206    x = fid.variables['x'][:]             #x-coordinates of vertices
3207    y = fid.variables['y'][:]             #y-coordinates of vertices
3208    elevation = fid.variables['elevation']     #Elevation
3209    stage = fid.variables['stage']     #Water level
3210    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3211    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3212
3213    starttime = fid.starttime[0]
3214    volumes = fid.variables['volumes'][:] #Connectivity
3215    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3216
3217    conserved_quantities = []
3218    interpolated_quantities = {}
3219    other_quantities = []
3220
3221    # get geo_reference
3222    #sww files don't have to have a geo_ref
3223    try:
3224        geo_reference = Geo_reference(NetCDFObject=fid)
3225    except: #AttributeError, e:
3226        geo_reference = None
3227
3228    if verbose: print '    getting quantities'
3229    for quantity in fid.variables.keys():
3230        dimensions = fid.variables[quantity].dimensions
3231        if 'number_of_timesteps' in dimensions:
3232            conserved_quantities.append(quantity)
3233            interpolated_quantities[quantity]=\
3234                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3235        else: other_quantities.append(quantity)
3236
3237    other_quantities.remove('x')
3238    other_quantities.remove('y')
3239    other_quantities.remove('z')
3240    other_quantities.remove('volumes')
3241
3242    conserved_quantities.remove('time')
3243
3244    if verbose: print '    building domain'
3245    #    From domain.Domain:
3246    #    domain = Domain(coordinates, volumes,\
3247    #                    conserved_quantities = conserved_quantities,\
3248    #                    other_quantities = other_quantities,zone=zone,\
3249    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3250
3251    #   From shallow_water.Domain:
3252    coordinates=coordinates.tolist()
3253    volumes=volumes.tolist()
3254    #FIXME:should this be in mesh?(peter row)
3255    if fid.smoothing == 'Yes': unique = False
3256    else: unique = True
3257    if unique:
3258        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3259
3260
3261    try:
3262        domain = Domain(coordinates, volumes, boundary)
3263    except AssertionError, e:
3264        fid.close()
3265        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3266        raise DataDomainError, msg
3267
3268    if not boundary is None:
3269        domain.boundary = boundary
3270
3271    domain.geo_reference = geo_reference
3272
3273    domain.starttime=float(starttime)+float(t)
3274    domain.time=0.0
3275
3276    for quantity in other_quantities:
3277        try:
3278            NaN = fid.variables[quantity].missing_value
3279        except:
3280            pass #quantity has no missing_value number
3281        X = fid.variables[quantity][:]
3282        if very_verbose:
3283            print '       ',quantity
3284            print '        NaN =',NaN
3285            print '        max(X)'
3286            print '       ',max(X)
3287            print '        max(X)==NaN'
3288            print '       ',max(X)==NaN
3289            print ''
3290        if (max(X)==NaN) or (min(X)==NaN):
3291            if fail_if_NaN:
3292                msg = 'quantity "%s" contains no_data entry'%quantity
3293                raise DataMissingValuesError, msg
3294            else:
3295                data = (X<>NaN)
3296                X = (X*data)+(data==0)*NaN_filler
3297        if unique:
3298            X = resize(X,(len(X)/3,3))
3299        domain.set_quantity(quantity,X)
3300    #
3301    for quantity in conserved_quantities:
3302        try:
3303            NaN = fid.variables[quantity].missing_value
3304        except:
3305            pass #quantity has no missing_value number
3306        X = interpolated_quantities[quantity]
3307        if very_verbose:
3308            print '       ',quantity
3309            print '        NaN =',NaN
3310            print '        max(X)'
3311            print '       ',max(X)
3312            print '        max(X)==NaN'
3313            print '       ',max(X)==NaN
3314            print ''
3315        if (max(X)==NaN) or (min(X)==NaN):
3316            if fail_if_NaN:
3317                msg = 'quantity "%s" contains no_data entry'%quantity
3318                raise DataMissingValuesError, msg
3319            else:
3320                data = (X<>NaN)
3321                X = (X*data)+(data==0)*NaN_filler
3322        if unique:
3323            X = resize(X,(X.shape[0]/3,3))
3324        domain.set_quantity(quantity,X)
3325
3326    fid.close()
3327    return domain
3328
3329def interpolated_quantity(saved_quantity,time_interp):
3330
3331    #given an index and ratio, interpolate quantity with respect to time.
3332    index,ratio = time_interp
3333    Q = saved_quantity
3334    if ratio > 0:
3335        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3336    else:
3337        q = Q[index]
3338    #Return vector of interpolated values
3339    return q
3340
3341def get_time_interp(time,t=None):
3342    #Finds the ratio and index for time interpolation.
[3560]3343    #It is borrowed from previous abstract_2d_finite_volumes code.
[2852]3344    if t is None:
3345        t=time[-1]
3346        index = -1
3347        ratio = 0.
3348    else:
3349        T = time
3350        tau = t
3351        index=0
3352        msg = 'Time interval derived from file %s [%s:%s]'\
3353            %('FIXMEfilename', T[0], T[-1])
3354        msg += ' does not match model time: %s' %tau
3355        if tau < time[0]: raise DataTimeError, msg
3356        if tau > time[-1]: raise DataTimeError, msg
3357        while tau > time[index]: index += 1
3358        while tau < time[index]: index -= 1
3359        if tau == time[index]:
3360            #Protect against case where tau == time[-1] (last time)
3361            # - also works in general when tau == time[i]
3362            ratio = 0
3363        else:
3364            #t is now between index and index+1
3365            ratio = (tau - time[index])/(time[index+1] - time[index])
3366    return (index,ratio)
3367
3368
3369def weed(coordinates,volumes,boundary = None):
3370    if type(coordinates)=='array':
3371        coordinates = coordinates.tolist()
3372    if type(volumes)=='array':
3373        volumes = volumes.tolist()
3374
3375    unique = False
3376    point_dict = {}
3377    same_point = {}
3378    for i in range(len(coordinates)):
3379        point = tuple(coordinates[i])
3380        if point_dict.has_key(point):
3381            unique = True
3382            same_point[i]=point
3383            #to change all point i references to point j
3384        else:
3385            point_dict[point]=i
3386            same_point[i]=point
3387
3388    coordinates = []
3389    i = 0
3390    for point in point_dict.keys():
3391        point = tuple(point)
3392        coordinates.append(list(point))
3393        point_dict[point]=i
3394        i+=1
3395
3396
3397    for volume in volumes:
3398        for i in range(len(volume)):
3399            index = volume[i]
3400            if index>-1:
3401                volume[i]=point_dict[same_point[index]]
3402
3403    new_boundary = {}
3404    if not boundary is None:
3405        for segment in boundary.keys():
3406            point0 = point_dict[same_point[segment[0]]]
3407            point1 = point_dict[same_point[segment[1]]]
3408            label = boundary[segment]
3409            #FIXME should the bounday attributes be concaterated
3410            #('exterior, pond') or replaced ('pond')(peter row)
3411
3412            if new_boundary.has_key((point0,point1)):
3413                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3414                                              #+','+label
3415
3416            elif new_boundary.has_key((point1,point0)):
3417                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3418                                              #+','+label
3419            else: new_boundary[(point0,point1)]=label
3420
3421        boundary = new_boundary
3422
3423    return coordinates,volumes,boundary
3424
3425
3426def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3427                 verbose=False):
3428    """Read Digitial Elevation model from the following NetCDF format (.dem)
3429
3430    Example:
3431
3432    ncols         3121
3433    nrows         1800
3434    xllcorner     722000
3435    yllcorner     5893000
3436    cellsize      25
3437    NODATA_value  -9999
3438    138.3698 137.4194 136.5062 135.5558 ..........
3439
3440    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3441    """
3442
3443    import os
3444    from Scientific.IO.NetCDF import NetCDFFile
3445    from Numeric import Float, zeros, sum, reshape, equal
3446
3447    root = basename_in
3448    inname = root + '.dem'
3449
3450    #Open existing netcdf file to read
3451    infile = NetCDFFile(inname, 'r')
3452    if verbose: print 'Reading DEM from %s' %inname
3453
3454    #Read metadata
3455    ncols = infile.ncols[0]
3456    nrows = infile.nrows[0]
3457    xllcorner = infile.xllcorner[0]
3458    yllcorner = infile.yllcorner[0]
3459    cellsize = infile.cellsize[0]
3460    NODATA_value = infile.NODATA_value[0]
3461    zone = infile.zone[0]
3462    false_easting = infile.false_easting[0]
3463    false_northing = infile.false_northing[0]
3464    projection = infile.projection
3465    datum = infile.datum
3466    units = infile.units
3467
3468    dem_elevation = infile.variables['elevation']
3469
3470    #Get output file name
3471    if basename_out == None:
3472        outname = root + '_' + repr(cellsize_new) + '.dem'
3473    else:
3474        outname = basename_out + '.dem'
3475
3476    if verbose: print 'Write decimated NetCDF file to %s' %outname
3477
3478    #Determine some dimensions for decimated grid
3479    (nrows_stencil, ncols_stencil) = stencil.shape
3480    x_offset = ncols_stencil / 2
3481    y_offset = nrows_stencil / 2
3482    cellsize_ratio = int(cellsize_new / cellsize)
3483    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3484    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3485
3486    #Open netcdf file for output
3487    outfile = NetCDFFile(outname, 'w')
3488
3489    #Create new file
3490    outfile.institution = 'Geoscience Australia'
3491    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3492                           'of spatial point data'
3493    #Georeferencing
3494    outfile.zone = zone
3495    outfile.projection = projection
3496    outfile.datum = datum
3497    outfile.units = units
3498
3499    outfile.cellsize = cellsize_new
3500    outfile.NODATA_value = NODATA_value
3501    outfile.false_easting = false_easting
3502    outfile.false_northing = false_northing
3503
3504    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3505    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3506    outfile.ncols = ncols_new
3507    outfile.nrows = nrows_new
3508
3509    # dimension definition
3510    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3511
3512    # variable definition
3513    outfile.createVariable('elevation', Float, ('number_of_points',))
3514
3515    # Get handle to the variable
3516    elevation = outfile.variables['elevation']
3517
3518    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3519
3520    #Store data
3521    global_index = 0
3522    for i in range(nrows_new):
3523        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3524        lower_index = global_index
3525        telev =  zeros(ncols_new, Float)
3526        local_index = 0
3527        trow = i * cellsize_ratio
3528
3529        for j in range(ncols_new):
3530            tcol = j * cellsize_ratio
3531            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3532
3533            #if dem contains 1 or more NODATA_values set value in
3534            #decimated dem to NODATA_value, else compute decimated
3535            #value using stencil
3536            if sum(sum(equal(tmp, NODATA_value))) > 0:
3537                telev[local_index] = NODATA_value
3538            else:
3539                telev[local_index] = sum(sum(tmp * stencil))
3540
3541            global_index += 1
3542            local_index += 1
3543
3544        upper_index = global_index
3545
3546        elevation[lower_index:upper_index] = telev
3547
3548    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3549
3550    infile.close()
3551    outfile.close()
3552
3553
3554
3555
3556def tsh2sww(filename, verbose=False): #test_tsh2sww
3557    """
3558    to check if a tsh/msh file 'looks' good.
3559    """
3560
3561    from shallow_water import Domain
[3560]3562    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
[2852]3563    import time, os
[3563]3564    #from data_manager import get_dataobject
[2852]3565    from os import sep, path
[3514]3566    from anuga.utilities.numerical_tools import mean
[2852]3567
3568    if verbose == True:print 'Creating domain from', filename
3569    domain = pmesh_to_domain_instance(filename, Domain)
3570    if verbose == True:print "Number of triangles = ", len(domain)
3571
3572    domain.smooth = True
3573    domain.format = 'sww'   #Native netcdf visualisation format
3574    file_path, filename = path.split(filename)
3575    filename, ext = path.splitext(filename)
[3846]3576    domain.set_name(filename)   
[2852]3577    domain.reduction = mean
3578    if verbose == True:print "file_path",file_path
3579    if file_path == "":file_path = "."
3580    domain.set_datadir(file_path)
3581
3582    if verbose == True:
3583        print "Output written to " + domain.get_datadir() + sep + \
[3846]3584              domain.get_name() + "." + domain.format
[2852]3585    sww = get_dataobject(domain)
3586    sww.store_connectivity()
3587    sww.store_timestep('stage')
3588
3589
3590def asc_csiro2sww(bath_dir,
3591                  elevation_dir,
3592                  ucur_dir,
3593                  vcur_dir,
3594                  sww_file,
3595                  minlat = None, maxlat = None,
3596                  minlon = None, maxlon = None,
3597                  zscale=1,
3598                  mean_stage = 0,
3599                  fail_on_NaN = True,
3600                  elevation_NaN_filler = 0,
3601                  bath_prefix='ba',
3602                  elevation_prefix='el',
3603                  verbose=False):
3604    """
3605    Produce an sww boundary file, from esri ascii data from CSIRO.
3606
3607    Also convert latitude and longitude to UTM. All coordinates are
3608    assumed to be given in the GDA94 datum.
3609
3610    assume:
3611    All files are in esri ascii format
3612
3613    4 types of information
3614    bathymetry
3615    elevation
3616    u velocity
3617    v velocity
3618
3619    Assumptions
3620    The metadata of all the files is the same
3621    Each type is in a seperate directory
3622    One bath file with extention .000
3623    The time period is less than 24hrs and uniform.
3624    """
3625    from Scientific.IO.NetCDF import NetCDFFile
3626
[3514]3627    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3628
3629    precision = Float # So if we want to change the precision its done here
3630
3631    # go in to the bath dir and load the only file,
3632    bath_files = os.listdir(bath_dir)
3633    #print "bath_files",bath_files
3634
3635    #fixme: make more general?
3636    bath_file = bath_files[0]
3637    bath_dir_file =  bath_dir + os.sep + bath_file
3638    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3639    #print "bath_metadata",bath_metadata
3640    #print "bath_grid",bath_grid
3641
3642    #Use the date.time of the bath file as a basis for
3643    #the start time for other files
3644    base_start = bath_file[-12:]
3645
3646    #go into the elevation dir and load the 000 file
3647    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3648                         + base_start
3649    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3650    #print "elevation_dir_file",elevation_dir_file
3651    #print "elevation_grid", elevation_grid
3652
3653    elevation_files = os.listdir(elevation_dir)
3654    ucur_files = os.listdir(ucur_dir)
3655    vcur_files = os.listdir(vcur_dir)
3656
3657    # the first elevation file should be the
3658    # file with the same base name as the bath data
3659    #print "elevation_files[0]",elevation_files[0]
3660    #print "'el' + base_start",'el' + base_start
3661    assert elevation_files[0] == 'el' + base_start
3662
3663    #assert bath_metadata == elevation_metadata
3664
3665
3666
3667    number_of_latitudes = bath_grid.shape[0]
3668    number_of_longitudes = bath_grid.shape[1]
3669    #number_of_times = len(os.listdir(elevation_dir))
3670    #number_of_points = number_of_latitudes*number_of_longitudes
3671    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3672
3673    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3674                  for x in range(number_of_longitudes)]
3675    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3676                 for y in range(number_of_latitudes)]
3677
3678     # reverse order of lat, so the fist lat represents the first grid row
3679    latitudes.reverse()
3680
3681    #print "latitudes - before _get_min_max_indexes",latitudes
3682    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes,longitudes,
3683                                                 minlat=minlat, maxlat=maxlat,
3684                                                 minlon=minlon, maxlon=maxlon)
3685
3686
3687    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3688    #print "bath_grid",bath_grid
3689    latitudes = latitudes[kmin:kmax]
3690    longitudes = longitudes[lmin:lmax]
3691    number_of_latitudes = len(latitudes)
3692    number_of_longitudes = len(longitudes)
3693    number_of_times = len(os.listdir(elevation_dir))
3694    number_of_points = number_of_latitudes*number_of_longitudes
3695    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3696    #print "number_of_points",number_of_points
3697
3698    #Work out the times
3699    if len(elevation_files) > 1:
3700        # Assume: The time period is less than 24hrs.
3701        time_period = (int(elevation_files[1][-3:]) - \
3702                      int(elevation_files[0][-3:]))*60*60
3703        times = [x*time_period for x in range(len(elevation_files))]
3704    else:
3705        times = [0.0]
3706    #print "times", times
3707    #print "number_of_latitudes", number_of_latitudes
3708    #print "number_of_longitudes", number_of_longitudes
3709    #print "number_of_times", number_of_times
3710    #print "latitudes", latitudes
3711    #print "longitudes", longitudes
3712
3713
3714    if verbose:
3715        print '------------------------------------------------'
3716        print 'Statistics:'
3717        print '  Extent (lat/lon):'
3718        print '    lat in [%f, %f], len(lat) == %d'\
3719              %(min(latitudes), max(latitudes),
3720                len(latitudes))
3721        print '    lon in [%f, %f], len(lon) == %d'\
3722              %(min(longitudes), max(longitudes),
3723                len(longitudes))
3724        print '    t in [%f, %f], len(t) == %d'\
3725              %(min(times), max(times), len(times))
3726
3727    ######### WRITE THE SWW FILE #############
3728    # NetCDF file definition
3729    outfile = NetCDFFile(sww_file, 'w')
3730
3731    #Create new file
3732    outfile.institution = 'Geoscience Australia'
3733    outfile.description = 'Converted from XXX'
3734
3735
3736    #For sww compatibility
3737    outfile.smoothing = 'Yes'
3738    outfile.order = 1
3739
3740    #Start time in seconds since the epoch (midnight 1/1/1970)
3741    outfile.starttime = starttime = times[0]
3742
3743
3744    # dimension definitions
3745    outfile.createDimension('number_of_volumes', number_of_volumes)
3746
3747    outfile.createDimension('number_of_vertices', 3)
3748    outfile.createDimension('number_of_points', number_of_points)
3749    outfile.createDimension('number_of_timesteps', number_of_times)
3750
3751    # variable definitions
3752    outfile.createVariable('x', precision, ('number_of_points',))
3753    outfile.createVariable('y', precision, ('number_of_points',))
3754    outfile.createVariable('elevation', precision, ('number_of_points',))
3755
3756    #FIXME: Backwards compatibility
3757    outfile.createVariable('z', precision, ('number_of_points',))
3758    #################################
3759
3760    outfile.createVariable('volumes', Int, ('number_of_volumes',
3761                                            'number_of_vertices'))
3762
3763    outfile.createVariable('time', precision,
3764                           ('number_of_timesteps',))
3765
3766    outfile.createVariable('stage', precision,
3767                           ('number_of_timesteps',
3768                            'number_of_points'))
3769
3770    outfile.createVariable('xmomentum', precision,
3771                           ('number_of_timesteps',
3772                            'number_of_points'))
3773
3774    outfile.createVariable('ymomentum', precision,
3775                           ('number_of_timesteps',
3776                            'number_of_points'))
3777
3778    #Store
[3514]3779    from anuga.coordinate_transforms.redfearn import redfearn
[2852]3780    x = zeros(number_of_points, Float)  #Easting
3781    y = zeros(number_of_points, Float)  #Northing
3782
3783    if verbose: print 'Making triangular grid'
3784    #Get zone of 1st point.
3785    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3786
3787    vertices = {}
3788    i = 0
3789    for k, lat in enumerate(latitudes):
3790        for l, lon in enumerate(longitudes):
3791
3792            vertices[l,k] = i
3793
3794            zone, easting, northing = redfearn(lat,lon)
3795
3796            msg = 'Zone boundary crossed at longitude =', lon
3797            #assert zone == refzone, msg
3798            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3799            x[i] = easting
3800            y[i] = northing
3801            i += 1
3802
3803
3804    #Construct 2 triangles per 'rectangular' element
3805    volumes = []
3806    for l in range(number_of_longitudes-1):    #X direction
3807        for k in range(number_of_latitudes-1): #Y direction
3808            v1 = vertices[l,k+1]
3809            v2 = vertices[l,k]
3810            v3 = vertices[l+1,k+1]
3811            v4 = vertices[l+1,k]
3812
3813            #Note, this is different to the ferrit2sww code
3814            #since the order of the lats is reversed.
3815            volumes.append([v1,v3,v2]) #Upper element
3816            volumes.append([v4,v2,v3]) #Lower element
3817
3818    volumes = array(volumes)
3819
3820    geo_ref = Geo_reference(refzone,min(x),min(y))
3821    geo_ref.write_NetCDF(outfile)
3822
3823    # This will put the geo ref in the middle
3824    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3825
3826
3827    if verbose:
3828        print '------------------------------------------------'
3829        print 'More Statistics:'
3830        print '  Extent (/lon):'
3831        print '    x in [%f, %f], len(lat) == %d'\
3832              %(min(x), max(x),
3833                len(x))
3834        print '    y in [%f, %f], len(lon) == %d'\
3835              %(min(y), max(y),
3836                len(y))
3837        print 'geo_ref: ',geo_ref
3838
3839    z = resize(bath_grid,outfile.variables['z'][:].shape)
3840    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3841    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3842    outfile.variables['z'][:] = z
3843    outfile.variables['elevation'][:] = z  #FIXME HACK
3844    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3845
3846    # do this to create an ok sww file.
3847    #outfile.variables['time'][:] = [0]   #Store time relative
3848    #outfile.variables['stage'] = z
3849    # put the next line up in the code after outfile.order = 1
3850    #number_of_times = 1
3851
3852    stage = outfile.variables['stage']
3853    xmomentum = outfile.variables['xmomentum']
3854    ymomentum = outfile.variables['ymomentum']
3855
3856    outfile.variables['time'][:] = times   #Store time relative
3857
3858    if verbose: print 'Converting quantities'
3859    n = number_of_times
3860    for j in range(number_of_times):
3861        # load in files
3862        elevation_meta, elevation_grid = \
3863           _read_asc(elevation_dir + os.sep + elevation_files[j])
3864
3865        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3866        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3867
3868        #print "elevation_grid",elevation_grid
3869        #cut matrix to desired size
3870        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3871        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3872        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3873        #print "elevation_grid",elevation_grid
3874        # handle missing values
3875        missing = (elevation_grid == elevation_meta['NODATA_value'])
3876        if sometrue (missing):
3877            if fail_on_NaN:
3878                msg = 'File %s contains missing values'\
3879                      %(elevation_files[j])
3880                raise DataMissingValuesError, msg
3881            else:
3882                elevation_grid = elevation_grid*(missing==0) + \
3883                                 missing*elevation_NaN_filler
3884
3885
3886        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3887        i = 0
3888        for k in range(number_of_latitudes):      #Y direction
3889            for l in range(number_of_longitudes): #X direction
3890                w = zscale*elevation_grid[k,l] + mean_stage
3891                stage[j,i] = w
3892                h = w - z[i]
3893                xmomentum[j,i] = u_momentum_grid[k,l]*h
3894                ymomentum[j,i] = v_momentum_grid[k,l]*h
3895                i += 1
3896    outfile.close()
3897
3898def _get_min_max_indexes(latitudes,longitudes,
3899                        minlat=None, maxlat=None,
3900                        minlon=None, maxlon=None):
3901    """
3902    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3903    specified with min/max lat/long
3904
3905    Think of the latitudes and longitudes describing a 2d surface.
3906    The area returned is, if possible, just big enough to cover the
3907    inputed max/min area. (This will not be possible if the max/min area
3908    has a section outside of the latitudes/longitudes area.)
3909
3910    assume latitudess & longitudes are sorted,
3911    long - from low to high (west to east, eg 148 - 151)
3912    lat - from high to low (north to south, eg -35 - -38)
3913    """
3914
3915    # reverse order of lat, so it's in ascending order
3916    latitudes.reverse()
3917    largest_lat_index = len(latitudes)-1
3918    #Cut out a smaller extent.
3919    if minlat == None:
3920        lat_min_index = 0
3921    else:
3922        lat_min_index = searchsorted(latitudes, minlat)-1
3923        if lat_min_index <0:
3924            lat_min_index = 0
3925
3926
3927    if maxlat == None:
3928        lat_max_index = largest_lat_index #len(latitudes)
3929    else:
3930        lat_max_index = searchsorted(latitudes, maxlat)
3931        if lat_max_index > largest_lat_index:
3932            lat_max_index = largest_lat_index
3933
3934    if minlon == None:
3935        lon_min_index = 0
3936    else:
3937        lon_min_index = searchsorted(longitudes, minlon)-1
3938        if lon_min_index <0:
3939            lon_min_index = 0
3940
3941    if maxlon == None:
3942        lon_max_index = len(longitudes)
3943    else:
3944        lon_max_index = searchsorted(longitudes, maxlon)
3945
3946    #Take into account that the latitude list was reversed
3947    latitudes.reverse() # Python passes by reference, need to swap back
3948    lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3949                                   largest_lat_index - lat_min_index
3950    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3951    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3952
3953    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3954
3955
3956def _read_asc(filename, verbose=False):
3957    """Read esri file from the following ASCII format (.asc)
3958
3959    Example:
3960
3961    ncols         3121
3962    nrows         1800
3963    xllcorner     722000
3964    yllcorner     5893000
3965    cellsize      25
3966    NODATA_value  -9999
3967    138.3698 137.4194 136.5062 135.5558 ..........
3968
3969    """
3970
3971    datafile = open(filename)
3972
3973    if verbose: print 'Reading DEM from %s' %(filename)
3974    lines = datafile.readlines()
3975    datafile.close()
3976
3977    if verbose: print 'Got', len(lines), ' lines'
3978
3979    ncols = int(lines.pop(0).split()[1].strip())
3980    nrows = int(lines.pop(0).split()[1].strip())
3981    xllcorner = float(lines.pop(0).split()[1].strip())
3982    yllcorner = float(lines.pop(0).split()[1].strip())
3983    cellsize = float(lines.pop(0).split()[1].strip())
3984    NODATA_value = float(lines.pop(0).split()[1].strip())
3985
3986    assert len(lines) == nrows
3987
3988    #Store data
3989    grid = []
3990
3991    n = len(lines)
3992    for i, line in enumerate(lines):
3993        cells = line.split()
3994        assert len(cells) == ncols
3995        grid.append(array([float(x) for x in cells]))
3996    grid = array(grid)
3997
3998    return {'xllcorner':xllcorner,
3999            'yllcorner':yllcorner,
4000            'cellsize':cellsize,
4001            'NODATA_value':NODATA_value}, grid
4002
[2884]4003
4004# FIXME (Ole): Is this doing anything at all?
[2852]4005def sww2timeseries(swwfile,
4006                   gauge_filename,
4007                   gauge_data_outname,
4008                   quantity = None,
4009                   time_min = None,
4010                   time_max = None,
4011                   verbose = False):
4012
4013    """Read SWW file and extract time series for prescribed quantities at
4014    gauge locations.
4015
4016    The gauge locations are defined in gauge_filename. This file should be
4017    in the form: gaugename, easting, northing, and should be stored in a
4018    .csv or .xya file.
4019
4020    Time series data at the gauges can be written to file (for example,
4021    Benfield requested raw data) with the default being no data written.
4022
4023    The parameter quantity must be the name of an existing quantity or
4024    an expression involving existing quantities. The default is
4025    'depth'.
4026
4027    The user can define a list of quantities. The possibilities are
4028    the conserved quantitues of the shallow water wave equation and other
4029    quantities which can be derived from those, i.e.
4030    ['depth', 'xmomentum', 'ymomentum', 'momentum', 'velocity', 'bearing'].
4031
4032    Momentum is the absolute momentum, sqrt(xmomentum^2 + ymomentum^2).
4033    Note, units of momentum are m^2/s and depth is m.
4034
4035    Velocity is absolute momentum divided by depth. (Confirming correct units:
4036    vel = abs mom / depth = (m^2/s)/m = m/s.)
4037
4038    Bearing returns the angle of the velocity vector from North.
4039
4040    If time_min and time_max is given, output plotted in that time range.
4041    The default is to plot the entire range of the time evident in sww file.
4042
4043    The export graphic format in 'png' and will be stored in the same
4044    directory as the input file.
4045    """
4046
4047    if quantity is None: quantity = 'depth'
4048
4049    # extract gauge locations from gauge file
4050
4051    # extract all quantities from swwfile (f = file_function)
[2884]4052    if time_min is None: time_min = min(f.get_time())
4053    if time_max is None: time_max = max(f.get_time())
[2852]4054
4055    # loop through appropriate range of time
4056    # plot prescribed quantities and export data if requested
4057
4058    #if gauge_data_outname is None:
4059
4060    return
[3292]4061
[3720]4062    ####  URS 2 SWW  ###
4063
4064lon_name = 'LON'
4065lat_name = 'LAT'
4066time_name = 'TIME'
4067precision = Float # So if we want to change the precision its done here       
4068class Write_nc:
4069    """
4070    Write an nc file.
4071   
4072    Note, this should be checked to meet cdc netcdf conventions for gridded
4073    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4074   
4075    """
4076    def __init__(self,
4077                 quantity_name,
4078                 file_name,
4079                 time_step_count,
4080                 time_step,
4081                 lon,
4082                 lat):
4083        """
4084        time_step_count is the number of time steps.
4085        time_step is the time step size
4086       
4087        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4088        """
4089        self.quantity_name = quantity_name
4090        quantity_units= {'HA':'METERS',
4091                              'UA':'METERS/SECOND',
4092                              'VA':'METERS/SECOND'}
4093       
4094        #self.file_name = file_name
4095        self.time_step_count = time_step_count
4096        self.time_step = time_step
4097
4098        # NetCDF file definition
4099        self.outfile = NetCDFFile(file_name, 'w')
4100        outfile = self.outfile       
4101
4102        #Create new file
4103        nc_lon_lat_header(outfile, lon, lat)
4104   
4105        # TIME
4106        outfile.createDimension(time_name, None)
4107        outfile.createVariable(time_name, precision, (time_name,))
4108
4109        #QUANTITY
4110        outfile.createVariable(self.quantity_name, precision,
4111                               (time_name, lat_name, lon_name))
4112        outfile.variables[self.quantity_name].missing_value=-1.e+034
4113        outfile.variables[self.quantity_name].units= \
4114                                 quantity_units[self.quantity_name]
4115        outfile.variables[lon_name][:]= ensure_numeric(lon)
4116        outfile.variables[lat_name][:]= ensure_numeric(lat)
4117
4118        #Assume no one will be wanting to read this, while we are writing
4119        #outfile.close()
4120       
4121    def store_timestep(self, quantity_slice):
4122        """
4123        quantity_slice is the data to be stored at this time step
4124        """
4125       
4126        outfile = self.outfile
4127       
4128        # Get the variables
4129        time = outfile.variables[time_name]
4130        quantity = outfile.variables[self.quantity_name]
4131           
4132        i = len(time)
4133
4134        #Store time
4135        time[i] = i*self.time_step #self.domain.time
4136        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4137                                           # And m/s to cm/sec
4138       
4139    def close(self):
4140        self.outfile.close()
4141
4142def urs2sww(basename_in='o', basename_out=None, verbose=False,
4143            remove_nc_files=True,
4144            minlat=None, maxlat=None,
4145            minlon= None, maxlon=None,
4146            mint=None, maxt=None,
4147            mean_stage=0,
[3930]4148            origin = None,
[3720]4149            zscale=1,
4150            fail_on_NaN=True,
4151            NaN_filler=0,
4152            elevation=None):
4153    """
4154    Convert URS C binary format for wave propagation to
4155    sww format native to abstract_2d_finite_volumes.
4156
4157    Specify only basename_in and read files of the form
4158    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4159    relative height, x-velocity and y-velocity, respectively.
4160
4161    Also convert latitude and longitude to UTM. All coordinates are
4162    assumed to be given in the GDA94 datum. The latitude and longitude
4163    information is for  a grid.
4164
4165    min's and max's: If omitted - full extend is used.
4166    To include a value min may equal it, while max must exceed it.
[3930]4167    Lat and lon are assumed to be in decimal degrees.
4168    NOTE: min Lon is the most east boundary.
[3720]4169
4170    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4171    which means that latitude is the fastest
4172    varying dimension (row major order, so to speak)
4173
4174    In URS C binary the latitudes and longitudes are in assending order.
4175    """
4176   
4177    if basename_out == None:
4178        basename_out = basename_in
4179    files_out = urs2nc(basename_in, basename_out)
4180    ferret2sww(basename_out,
4181               minlat=minlat,
4182               maxlat=maxlat,
4183               minlon=minlon,
4184               mint=mint,
4185               maxt=maxt,
4186               mean_stage=mean_stage,
[3930]4187               origin=origin,
[3720]4188               zscale=zscale,
4189               fail_on_NaN=fail_on_NaN,
4190               NaN_filler=NaN_filler,
4191               inverted_bathymetry=True,
4192               verbose=verbose)
4193    #print "files_out",files_out
4194    if remove_nc_files:
4195        for file_out in files_out:
4196            os.remove(file_out)
4197   
4198def urs2nc(basename_in = 'o', basename_out = 'urs'):
4199    files_in = [basename_in+'-z-mux',
4200                basename_in+'-e-mux',
4201                basename_in+'-n-mux']
4202    files_out = [basename_out+'_ha.nc',
4203                 basename_out+'_ua.nc',
4204                 basename_out+'_va.nc']
4205    quantities = ['HA','UA','VA']
4206
[3750]4207    for file_name in files_in:
4208        if os.access(file_name, os.F_OK) == 0 :
4209            msg = 'File %s does not exist or is not accessible' %file_name
4210            raise IOError, msg
4211       
[3720]4212    hashed_elevation = None
4213    for file_in, file_out, quantity in map(None, files_in,
4214                                           files_out,
4215                                           quantities):
[3820]4216        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
[3720]4217                                         file_out,
4218                                         quantity)
4219        #print "lon",lon
4220        #print "lat",lat
4221        if hashed_elevation == None:
4222            elevation_file = basename_out+'_e.nc'
4223            write_elevation_sww(elevation_file,
4224                                lon,
4225                                lat,
[3820]4226                                depth)
[3720]4227            hashed_elevation = myhash(lonlatdep)
4228        else:
4229            msg = "The elevation information in the mux files is inconsistent"
4230            assert hashed_elevation == myhash(lonlatdep), msg
4231    files_out.append(elevation_file)
4232    return files_out
4233   
4234def _binary_c2nc(file_in, file_out, quantity):
4235    """
4236
4237    return the depth info, so it can be written to a file
4238    """
4239    columns = 3 # long, lat , depth
[3824]4240    #FIXME use mux_file, not f
[3720]4241    f = open(file_in, 'rb')
4242   
4243    # Number of points/stations
4244    (points_num,)= unpack('i',f.read(4))
4245
[3826]4246    #print 'points_num', points_num
4247    #import sys; sys.exit()
[3720]4248    # nt, int - Number of time steps
4249    (time_step_count,)= unpack('i',f.read(4))
4250
4251    #dt, float - time step, seconds
4252    (time_step,) = unpack('f', f.read(4))
4253   
4254    msg = "Bad data in the mux file."
4255    if points_num < 0:
[3741]4256        f.close()
[3720]4257        raise ANUGAError, msg
4258    if time_step_count < 0:
[3741]4259        f.close()
[3720]4260        raise ANUGAError, msg
4261    if time_step < 0:
[3741]4262        f.close()
[3720]4263        raise ANUGAError, msg
4264   
4265    lonlatdep = p_array.array('f')
[3830]4266    ####points_num = 2914
[3720]4267    lonlatdep.read(f, columns * points_num)
4268    lonlatdep = array(lonlatdep, typecode=Float)   
[3826]4269    lonlatdep = reshape(lonlatdep, (points_num, columns))
[3720]4270   
[3820]4271    #print "lonlatdep", lonlatdep
4272   
4273    lon, lat, depth = lon_lat2grid(lonlatdep)
[3830]4274    lon_sorted = list(lon)
[3750]4275    lon_sorted.sort()
[3830]4276
[3826]4277    if not allclose(lon, lon_sorted):
[3750]4278        msg = "Longitudes in mux file are not in ascending order"
4279        raise IOError, msg
[3830]4280    lat_sorted = list(lat)
4281    lat_sorted.sort()
4282
[3826]4283    if not allclose(lat, lat_sorted):
[3750]4284        msg = "Latitudes in mux file are not in ascending order"
4285   
[3720]4286    nc_file = Write_nc(quantity,
4287                       file_out,
[3826]4288                       time_step_count,
4289                       time_step,
4290                       lon,
4291                       lat)
[3720]4292
4293    for i in range(time_step_count):
[3824]4294        #Read in a time slice  from mux file 
[3720]4295        hz_p_array = p_array.array('f')
4296        hz_p_array.read(f, points_num)
4297        hz_p = array(hz_p_array, typecode=Float)
[3820]4298        hz_p = reshape(hz_p, (len(lon), len(lat)))
4299        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
[3824]4300
4301        #write time slice to nc file
[3720]4302        nc_file.store_timestep(hz_p)
[3824]4303    #FIXME should I close the mux file here?
4304    f.close()
[3720]4305    nc_file.close()
4306
[3820]4307    return lonlatdep, lon, lat, depth
[3720]4308   
4309
4310def write_elevation_sww(file_out, lon, lat, depth_vector):
4311     
4312    # NetCDF file definition
4313    outfile = NetCDFFile(file_out, 'w')
4314
4315    #Create new file
4316    nc_lon_lat_header(outfile, lon, lat)
4317   
4318    # ELEVATION
4319    zname = 'ELEVATION'
4320    outfile.createVariable(zname, precision, (lat_name, lon_name))
4321    outfile.variables[zname].units='CENTIMETERS'
4322    outfile.variables[zname].missing_value=-1.e+034
4323
4324    outfile.variables[lon_name][:]= ensure_numeric(lon)
4325    outfile.variables[lat_name][:]= ensure_numeric(lat)
4326
4327    depth = reshape(depth_vector, ( len(lat), len(lon)))
4328    #print "depth",depth
4329    outfile.variables[zname][:]= depth
4330   
4331    outfile.close()
4332   
4333def nc_lon_lat_header(outfile, lon, lat):
4334   
4335    outfile.institution = 'Geoscience Australia'
4336    outfile.description = 'Converted from URS binary C'
4337   
4338    # Longitude
4339    outfile.createDimension(lon_name, len(lon))
4340    outfile.createVariable(lon_name, precision, (lon_name,))
4341    outfile.variables[lon_name].point_spacing='uneven'
4342    outfile.variables[lon_name].units='degrees_east'
4343    outfile.variables[lon_name].assignValue(lon)
4344
4345
4346    # Latitude
4347    outfile.createDimension(lat_name, len(lat))
4348    outfile.createVariable(lat_name, precision, (lat_name,))
4349    outfile.variables[lat_name].point_spacing='uneven'
4350    outfile.variables[lat_name].units='degrees_north'
4351    outfile.variables[lat_name].assignValue(lat)
4352
4353
4354   
4355def lon_lat2grid(long_lat_dep):
4356    """
4357    given a list of points that are assumed to be an a grid,
4358    return the long's and lat's of the grid.
4359    long_lat_dep is an array where each row is a position.
4360    The first column is longitudes.
4361    The second column is latitudes.
[3820]4362
4363    The latitude is the fastest varying dimension - in mux files
[3720]4364    """
4365    LONG = 0
4366    LAT = 1
[3820]4367    QUANTITY = 2
[3830]4368
4369    long_lat_dep = ensure_numeric(long_lat_dep, Float)
[3820]4370   
[3826]4371    num_points = long_lat_dep.shape[0]
4372    this_rows_long = long_lat_dep[0,LONG]
4373   
4374    # Count the length of unique latitudes
4375    i = 0
4376    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4377        i += 1
4378         
4379    lat = long_lat_dep[:i, LAT]       
4380    long = long_lat_dep[::i, LONG]     
4381    lenlong = len(long)
4382    lenlat = len(lat)
4383#    print 'len lat', lat, len(lat)
4384#    print 'len long', long, len(long)
4385         
4386    msg = 'Input data is not gridded'     
4387    assert num_points % lenlat == 0, msg
4388    assert num_points % lenlong == 0, msg
4389         
4390    # Test that data is gridded       
4391    for i in range(lenlong):
[3720]4392        msg = 'Data is not gridded.  It must be for this operation'
[3826]4393        first = i*lenlat
4394        last = first + lenlat
4395               
4396        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4397        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
[3720]4398   
[3826]4399   
4400#    print 'range long', min(long), max(long)
4401#    print 'range lat', min(lat), max(lat)
4402#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4403#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4404   
4405   
4406   
4407    msg = 'Out of range latitudes/longitudes'
[3720]4408    for l in lat:assert -90 < l < 90 , msg
4409    for l in long:assert -180 < l < 180 , msg
4410
[3826]4411    # Changing quantity from lat being the fastest varying dimension to
[3820]4412    # long being the fastest varying dimension
4413    # FIXME - make this faster/do this a better way
4414    # use numeric transpose, after reshaping the quantity vector
[3826]4415#    quantity = zeros(len(long_lat_dep), Float)
4416    quantity = zeros(num_points, Float)
4417   
4418#    print 'num',num_points
[3820]4419    for lat_i, _ in enumerate(lat):
4420        for long_i, _ in enumerate(long):
4421            q_index = lat_i*lenlong+long_i
4422            lld_index = long_i*lenlat+lat_i
[3826]4423#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4424            temp = long_lat_dep[lld_index, QUANTITY]
4425            quantity[q_index] = temp
[3820]4426           
4427    return long, lat, quantity
4428
[3720]4429    ####  END URS 2 SWW  ###     
[3292]4430#-------------------------------------------------------------
4431if __name__ == "__main__":
[3296]4432    pass
[3292]4433
Note: See TracBrowser for help on using the repository browser.