source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4271

Last change on this file since 4271 was 4271, checked in by duncan, 18 years ago

working on updated urs2sww

File size: 148.1 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64#import time, os
65from os import sep, path
66
67from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
68     searchsorted, zeros, allclose, around, reshape, transpose, sort
69from Scientific.IO.NetCDF import NetCDFFile
70
71
72from anuga.coordinate_transforms.redfearn import redfearn, \
73     convert_from_latlon_to_utm
74from anuga.coordinate_transforms.geo_reference import Geo_reference
75from anuga.geospatial_data.geospatial_data import Geospatial_data
76from anuga.config import minimum_storable_height as default_minimum_storable_height
77from anuga.utilities.numerical_tools import ensure_numeric,  mean
78from anuga.caching.caching import myhash
79from anuga.utilities.anuga_exceptions import ANUGAError
80from anuga.pmesh.mesh import Mesh
81from anuga.shallow_water import Domain
82from anuga.abstract_2d_finite_volumes.pmesh2domain import \
83     pmesh_to_domain_instance
84   
85def make_filename(s):
86    """Transform argument string into a suitable filename
87    """
88
89    s = s.strip()
90    s = s.replace(' ', '_')
91    s = s.replace('(', '')
92    s = s.replace(')', '')
93    s = s.replace('__', '_')
94
95    return s
96
97
98def check_dir(path, verbose=None):
99    """Check that specified path exists.
100    If path does not exist it will be created if possible
101
102    USAGE:
103       checkdir(path, verbose):
104
105    ARGUMENTS:
106        path -- Directory
107        verbose -- Flag verbose output (default: None)
108
109    RETURN VALUE:
110        Verified path including trailing separator
111
112    """
113
114    import os, sys
115    import os.path
116
117    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
118        unix = 0
119    else:
120        unix = 1
121
122
123    if path[-1] != os.sep:
124        path = path + os.sep  # Add separator for directories
125
126    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
127    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
128        try:
129            exitcode=os.mkdir(path)
130
131            # Change access rights if possible
132            #
133            if unix:
134                exitcode=os.system('chmod 775 '+path)
135            else:
136                pass  # FIXME: What about acces rights under Windows?
137
138            if verbose: print 'MESSAGE: Directory', path, 'created.'
139
140        except:
141            print 'WARNING: Directory', path, 'could not be created.'
142            if unix:
143                path = '/tmp/'
144            else:
145                path = 'C:'
146
147            print 'Using directory %s instead' %path
148
149    return(path)
150
151
152
153def del_dir(path):
154    """Recursively delete directory path and all its contents
155    """
156
157    import os
158
159    if os.path.isdir(path):
160        for file in os.listdir(path):
161            X = os.path.join(path, file)
162
163
164            if os.path.isdir(X) and not os.path.islink(X):
165                del_dir(X)
166            else:
167                try:
168                    os.remove(X)
169                except:
170                    print "Could not remove file %s" %X
171
172        os.rmdir(path)
173
174
175
176def create_filename(datadir, filename, format, size=None, time=None):
177
178    import os
179    #from anuga.config import data_dir
180
181    FN = check_dir(datadir) + filename
182
183    if size is not None:
184        FN += '_size%d' %size
185
186    if time is not None:
187        FN += '_time%.2f' %time
188
189    FN += '.' + format
190    return FN
191
192
193def get_files(datadir, filename, format, size):
194    """Get all file (names) with gven name, size and format
195    """
196
197    import glob
198
199    import os
200    #from anuga.config import data_dir
201
202    dir = check_dir(datadir)
203
204    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
205    return glob.glob(pattern)
206
207
208
209#Generic class for storing output to e.g. visualisation or checkpointing
210class Data_format:
211    """Generic interface to data formats
212    """
213
214
215    def __init__(self, domain, extension, mode = 'w'):
216        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
217                                        '''   'w' (write)'''+\
218                                        '''   'r' (read)''' +\
219                                        '''   'a' (append)'''
220
221        #Create filename
222        self.filename = create_filename(domain.get_datadir(),
223                                        domain.get_name(), extension)
224
225        #print 'F', self.filename
226        self.timestep = 0
227        self.domain = domain
228       
229
230
231        # Exclude ghosts in case this is a parallel domain
232        self.number_of_nodes = domain.number_of_full_nodes       
233        self.number_of_volumes = domain.number_of_full_triangles
234        #self.number_of_volumes = len(domain)       
235
236
237
238
239        #FIXME: Should we have a general set_precision function?
240
241
242
243#Class for storing output to e.g. visualisation
244class Data_format_sww(Data_format):
245    """Interface to native NetCDF format (.sww) for storing model output
246
247    There are two kinds of data
248
249    1: Constant data: Vertex coordinates and field values. Stored once
250    2: Variable data: Conserved quantities. Stored once per timestep.
251
252    All data is assumed to reside at vertex locations.
253    """
254
255
256    def __init__(self, domain, mode = 'w',\
257                 max_size = 2000000000,
258                 recursion = False):
259        from Scientific.IO.NetCDF import NetCDFFile
260        from Numeric import Int, Float, Float32
261
262        self.precision = Float32 #Use single precision for quantities
263        if hasattr(domain, 'max_size'):
264            self.max_size = domain.max_size #file size max is 2Gig
265        else:
266            self.max_size = max_size
267        self.recursion = recursion
268        self.mode = mode
269
270        Data_format.__init__(self, domain, 'sww', mode)
271
272        if hasattr(domain, 'minimum_storable_height'):
273            self.minimum_storable_height =  domain.minimum_storable_height
274        else:
275            self.minimum_storable_height = default_minimum_storable_height
276
277        # NetCDF file definition
278        fid = NetCDFFile(self.filename, mode)
279
280        if mode == 'w':
281
282            #Create new file
283            fid.institution = 'Geoscience Australia'
284            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
285
286            if domain.smooth:
287                fid.smoothing = 'Yes'
288            else:
289                fid.smoothing = 'No'
290
291            fid.order = domain.default_order
292
293            if hasattr(domain, 'texture'):
294                fid.texture = domain.texture
295            #else:
296            #    fid.texture = 'None'
297
298            #Reference point
299            #Start time in seconds since the epoch (midnight 1/1/1970)
300            #FIXME: Use Georef
301            fid.starttime = domain.starttime
302
303            # dimension definitions
304            fid.createDimension('number_of_volumes', self.number_of_volumes)
305            fid.createDimension('number_of_vertices', 3)
306
307            if domain.smooth is True:
308                #fid.createDimension('number_of_points', len(domain.vertexlist))
309                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
310
311                # FIXME(Ole): This will cause sww files for paralle domains to
312                # have ghost nodes stored (but not used by triangles).
313                # To clean this up, we have to change get_vertex_values and friends in
314                # quantity.py (but I can't be bothered right now)
315            else:
316                fid.createDimension('number_of_points', 3*self.number_of_volumes)
317
318            fid.createDimension('number_of_timesteps', None) #extensible
319
320            # variable definitions
321            fid.createVariable('x', self.precision, ('number_of_points',))
322            fid.createVariable('y', self.precision, ('number_of_points',))
323            fid.createVariable('elevation', self.precision, ('number_of_points',))
324            if domain.geo_reference is not None:
325                domain.geo_reference.write_NetCDF(fid)
326
327            #FIXME: Backwards compatibility
328            fid.createVariable('z', self.precision, ('number_of_points',))
329            #################################
330
331            fid.createVariable('volumes', Int, ('number_of_volumes',
332                                                'number_of_vertices'))
333
334            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
335                                               # close to each other may appear as the same step
336                               ('number_of_timesteps',))
337
338            fid.createVariable('stage', self.precision,
339                               ('number_of_timesteps',
340                                'number_of_points'))
341
342            fid.createVariable('xmomentum', self.precision,
343                               ('number_of_timesteps',
344                                'number_of_points'))
345
346            fid.createVariable('ymomentum', self.precision,
347                               ('number_of_timesteps',
348                                'number_of_points'))
349
350        #Close
351        fid.close()
352
353
354    def store_connectivity(self):
355        """Specialisation of store_connectivity for net CDF format
356
357        Writes x,y,z coordinates of triangles constituting
358        the bed elevation.
359        """
360
361        from Scientific.IO.NetCDF import NetCDFFile
362
363        from Numeric import concatenate, Int
364
365        domain = self.domain
366
367        #Get NetCDF
368        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
369
370        # Get the variables
371        x = fid.variables['x']
372        y = fid.variables['y']
373        z = fid.variables['elevation']
374
375        volumes = fid.variables['volumes']
376
377        # Get X, Y and bed elevation Z
378        Q = domain.quantities['elevation']
379        X,Y,Z,V = Q.get_vertex_values(xy=True,
380                                      precision=self.precision)
381
382        volumes[:] = V.astype(volumes.typecode())
383        x[:] = X
384        y[:] = Y
385        z[:] = Z
386
387        #FIXME: Backwards compatibility
388        z = fid.variables['z']
389        z[:] = Z
390        ################################
391
392
393
394        #Close
395        fid.close()
396
397    def store_timestep(self, names):
398        """Store time and named quantities to file
399        """
400        from Scientific.IO.NetCDF import NetCDFFile
401        import types
402        from time import sleep
403        from os import stat
404
405        from Numeric import choose
406
407        #Get NetCDF
408        retries = 0
409        file_open = False
410        while not file_open and retries < 10:
411            try:
412                fid = NetCDFFile(self.filename, 'a') #Open existing file
413            except IOError:
414                #This could happen if someone was reading the file.
415                #In that case, wait a while and try again
416                msg = 'Warning (store_timestep): File %s could not be opened'\
417                      %self.filename
418                msg += ' - trying step %s again' %self.domain.time
419                print msg
420                retries += 1
421                sleep(1)
422            else:
423                file_open = True
424
425        if not file_open:
426            msg = 'File %s could not be opened for append' %self.filename
427            raise DataFileNotOpenError, msg
428
429
430
431        #Check to see if the file is already too big:
432        time = fid.variables['time']
433        i = len(time)+1
434        file_size = stat(self.filename)[6]
435        file_size_increase =  file_size/i
436        if file_size + file_size_increase > self.max_size*(2**self.recursion):
437            #in order to get the file name and start time correct,
438            #I change the domian.filename and domain.starttime.
439            #This is the only way to do this without changing
440            #other modules (I think).
441
442            #write a filename addon that won't break swollens reader
443            #(10.sww is bad)
444            filename_ext = '_time_%s'%self.domain.time
445            filename_ext = filename_ext.replace('.', '_')
446            #remember the old filename, then give domain a
447            #name with the extension
448            old_domain_filename = self.domain.get_name()
449            if not self.recursion:
450                self.domain.set_name(old_domain_filename+filename_ext)
451
452
453            #change the domain starttime to the current time
454            old_domain_starttime = self.domain.starttime
455            self.domain.starttime = self.domain.time
456
457            #build a new data_structure.
458            next_data_structure=\
459                Data_format_sww(self.domain, mode=self.mode,\
460                                max_size = self.max_size,\
461                                recursion = self.recursion+1)
462            if not self.recursion:
463                print '    file_size = %s'%file_size
464                print '    saving file to %s'%next_data_structure.filename
465            #set up the new data_structure
466            self.domain.writer = next_data_structure
467
468            #FIXME - could be cleaner to use domain.store_timestep etc.
469            next_data_structure.store_connectivity()
470            next_data_structure.store_timestep(names)
471            fid.sync()
472            fid.close()
473
474            #restore the old starttime and filename
475            self.domain.starttime = old_domain_starttime
476            self.domain.set_name(old_domain_filename)           
477        else:
478            self.recursion = False
479            domain = self.domain
480
481            # Get the variables
482            time = fid.variables['time']
483            stage = fid.variables['stage']
484            xmomentum = fid.variables['xmomentum']
485            ymomentum = fid.variables['ymomentum']
486            i = len(time)
487
488            #Store time
489            time[i] = self.domain.time
490
491
492            if type(names) not in [types.ListType, types.TupleType]:
493                names = [names]
494
495            for name in names:
496                # Get quantity
497                Q = domain.quantities[name]
498                A,V = Q.get_vertex_values(xy = False,
499                                          precision = self.precision)
500
501
502                #FIXME: Make this general (see below)
503                if name == 'stage':
504                    z = fid.variables['elevation']
505                    #print z[:]
506                    #print A-z[:]
507
508
509                    A = choose(A-z[:] >= self.minimum_storable_height,
510                               (z[:], A))
511                    stage[i,:] = A.astype(self.precision)
512                elif name == 'xmomentum':
513                    xmomentum[i,:] = A.astype(self.precision)
514                elif name == 'ymomentum':
515                    ymomentum[i,:] = A.astype(self.precision)
516
517                #As in....
518            #eval( name + '[i,:] = A.astype(self.precision)' )
519            #FIXME: But we need a UNIT test for that before refactoring
520
521
522
523            #Flush and close
524            fid.sync()
525            fid.close()
526
527
528
529#Class for handling checkpoints data
530class Data_format_cpt(Data_format):
531    """Interface to native NetCDF format (.cpt)
532    """
533
534
535    def __init__(self, domain, mode = 'w'):
536        from Scientific.IO.NetCDF import NetCDFFile
537        from Numeric import Int, Float, Float
538
539        self.precision = Float #Use full precision
540
541        Data_format.__init__(self, domain, 'sww', mode)
542
543
544        # NetCDF file definition
545        fid = NetCDFFile(self.filename, mode)
546
547        if mode == 'w':
548            #Create new file
549            fid.institution = 'Geoscience Australia'
550            fid.description = 'Checkpoint data'
551            #fid.smooth = domain.smooth
552            fid.order = domain.default_order
553
554            # dimension definitions
555            fid.createDimension('number_of_volumes', self.number_of_volumes)
556            fid.createDimension('number_of_vertices', 3)
557
558            #Store info at all vertices (no smoothing)
559            fid.createDimension('number_of_points', 3*self.number_of_volumes)
560            fid.createDimension('number_of_timesteps', None) #extensible
561
562            # variable definitions
563
564            #Mesh
565            fid.createVariable('x', self.precision, ('number_of_points',))
566            fid.createVariable('y', self.precision, ('number_of_points',))
567
568
569            fid.createVariable('volumes', Int, ('number_of_volumes',
570                                                'number_of_vertices'))
571
572            fid.createVariable('time', self.precision,
573                               ('number_of_timesteps',))
574
575            #Allocate space for all quantities
576            for name in domain.quantities.keys():
577                fid.createVariable(name, self.precision,
578                                   ('number_of_timesteps',
579                                    'number_of_points'))
580
581        #Close
582        fid.close()
583
584
585    def store_checkpoint(self):
586        """
587        Write x,y coordinates of triangles.
588        Write connectivity (
589        constituting
590        the bed elevation.
591        """
592
593        from Scientific.IO.NetCDF import NetCDFFile
594
595        from Numeric import concatenate
596
597        domain = self.domain
598
599        #Get NetCDF
600        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
601
602        # Get the variables
603        x = fid.variables['x']
604        y = fid.variables['y']
605
606        volumes = fid.variables['volumes']
607
608        # Get X, Y and bed elevation Z
609        Q = domain.quantities['elevation']
610        X,Y,Z,V = Q.get_vertex_values(xy=True,
611                      precision = self.precision)
612
613
614
615        x[:] = X.astype(self.precision)
616        y[:] = Y.astype(self.precision)
617        z[:] = Z.astype(self.precision)
618
619        volumes[:] = V
620
621        #Close
622        fid.close()
623
624
625    def store_timestep(self, name):
626        """Store time and named quantity to file
627        """
628        from Scientific.IO.NetCDF import NetCDFFile
629        from time import sleep
630
631        #Get NetCDF
632        retries = 0
633        file_open = False
634        while not file_open and retries < 10:
635            try:
636                fid = NetCDFFile(self.filename, 'a') #Open existing file
637            except IOError:
638                #This could happen if someone was reading the file.
639                #In that case, wait a while and try again
640                msg = 'Warning (store_timestep): File %s could not be opened'\
641                  %self.filename
642                msg += ' - trying again'
643                print msg
644                retries += 1
645                sleep(1)
646            else:
647                file_open = True
648
649        if not file_open:
650            msg = 'File %s could not be opened for append' %self.filename
651            raise DataFileNotOPenError, msg
652
653
654        domain = self.domain
655
656        # Get the variables
657        time = fid.variables['time']
658        stage = fid.variables['stage']
659        i = len(time)
660
661        #Store stage
662        time[i] = self.domain.time
663
664        # Get quantity
665        Q = domain.quantities[name]
666        A,V = Q.get_vertex_values(xy=False,
667                  precision = self.precision)
668
669        stage[i,:] = A.astype(self.precision)
670
671        #Flush and close
672        fid.sync()
673        fid.close()
674
675
676
677
678
679#Function for storing xya output
680#FIXME Not done yet for this version
681#This is obsolete.  Use geo_spatial_data instead
682class Data_format_xya(Data_format):
683    """Generic interface to data formats
684    """
685
686
687    def __init__(self, domain, mode = 'w'):
688        from Scientific.IO.NetCDF import NetCDFFile
689        from Numeric import Int, Float, Float32
690
691        self.precision = Float32 #Use single precision
692
693        Data_format.__init__(self, domain, 'xya', mode)
694
695
696
697    #FIXME -This is the old xya format
698    def store_all(self):
699        """Specialisation of store all for xya format
700
701        Writes x,y,z coordinates of triangles constituting
702        the bed elevation.
703        """
704
705        from Numeric import concatenate
706
707        domain = self.domain
708
709        fd = open(self.filename, 'w')
710
711
712        if domain.smooth is True:
713            number_of_points =  len(domain.vertexlist)
714        else:
715            number_of_points = 3*self.number_of_volumes
716
717        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
718
719        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
720                 " # <vertex #> <x> <y> [attributes]" + "\n")
721
722
723        # Get X, Y, bed elevation and friction (index=0,1)
724        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
725                                           indices = (0,1), precision = self.precision)
726
727        bed_eles = A[:,0]
728        fricts = A[:,1]
729
730        # Get stage (index=0)
731        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
732                                       indices = (0,), precision = self.precision)
733
734        stages = B[:,0]
735
736        #<vertex #> <x> <y> [attributes]
737        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
738                                               stages, fricts):
739
740            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
741            fd.write(s)
742
743        #close
744        fd.close()
745
746
747    def store_timestep(self, t, V0, V1, V2):
748        """Store time, water heights (and momentums) to file
749        """
750        pass
751
752
753#### NED is national exposure database
754   
755LAT_TITLE = 'LATITUDE'
756LONG_TITLE = 'LONGITUDE'
757X_TITLE = 'x'
758Y_TITLE = 'y'
759class Exposure_csv:
760    def __init__(self,file_name, latitude_title=LAT_TITLE,
761                 longitude_title=LONG_TITLE, is_x_y_locations=None,
762                 x_title=X_TITLE, y_title=Y_TITLE,
763                 refine_polygon=None):
764        """
765        This class is for handling the exposure csv file.
766        It reads the file in and converts the lats and longs to a geospatial
767        data object.
768        Use the methods to read and write columns.
769
770        The format of the csv files it reads is;
771           The first row is a title row.
772           comma's are the delimiters
773           each column is a 'set' of data
774
775        Feel free to use/expand it to read other csv files.
776           
777           
778        It is not for adding and deleting rows
779       
780        Can geospatial handle string attributes? It's not made for them.
781        Currently it can't load and save string att's.
782
783        So just use geospatial to hold the x, y and georef? Bad, since
784        different att's are in diferent structures.  Not so bad, the info
785        to write if the .csv file is saved is in attribute_dic
786
787        The location info is in the geospatial attribute.
788       
789       
790        """
791        self._file_name = file_name
792        self._geospatial = None #
793
794        # self._attribute_dic is a dictionary.
795        #The keys are the column titles.
796        #The values are lists of column data
797       
798        # self._title_index_dic is a dictionary.
799        #The keys are the column titles.
800        #The values are the index positions of file columns.
801        self._attribute_dic, self._title_index_dic = \
802        self._load_exposure_csv(self._file_name)
803        try:
804            #Have code here that handles caps or lower
805            lats = self._attribute_dic[latitude_title]
806            longs = self._attribute_dic[longitude_title]
807           
808        except KeyError:
809            # maybe a warning..
810            #Let's see if this works..
811            if False != is_x_y_locations:
812                is_x_y_locations = True
813            pass
814        else:
815            self._geospatial = Geospatial_data(latitudes = lats,
816                 longitudes = longs)
817
818        if is_x_y_locations is True:
819            if self._geospatial is not None:
820                pass #fixme throw an error
821            try:
822                xs = self._attribute_dic[x_title]
823                ys = self._attribute_dic[y_title]
824                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
825            except KeyError:
826                # maybe a warning..
827                msg = "Could not find location information."
828                raise TitleValueError, msg
829            else:
830                self._geospatial = Geospatial_data(data_points=points)
831           
832        # create a list of points that are in the refining_polygon
833        # described by a list of indexes representing the points
834
835    def __cmp__(self, other):
836        #print "self._attribute_dic",self._attribute_dic
837        #print "other._attribute_dic",other._attribute_dic
838        #print "self._title_index_dic", self._title_index_dic
839        #print "other._title_index_dic", other._title_index_dic
840       
841        #check that a is an instance of this class
842        if isinstance(self, type(other)):
843            result = cmp(self._attribute_dic, other._attribute_dic)
844            if result <>0:
845                return result
846            # The order of the columns is important. Therefore..
847            result = cmp(self._title_index_dic, other._title_index_dic)
848            if result <>0:
849                return result
850            for self_ls, other_ls in map(None,self._attribute_dic, \
851                    other._attribute_dic):
852                result = cmp(self._attribute_dic[self_ls],
853                             other._attribute_dic[other_ls])
854                if result <>0:
855                    return result
856            return 0
857        else:
858            return 1
859   
860    def _load_exposure_csv(self, file_name):
861        """
862        Load in the csv as a dic, title as key and column info as value, .
863        Also, create a dic, title as key and column index as value,
864        to keep track of the column order.
865        """
866        #
867        attribute_dic = {}
868        title_index_dic = {}
869        titles_stripped = [] # list of titles
870       
871        reader = csv.reader(file(file_name))
872
873        # Read in and manipulate the title info
874        titles = reader.next()
875        for i,title in enumerate(titles):
876            titles_stripped.append(title.strip())
877            title_index_dic[title.strip()] = i
878        title_count = len(titles_stripped)       
879        #print "title_index_dic",title_index_dic
880
881       
882        #create a dic of colum values, indexed by column title
883        for line in reader:
884            if len(line) <> title_count:
885                raise IOError #FIXME make this nicer
886            for i, value in enumerate(line):
887                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
888           
889        return attribute_dic, title_index_dic
890
891    def get_column(self, column_name, use_refind_polygon=False):
892        """
893        Given a column name return a list of the column values
894
895        Note, the type of the values will be String!
896        do this to change a list of strings to a list of floats
897        time = [float(x) for x in time]
898       
899        Not implemented:
900        if use_refind_polygon is True, only return values in the
901        refined polygon
902        """
903        if not self._attribute_dic.has_key(column_name):
904            msg = 'Therer is  no column called %s!' %column_name
905            raise TitleValueError, msg
906        return self._attribute_dic[column_name]
907
908
909    def get_value(self, value_column_name,
910                  known_column_name,
911                  known_values,
912                  use_refind_polygon=False):
913        """
914        Do linear interpolation on the known_colum, using the known_value,
915        to return a value of the column_value_name.
916        """
917        pass
918
919
920    def get_location(self, use_refind_polygon=False):
921        """
922        Return a geospatial object which describes the
923        locations of the location file.
924
925        Note, if there is not location info, this returns None.
926       
927        Not implemented:
928        if use_refind_polygon is True, only return values in the
929        refined polygon
930        """
931        return self._geospatial
932
933    def set_column(self, column_name, column_values, overwrite=False):
934        """
935        Add a column to the 'end' (with the right most column being the end)
936        of the csv file.
937
938        Set overwrite to True if you want to overwrite a column.
939       
940        Note, in column_name white space is removed and case is not checked.
941        Precondition
942        The column_name and column_values cannot have comma's in it.
943        """
944       
945        value_row_count = \
946            len(self._attribute_dic[self._title_index_dic.keys()[0]])
947        if len(column_values) <> value_row_count: 
948            msg = 'The number of column values must equal the number of rows.'
949            raise DataMissingValuesError, msg
950       
951        if self._attribute_dic.has_key(column_name):
952            if not overwrite:
953                msg = 'Column name %s already in use!' %column_name
954                raise TitleValueError, msg
955        else:
956            # New title.  Add it to the title index.
957            self._title_index_dic[column_name] = len(self._title_index_dic)
958        self._attribute_dic[column_name] = column_values
959        #print "self._title_index_dic[column_name]",self._title_index_dic
960
961    def save(self, file_name=None):
962        """
963        Save the exposure csv file
964        """
965        if file_name is None:
966            file_name = self._file_name
967       
968        fd = open(file_name,'wb')
969        writer = csv.writer(fd)
970       
971        #Write the title to a cvs file
972        line = [None]* len(self._title_index_dic)
973        for title in self._title_index_dic.iterkeys():
974            line[self._title_index_dic[title]]= title
975        writer.writerow(line)
976       
977        # Write the values to a cvs file
978        value_row_count = \
979            len(self._attribute_dic[self._title_index_dic.keys()[0]])
980        for row_i in range(value_row_count):
981            line = [None]* len(self._title_index_dic)
982            for title in self._title_index_dic.iterkeys():
983                line[self._title_index_dic[title]]= \
984                     self._attribute_dic[title][row_i]
985            writer.writerow(line)
986
987
988#Auxiliary
989def write_obj(filename,x,y,z):
990    """Store x,y,z vectors into filename (obj format)
991       Vectors are assumed to have dimension (M,3) where
992       M corresponds to the number elements.
993       triangles are assumed to be disconnected
994
995       The three numbers in each vector correspond to three vertices,
996
997       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
998
999    """
1000    #print 'Writing obj to %s' % filename
1001
1002    import os.path
1003
1004    root, ext = os.path.splitext(filename)
1005    if ext == '.obj':
1006        FN = filename
1007    else:
1008        FN = filename + '.obj'
1009
1010
1011    outfile = open(FN, 'wb')
1012    outfile.write("# Triangulation as an obj file\n")
1013
1014    M, N = x.shape
1015    assert N==3  #Assuming three vertices per element
1016
1017    for i in range(M):
1018        for j in range(N):
1019            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1020
1021    for i in range(M):
1022        base = i*N
1023        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1024
1025    outfile.close()
1026
1027
1028#########################################################
1029#Conversion routines
1030########################################################
1031
1032def sww2obj(basefilename, size):
1033    """Convert netcdf based data output to obj
1034    """
1035    from Scientific.IO.NetCDF import NetCDFFile
1036
1037    from Numeric import Float, zeros
1038
1039    #Get NetCDF
1040    FN = create_filename('.', basefilename, 'sww', size)
1041    print 'Reading from ', FN
1042    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1043
1044
1045    # Get the variables
1046    x = fid.variables['x']
1047    y = fid.variables['y']
1048    z = fid.variables['elevation']
1049    time = fid.variables['time']
1050    stage = fid.variables['stage']
1051
1052    M = size  #Number of lines
1053    xx = zeros((M,3), Float)
1054    yy = zeros((M,3), Float)
1055    zz = zeros((M,3), Float)
1056
1057    for i in range(M):
1058        for j in range(3):
1059            xx[i,j] = x[i+j*M]
1060            yy[i,j] = y[i+j*M]
1061            zz[i,j] = z[i+j*M]
1062
1063    #Write obj for bathymetry
1064    FN = create_filename('.', basefilename, 'obj', size)
1065    write_obj(FN,xx,yy,zz)
1066
1067
1068    #Now read all the data with variable information, combine with
1069    #x,y info and store as obj
1070
1071    for k in range(len(time)):
1072        t = time[k]
1073        print 'Processing timestep %f' %t
1074
1075        for i in range(M):
1076            for j in range(3):
1077                zz[i,j] = stage[k,i+j*M]
1078
1079
1080        #Write obj for variable data
1081        #FN = create_filename(basefilename, 'obj', size, time=t)
1082        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1083        write_obj(FN,xx,yy,zz)
1084
1085
1086def dat2obj(basefilename):
1087    """Convert line based data output to obj
1088    FIXME: Obsolete?
1089    """
1090
1091    import glob, os
1092    from anuga.config import data_dir
1093
1094
1095    #Get bathymetry and x,y's
1096    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1097
1098    from Numeric import zeros, Float
1099
1100    M = len(lines)  #Number of lines
1101    x = zeros((M,3), Float)
1102    y = zeros((M,3), Float)
1103    z = zeros((M,3), Float)
1104
1105    ##i = 0
1106    for i, line in enumerate(lines):
1107        tokens = line.split()
1108        values = map(float,tokens)
1109
1110        for j in range(3):
1111            x[i,j] = values[j*3]
1112            y[i,j] = values[j*3+1]
1113            z[i,j] = values[j*3+2]
1114
1115        ##i += 1
1116
1117
1118    #Write obj for bathymetry
1119    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1120
1121
1122    #Now read all the data files with variable information, combine with
1123    #x,y info
1124    #and store as obj
1125
1126    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1127
1128    for filename in files:
1129        print 'Processing %s' % filename
1130
1131        lines = open(data_dir+os.sep+filename,'r').readlines()
1132        assert len(lines) == M
1133        root, ext = os.path.splitext(filename)
1134
1135        #Get time from filename
1136        i0 = filename.find('_time=')
1137        if i0 == -1:
1138            #Skip bathymetry file
1139            continue
1140
1141        i0 += 6  #Position where time starts
1142        i1 = filename.find('.dat')
1143
1144        if i1 > i0:
1145            t = float(filename[i0:i1])
1146        else:
1147            raise DataTimeError, 'Hmmmm'
1148
1149
1150
1151        ##i = 0
1152        for i, line in enumerate(lines):
1153            tokens = line.split()
1154            values = map(float,tokens)
1155
1156            for j in range(3):
1157                z[i,j] = values[j]
1158
1159            ##i += 1
1160
1161        #Write obj for variable data
1162        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1163
1164
1165def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1166    """Read netcdf filename1, pick timesteps first:step:last and save to
1167    nettcdf file filename2
1168    """
1169    from Scientific.IO.NetCDF import NetCDFFile
1170
1171    #Get NetCDF
1172    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1173    outfile = NetCDFFile(filename2, 'w')  #Open new file
1174
1175
1176    #Copy dimensions
1177    for d in infile.dimensions:
1178        outfile.createDimension(d, infile.dimensions[d])
1179
1180    for name in infile.variables:
1181        var = infile.variables[name]
1182        outfile.createVariable(name, var.typecode(), var.dimensions)
1183
1184
1185    #Copy the static variables
1186    for name in infile.variables:
1187        if name == 'time' or name == 'stage':
1188            pass
1189        else:
1190            #Copy
1191            outfile.variables[name][:] = infile.variables[name][:]
1192
1193    #Copy selected timesteps
1194    time = infile.variables['time']
1195    stage = infile.variables['stage']
1196
1197    newtime = outfile.variables['time']
1198    newstage = outfile.variables['stage']
1199
1200    if last is None:
1201        last = len(time)
1202
1203    selection = range(first, last, step)
1204    for i, j in enumerate(selection):
1205        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1206        newtime[i] = time[j]
1207        newstage[i,:] = stage[j,:]
1208
1209    #Close
1210    infile.close()
1211    outfile.close()
1212
1213
1214#Get data objects
1215def get_dataobject(domain, mode='w'):
1216    """Return instance of class of given format using filename
1217    """
1218
1219    cls = eval('Data_format_%s' %domain.format)
1220    return cls(domain, mode)
1221
1222#FIXME move into geospatial.  There should only be one method that
1223# reads xya, and writes pts.
1224def xya2pts(basename_in, basename_out=None, verbose=False,
1225            #easting_min=None, easting_max=None,
1226            #northing_min=None, northing_max=None,
1227            stride = 1,
1228            attribute_name = 'elevation',
1229            z_func = None):
1230    """Read points data from ascii (.xya)
1231
1232    Example:
1233
1234              x(m)        y(m)        z(m)
1235         0.00000e+00  0.00000e+00  1.3535000e-01
1236         0.00000e+00  1.40000e-02  1.3535000e-01
1237
1238
1239
1240    Convert to NetCDF pts format which is
1241
1242    points:  (Nx2) Float array
1243    elevation: N Float array
1244
1245    Only lines that contain three numeric values are processed
1246
1247    If z_func is specified, it will be applied to the third field
1248    """
1249
1250    import os
1251    #from Scientific.IO.NetCDF import NetCDFFile
1252    from Numeric import Float, arrayrange, concatenate
1253
1254    root, ext = os.path.splitext(basename_in)
1255
1256    if ext == '': ext = '.xya'
1257
1258    #Get NetCDF
1259    infile = open(root + ext, 'r')  #Open existing xya file for read
1260
1261    if verbose: print 'Reading xya points from %s' %(root + ext)
1262
1263    points = []
1264    attribute = []
1265    for i, line in enumerate(infile.readlines()):
1266
1267        if i % stride != 0: continue
1268
1269        fields = line.split()
1270
1271        try:
1272            assert len(fields) == 3
1273        except:
1274            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1275
1276        try:
1277            x = float( fields[0] )
1278            y = float( fields[1] )
1279            z = float( fields[2] )
1280        except:
1281            continue
1282
1283        points.append( [x, y] )
1284
1285        if callable(z_func):
1286            attribute.append(z_func(z))
1287        else:
1288            attribute.append(z)
1289
1290
1291    #Get output file
1292    if basename_out == None:
1293        ptsname = root + '.pts'
1294    else:
1295        ptsname = basename_out + '.pts'
1296
1297    if verbose: print 'Store to NetCDF file %s' %ptsname
1298    write_ptsfile(ptsname, points, attribute, attribute_name)
1299
1300
1301
1302######Obsoleted by export_points in load_mesh
1303def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1304                  zone=None, xllcorner=None, yllcorner=None):
1305    """Write points and associated attribute to pts (NetCDF) format
1306    """
1307
1308    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1309
1310    from Numeric import Float
1311
1312    if attribute_name is None:
1313        attribute_name = 'attribute'
1314
1315
1316    from Scientific.IO.NetCDF import NetCDFFile
1317
1318    # NetCDF file definition
1319    outfile = NetCDFFile(ptsname, 'w')
1320
1321
1322    #Create new file
1323    outfile.institution = 'Geoscience Australia'
1324    outfile.description = 'NetCDF pts format for compact and '\
1325                          'portable storage of spatial point data'
1326
1327
1328    #Georeferencing
1329    from anuga.coordinate_transforms.geo_reference import Geo_reference
1330    if zone is None:
1331        assert xllcorner is None, 'xllcorner must be None'
1332        assert yllcorner is None, 'yllcorner must be None'
1333        Geo_reference().write_NetCDF(outfile)
1334    else:
1335        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1336
1337
1338
1339    outfile.createDimension('number_of_points', len(points))
1340    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1341
1342    # variable definitions
1343    outfile.createVariable('points', Float, ('number_of_points',
1344                                             'number_of_dimensions'))
1345    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1346
1347    # Get handles to the variables
1348    nc_points = outfile.variables['points']
1349    nc_attribute = outfile.variables[attribute_name]
1350
1351    #Store data
1352    nc_points[:, :] = points
1353    nc_attribute[:] = attribute
1354
1355    outfile.close()
1356
1357
1358def dem2pts(basename_in, basename_out=None,
1359            easting_min=None, easting_max=None,
1360            northing_min=None, northing_max=None,
1361            use_cache=False, verbose=False,):
1362    """Read Digitial Elevation model from the following NetCDF format (.dem)
1363
1364    Example:
1365
1366    ncols         3121
1367    nrows         1800
1368    xllcorner     722000
1369    yllcorner     5893000
1370    cellsize      25
1371    NODATA_value  -9999
1372    138.3698 137.4194 136.5062 135.5558 ..........
1373
1374    Convert to NetCDF pts format which is
1375
1376    points:  (Nx2) Float array
1377    elevation: N Float array
1378    """
1379
1380
1381
1382    kwargs = {'basename_out': basename_out,
1383              'easting_min': easting_min,
1384              'easting_max': easting_max,
1385              'northing_min': northing_min,
1386              'northing_max': northing_max,
1387              'verbose': verbose}
1388
1389    if use_cache is True:
1390        from caching import cache
1391        result = cache(_dem2pts, basename_in, kwargs,
1392                       dependencies = [basename_in + '.dem'],
1393                       verbose = verbose)
1394
1395    else:
1396        result = apply(_dem2pts, [basename_in], kwargs)
1397
1398    return result
1399
1400
1401def _dem2pts(basename_in, basename_out=None, verbose=False,
1402            easting_min=None, easting_max=None,
1403            northing_min=None, northing_max=None):
1404    """Read Digitial Elevation model from the following NetCDF format (.dem)
1405
1406    Internal function. See public function dem2pts for details.
1407    """
1408
1409    #FIXME: Can this be written feasibly using write_pts?
1410
1411    import os
1412    from Scientific.IO.NetCDF import NetCDFFile
1413    from Numeric import Float, zeros, reshape, sum
1414
1415    root = basename_in
1416
1417    #Get NetCDF
1418    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1419
1420    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1421
1422    ncols = infile.ncols[0]
1423    nrows = infile.nrows[0]
1424    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1425    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1426    cellsize = infile.cellsize[0]
1427    NODATA_value = infile.NODATA_value[0]
1428    dem_elevation = infile.variables['elevation']
1429
1430    zone = infile.zone[0]
1431    false_easting = infile.false_easting[0]
1432    false_northing = infile.false_northing[0]
1433
1434    #Text strings
1435    projection = infile.projection
1436    datum = infile.datum
1437    units = infile.units
1438
1439
1440    #Get output file
1441    if basename_out == None:
1442        ptsname = root + '.pts'
1443    else:
1444        ptsname = basename_out + '.pts'
1445
1446    if verbose: print 'Store to NetCDF file %s' %ptsname
1447    # NetCDF file definition
1448    outfile = NetCDFFile(ptsname, 'w')
1449
1450    #Create new file
1451    outfile.institution = 'Geoscience Australia'
1452    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1453                      'of spatial point data'
1454    #assign default values
1455    if easting_min is None: easting_min = xllcorner
1456    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1457    if northing_min is None: northing_min = yllcorner
1458    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1459
1460    #compute offsets to update georeferencing
1461    easting_offset = xllcorner - easting_min
1462    northing_offset = yllcorner - northing_min
1463
1464    #Georeferencing
1465    outfile.zone = zone
1466    outfile.xllcorner = easting_min #Easting of lower left corner
1467    outfile.yllcorner = northing_min #Northing of lower left corner
1468    outfile.false_easting = false_easting
1469    outfile.false_northing = false_northing
1470
1471    outfile.projection = projection
1472    outfile.datum = datum
1473    outfile.units = units
1474
1475
1476    #Grid info (FIXME: probably not going to be used, but heck)
1477    outfile.ncols = ncols
1478    outfile.nrows = nrows
1479
1480    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1481    totalnopoints = nrows*ncols
1482
1483    # calculating number of NODATA_values for each row in clipped region
1484    #FIXME: use array operations to do faster
1485    nn = 0
1486    k = 0
1487    i1_0 = 0
1488    j1_0 = 0
1489    thisj = 0
1490    thisi = 0
1491    for i in range(nrows):
1492        y = (nrows-i-1)*cellsize + yllcorner
1493        for j in range(ncols):
1494            x = j*cellsize + xllcorner
1495            if easting_min <= x <= easting_max and \
1496               northing_min <= y <= northing_max:
1497                thisj = j
1498                thisi = i
1499                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1500
1501                if k == 0:
1502                    i1_0 = i
1503                    j1_0 = j
1504                k += 1
1505
1506    index1 = j1_0
1507    index2 = thisj
1508
1509    # dimension definitions
1510    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1511    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1512
1513    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1514    nopoints = clippednopoints-nn
1515
1516    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1517
1518    if verbose:
1519        print 'There are %d values in the elevation' %totalnopoints
1520        print 'There are %d values in the clipped elevation' %clippednopoints
1521        print 'There are %d NODATA_values in the clipped elevation' %nn
1522
1523    outfile.createDimension('number_of_points', nopoints)
1524    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1525
1526    # variable definitions
1527    outfile.createVariable('points', Float, ('number_of_points',
1528                                             'number_of_dimensions'))
1529    outfile.createVariable('elevation', Float, ('number_of_points',))
1530
1531    # Get handles to the variables
1532    points = outfile.variables['points']
1533    elevation = outfile.variables['elevation']
1534
1535    lenv = index2-index1+1
1536    #Store data
1537    global_index = 0
1538    #for i in range(nrows):
1539    for i in range(i1_0,thisi+1,1):
1540        if verbose and i%((nrows+10)/10)==0:
1541            print 'Processing row %d of %d' %(i, nrows)
1542
1543        lower_index = global_index
1544
1545        v = dem_elevation_r[i,index1:index2+1]
1546        no_NODATA = sum(v == NODATA_value)
1547        if no_NODATA > 0:
1548            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1549        else:
1550            newcols = lenv #ncols_in_bounding_box
1551
1552        telev = zeros(newcols, Float)
1553        tpoints = zeros((newcols, 2), Float)
1554
1555        local_index = 0
1556
1557        y = (nrows-i-1)*cellsize + yllcorner
1558        #for j in range(ncols):
1559        for j in range(j1_0,index2+1,1):
1560
1561            x = j*cellsize + xllcorner
1562            if easting_min <= x <= easting_max and \
1563               northing_min <= y <= northing_max and \
1564               dem_elevation_r[i,j] <> NODATA_value:
1565                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1566                telev[local_index] = dem_elevation_r[i, j]
1567                global_index += 1
1568                local_index += 1
1569
1570        upper_index = global_index
1571
1572        if upper_index == lower_index + newcols:
1573            points[lower_index:upper_index, :] = tpoints
1574            elevation[lower_index:upper_index] = telev
1575
1576    assert global_index == nopoints, 'index not equal to number of points'
1577
1578    infile.close()
1579    outfile.close()
1580
1581
1582
1583def _read_hecras_cross_sections(lines):
1584    """Return block of surface lines for each cross section
1585    Starts with SURFACE LINE,
1586    Ends with END CROSS-SECTION
1587    """
1588
1589    points = []
1590
1591    reading_surface = False
1592    for i, line in enumerate(lines):
1593
1594        if len(line.strip()) == 0:    #Ignore blanks
1595            continue
1596
1597        if lines[i].strip().startswith('SURFACE LINE'):
1598            reading_surface = True
1599            continue
1600
1601        if lines[i].strip().startswith('END') and reading_surface:
1602            yield points
1603            reading_surface = False
1604            points = []
1605
1606        if reading_surface:
1607            fields = line.strip().split(',')
1608            easting = float(fields[0])
1609            northing = float(fields[1])
1610            elevation = float(fields[2])
1611            points.append([easting, northing, elevation])
1612
1613
1614
1615
1616def hecras_cross_sections2pts(basename_in,
1617                              basename_out=None,
1618                              verbose=False):
1619    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1620
1621    Example:
1622
1623
1624# RAS export file created on Mon 15Aug2005 11:42
1625# by HEC-RAS Version 3.1.1
1626
1627BEGIN HEADER:
1628  UNITS: METRIC
1629  DTM TYPE: TIN
1630  DTM: v:\1\cit\perth_topo\river_tin
1631  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1632  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1633  MAP PROJECTION: UTM
1634  PROJECTION ZONE: 50
1635  DATUM: AGD66
1636  VERTICAL DATUM:
1637  NUMBER OF REACHES:  19
1638  NUMBER OF CROSS-SECTIONS:  14206
1639END HEADER:
1640
1641
1642Only the SURFACE LINE data of the following form will be utilised
1643
1644  CROSS-SECTION:
1645    STREAM ID:Southern-Wungong
1646    REACH ID:Southern-Wungong
1647    STATION:19040.*
1648    CUT LINE:
1649      405548.671603161 , 6438142.7594925
1650      405734.536092045 , 6438326.10404912
1651      405745.130459356 , 6438331.48627354
1652      405813.89633823 , 6438368.6272789
1653    SURFACE LINE:
1654     405548.67,   6438142.76,   35.37
1655     405552.24,   6438146.28,   35.41
1656     405554.78,   6438148.78,   35.44
1657     405555.80,   6438149.79,   35.44
1658     405559.37,   6438153.31,   35.45
1659     405560.88,   6438154.81,   35.44
1660     405562.93,   6438156.83,   35.42
1661     405566.50,   6438160.35,   35.38
1662     405566.99,   6438160.83,   35.37
1663     ...
1664   END CROSS-SECTION
1665
1666    Convert to NetCDF pts format which is
1667
1668    points:  (Nx2) Float array
1669    elevation: N Float array
1670    """
1671
1672    #FIXME: Can this be written feasibly using write_pts?
1673
1674    import os
1675    from Scientific.IO.NetCDF import NetCDFFile
1676    from Numeric import Float, zeros, reshape
1677
1678    root = basename_in
1679
1680    #Get ASCII file
1681    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1682
1683    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1684
1685    lines = infile.readlines()
1686    infile.close()
1687
1688    if verbose: print 'Converting to pts format'
1689
1690    i = 0
1691    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1692        i += 1
1693
1694    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1695    i += 1
1696
1697    assert lines[i].strip().upper().startswith('UNITS:')
1698    units = lines[i].strip().split()[1]
1699    i += 1
1700
1701    assert lines[i].strip().upper().startswith('DTM TYPE:')
1702    i += 1
1703
1704    assert lines[i].strip().upper().startswith('DTM:')
1705    i += 1
1706
1707    assert lines[i].strip().upper().startswith('STREAM')
1708    i += 1
1709
1710    assert lines[i].strip().upper().startswith('CROSS')
1711    i += 1
1712
1713    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1714    projection = lines[i].strip().split(':')[1]
1715    i += 1
1716
1717    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1718    zone = int(lines[i].strip().split(':')[1])
1719    i += 1
1720
1721    assert lines[i].strip().upper().startswith('DATUM:')
1722    datum = lines[i].strip().split(':')[1]
1723    i += 1
1724
1725    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1726    i += 1
1727
1728    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1729    i += 1
1730
1731    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1732    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1733    i += 1
1734
1735
1736    #Now read all points
1737    points = []
1738    elevation = []
1739    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1740        for k, entry in enumerate(entries):
1741            points.append(entry[:2])
1742            elevation.append(entry[2])
1743
1744
1745    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1746          %(j+1, number_of_cross_sections)
1747    assert j+1 == number_of_cross_sections, msg
1748
1749    #Get output file
1750    if basename_out == None:
1751        ptsname = root + '.pts'
1752    else:
1753        ptsname = basename_out + '.pts'
1754
1755    #FIXME (DSG-ON): use loadASCII export_points_file
1756    if verbose: print 'Store to NetCDF file %s' %ptsname
1757    # NetCDF file definition
1758    outfile = NetCDFFile(ptsname, 'w')
1759
1760    #Create new file
1761    outfile.institution = 'Geoscience Australia'
1762    outfile.description = 'NetCDF pts format for compact and portable ' +\
1763                          'storage of spatial point data derived from HEC-RAS'
1764
1765    #Georeferencing
1766    outfile.zone = zone
1767    outfile.xllcorner = 0.0
1768    outfile.yllcorner = 0.0
1769    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1770    outfile.false_northing = 1000000
1771
1772    outfile.projection = projection
1773    outfile.datum = datum
1774    outfile.units = units
1775
1776
1777    outfile.createDimension('number_of_points', len(points))
1778    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1779
1780    # variable definitions
1781    outfile.createVariable('points', Float, ('number_of_points',
1782                                             'number_of_dimensions'))
1783    outfile.createVariable('elevation', Float, ('number_of_points',))
1784
1785    # Get handles to the variables
1786    outfile.variables['points'][:] = points
1787    outfile.variables['elevation'][:] = elevation
1788
1789    outfile.close()
1790
1791
1792
1793def sww2dem(basename_in, basename_out = None,
1794            quantity = None,
1795            timestep = None,
1796            reduction = None,
1797            cellsize = 10,
1798            NODATA_value = -9999,
1799            easting_min = None,
1800            easting_max = None,
1801            northing_min = None,
1802            northing_max = None,
1803            verbose = False,
1804            origin = None,
1805            datum = 'WGS84',
1806        format = 'ers'):
1807
1808    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1809
1810    Example (ASC):
1811
1812    ncols         3121
1813    nrows         1800
1814    xllcorner     722000
1815    yllcorner     5893000
1816    cellsize      25
1817    NODATA_value  -9999
1818    138.3698 137.4194 136.5062 135.5558 ..........
1819
1820    Also write accompanying file with same basename_in but extension .prj
1821    used to fix the UTM zone, datum, false northings and eastings.
1822
1823    The prj format is assumed to be as
1824
1825    Projection    UTM
1826    Zone          56
1827    Datum         WGS84
1828    Zunits        NO
1829    Units         METERS
1830    Spheroid      WGS84
1831    Xshift        0.0000000000
1832    Yshift        10000000.0000000000
1833    Parameters
1834
1835
1836    The parameter quantity must be the name of an existing quantity or
1837    an expression involving existing quantities. The default is
1838    'elevation'.
1839
1840    if timestep (an index) is given, output quantity at that timestep
1841
1842    if reduction is given use that to reduce quantity over all timesteps.
1843
1844    datum
1845
1846    format can be either 'asc' or 'ers'
1847    """
1848
1849    import sys
1850    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1851    from Numeric import array2string
1852
1853    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1854    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1855
1856    msg = 'Format must be either asc or ers'
1857    assert format.lower() in ['asc', 'ers'], msg
1858
1859
1860    false_easting = 500000
1861    false_northing = 10000000
1862
1863    if quantity is None:
1864        quantity = 'elevation'
1865
1866    if reduction is None:
1867        reduction = max
1868
1869    if basename_out is None:
1870        basename_out = basename_in + '_%s' %quantity
1871
1872    swwfile = basename_in + '.sww'
1873    demfile = basename_out + '.' + format
1874    # Note the use of a .ers extension is optional (write_ermapper_grid will
1875    # deal with either option
1876
1877    #Read sww file
1878    if verbose: print 'Reading from %s' %swwfile
1879    from Scientific.IO.NetCDF import NetCDFFile
1880    fid = NetCDFFile(swwfile)
1881
1882    #Get extent and reference
1883    x = fid.variables['x'][:]
1884    y = fid.variables['y'][:]
1885    volumes = fid.variables['volumes'][:]
1886    if timestep is not None:
1887        times = fid.variables['time'][timestep]
1888    else:
1889        times = fid.variables['time'][:]
1890
1891    number_of_timesteps = fid.dimensions['number_of_timesteps']
1892    number_of_points = fid.dimensions['number_of_points']
1893   
1894    if origin is None:
1895
1896        #Get geo_reference
1897        #sww files don't have to have a geo_ref
1898        try:
1899            geo_reference = Geo_reference(NetCDFObject=fid)
1900        except AttributeError, e:
1901            geo_reference = Geo_reference() #Default georef object
1902
1903        xllcorner = geo_reference.get_xllcorner()
1904        yllcorner = geo_reference.get_yllcorner()
1905        zone = geo_reference.get_zone()
1906    else:
1907        zone = origin[0]
1908        xllcorner = origin[1]
1909        yllcorner = origin[2]
1910
1911
1912
1913    #FIXME: Refactor using code from file_function.statistics
1914    #Something like print swwstats(swwname)
1915    if verbose:
1916        print '------------------------------------------------'
1917        print 'Statistics of SWW file:'
1918        print '  Name: %s' %swwfile
1919        print '  Reference:'
1920        print '    Lower left corner: [%f, %f]'\
1921              %(xllcorner, yllcorner)
1922        if timestep is not None:
1923            print '    Time: %f' %(times)
1924        else:
1925            print '    Start time: %f' %fid.starttime[0]
1926        print '  Extent:'
1927        print '    x [m] in [%f, %f], len(x) == %d'\
1928              %(min(x.flat), max(x.flat), len(x.flat))
1929        print '    y [m] in [%f, %f], len(y) == %d'\
1930              %(min(y.flat), max(y.flat), len(y.flat))
1931        if timestep is not None:
1932            print '    t [s] = %f, len(t) == %d' %(times, 1)
1933        else:
1934            print '    t [s] in [%f, %f], len(t) == %d'\
1935              %(min(times), max(times), len(times))
1936        print '  Quantities [SI units]:'
1937        for name in ['stage', 'xmomentum', 'ymomentum']:
1938            q = fid.variables[name][:].flat
1939            if timestep is not None:
1940                q = q[timestep*len(x):(timestep+1)*len(x)]
1941            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1942        for name in ['elevation']:
1943            q = fid.variables[name][:].flat
1944            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1945           
1946    # Get quantity and reduce if applicable
1947    if verbose: print 'Processing quantity %s' %quantity
1948
1949    # Turn NetCDF objects into Numeric arrays
1950    quantity_dict = {}
1951    for name in fid.variables.keys():
1952        quantity_dict[name] = fid.variables[name][:] 
1953
1954
1955    # Convert quantity expression to quantities found in sww file   
1956    q = apply_expression_to_dictionary(quantity, quantity_dict)
1957
1958    if len(q.shape) == 2:
1959        #q has a time component and needs to be reduced along
1960        #the temporal dimension
1961        if verbose: print 'Reducing quantity %s' %quantity
1962        q_reduced = zeros( number_of_points, Float )
1963       
1964        if timestep is not None:
1965            for k in range(number_of_points):
1966                q_reduced[k] = q[timestep,k] 
1967        else:
1968            for k in range(number_of_points):
1969                q_reduced[k] = reduction( q[:,k] )
1970
1971        q = q_reduced
1972
1973    #Post condition: Now q has dimension: number_of_points
1974    assert len(q.shape) == 1
1975    assert q.shape[0] == number_of_points
1976
1977
1978    if verbose:
1979        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1980
1981
1982    #Create grid and update xll/yll corner and x,y
1983
1984    #Relative extent
1985    if easting_min is None:
1986        xmin = min(x)
1987    else:
1988        xmin = easting_min - xllcorner
1989
1990    if easting_max is None:
1991        xmax = max(x)
1992    else:
1993        xmax = easting_max - xllcorner
1994
1995    if northing_min is None:
1996        ymin = min(y)
1997    else:
1998        ymin = northing_min - yllcorner
1999
2000    if northing_max is None:
2001        ymax = max(y)
2002    else:
2003        ymax = northing_max - yllcorner
2004
2005
2006
2007    if verbose: print 'Creating grid'
2008    ncols = int((xmax-xmin)/cellsize)+1
2009    nrows = int((ymax-ymin)/cellsize)+1
2010
2011
2012    #New absolute reference and coordinates
2013    newxllcorner = xmin+xllcorner
2014    newyllcorner = ymin+yllcorner
2015
2016    x = x+xllcorner-newxllcorner
2017    y = y+yllcorner-newyllcorner
2018   
2019    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2020    assert len(vertex_points.shape) == 2
2021
2022    grid_points = zeros ( (ncols*nrows, 2), Float )
2023
2024
2025    for i in xrange(nrows):
2026        if format.lower() == 'asc':
2027            yg = i*cellsize
2028        else:
2029        #this will flip the order of the y values for ers
2030            yg = (nrows-i)*cellsize
2031
2032        for j in xrange(ncols):
2033            xg = j*cellsize
2034            k = i*ncols + j
2035
2036            grid_points[k,0] = xg
2037            grid_points[k,1] = yg
2038
2039    #Interpolate
2040    #from least_squares import Interpolation
2041    from anuga.fit_interpolate.interpolate import Interpolate
2042
2043    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2044
2045    #Interpolate using quantity values
2046    if verbose: print 'Interpolating'
2047    grid_values = interp.interpolate(q, grid_points).flat
2048
2049
2050    if verbose:
2051        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2052                                                      max(grid_values))
2053
2054    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2055    P = interp.mesh.get_boundary_polygon()
2056    outside_indices = outside_polygon(grid_points, P, closed=True)
2057
2058    for i in outside_indices:
2059        grid_values[i] = NODATA_value
2060
2061
2062
2063
2064    if format.lower() == 'ers':
2065        # setup ERS header information
2066        grid_values = reshape(grid_values,(nrows, ncols))
2067        header = {}
2068        header['datum'] = '"' + datum + '"'
2069        # FIXME The use of hardwired UTM and zone number needs to be made optional
2070        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2071        header['projection'] = '"UTM-' + str(zone) + '"'
2072        header['coordinatetype'] = 'EN'
2073        if header['coordinatetype'] == 'LL':
2074            header['longitude'] = str(newxllcorner)
2075            header['latitude'] = str(newyllcorner)
2076        elif header['coordinatetype'] == 'EN':
2077            header['eastings'] = str(newxllcorner)
2078            header['northings'] = str(newyllcorner)
2079        header['nullcellvalue'] = str(NODATA_value)
2080        header['xdimension'] = str(cellsize)
2081        header['ydimension'] = str(cellsize)
2082        header['value'] = '"' + quantity + '"'
2083        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2084
2085
2086        #Write
2087        if verbose: print 'Writing %s' %demfile
2088        import ermapper_grids
2089        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2090
2091        fid.close()
2092    else:
2093        #Write to Ascii format
2094
2095        #Write prj file
2096        prjfile = basename_out + '.prj'
2097
2098        if verbose: print 'Writing %s' %prjfile
2099        prjid = open(prjfile, 'w')
2100        prjid.write('Projection    %s\n' %'UTM')
2101        prjid.write('Zone          %d\n' %zone)
2102        prjid.write('Datum         %s\n' %datum)
2103        prjid.write('Zunits        NO\n')
2104        prjid.write('Units         METERS\n')
2105        prjid.write('Spheroid      %s\n' %datum)
2106        prjid.write('Xshift        %d\n' %false_easting)
2107        prjid.write('Yshift        %d\n' %false_northing)
2108        prjid.write('Parameters\n')
2109        prjid.close()
2110
2111
2112
2113        if verbose: print 'Writing %s' %demfile
2114
2115        ascid = open(demfile, 'w')
2116
2117        ascid.write('ncols         %d\n' %ncols)
2118        ascid.write('nrows         %d\n' %nrows)
2119        ascid.write('xllcorner     %d\n' %newxllcorner)
2120        ascid.write('yllcorner     %d\n' %newyllcorner)
2121        ascid.write('cellsize      %f\n' %cellsize)
2122        ascid.write('NODATA_value  %d\n' %NODATA_value)
2123
2124
2125        #Get bounding polygon from mesh
2126        #P = interp.mesh.get_boundary_polygon()
2127        #inside_indices = inside_polygon(grid_points, P)
2128
2129        for i in range(nrows):
2130            if verbose and i%((nrows+10)/10)==0:
2131                print 'Doing row %d of %d' %(i, nrows)
2132
2133            base_index = (nrows-i-1)*ncols
2134
2135            slice = grid_values[base_index:base_index+ncols]
2136            s = array2string(slice, max_line_width=sys.maxint)
2137            ascid.write(s[1:-1] + '\n')
2138
2139
2140            #print
2141            #for j in range(ncols):
2142            #    index = base_index+j#
2143            #    print grid_values[index],
2144            #    ascid.write('%f ' %grid_values[index])
2145            #ascid.write('\n')
2146
2147        #Close
2148        ascid.close()
2149        fid.close()
2150
2151#Backwards compatibility
2152def sww2asc(basename_in, basename_out = None,
2153            quantity = None,
2154            timestep = None,
2155            reduction = None,
2156            cellsize = 10,
2157            verbose = False,
2158            origin = None):
2159    print 'sww2asc will soon be obsoleted - please use sww2dem'
2160    sww2dem(basename_in,
2161            basename_out = basename_out,
2162            quantity = quantity,
2163            timestep = timestep,
2164            reduction = reduction,
2165            cellsize = cellsize,
2166            verbose = verbose,
2167            origin = origin,
2168        datum = 'WGS84',
2169        format = 'asc')
2170
2171def sww2ers(basename_in, basename_out = None,
2172            quantity = None,
2173            timestep = None,
2174            reduction = None,
2175            cellsize = 10,
2176            verbose = False,
2177            origin = None,
2178            datum = 'WGS84'):
2179    print 'sww2ers will soon be obsoleted - please use sww2dem'
2180    sww2dem(basename_in,
2181            basename_out = basename_out,
2182            quantity = quantity,
2183            timestep = timestep,
2184            reduction = reduction,
2185            cellsize = cellsize,
2186            verbose = verbose,
2187            origin = origin,
2188            datum = datum,
2189            format = 'ers')
2190################################# END COMPATIBILITY ##############
2191
2192
2193
2194def sww2pts(basename_in, basename_out=None,
2195            data_points=None,
2196            quantity=None,
2197            timestep=None,
2198            reduction=None,
2199            NODATA_value=-9999,
2200            verbose=False,
2201            origin=None):
2202            #datum = 'WGS84')
2203
2204
2205    """Read SWW file and convert to interpolated values at selected points
2206
2207    The parameter quantity' must be the name of an existing quantity or
2208    an expression involving existing quantities. The default is
2209    'elevation'.
2210
2211    if timestep (an index) is given, output quantity at that timestep
2212
2213    if reduction is given use that to reduce quantity over all timesteps.
2214
2215    data_points (Nx2 array) give locations of points where quantity is to be computed.
2216   
2217    """
2218
2219    import sys
2220    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2221    from Numeric import array2string
2222
2223    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2224    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2225
2226    from anuga.geospatial_data.geospatial_data import Geospatial_data
2227
2228    if quantity is None:
2229        quantity = 'elevation'
2230
2231    if reduction is None:
2232        reduction = max
2233
2234    if basename_out is None:
2235        basename_out = basename_in + '_%s' %quantity
2236
2237    swwfile = basename_in + '.sww'
2238    ptsfile = basename_out + '.pts'
2239
2240    # Read sww file
2241    if verbose: print 'Reading from %s' %swwfile
2242    from Scientific.IO.NetCDF import NetCDFFile
2243    fid = NetCDFFile(swwfile)
2244
2245    # Get extent and reference
2246    x = fid.variables['x'][:]
2247    y = fid.variables['y'][:]
2248    volumes = fid.variables['volumes'][:]
2249
2250    number_of_timesteps = fid.dimensions['number_of_timesteps']
2251    number_of_points = fid.dimensions['number_of_points']
2252    if origin is None:
2253
2254        # Get geo_reference
2255        # sww files don't have to have a geo_ref
2256        try:
2257            geo_reference = Geo_reference(NetCDFObject=fid)
2258        except AttributeError, e:
2259            geo_reference = Geo_reference() #Default georef object
2260
2261        xllcorner = geo_reference.get_xllcorner()
2262        yllcorner = geo_reference.get_yllcorner()
2263        zone = geo_reference.get_zone()
2264    else:
2265        zone = origin[0]
2266        xllcorner = origin[1]
2267        yllcorner = origin[2]
2268
2269
2270
2271    # FIXME: Refactor using code from file_function.statistics
2272    # Something like print swwstats(swwname)
2273    if verbose:
2274        x = fid.variables['x'][:]
2275        y = fid.variables['y'][:]
2276        times = fid.variables['time'][:]
2277        print '------------------------------------------------'
2278        print 'Statistics of SWW file:'
2279        print '  Name: %s' %swwfile
2280        print '  Reference:'
2281        print '    Lower left corner: [%f, %f]'\
2282              %(xllcorner, yllcorner)
2283        print '    Start time: %f' %fid.starttime[0]
2284        print '  Extent:'
2285        print '    x [m] in [%f, %f], len(x) == %d'\
2286              %(min(x.flat), max(x.flat), len(x.flat))
2287        print '    y [m] in [%f, %f], len(y) == %d'\
2288              %(min(y.flat), max(y.flat), len(y.flat))
2289        print '    t [s] in [%f, %f], len(t) == %d'\
2290              %(min(times), max(times), len(times))
2291        print '  Quantities [SI units]:'
2292        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2293            q = fid.variables[name][:].flat
2294            print '    %s in [%f, %f]' %(name, min(q), max(q))
2295
2296
2297
2298    # Get quantity and reduce if applicable
2299    if verbose: print 'Processing quantity %s' %quantity
2300
2301    # Turn NetCDF objects into Numeric arrays
2302    quantity_dict = {}
2303    for name in fid.variables.keys():
2304        quantity_dict[name] = fid.variables[name][:]
2305
2306
2307
2308    # Convert quantity expression to quantities found in sww file   
2309    q = apply_expression_to_dictionary(quantity, quantity_dict)
2310
2311
2312
2313    if len(q.shape) == 2:
2314        # q has a time component and needs to be reduced along
2315        # the temporal dimension
2316        if verbose: print 'Reducing quantity %s' %quantity
2317        q_reduced = zeros( number_of_points, Float )
2318
2319        for k in range(number_of_points):
2320            q_reduced[k] = reduction( q[:,k] )
2321
2322        q = q_reduced
2323
2324    # Post condition: Now q has dimension: number_of_points
2325    assert len(q.shape) == 1
2326    assert q.shape[0] == number_of_points
2327
2328
2329    if verbose:
2330        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2331
2332
2333    # Create grid and update xll/yll corner and x,y
2334    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2335    assert len(vertex_points.shape) == 2
2336
2337    # Interpolate
2338    from anuga.fit_interpolate.interpolate import Interpolate
2339    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2340
2341    # Interpolate using quantity values
2342    if verbose: print 'Interpolating'
2343    interpolated_values = interp.interpolate(q, data_points).flat
2344
2345
2346    if verbose:
2347        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2348                                                      max(interpolated_values))
2349
2350    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2351    P = interp.mesh.get_boundary_polygon()
2352    outside_indices = outside_polygon(data_points, P, closed=True)
2353
2354    for i in outside_indices:
2355        interpolated_values[i] = NODATA_value
2356
2357
2358    # Store results   
2359    G = Geospatial_data(data_points=data_points,
2360                        attributes=interpolated_values)
2361
2362    G.export_points_file(ptsfile, absolute = True)
2363
2364    fid.close()
2365
2366
2367def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2368                                  use_cache = False,
2369                                  verbose = False):
2370    """Read Digitial Elevation model from the following ASCII format (.asc)
2371
2372    Example:
2373
2374    ncols         3121
2375    nrows         1800
2376    xllcorner     722000
2377    yllcorner     5893000
2378    cellsize      25
2379    NODATA_value  -9999
2380    138.3698 137.4194 136.5062 135.5558 ..........
2381
2382    Convert basename_in + '.asc' to NetCDF format (.dem)
2383    mimicking the ASCII format closely.
2384
2385
2386    An accompanying file with same basename_in but extension .prj must exist
2387    and is used to fix the UTM zone, datum, false northings and eastings.
2388
2389    The prj format is assumed to be as
2390
2391    Projection    UTM
2392    Zone          56
2393    Datum         WGS84
2394    Zunits        NO
2395    Units         METERS
2396    Spheroid      WGS84
2397    Xshift        0.0000000000
2398    Yshift        10000000.0000000000
2399    Parameters
2400    """
2401
2402
2403
2404    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2405
2406    if use_cache is True:
2407        from caching import cache
2408        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2409                       dependencies = [basename_in + '.asc'],
2410                       verbose = verbose)
2411
2412    else:
2413        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2414
2415    return result
2416
2417
2418
2419
2420
2421
2422def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2423                                  verbose = False):
2424    """Read Digitial Elevation model from the following ASCII format (.asc)
2425
2426    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2427    """
2428
2429    import os
2430    from Scientific.IO.NetCDF import NetCDFFile
2431    from Numeric import Float, array
2432
2433    #root, ext = os.path.splitext(basename_in)
2434    root = basename_in
2435
2436    ###########################################
2437    # Read Meta data
2438    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2439    metadatafile = open(root + '.prj')
2440    metalines = metadatafile.readlines()
2441    metadatafile.close()
2442
2443    L = metalines[0].strip().split()
2444    assert L[0].strip().lower() == 'projection'
2445    projection = L[1].strip()                   #TEXT
2446
2447    L = metalines[1].strip().split()
2448    assert L[0].strip().lower() == 'zone'
2449    zone = int(L[1].strip())
2450
2451    L = metalines[2].strip().split()
2452    assert L[0].strip().lower() == 'datum'
2453    datum = L[1].strip()                        #TEXT
2454
2455    L = metalines[3].strip().split()
2456    assert L[0].strip().lower() == 'zunits'     #IGNORE
2457    zunits = L[1].strip()                       #TEXT
2458
2459    L = metalines[4].strip().split()
2460    assert L[0].strip().lower() == 'units'
2461    units = L[1].strip()                        #TEXT
2462
2463    L = metalines[5].strip().split()
2464    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2465    spheroid = L[1].strip()                     #TEXT
2466
2467    L = metalines[6].strip().split()
2468    assert L[0].strip().lower() == 'xshift'
2469    false_easting = float(L[1].strip())
2470
2471    L = metalines[7].strip().split()
2472    assert L[0].strip().lower() == 'yshift'
2473    false_northing = float(L[1].strip())
2474
2475    #print false_easting, false_northing, zone, datum
2476
2477
2478    ###########################################
2479    #Read DEM data
2480
2481    datafile = open(basename_in + '.asc')
2482
2483    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2484    lines = datafile.readlines()
2485    datafile.close()
2486
2487    if verbose: print 'Got', len(lines), ' lines'
2488
2489    ncols = int(lines[0].split()[1].strip())
2490    nrows = int(lines[1].split()[1].strip())
2491    xllcorner = float(lines[2].split()[1].strip())
2492    yllcorner = float(lines[3].split()[1].strip())
2493    cellsize = float(lines[4].split()[1].strip())
2494    NODATA_value = int(lines[5].split()[1].strip())
2495
2496    assert len(lines) == nrows + 6
2497
2498
2499    ##########################################
2500
2501
2502    if basename_out == None:
2503        netcdfname = root + '.dem'
2504    else:
2505        netcdfname = basename_out + '.dem'
2506
2507    if verbose: print 'Store to NetCDF file %s' %netcdfname
2508    # NetCDF file definition
2509    fid = NetCDFFile(netcdfname, 'w')
2510
2511    #Create new file
2512    fid.institution = 'Geoscience Australia'
2513    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2514                      'of spatial point data'
2515
2516    fid.ncols = ncols
2517    fid.nrows = nrows
2518    fid.xllcorner = xllcorner
2519    fid.yllcorner = yllcorner
2520    fid.cellsize = cellsize
2521    fid.NODATA_value = NODATA_value
2522
2523    fid.zone = zone
2524    fid.false_easting = false_easting
2525    fid.false_northing = false_northing
2526    fid.projection = projection
2527    fid.datum = datum
2528    fid.units = units
2529
2530
2531    # dimension definitions
2532    fid.createDimension('number_of_rows', nrows)
2533    fid.createDimension('number_of_columns', ncols)
2534
2535    # variable definitions
2536    fid.createVariable('elevation', Float, ('number_of_rows',
2537                                            'number_of_columns'))
2538
2539    # Get handles to the variables
2540    elevation = fid.variables['elevation']
2541
2542    #Store data
2543    n = len(lines[6:])
2544    for i, line in enumerate(lines[6:]):
2545        fields = line.split()
2546        if verbose and i%((n+10)/10)==0:
2547            print 'Processing row %d of %d' %(i, nrows)
2548
2549        elevation[i, :] = array([float(x) for x in fields])
2550
2551    fid.close()
2552
2553
2554
2555
2556
2557def ferret2sww(basename_in, basename_out = None,
2558               verbose = False,
2559               minlat = None, maxlat = None,
2560               minlon = None, maxlon = None,
2561               mint = None, maxt = None, mean_stage = 0,
2562               origin = None, zscale = 1,
2563               fail_on_NaN = True,
2564               NaN_filler = 0,
2565               elevation = None,
2566               inverted_bathymetry = True
2567               ): #FIXME: Bathymetry should be obtained
2568                                  #from MOST somehow.
2569                                  #Alternatively from elsewhere
2570                                  #or, as a last resort,
2571                                  #specified here.
2572                                  #The value of -100 will work
2573                                  #for the Wollongong tsunami
2574                                  #scenario but is very hacky
2575    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2576    sww format native to abstract_2d_finite_volumes.
2577
2578    Specify only basename_in and read files of the form
2579    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2580    relative height, x-velocity and y-velocity, respectively.
2581
2582    Also convert latitude and longitude to UTM. All coordinates are
2583    assumed to be given in the GDA94 datum.
2584
2585    min's and max's: If omitted - full extend is used.
2586    To include a value min may equal it, while max must exceed it.
2587    Lat and lon are assuemd to be in decimal degrees
2588
2589    origin is a 3-tuple with geo referenced
2590    UTM coordinates (zone, easting, northing)
2591
2592    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2593    which means that longitude is the fastest
2594    varying dimension (row major order, so to speak)
2595
2596    ferret2sww uses grid points as vertices in a triangular grid
2597    counting vertices from lower left corner upwards, then right
2598    """
2599
2600    import os
2601    from Scientific.IO.NetCDF import NetCDFFile
2602    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2603    from Numeric import allclose, around
2604
2605    precision = Float
2606
2607    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2608
2609    if minlat != None:
2610        assert -90 < minlat < 90 , msg
2611    if maxlat != None:
2612        assert -90 < maxlat < 90 , msg
2613        if minlat != None:
2614            assert maxlat > minlat
2615    if minlon != None:
2616        assert -180 < minlon < 180 , msg
2617    if maxlon != None:
2618        assert -180 < maxlon < 180 , msg
2619        if minlon != None:
2620            assert maxlon > minlon
2621       
2622
2623
2624    #Get NetCDF data
2625    if verbose: print 'Reading files %s_*.nc' %basename_in
2626    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2627    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2628    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2629    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2630    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2631
2632    if basename_out is None:
2633        swwname = basename_in + '.sww'
2634    else:
2635        swwname = basename_out + '.sww'
2636
2637#    Get dimensions of file_h
2638    for dimension in file_h.dimensions.keys():
2639        if dimension[:3] == 'LON':
2640            dim_h_longitude = dimension
2641        if dimension[:3] == 'LAT':
2642            dim_h_latitude = dimension
2643        if dimension[:4] == 'TIME':
2644            dim_h_time = dimension
2645
2646#    print 'long:', dim_h_longitude
2647#    print 'lats:', dim_h_latitude
2648#    print 'times:', dim_h_time
2649
2650    times = file_h.variables[dim_h_time]
2651    latitudes = file_h.variables[dim_h_latitude]
2652    longitudes = file_h.variables[dim_h_longitude]
2653   
2654# get dimensions for file_e
2655    for dimension in file_e.dimensions.keys():
2656        if dimension[:3] == 'LON':
2657            dim_e_longitude = dimension
2658        if dimension[:3] == 'LAT':
2659            dim_e_latitude = dimension
2660
2661# get dimensions for file_u
2662    for dimension in file_u.dimensions.keys():
2663        if dimension[:3] == 'LON':
2664            dim_u_longitude = dimension
2665        if dimension[:3] == 'LAT':
2666            dim_u_latitude = dimension
2667        if dimension[:4] == 'TIME':
2668            dim_u_time = dimension
2669           
2670# get dimensions for file_v
2671    for dimension in file_v.dimensions.keys():
2672        if dimension[:3] == 'LON':
2673            dim_v_longitude = dimension
2674        if dimension[:3] == 'LAT':
2675            dim_v_latitude = dimension
2676        if dimension[:4] == 'TIME':
2677            dim_v_time = dimension
2678
2679
2680    #Precision used by most for lat/lon is 4 or 5 decimals
2681    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2682    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2683
2684    #Check that files are compatible
2685    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2686    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2687    assert allclose(latitudes, e_lat)
2688
2689    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2690    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2691    assert allclose(longitudes, e_lon)
2692
2693    if mint == None:
2694        jmin = 0
2695    else:
2696        jmin = searchsorted(times, mint)
2697       
2698    if maxt == None:
2699        jmax=len(times)
2700    else:
2701        jmax = searchsorted(times, maxt)
2702
2703    #print "latitudes[:]",latitudes[:]
2704    #print "longitudes[:]",longitudes [:]
2705    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2706                                                  longitudes[:],
2707                                                 minlat, maxlat,
2708                                                 minlon, maxlon)
2709
2710
2711#    print' j', jmin, jmax
2712    times = times[jmin:jmax]
2713    latitudes = latitudes[kmin:kmax]
2714    longitudes = longitudes[lmin:lmax]
2715
2716    #print "latitudes[:]",latitudes[:]
2717    #print "longitudes[:]",longitudes [:]
2718
2719    if verbose: print 'cropping'
2720    zname = 'ELEVATION'
2721
2722    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2723    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2724    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2725    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2726
2727    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2728    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2729    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2730    #        from Numeric import asarray
2731    #        elevations=elevations.tolist()
2732    #        elevations.reverse()
2733    #        elevations=asarray(elevations)
2734    #    else:
2735    #        from Numeric import asarray
2736    #        elevations=elevations.tolist()
2737    #        elevations.reverse()
2738    #        elevations=asarray(elevations)
2739    #        'print hmmm'
2740
2741
2742
2743    #Get missing values
2744    nan_ha = file_h.variables['HA'].missing_value[0]
2745    nan_ua = file_u.variables['UA'].missing_value[0]
2746    nan_va = file_v.variables['VA'].missing_value[0]
2747    if hasattr(file_e.variables[zname],'missing_value'):
2748        nan_e  = file_e.variables[zname].missing_value[0]
2749    else:
2750        nan_e = None
2751
2752    #Cleanup
2753    from Numeric import sometrue
2754
2755    missing = (amplitudes == nan_ha)
2756    if sometrue (missing):
2757        if fail_on_NaN:
2758            msg = 'NetCDFFile %s contains missing values'\
2759                  %(basename_in+'_ha.nc')
2760            raise DataMissingValuesError, msg
2761        else:
2762            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2763
2764    missing = (uspeed == nan_ua)
2765    if sometrue (missing):
2766        if fail_on_NaN:
2767            msg = 'NetCDFFile %s contains missing values'\
2768                  %(basename_in+'_ua.nc')
2769            raise DataMissingValuesError, msg
2770        else:
2771            uspeed = uspeed*(missing==0) + missing*NaN_filler
2772
2773    missing = (vspeed == nan_va)
2774    if sometrue (missing):
2775        if fail_on_NaN:
2776            msg = 'NetCDFFile %s contains missing values'\
2777                  %(basename_in+'_va.nc')
2778            raise DataMissingValuesError, msg
2779        else:
2780            vspeed = vspeed*(missing==0) + missing*NaN_filler
2781
2782
2783    missing = (elevations == nan_e)
2784    if sometrue (missing):
2785        if fail_on_NaN:
2786            msg = 'NetCDFFile %s contains missing values'\
2787                  %(basename_in+'_e.nc')
2788            raise DataMissingValuesError, msg
2789        else:
2790            elevations = elevations*(missing==0) + missing*NaN_filler
2791
2792    #######
2793
2794
2795
2796    number_of_times = times.shape[0]
2797    number_of_latitudes = latitudes.shape[0]
2798    number_of_longitudes = longitudes.shape[0]
2799
2800    assert amplitudes.shape[0] == number_of_times
2801    assert amplitudes.shape[1] == number_of_latitudes
2802    assert amplitudes.shape[2] == number_of_longitudes
2803
2804    if verbose:
2805        print '------------------------------------------------'
2806        print 'Statistics:'
2807        print '  Extent (lat/lon):'
2808        print '    lat in [%f, %f], len(lat) == %d'\
2809              %(min(latitudes.flat), max(latitudes.flat),
2810                len(latitudes.flat))
2811        print '    lon in [%f, %f], len(lon) == %d'\
2812              %(min(longitudes.flat), max(longitudes.flat),
2813                len(longitudes.flat))
2814        print '    t in [%f, %f], len(t) == %d'\
2815              %(min(times.flat), max(times.flat), len(times.flat))
2816
2817        q = amplitudes.flat
2818        name = 'Amplitudes (ha) [cm]'
2819        print %s in [%f, %f]' %(name, min(q), max(q))
2820
2821        q = uspeed.flat
2822        name = 'Speeds (ua) [cm/s]'
2823        print %s in [%f, %f]' %(name, min(q), max(q))
2824
2825        q = vspeed.flat
2826        name = 'Speeds (va) [cm/s]'
2827        print %s in [%f, %f]' %(name, min(q), max(q))
2828
2829        q = elevations.flat
2830        name = 'Elevations (e) [m]'
2831        print %s in [%f, %f]' %(name, min(q), max(q))
2832
2833
2834    #print number_of_latitudes, number_of_longitudes
2835    number_of_points = number_of_latitudes*number_of_longitudes
2836    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2837
2838
2839    file_h.close()
2840    file_u.close()
2841    file_v.close()
2842    file_e.close()
2843
2844
2845    # NetCDF file definition
2846    outfile = NetCDFFile(swwname, 'w')
2847
2848    #Create new file
2849    outfile.institution = 'Geoscience Australia'
2850    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2851                          %(basename_in + '_ha.nc',
2852                            basename_in + '_ua.nc',
2853                            basename_in + '_va.nc',
2854                            basename_in + '_e.nc')
2855
2856
2857    #For sww compatibility
2858    outfile.smoothing = 'Yes'
2859    outfile.order = 1
2860
2861    #Start time in seconds since the epoch (midnight 1/1/1970)
2862    outfile.starttime = starttime = times[0]
2863    times = times - starttime  #Store relative times
2864
2865    # dimension definitions
2866    outfile.createDimension('number_of_volumes', number_of_volumes)
2867
2868    outfile.createDimension('number_of_vertices', 3)
2869    outfile.createDimension('number_of_points', number_of_points)
2870
2871
2872    #outfile.createDimension('number_of_timesteps', len(times))
2873    outfile.createDimension('number_of_timesteps', len(times))
2874
2875    # variable definitions
2876    outfile.createVariable('x', precision, ('number_of_points',))
2877    outfile.createVariable('y', precision, ('number_of_points',))
2878    outfile.createVariable('elevation', precision, ('number_of_points',))
2879
2880    #FIXME: Backwards compatibility
2881    outfile.createVariable('z', precision, ('number_of_points',))
2882    #################################
2883
2884    outfile.createVariable('volumes', Int, ('number_of_volumes',
2885                                            'number_of_vertices'))
2886
2887    outfile.createVariable('time', precision,
2888                           ('number_of_timesteps',))
2889
2890    outfile.createVariable('stage', precision,
2891                           ('number_of_timesteps',
2892                            'number_of_points'))
2893
2894    outfile.createVariable('xmomentum', precision,
2895                           ('number_of_timesteps',
2896                            'number_of_points'))
2897
2898    outfile.createVariable('ymomentum', precision,
2899                           ('number_of_timesteps',
2900                            'number_of_points'))
2901
2902
2903    #Store
2904    from anuga.coordinate_transforms.redfearn import redfearn
2905    x = zeros(number_of_points, Float)  #Easting
2906    y = zeros(number_of_points, Float)  #Northing
2907
2908
2909    if verbose: print 'Making triangular grid'
2910    #Check zone boundaries
2911    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2912
2913    vertices = {}
2914    i = 0
2915    for k, lat in enumerate(latitudes):       #Y direction
2916        for l, lon in enumerate(longitudes):  #X direction
2917
2918            vertices[l,k] = i
2919
2920            zone, easting, northing = redfearn(lat,lon)
2921
2922            msg = 'Zone boundary crossed at longitude =', lon
2923            #assert zone == refzone, msg
2924            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2925            x[i] = easting
2926            y[i] = northing
2927            i += 1
2928
2929
2930    #Construct 2 triangles per 'rectangular' element
2931    volumes = []
2932    for l in range(number_of_longitudes-1):    #X direction
2933        for k in range(number_of_latitudes-1): #Y direction
2934            v1 = vertices[l,k+1]
2935            v2 = vertices[l,k]
2936            v3 = vertices[l+1,k+1]
2937            v4 = vertices[l+1,k]
2938
2939            volumes.append([v1,v2,v3]) #Upper element
2940            volumes.append([v4,v3,v2]) #Lower element
2941
2942    volumes = array(volumes)
2943
2944    if origin == None:
2945        zone = refzone
2946        xllcorner = min(x)
2947        yllcorner = min(y)
2948    else:
2949        zone = origin[0]
2950        xllcorner = origin[1]
2951        yllcorner = origin[2]
2952
2953
2954    outfile.xllcorner = xllcorner
2955    outfile.yllcorner = yllcorner
2956    outfile.zone = zone
2957
2958
2959    if elevation is not None:
2960        z = elevation
2961    else:
2962        if inverted_bathymetry:
2963            z = -1*elevations
2964        else:
2965            z = elevations
2966    #FIXME: z should be obtained from MOST and passed in here
2967
2968    from Numeric import resize
2969    z = resize(z,outfile.variables['z'][:].shape)
2970    outfile.variables['x'][:] = x - xllcorner
2971    outfile.variables['y'][:] = y - yllcorner
2972    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2973    outfile.variables['elevation'][:] = z
2974    outfile.variables['time'][:] = times   #Store time relative
2975    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2976
2977
2978
2979    #Time stepping
2980    stage = outfile.variables['stage']
2981    xmomentum = outfile.variables['xmomentum']
2982    ymomentum = outfile.variables['ymomentum']
2983
2984    if verbose: print 'Converting quantities'
2985    n = len(times)
2986    for j in range(n):
2987        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2988        i = 0
2989        for k in range(number_of_latitudes):      #Y direction
2990            for l in range(number_of_longitudes): #X direction
2991                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2992                stage[j,i] = w
2993                h = w - z[i]
2994                xmomentum[j,i] = uspeed[j,k,l]/100*h
2995                ymomentum[j,i] = vspeed[j,k,l]/100*h
2996                i += 1
2997
2998    #outfile.close()
2999
3000    #FIXME: Refactor using code from file_function.statistics
3001    #Something like print swwstats(swwname)
3002    if verbose:
3003        x = outfile.variables['x'][:]
3004        y = outfile.variables['y'][:]
3005        times = outfile.variables['time'][:]
3006        print '------------------------------------------------'
3007        print 'Statistics of output file:'
3008        print '  Name: %s' %swwname
3009        print '  Reference:'
3010        print '    Lower left corner: [%f, %f]'\
3011              %(xllcorner, yllcorner)
3012        print '    Start time: %f' %starttime
3013        print '  Extent:'
3014        print '    x [m] in [%f, %f], len(x) == %d'\
3015              %(min(x.flat), max(x.flat), len(x.flat))
3016        print '    y [m] in [%f, %f], len(y) == %d'\
3017              %(min(y.flat), max(y.flat), len(y.flat))
3018        print '    t [s] in [%f, %f], len(t) == %d'\
3019              %(min(times), max(times), len(times))
3020        print '  Quantities [SI units]:'
3021        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3022            q = outfile.variables[name][:].flat
3023            print '    %s in [%f, %f]' %(name, min(q), max(q))
3024
3025
3026
3027    outfile.close()
3028
3029
3030
3031
3032
3033def timefile2netcdf(filename, quantity_names = None):
3034    """Template for converting typical text files with time series to
3035    NetCDF tms file.
3036
3037
3038    The file format is assumed to be either two fields separated by a comma:
3039
3040        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3041
3042    E.g
3043
3044      31/08/04 00:00:00, 1.328223 0 0
3045      31/08/04 00:15:00, 1.292912 0 0
3046
3047    will provide a time dependent function f(t) with three attributes
3048
3049    filename is assumed to be the rootname with extenisons .txt and .sww
3050    """
3051
3052    import time, calendar
3053    from Numeric import array
3054    from anuga.config import time_format
3055    from anuga.utilities.numerical_tools import ensure_numeric
3056
3057
3058
3059    fid = open(filename + '.txt')
3060    line = fid.readline()
3061    fid.close()
3062
3063    fields = line.split(',')
3064    msg = 'File %s must have the format date, value0 value1 value2 ...'
3065    assert len(fields) == 2, msg
3066
3067    try:
3068        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3069    except ValueError:
3070        msg = 'First field in file %s must be' %filename
3071        msg += ' date-time with format %s.\n' %time_format
3072        msg += 'I got %s instead.' %fields[0]
3073        raise DataTimeError, msg
3074
3075
3076    #Split values
3077    values = []
3078    for value in fields[1].split():
3079        values.append(float(value))
3080
3081    q = ensure_numeric(values)
3082
3083    msg = 'ERROR: File must contain at least one independent value'
3084    assert len(q.shape) == 1, msg
3085
3086
3087
3088    #Read times proper
3089    from Numeric import zeros, Float, alltrue
3090    from anuga.config import time_format
3091    import time, calendar
3092
3093    fid = open(filename + '.txt')
3094    lines = fid.readlines()
3095    fid.close()
3096
3097    N = len(lines)
3098    d = len(q)
3099
3100    T = zeros(N, Float)       #Time
3101    Q = zeros((N, d), Float)  #Values
3102
3103    for i, line in enumerate(lines):
3104        fields = line.split(',')
3105        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3106
3107        T[i] = realtime - starttime
3108
3109        for j, value in enumerate(fields[1].split()):
3110            Q[i, j] = float(value)
3111
3112    msg = 'File %s must list time as a monotonuosly ' %filename
3113    msg += 'increasing sequence'
3114    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3115
3116
3117    #Create NetCDF file
3118    from Scientific.IO.NetCDF import NetCDFFile
3119
3120    fid = NetCDFFile(filename + '.tms', 'w')
3121
3122
3123    fid.institution = 'Geoscience Australia'
3124    fid.description = 'Time series'
3125
3126
3127    #Reference point
3128    #Start time in seconds since the epoch (midnight 1/1/1970)
3129    #FIXME: Use Georef
3130    fid.starttime = starttime
3131
3132
3133    # dimension definitions
3134    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3135    #fid.createDimension('number_of_vertices', 3)
3136
3137
3138    fid.createDimension('number_of_timesteps', len(T))
3139
3140    fid.createVariable('time', Float, ('number_of_timesteps',))
3141
3142    fid.variables['time'][:] = T
3143
3144    for i in range(Q.shape[1]):
3145        try:
3146            name = quantity_names[i]
3147        except:
3148            name = 'Attribute%d'%i
3149
3150        fid.createVariable(name, Float, ('number_of_timesteps',))
3151        fid.variables[name][:] = Q[:,i]
3152
3153    fid.close()
3154
3155
3156def extent_sww(file_name):
3157    """
3158    Read in an sww file.
3159
3160    Input;
3161    file_name - the sww file
3162
3163    Output;
3164    z - Vector of bed elevation
3165    volumes - Array.  Each row has 3 values, representing
3166    the vertices that define the volume
3167    time - Vector of the times where there is stage information
3168    stage - array with respect to time and vertices (x,y)
3169    """
3170
3171
3172    from Scientific.IO.NetCDF import NetCDFFile
3173
3174    #Check contents
3175    #Get NetCDF
3176    fid = NetCDFFile(file_name, 'r')
3177
3178    # Get the variables
3179    x = fid.variables['x'][:]
3180    y = fid.variables['y'][:]
3181    stage = fid.variables['stage'][:]
3182    #print "stage",stage
3183    #print "stage.shap",stage.shape
3184    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3185    #print "min(stage)",min(stage)
3186
3187    fid.close()
3188
3189    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3190
3191
3192def sww2domain(filename,boundary=None,t=None,\
3193               fail_if_NaN=True,NaN_filler=0\
3194               ,verbose = False,very_verbose = False):
3195    """
3196    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3197
3198    Boundary is not recommended if domain.smooth is not selected, as it
3199    uses unique coordinates, but not unique boundaries. This means that
3200    the boundary file will not be compatable with the coordinates, and will
3201    give a different final boundary, or crash.
3202    """
3203    NaN=9.969209968386869e+036
3204    #initialise NaN.
3205
3206    from Scientific.IO.NetCDF import NetCDFFile
3207    from shallow_water import Domain
3208    from Numeric import asarray, transpose, resize
3209
3210    if verbose: print 'Reading from ', filename
3211    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3212    time = fid.variables['time']       #Timesteps
3213    if t is None:
3214        t = time[-1]
3215    time_interp = get_time_interp(time,t)
3216
3217    # Get the variables as Numeric arrays
3218    x = fid.variables['x'][:]             #x-coordinates of vertices
3219    y = fid.variables['y'][:]             #y-coordinates of vertices
3220    elevation = fid.variables['elevation']     #Elevation
3221    stage = fid.variables['stage']     #Water level
3222    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3223    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3224
3225    starttime = fid.starttime[0]
3226    volumes = fid.variables['volumes'][:] #Connectivity
3227    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3228
3229    conserved_quantities = []
3230    interpolated_quantities = {}
3231    other_quantities = []
3232
3233    # get geo_reference
3234    #sww files don't have to have a geo_ref
3235    try:
3236        geo_reference = Geo_reference(NetCDFObject=fid)
3237    except: #AttributeError, e:
3238        geo_reference = None
3239
3240    if verbose: print '    getting quantities'
3241    for quantity in fid.variables.keys():
3242        dimensions = fid.variables[quantity].dimensions
3243        if 'number_of_timesteps' in dimensions:
3244            conserved_quantities.append(quantity)
3245            interpolated_quantities[quantity]=\
3246                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3247        else: other_quantities.append(quantity)
3248
3249    other_quantities.remove('x')
3250    other_quantities.remove('y')
3251    other_quantities.remove('z')
3252    other_quantities.remove('volumes')
3253
3254    conserved_quantities.remove('time')
3255
3256    if verbose: print '    building domain'
3257    #    From domain.Domain:
3258    #    domain = Domain(coordinates, volumes,\
3259    #                    conserved_quantities = conserved_quantities,\
3260    #                    other_quantities = other_quantities,zone=zone,\
3261    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3262
3263    #   From shallow_water.Domain:
3264    coordinates=coordinates.tolist()
3265    volumes=volumes.tolist()
3266    #FIXME:should this be in mesh?(peter row)
3267    if fid.smoothing == 'Yes': unique = False
3268    else: unique = True
3269    if unique:
3270        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3271
3272
3273    try:
3274        domain = Domain(coordinates, volumes, boundary)
3275    except AssertionError, e:
3276        fid.close()
3277        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3278        raise DataDomainError, msg
3279
3280    if not boundary is None:
3281        domain.boundary = boundary
3282
3283    domain.geo_reference = geo_reference
3284
3285    domain.starttime=float(starttime)+float(t)
3286    domain.time=0.0
3287
3288    for quantity in other_quantities:
3289        try:
3290            NaN = fid.variables[quantity].missing_value
3291        except:
3292            pass #quantity has no missing_value number
3293        X = fid.variables[quantity][:]
3294        if very_verbose:
3295            print '       ',quantity
3296            print '        NaN =',NaN
3297            print '        max(X)'
3298            print '       ',max(X)
3299            print '        max(X)==NaN'
3300            print '       ',max(X)==NaN
3301            print ''
3302        if (max(X)==NaN) or (min(X)==NaN):
3303            if fail_if_NaN:
3304                msg = 'quantity "%s" contains no_data entry'%quantity
3305                raise DataMissingValuesError, msg
3306            else:
3307                data = (X<>NaN)
3308                X = (X*data)+(data==0)*NaN_filler
3309        if unique:
3310            X = resize(X,(len(X)/3,3))
3311        domain.set_quantity(quantity,X)
3312    #
3313    for quantity in conserved_quantities:
3314        try:
3315            NaN = fid.variables[quantity].missing_value
3316        except:
3317            pass #quantity has no missing_value number
3318        X = interpolated_quantities[quantity]
3319        if very_verbose:
3320            print '       ',quantity
3321            print '        NaN =',NaN
3322            print '        max(X)'
3323            print '       ',max(X)
3324            print '        max(X)==NaN'
3325            print '       ',max(X)==NaN
3326            print ''
3327        if (max(X)==NaN) or (min(X)==NaN):
3328            if fail_if_NaN:
3329                msg = 'quantity "%s" contains no_data entry'%quantity
3330                raise DataMissingValuesError, msg
3331            else:
3332                data = (X<>NaN)
3333                X = (X*data)+(data==0)*NaN_filler
3334        if unique:
3335            X = resize(X,(X.shape[0]/3,3))
3336        domain.set_quantity(quantity,X)
3337
3338    fid.close()
3339    return domain
3340
3341def interpolated_quantity(saved_quantity,time_interp):
3342
3343    #given an index and ratio, interpolate quantity with respect to time.
3344    index,ratio = time_interp
3345    Q = saved_quantity
3346    if ratio > 0:
3347        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3348    else:
3349        q = Q[index]
3350    #Return vector of interpolated values
3351    return q
3352
3353def get_time_interp(time,t=None):
3354    #Finds the ratio and index for time interpolation.
3355    #It is borrowed from previous abstract_2d_finite_volumes code.
3356    if t is None:
3357        t=time[-1]
3358        index = -1
3359        ratio = 0.
3360    else:
3361        T = time
3362        tau = t
3363        index=0
3364        msg = 'Time interval derived from file %s [%s:%s]'\
3365            %('FIXMEfilename', T[0], T[-1])
3366        msg += ' does not match model time: %s' %tau
3367        if tau < time[0]: raise DataTimeError, msg
3368        if tau > time[-1]: raise DataTimeError, msg
3369        while tau > time[index]: index += 1
3370        while tau < time[index]: index -= 1
3371        if tau == time[index]:
3372            #Protect against case where tau == time[-1] (last time)
3373            # - also works in general when tau == time[i]
3374            ratio = 0
3375        else:
3376            #t is now between index and index+1
3377            ratio = (tau - time[index])/(time[index+1] - time[index])
3378    return (index,ratio)
3379
3380
3381def weed(coordinates,volumes,boundary = None):
3382    if type(coordinates)=='array':
3383        coordinates = coordinates.tolist()
3384    if type(volumes)=='array':
3385        volumes = volumes.tolist()
3386
3387    unique = False
3388    point_dict = {}
3389    same_point = {}
3390    for i in range(len(coordinates)):
3391        point = tuple(coordinates[i])
3392        if point_dict.has_key(point):
3393            unique = True
3394            same_point[i]=point
3395            #to change all point i references to point j
3396        else:
3397            point_dict[point]=i
3398            same_point[i]=point
3399
3400    coordinates = []
3401    i = 0
3402    for point in point_dict.keys():
3403        point = tuple(point)
3404        coordinates.append(list(point))
3405        point_dict[point]=i
3406        i+=1
3407
3408
3409    for volume in volumes:
3410        for i in range(len(volume)):
3411            index = volume[i]
3412            if index>-1:
3413                volume[i]=point_dict[same_point[index]]
3414
3415    new_boundary = {}
3416    if not boundary is None:
3417        for segment in boundary.keys():
3418            point0 = point_dict[same_point[segment[0]]]
3419            point1 = point_dict[same_point[segment[1]]]
3420            label = boundary[segment]
3421            #FIXME should the bounday attributes be concaterated
3422            #('exterior, pond') or replaced ('pond')(peter row)
3423
3424            if new_boundary.has_key((point0,point1)):
3425                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3426                                              #+','+label
3427
3428            elif new_boundary.has_key((point1,point0)):
3429                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3430                                              #+','+label
3431            else: new_boundary[(point0,point1)]=label
3432
3433        boundary = new_boundary
3434
3435    return coordinates,volumes,boundary
3436
3437
3438def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3439                 verbose=False):
3440    """Read Digitial Elevation model from the following NetCDF format (.dem)
3441
3442    Example:
3443
3444    ncols         3121
3445    nrows         1800
3446    xllcorner     722000
3447    yllcorner     5893000
3448    cellsize      25
3449    NODATA_value  -9999
3450    138.3698 137.4194 136.5062 135.5558 ..........
3451
3452    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3453    """
3454
3455    import os
3456    from Scientific.IO.NetCDF import NetCDFFile
3457    from Numeric import Float, zeros, sum, reshape, equal
3458
3459    root = basename_in
3460    inname = root + '.dem'
3461
3462    #Open existing netcdf file to read
3463    infile = NetCDFFile(inname, 'r')
3464    if verbose: print 'Reading DEM from %s' %inname
3465
3466    #Read metadata
3467    ncols = infile.ncols[0]
3468    nrows = infile.nrows[0]
3469    xllcorner = infile.xllcorner[0]
3470    yllcorner = infile.yllcorner[0]
3471    cellsize = infile.cellsize[0]
3472    NODATA_value = infile.NODATA_value[0]
3473    zone = infile.zone[0]
3474    false_easting = infile.false_easting[0]
3475    false_northing = infile.false_northing[0]
3476    projection = infile.projection
3477    datum = infile.datum
3478    units = infile.units
3479
3480    dem_elevation = infile.variables['elevation']
3481
3482    #Get output file name
3483    if basename_out == None:
3484        outname = root + '_' + repr(cellsize_new) + '.dem'
3485    else:
3486        outname = basename_out + '.dem'
3487
3488    if verbose: print 'Write decimated NetCDF file to %s' %outname
3489
3490    #Determine some dimensions for decimated grid
3491    (nrows_stencil, ncols_stencil) = stencil.shape
3492    x_offset = ncols_stencil / 2
3493    y_offset = nrows_stencil / 2
3494    cellsize_ratio = int(cellsize_new / cellsize)
3495    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3496    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3497
3498    #Open netcdf file for output
3499    outfile = NetCDFFile(outname, 'w')
3500
3501    #Create new file
3502    outfile.institution = 'Geoscience Australia'
3503    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3504                           'of spatial point data'
3505    #Georeferencing
3506    outfile.zone = zone
3507    outfile.projection = projection
3508    outfile.datum = datum
3509    outfile.units = units
3510
3511    outfile.cellsize = cellsize_new
3512    outfile.NODATA_value = NODATA_value
3513    outfile.false_easting = false_easting
3514    outfile.false_northing = false_northing
3515
3516    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3517    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3518    outfile.ncols = ncols_new
3519    outfile.nrows = nrows_new
3520
3521    # dimension definition
3522    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3523
3524    # variable definition
3525    outfile.createVariable('elevation', Float, ('number_of_points',))
3526
3527    # Get handle to the variable
3528    elevation = outfile.variables['elevation']
3529
3530    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3531
3532    #Store data
3533    global_index = 0
3534    for i in range(nrows_new):
3535        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3536        lower_index = global_index
3537        telev =  zeros(ncols_new, Float)
3538        local_index = 0
3539        trow = i * cellsize_ratio
3540
3541        for j in range(ncols_new):
3542            tcol = j * cellsize_ratio
3543            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3544
3545            #if dem contains 1 or more NODATA_values set value in
3546            #decimated dem to NODATA_value, else compute decimated
3547            #value using stencil
3548            if sum(sum(equal(tmp, NODATA_value))) > 0:
3549                telev[local_index] = NODATA_value
3550            else:
3551                telev[local_index] = sum(sum(tmp * stencil))
3552
3553            global_index += 1
3554            local_index += 1
3555
3556        upper_index = global_index
3557
3558        elevation[lower_index:upper_index] = telev
3559
3560    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3561
3562    infile.close()
3563    outfile.close()
3564
3565
3566
3567
3568def tsh2sww(filename, verbose=False): #test_tsh2sww
3569    """
3570    to check if a tsh/msh file 'looks' good.
3571    """
3572
3573
3574    if verbose == True:print 'Creating domain from', filename
3575    domain = pmesh_to_domain_instance(filename, Domain)
3576    if verbose == True:print "Number of triangles = ", len(domain)
3577
3578    domain.smooth = True
3579    domain.format = 'sww'   #Native netcdf visualisation format
3580    file_path, filename = path.split(filename)
3581    filename, ext = path.splitext(filename)
3582    domain.set_name(filename)   
3583    domain.reduction = mean
3584    if verbose == True:print "file_path",file_path
3585    if file_path == "":file_path = "."
3586    domain.set_datadir(file_path)
3587
3588    if verbose == True:
3589        print "Output written to " + domain.get_datadir() + sep + \
3590              domain.get_name() + "." + domain.format
3591    sww = get_dataobject(domain)
3592    sww.store_connectivity()
3593    sww.store_timestep('stage')
3594
3595
3596def asc_csiro2sww(bath_dir,
3597                  elevation_dir,
3598                  ucur_dir,
3599                  vcur_dir,
3600                  sww_file,
3601                  minlat = None, maxlat = None,
3602                  minlon = None, maxlon = None,
3603                  zscale=1,
3604                  mean_stage = 0,
3605                  fail_on_NaN = True,
3606                  elevation_NaN_filler = 0,
3607                  bath_prefix='ba',
3608                  elevation_prefix='el',
3609                  verbose=False):
3610    """
3611    Produce an sww boundary file, from esri ascii data from CSIRO.
3612
3613    Also convert latitude and longitude to UTM. All coordinates are
3614    assumed to be given in the GDA94 datum.
3615
3616    assume:
3617    All files are in esri ascii format
3618
3619    4 types of information
3620    bathymetry
3621    elevation
3622    u velocity
3623    v velocity
3624
3625    Assumptions
3626    The metadata of all the files is the same
3627    Each type is in a seperate directory
3628    One bath file with extention .000
3629    The time period is less than 24hrs and uniform.
3630    """
3631    from Scientific.IO.NetCDF import NetCDFFile
3632
3633    from anuga.coordinate_transforms.redfearn import redfearn
3634
3635    precision = Float # So if we want to change the precision its done here
3636
3637    # go in to the bath dir and load the only file,
3638    bath_files = os.listdir(bath_dir)
3639
3640    bath_file = bath_files[0]
3641    bath_dir_file =  bath_dir + os.sep + bath_file
3642    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3643
3644    #Use the date.time of the bath file as a basis for
3645    #the start time for other files
3646    base_start = bath_file[-12:]
3647
3648    #go into the elevation dir and load the 000 file
3649    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3650                         + base_start
3651
3652    elevation_files = os.listdir(elevation_dir)
3653    ucur_files = os.listdir(ucur_dir)
3654    vcur_files = os.listdir(vcur_dir)
3655    elevation_files.sort()
3656    # the first elevation file should be the
3657    # file with the same base name as the bath data
3658    assert elevation_files[0] == 'el' + base_start
3659
3660    number_of_latitudes = bath_grid.shape[0]
3661    number_of_longitudes = bath_grid.shape[1]
3662    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3663
3664    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3665                  for x in range(number_of_longitudes)]
3666    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3667                 for y in range(number_of_latitudes)]
3668
3669     # reverse order of lat, so the fist lat represents the first grid row
3670    latitudes.reverse()
3671
3672    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3673                                                 minlat=minlat, maxlat=maxlat,
3674                                                 minlon=minlon, maxlon=maxlon)
3675
3676
3677    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3678    latitudes = latitudes[kmin:kmax]
3679    longitudes = longitudes[lmin:lmax]
3680    number_of_latitudes = len(latitudes)
3681    number_of_longitudes = len(longitudes)
3682    number_of_times = len(os.listdir(elevation_dir))
3683    number_of_points = number_of_latitudes*number_of_longitudes
3684    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3685
3686    #Work out the times
3687    if len(elevation_files) > 1:
3688        # Assume: The time period is less than 24hrs.
3689        time_period = (int(elevation_files[1][-3:]) - \
3690                      int(elevation_files[0][-3:]))*60*60
3691        times = [x*time_period for x in range(len(elevation_files))]
3692    else:
3693        times = [0.0]
3694
3695
3696    if verbose:
3697        print '------------------------------------------------'
3698        print 'Statistics:'
3699        print '  Extent (lat/lon):'
3700        print '    lat in [%f, %f], len(lat) == %d'\
3701              %(min(latitudes), max(latitudes),
3702                len(latitudes))
3703        print '    lon in [%f, %f], len(lon) == %d'\
3704              %(min(longitudes), max(longitudes),
3705                len(longitudes))
3706        print '    t in [%f, %f], len(t) == %d'\
3707              %(min(times), max(times), len(times))
3708
3709    ######### WRITE THE SWW FILE #############
3710    # NetCDF file definition
3711    outfile = NetCDFFile(sww_file, 'w')
3712
3713    #Create new file
3714    outfile.institution = 'Geoscience Australia'
3715    outfile.description = 'Converted from XXX'
3716
3717
3718    #For sww compatibility
3719    outfile.smoothing = 'Yes'
3720    outfile.order = 1
3721
3722    #Start time in seconds since the epoch (midnight 1/1/1970)
3723    outfile.starttime = starttime = times[0]
3724
3725
3726    # dimension definitions
3727    outfile.createDimension('number_of_volumes', number_of_volumes)
3728
3729    outfile.createDimension('number_of_vertices', 3)
3730    outfile.createDimension('number_of_points', number_of_points)
3731    outfile.createDimension('number_of_timesteps', number_of_times)
3732
3733    # variable definitions
3734    outfile.createVariable('x', precision, ('number_of_points',))
3735    outfile.createVariable('y', precision, ('number_of_points',))
3736    outfile.createVariable('elevation', precision, ('number_of_points',))
3737
3738    #FIXME: Backwards compatibility
3739    outfile.createVariable('z', precision, ('number_of_points',))
3740    #################################
3741
3742    outfile.createVariable('volumes', Int, ('number_of_volumes',
3743                                            'number_of_vertices'))
3744
3745    outfile.createVariable('time', precision,
3746                           ('number_of_timesteps',))
3747
3748    outfile.createVariable('stage', precision,
3749                           ('number_of_timesteps',
3750                            'number_of_points'))
3751
3752    outfile.createVariable('xmomentum', precision,
3753                           ('number_of_timesteps',
3754                            'number_of_points'))
3755
3756    outfile.createVariable('ymomentum', precision,
3757                           ('number_of_timesteps',
3758                            'number_of_points'))
3759
3760    #Store
3761    from anuga.coordinate_transforms.redfearn import redfearn
3762    x = zeros(number_of_points, Float)  #Easting
3763    y = zeros(number_of_points, Float)  #Northing
3764
3765    if verbose: print 'Making triangular grid'
3766    #Get zone of 1st point.
3767    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3768
3769    vertices = {}
3770    i = 0
3771    for k, lat in enumerate(latitudes):
3772        for l, lon in enumerate(longitudes):
3773
3774            vertices[l,k] = i
3775
3776            zone, easting, northing = redfearn(lat,lon)
3777
3778            msg = 'Zone boundary crossed at longitude =', lon
3779            #assert zone == refzone, msg
3780            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3781            x[i] = easting
3782            y[i] = northing
3783            i += 1
3784
3785
3786    #Construct 2 triangles per 'rectangular' element
3787    volumes = []
3788    for l in range(number_of_longitudes-1):    #X direction
3789        for k in range(number_of_latitudes-1): #Y direction
3790            v1 = vertices[l,k+1]
3791            v2 = vertices[l,k]
3792            v3 = vertices[l+1,k+1]
3793            v4 = vertices[l+1,k]
3794
3795            #Note, this is different to the ferrit2sww code
3796            #since the order of the lats is reversed.
3797            volumes.append([v1,v3,v2]) #Upper element
3798            volumes.append([v4,v2,v3]) #Lower element
3799
3800    volumes = array(volumes)
3801
3802    geo_ref = Geo_reference(refzone,min(x),min(y))
3803    geo_ref.write_NetCDF(outfile)
3804
3805    # This will put the geo ref in the middle
3806    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3807
3808
3809    if verbose:
3810        print '------------------------------------------------'
3811        print 'More Statistics:'
3812        print '  Extent (/lon):'
3813        print '    x in [%f, %f], len(lat) == %d'\
3814              %(min(x), max(x),
3815                len(x))
3816        print '    y in [%f, %f], len(lon) == %d'\
3817              %(min(y), max(y),
3818                len(y))
3819        print 'geo_ref: ',geo_ref
3820
3821    z = resize(bath_grid,outfile.variables['z'][:].shape)
3822    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3823    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3824    outfile.variables['z'][:] = z
3825    outfile.variables['elevation'][:] = z  #FIXME HACK
3826    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3827
3828    stage = outfile.variables['stage']
3829    xmomentum = outfile.variables['xmomentum']
3830    ymomentum = outfile.variables['ymomentum']
3831
3832    outfile.variables['time'][:] = times   #Store time relative
3833
3834    if verbose: print 'Converting quantities'
3835    n = number_of_times
3836    for j in range(number_of_times):
3837        # load in files
3838        elevation_meta, elevation_grid = \
3839           _read_asc(elevation_dir + os.sep + elevation_files[j])
3840
3841        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3842        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3843
3844        #cut matrix to desired size
3845        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3846        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3847        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3848       
3849        # handle missing values
3850        missing = (elevation_grid == elevation_meta['NODATA_value'])
3851        if sometrue (missing):
3852            if fail_on_NaN:
3853                msg = 'File %s contains missing values'\
3854                      %(elevation_files[j])
3855                raise DataMissingValuesError, msg
3856            else:
3857                elevation_grid = elevation_grid*(missing==0) + \
3858                                 missing*elevation_NaN_filler
3859
3860
3861        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3862        i = 0
3863        for k in range(number_of_latitudes):      #Y direction
3864            for l in range(number_of_longitudes): #X direction
3865                w = zscale*elevation_grid[k,l] + mean_stage
3866                stage[j,i] = w
3867                h = w - z[i]
3868                xmomentum[j,i] = u_momentum_grid[k,l]*h
3869                ymomentum[j,i] = v_momentum_grid[k,l]*h
3870                i += 1
3871    outfile.close()
3872
3873def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3874                        minlat=None, maxlat=None,
3875                        minlon=None, maxlon=None):
3876    """
3877    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3878    specified with min/max lat/long
3879
3880    Think of the latitudes and longitudes describing a 2d surface.
3881    The area returned is, if possible, just big enough to cover the
3882    inputed max/min area. (This will not be possible if the max/min area
3883    has a section outside of the latitudes/longitudes area.)
3884
3885    asset  longitudes are sorted,
3886    long - from low to high (west to east, eg 148 - 151)
3887    assert latitudes are sorted, ascending or decending
3888    """
3889    latitudes = latitudes_ref[:]
3890    longitudes = longitudes_ref[:]
3891
3892    latitudes = ensure_numeric(latitudes)
3893    longitudes = ensure_numeric(longitudes)
3894   
3895    assert allclose(sort(longitudes), longitudes)
3896   
3897    lat_ascending = True
3898    if not allclose(sort(latitudes), latitudes):
3899        lat_ascending = False
3900        # reverse order of lat, so it's in ascending order         
3901        latitudes = latitudes[::-1]
3902        assert allclose(sort(latitudes), latitudes)
3903    #print "latitudes  in funct", latitudes
3904   
3905    largest_lat_index = len(latitudes)-1
3906    #Cut out a smaller extent.
3907    if minlat == None:
3908        lat_min_index = 0
3909    else:
3910        lat_min_index = searchsorted(latitudes, minlat)-1
3911        if lat_min_index <0:
3912            lat_min_index = 0
3913
3914
3915    if maxlat == None:
3916        lat_max_index = largest_lat_index #len(latitudes)
3917    else:
3918        lat_max_index = searchsorted(latitudes, maxlat)
3919        if lat_max_index > largest_lat_index:
3920            lat_max_index = largest_lat_index
3921
3922    if minlon == None:
3923        lon_min_index = 0
3924    else:
3925        lon_min_index = searchsorted(longitudes, minlon)-1
3926        if lon_min_index <0:
3927            lon_min_index = 0
3928
3929    if maxlon == None:
3930        lon_max_index = len(longitudes)
3931    else:
3932        lon_max_index = searchsorted(longitudes, maxlon)
3933
3934    # Reversing the indexes, if the lat array is decending
3935    if lat_ascending is False:
3936        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3937                                       largest_lat_index - lat_min_index
3938    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3939    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3940
3941    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3942
3943
3944def _read_asc(filename, verbose=False):
3945    """Read esri file from the following ASCII format (.asc)
3946
3947    Example:
3948
3949    ncols         3121
3950    nrows         1800
3951    xllcorner     722000
3952    yllcorner     5893000
3953    cellsize      25
3954    NODATA_value  -9999
3955    138.3698 137.4194 136.5062 135.5558 ..........
3956
3957    """
3958
3959    datafile = open(filename)
3960
3961    if verbose: print 'Reading DEM from %s' %(filename)
3962    lines = datafile.readlines()
3963    datafile.close()
3964
3965    if verbose: print 'Got', len(lines), ' lines'
3966
3967    ncols = int(lines.pop(0).split()[1].strip())
3968    nrows = int(lines.pop(0).split()[1].strip())
3969    xllcorner = float(lines.pop(0).split()[1].strip())
3970    yllcorner = float(lines.pop(0).split()[1].strip())
3971    cellsize = float(lines.pop(0).split()[1].strip())
3972    NODATA_value = float(lines.pop(0).split()[1].strip())
3973
3974    assert len(lines) == nrows
3975
3976    #Store data
3977    grid = []
3978
3979    n = len(lines)
3980    for i, line in enumerate(lines):
3981        cells = line.split()
3982        assert len(cells) == ncols
3983        grid.append(array([float(x) for x in cells]))
3984    grid = array(grid)
3985
3986    return {'xllcorner':xllcorner,
3987            'yllcorner':yllcorner,
3988            'cellsize':cellsize,
3989            'NODATA_value':NODATA_value}, grid
3990
3991
3992
3993    ####  URS 2 SWW  ###
3994
3995lon_name = 'LON'
3996lat_name = 'LAT'
3997time_name = 'TIME'
3998precision = Float # So if we want to change the precision its done here       
3999class Write_nc:
4000    """
4001    Write an nc file.
4002   
4003    Note, this should be checked to meet cdc netcdf conventions for gridded
4004    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4005   
4006    """
4007    def __init__(self,
4008                 quantity_name,
4009                 file_name,
4010                 time_step_count,
4011                 time_step,
4012                 lon,
4013                 lat):
4014        """
4015        time_step_count is the number of time steps.
4016        time_step is the time step size
4017       
4018        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4019        """
4020        self.quantity_name = quantity_name
4021        quantity_units= {'HA':'METERS',
4022                              'UA':'METERS/SECOND',
4023                              'VA':'METERS/SECOND'}
4024       
4025        #self.file_name = file_name
4026        self.time_step_count = time_step_count
4027        self.time_step = time_step
4028
4029        # NetCDF file definition
4030        self.outfile = NetCDFFile(file_name, 'w')
4031        outfile = self.outfile       
4032
4033        #Create new file
4034        nc_lon_lat_header(outfile, lon, lat)
4035   
4036        # TIME
4037        outfile.createDimension(time_name, None)
4038        outfile.createVariable(time_name, precision, (time_name,))
4039
4040        #QUANTITY
4041        outfile.createVariable(self.quantity_name, precision,
4042                               (time_name, lat_name, lon_name))
4043        outfile.variables[self.quantity_name].missing_value=-1.e+034
4044        outfile.variables[self.quantity_name].units= \
4045                                 quantity_units[self.quantity_name]
4046        outfile.variables[lon_name][:]= ensure_numeric(lon)
4047        outfile.variables[lat_name][:]= ensure_numeric(lat)
4048
4049        #Assume no one will be wanting to read this, while we are writing
4050        #outfile.close()
4051       
4052    def store_timestep(self, quantity_slice):
4053        """
4054        quantity_slice is the data to be stored at this time step
4055        """
4056       
4057        outfile = self.outfile
4058       
4059        # Get the variables
4060        time = outfile.variables[time_name]
4061        quantity = outfile.variables[self.quantity_name]
4062           
4063        i = len(time)
4064
4065        #Store time
4066        time[i] = i*self.time_step #self.domain.time
4067        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4068                                           # And m/s to cm/sec
4069       
4070    def close(self):
4071        self.outfile.close()
4072
4073def urs2sww(basename_in='o', basename_out=None, verbose=False,
4074            remove_nc_files=True,
4075            minlat=None, maxlat=None,
4076            minlon= None, maxlon=None,
4077            mint=None, maxt=None,
4078            mean_stage=0,
4079            origin = None,
4080            zscale=1,
4081            fail_on_NaN=True,
4082            NaN_filler=0,
4083            elevation=None,
4084            gridded=True):
4085    """
4086    Convert URS C binary format for wave propagation to
4087    sww format native to abstract_2d_finite_volumes.
4088
4089    Specify only basename_in and read files of the form
4090    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4091    relative height, x-velocity and y-velocity, respectively.
4092
4093    Also convert latitude and longitude to UTM. All coordinates are
4094    assumed to be given in the GDA94 datum. The latitude and longitude
4095    information is for  a grid.
4096
4097    min's and max's: If omitted - full extend is used.
4098    To include a value min may equal it, while max must exceed it.
4099    Lat and lon are assumed to be in decimal degrees.
4100    NOTE: minlon is the most east boundary.
4101   
4102    origin is a 3-tuple with geo referenced
4103    UTM coordinates (zone, easting, northing)
4104    It will be the origin of the sww file. This shouldn't be used,
4105    since all of anuga should be able to handle an arbitary origin.
4106
4107
4108    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4109    which means that latitude is the fastest
4110    varying dimension (row major order, so to speak)
4111
4112    In URS C binary the latitudes and longitudes are in assending order.
4113    """
4114    if gridded is False:
4115        pass
4116        #urs_ungridded2sww()
4117       
4118    if basename_out == None:
4119        basename_out = basename_in
4120    files_out = urs2nc(basename_in, basename_out)
4121    ferret2sww(basename_out,
4122               minlat=minlat,
4123               maxlat=maxlat,
4124               minlon=minlon,
4125               maxlon=maxlon,
4126               mint=mint,
4127               maxt=maxt,
4128               mean_stage=mean_stage,
4129               origin=origin,
4130               zscale=zscale,
4131               fail_on_NaN=fail_on_NaN,
4132               NaN_filler=NaN_filler,
4133               inverted_bathymetry=True,
4134               verbose=verbose)
4135    #print "files_out",files_out
4136    if remove_nc_files:
4137        for file_out in files_out:
4138            os.remove(file_out)
4139   
4140def urs2nc(basename_in = 'o', basename_out = 'urs'):
4141    """
4142    Convert the 3 urs files to 4 nc files.
4143
4144    The name of the urs file names must be;
4145    [basename_in]-z-mux
4146    [basename_in]-e-mux
4147    [basename_in]-n-mux
4148   
4149    """
4150    files_in = [basename_in+'-z-mux',
4151                basename_in+'-e-mux',
4152                basename_in+'-n-mux']
4153    files_out = [basename_out+'_ha.nc',
4154                 basename_out+'_ua.nc',
4155                 basename_out+'_va.nc']
4156    quantities = ['HA','UA','VA']
4157
4158    for file_name in files_in:
4159        if os.access(file_name, os.F_OK) == 0 :
4160            msg = 'File %s does not exist or is not accessible' %file_name
4161            raise IOError, msg
4162       
4163    hashed_elevation = None
4164    for file_in, file_out, quantity in map(None, files_in,
4165                                           files_out,
4166                                           quantities):
4167        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4168                                         file_out,
4169                                         quantity)
4170        #print "lonlatdep", lonlatdep
4171        if hashed_elevation == None:
4172            elevation_file = basename_out+'_e.nc'
4173            write_elevation_nc(elevation_file,
4174                                lon,
4175                                lat,
4176                                depth)
4177            hashed_elevation = myhash(lonlatdep)
4178        else:
4179            msg = "The elevation information in the mux files is inconsistent"
4180            assert hashed_elevation == myhash(lonlatdep), msg
4181    files_out.append(elevation_file)
4182    return files_out
4183   
4184def _binary_c2nc(file_in, file_out, quantity):
4185    """
4186    Reads in a quantity urs file and writes a quantity nc file.
4187    additionally, returns the depth and lat, long info,
4188    so it can be written to a file.
4189    """
4190    columns = 3 # long, lat , depth
4191    mux_file = open(file_in, 'rb')
4192   
4193    # Number of points/stations
4194    (points_num,)= unpack('i',mux_file.read(4))
4195
4196    # nt, int - Number of time steps
4197    (time_step_count,)= unpack('i',mux_file.read(4))
4198
4199    #dt, float - time step, seconds
4200    (time_step,) = unpack('f', mux_file.read(4))
4201   
4202    msg = "Bad data in the mux file."
4203    if points_num < 0:
4204        mux_file.close()
4205        raise ANUGAError, msg
4206    if time_step_count < 0:
4207        mux_file.close()
4208        raise ANUGAError, msg
4209    if time_step < 0:
4210        mux_file.close()
4211        raise ANUGAError, msg
4212   
4213    lonlatdep = p_array.array('f')
4214    lonlatdep.read(mux_file, columns * points_num)
4215    lonlatdep = array(lonlatdep, typecode=Float)   
4216    lonlatdep = reshape(lonlatdep, (points_num, columns))
4217   
4218    lon, lat, depth = lon_lat2grid(lonlatdep)
4219    lon_sorted = list(lon)
4220    lon_sorted.sort()
4221
4222    if not lon == lon_sorted:
4223        msg = "Longitudes in mux file are not in ascending order"
4224        raise IOError, msg
4225    lat_sorted = list(lat)
4226    lat_sorted.sort()
4227
4228    if not lat == lat_sorted:
4229        msg = "Latitudes in mux file are not in ascending order"
4230   
4231    nc_file = Write_nc(quantity,
4232                       file_out,
4233                       time_step_count,
4234                       time_step,
4235                       lon,
4236                       lat)
4237
4238    for i in range(time_step_count):
4239        #Read in a time slice  from mux file 
4240        hz_p_array = p_array.array('f')
4241        hz_p_array.read(mux_file, points_num)
4242        hz_p = array(hz_p_array, typecode=Float)
4243        hz_p = reshape(hz_p, (len(lon), len(lat)))
4244        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4245
4246        #write time slice to nc file
4247        nc_file.store_timestep(hz_p)
4248    mux_file.close()
4249    nc_file.close()
4250
4251    return lonlatdep, lon, lat, depth
4252   
4253
4254def write_elevation_nc(file_out, lon, lat, depth_vector):
4255    """
4256    Write an nc elevation file.
4257    """
4258   
4259    # NetCDF file definition
4260    outfile = NetCDFFile(file_out, 'w')
4261
4262    #Create new file
4263    nc_lon_lat_header(outfile, lon, lat)
4264   
4265    # ELEVATION
4266    zname = 'ELEVATION'
4267    outfile.createVariable(zname, precision, (lat_name, lon_name))
4268    outfile.variables[zname].units='CENTIMETERS'
4269    outfile.variables[zname].missing_value=-1.e+034
4270
4271    outfile.variables[lon_name][:]= ensure_numeric(lon)
4272    outfile.variables[lat_name][:]= ensure_numeric(lat)
4273
4274    depth = reshape(depth_vector, ( len(lat), len(lon)))
4275    outfile.variables[zname][:]= depth
4276   
4277    outfile.close()
4278   
4279def nc_lon_lat_header(outfile, lon, lat):
4280    """
4281    outfile is the netcdf file handle.
4282    lon - a list/array of the longitudes
4283    lat - a list/array of the latitudes
4284    """
4285   
4286    outfile.institution = 'Geoscience Australia'
4287    outfile.description = 'Converted from URS binary C'
4288   
4289    # Longitude
4290    outfile.createDimension(lon_name, len(lon))
4291    outfile.createVariable(lon_name, precision, (lon_name,))
4292    outfile.variables[lon_name].point_spacing='uneven'
4293    outfile.variables[lon_name].units='degrees_east'
4294    outfile.variables[lon_name].assignValue(lon)
4295
4296
4297    # Latitude
4298    outfile.createDimension(lat_name, len(lat))
4299    outfile.createVariable(lat_name, precision, (lat_name,))
4300    outfile.variables[lat_name].point_spacing='uneven'
4301    outfile.variables[lat_name].units='degrees_north'
4302    outfile.variables[lat_name].assignValue(lat)
4303
4304
4305   
4306def lon_lat2grid(long_lat_dep):
4307    """
4308    given a list of points that are assumed to be an a grid,
4309    return the long's and lat's of the grid.
4310    long_lat_dep is an array where each row is a position.
4311    The first column is longitudes.
4312    The second column is latitudes.
4313
4314    The latitude is the fastest varying dimension - in mux files
4315    """
4316    LONG = 0
4317    LAT = 1
4318    QUANTITY = 2
4319
4320    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4321   
4322    num_points = long_lat_dep.shape[0]
4323    this_rows_long = long_lat_dep[0,LONG]
4324
4325    # Count the length of unique latitudes
4326    i = 0
4327    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4328        i += 1
4329    # determine the lats and longsfrom the grid
4330    lat = long_lat_dep[:i, LAT]       
4331    long = long_lat_dep[::i, LONG]
4332   
4333    lenlong = len(long)
4334    lenlat = len(lat)
4335    #print 'len lat', lat, len(lat)
4336    #print 'len long', long, len(long)
4337         
4338    msg = 'Input data is not gridded'     
4339    assert num_points % lenlat == 0, msg
4340    assert num_points % lenlong == 0, msg
4341         
4342    # Test that data is gridded       
4343    for i in range(lenlong):
4344        msg = 'Data is not gridded.  It must be for this operation'
4345        first = i*lenlat
4346        last = first + lenlat
4347               
4348        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4349        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4350   
4351   
4352#    print 'range long', min(long), max(long)
4353#    print 'range lat', min(lat), max(lat)
4354#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4355#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4356   
4357   
4358   
4359    msg = 'Out of range latitudes/longitudes'
4360    for l in lat:assert -90 < l < 90 , msg
4361    for l in long:assert -180 < l < 180 , msg
4362
4363    # Changing quantity from lat being the fastest varying dimension to
4364    # long being the fastest varying dimension
4365    # FIXME - make this faster/do this a better way
4366    # use numeric transpose, after reshaping the quantity vector
4367#    quantity = zeros(len(long_lat_dep), Float)
4368    quantity = zeros(num_points, Float)
4369   
4370#    print 'num',num_points
4371    for lat_i, _ in enumerate(lat):
4372        for long_i, _ in enumerate(long):
4373            q_index = lat_i*lenlong+long_i
4374            lld_index = long_i*lenlat+lat_i
4375#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4376            temp = long_lat_dep[lld_index, QUANTITY]
4377            quantity[q_index] = temp
4378           
4379    return long, lat, quantity
4380
4381    ####  END URS 2 SWW  ###
4382
4383    #### URS UNGRIDDED 2 SWW ###
4384
4385    ### PRODUCING THE POINTS NEEDED FILE ###
4386def URS_points_needed_to_file(file_name, boundary_polygon, ll_lat, ll_long,
4387                              grid_spacing, 
4388                      lat_amount, long_amount, zone=None):
4389    """
4390    file_name - name of the urs file produced for David.
4391    boundary_polygon - a list of points that describes a polygon.
4392                      The last point is assumed ot join the first point.
4393                      This is in UTM (lat long would be better though)
4394
4395    ll_lat - lower left latitude, in decimal degrees
4396    ll-long - lower left longitude, in decimal degrees
4397    grid_spacing - in deciamal degrees
4398
4399
4400    Don't add the file extension.  It will be added.
4401    """
4402    geo = URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing, 
4403                      lat_amount, long_amount, zone=zone)
4404    if not file_name[-4:] == ".urs":
4405        file_name += ".urs"
4406    geo.export_points_file(file_name)
4407   
4408def URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing, 
4409                      lat_amount, long_amount, zone=None):
4410    """
4411
4412    boundary_polygon - a list of points that describes a polygon.
4413                      The last point is assumed ot join the first point.
4414                      This is in UTM (lat long would be better though)
4415
4416    ll_lat - lower left latitude, in decimal degrees
4417    ll-long - lower left longitude, in decimal degrees
4418    grid_spacing - in deciamal degrees
4419
4420    """
4421   
4422    from sets import ImmutableSet
4423   
4424    msg = "grid_spacing can not be zero"
4425    assert not grid_spacing ==0, msg
4426    a = boundary_polygon
4427    # List of segments.  Each segment is two points.
4428    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4429
4430    # convert the segs to Lat's and longs.
4431    if zone is None:
4432        # Assume the zone of the segments is the same as the lower left
4433        # corner of the lat long data
4434        zone, _, _ = redfearn(ll_lat, ll_long)
4435    lat_long_set = ImmutableSet()
4436    for seg in segs:
4437        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4438                      lat_amount, long_amount, zone)
4439        lat_long_set |= ImmutableSet(points_lat_long)
4440    #print "lat_long_set",lat_long_set
4441    geo = Geospatial_data(data_points=list(lat_long_set),
4442                              points_are_lats_longs=True)
4443    return geo
4444   
4445def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4446                  lat_amount, long_amount, zone):
4447    """
4448    return a list of the points, in lats and longs that are needed to
4449    interpolate any point on the segment.
4450    """
4451    #print "zone",zone
4452    geo_reference = Geo_reference(zone=zone)
4453    #print "seg",seg
4454    geo = Geospatial_data(seg,geo_reference=geo_reference)
4455    seg_lat_long = geo.get_data_points(as_lat_long=True)
4456    #print "seg_lat_long", seg_lat_long
4457    # 1.415 = 2^0.5, rounded up....
4458    buffer = 1.415 * grid_spacing
4459   
4460    #
4461   
4462    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4463    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4464    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4465    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4466
4467    #print "ll_lat", ll_lat
4468    #print "ll_long", ll_long
4469    #print "max_lat", max_lat
4470    #print "max_long", max_long
4471    #print "min_lat", min_lat
4472    #print "min_long", min_long
4473    first_row = (min_long - ll_long)/grid_spacing
4474    # To round up
4475    first_row_long = int(round(first_row + 0.5))
4476    #print "first_row", first_row_long
4477
4478    last_row = (max_long - ll_long)/grid_spacing # round down
4479    last_row_long = int(round(last_row))
4480    #print "last_row",last_row _long
4481
4482   
4483    first_row = (min_lat - ll_lat)/grid_spacing
4484    # To round up
4485    first_row_lat = int(round(first_row + 0.5))
4486    #print "first_row", first_row_lat
4487
4488    last_row = (max_lat - ll_lat)/grid_spacing # round down
4489    last_row_lat = int(round(last_row))
4490    #print "last_row",last_row_lat
4491
4492    points_lat_long = []
4493    # Create a list of the lat long points to include.
4494    for index_lat in range(first_row_lat, last_row_lat + 1):
4495        for index_long in range(first_row_long, last_row_long + 1):
4496            lat = ll_lat + index_lat*grid_spacing
4497            long = ll_long + index_long*grid_spacing
4498            points_lat_long.append((lat, long)) #must be hashable
4499    #print "points_lat_long", points_lat_long
4500    return points_lat_long   
4501
4502    #### CONVERTING UNGRIDDED URS DATA TO AN SWW FILE ####
4503def urs_ungridded2sww_link(basename_in='o', basename_out=None, verbose=False,
4504            remove_nc_files=True,
4505            minlat=None, maxlat=None,
4506            minlon= None, maxlon=None,
4507            mint=None, maxt=None,
4508            mean_stage=0,
4509            origin = None,
4510            zscale=1,
4511            fail_on_NaN=True,
4512            NaN_filler=0,
4513            elevation=None):
4514    """
4515    validation stuff on parrameters not used
4516    """
4517    pass
4518   
4519def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
4520            #mint=None, maxt=None,
4521            mean_stage=0,
4522            origin = None,
4523            zscale=1,
4524            fail_on_NaN=True,
4525            NaN_filler=0):
4526    """
4527    parameters not used!
4528    NaN_filler
4529    origin
4530    #mint=None, maxt=None,
4531    """
4532    # Convert to utm
4533   
4534    files_in = [basename_in+'-z-mux',
4535                basename_in+'-e-mux',
4536                basename_in+'-n-mux']
4537    quantities = ['HA','UA','VA']
4538
4539    # instanciate urs_points of the three mux files.
4540    mux = {}
4541    quality_slices = {}
4542    for quantity, file in map(None, quantities, files_in):
4543        mux[quantity] = Urs_points(file)
4544        quality_slices[quantity] = []
4545        for slice in mux[quantity]:
4546            quality_slices[quantity].append(slice)
4547       
4548       
4549    # FIXME: check that the depth is the same. (hashing)
4550
4551    # handle to a mux file to do depth stuff
4552    a_mux = mux[quantities[0]]
4553   
4554    # Convert to utm
4555    lat = a_mux.lonlatdep[:,1]
4556    long = a_mux.lonlatdep[:,0]
4557    points_utm, zone = convert_from_latlon_to_utm( \
4558        latitudes=lat, longitudes=long)
4559    #print "points_utm", points_utm
4560    #print "zone", zone
4561
4562    elevation = a_mux.lonlatdep[:,0] * -1 #
4563   
4564    # grid ( create a mesh from the selected points)
4565    mesh = Mesh()
4566    mesh.add_vertices(points_utm)
4567    mesh.auto_segment()
4568    mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
4569    mesh_dic = mesh.Mesh2MeshList()
4570
4571    #mesh.export_mesh_file(basename_in + '.tsh')
4572
4573    times = []
4574    for i in range(a_mux.time_step_count):
4575        times.append(a_mux.time_step * i)
4576       
4577    points_utm=ensure_numeric(points_utm)
4578    #print "mesh_dic['generatedpointlist']", mesh_dic['generatedpointlist']
4579    #print "points_utm", points_utm
4580    assert ensure_numeric(mesh_dic['generatedpointlist']) == \
4581           ensure_numeric(points_utm)
4582   
4583    volumes = mesh_dic['generatedtrianglelist']
4584   
4585    # write sww intro and grid stuff.   
4586    if basename_out is None:
4587        swwname = basename_in + '.sww'
4588    else:
4589        swwname = basename_out + '.sww'
4590
4591    outfile = NetCDFFile(swwname, 'w')
4592    # For a different way of doing this, check out tsh2sww
4593    write_sww_header(outfile, times, len(volumes), len(points_utm))
4594    write_sww_triangulation(outfile, points_utm, volumes,
4595                            elevation, zone,  verbose=verbose)
4596
4597    write_sww_time_slice(outfile, quality_slices['HA'],
4598                         quality_slices['UA'], quality_slices['VA'],
4599                         elevation,
4600                         mean_stage=mean_stage, zscale=zscale,
4601                         verbose=verbose)
4602    #
4603    # Do some conversions while writing the sww file
4604
4605
4606
4607def write_sww_header(outfile, times, number_of_volumes, number_of_points ):
4608    """
4609    outfile - the name of the file that will be written
4610    times - A list of the time slice times
4611    number_of_volumes - the number of triangles
4612    """
4613    number_of_times = len(times)
4614   
4615    #Create new file
4616    outfile.institution = 'Geoscience Australia'
4617    outfile.description = 'Converted from XXX'
4618
4619
4620    #For sww compatibility
4621    outfile.smoothing = 'Yes'
4622    outfile.order = 1
4623
4624    #Start time in seconds since the epoch (midnight 1/1/1970)
4625    outfile.starttime = starttime = times[0]
4626
4627
4628    # dimension definitions
4629    outfile.createDimension('number_of_volumes', number_of_volumes)
4630
4631    outfile.createDimension('number_of_vertices', 3)
4632    outfile.createDimension('number_of_points', number_of_points)
4633    outfile.createDimension('number_of_timesteps', number_of_times)
4634
4635    # variable definitions
4636    outfile.createVariable('x', precision, ('number_of_points',))
4637    outfile.createVariable('y', precision, ('number_of_points',))
4638    outfile.createVariable('elevation', precision, ('number_of_points',))
4639
4640    #FIXME: Backwards compatibility
4641    outfile.createVariable('z', precision, ('number_of_points',))
4642    #################################
4643
4644    outfile.createVariable('volumes', Int, ('number_of_volumes',
4645                                            'number_of_vertices'))
4646
4647    outfile.createVariable('time', precision,
4648                           ('number_of_timesteps',))
4649
4650    outfile.createVariable('stage', precision,
4651                           ('number_of_timesteps',
4652                            'number_of_points'))
4653
4654    outfile.createVariable('xmomentum', precision,
4655                           ('number_of_timesteps',
4656                            'number_of_points'))
4657
4658    outfile.createVariable('ymomentum', precision,
4659                           ('number_of_timesteps',
4660                            'number_of_points'))
4661
4662class Urs_points:
4663    """
4664    Read the info in URS mux files.
4665
4666    for the quantities heres a correlation between the file names and
4667    what they mean;
4668    z-mux is height above sea level, m
4669    e-mux is velocity is Eastern direction, m/s
4670    n-mux is velocity is Northern direction, m/s
4671
4672   
4673    """
4674    def __init__(self,urs_file):
4675        self.iterated = False
4676        columns = 3 # long, lat , depth
4677        mux_file = open(urs_file, 'rb')
4678       
4679        # Number of points/stations
4680        (self.points_num,)= unpack('i',mux_file.read(4))
4681       
4682        # nt, int - Number of time steps
4683        (self.time_step_count,)= unpack('i',mux_file.read(4))
4684
4685        #dt, float - time step, seconds
4686        (self.time_step,) = unpack('f', mux_file.read(4))
4687   
4688        msg = "Bad data in the urs file."
4689        if self.points_num < 0:
4690            mux_file.close()
4691            raise ANUGAError, msg
4692        if self.time_step_count < 0:
4693            mux_file.close()
4694            raise ANUGAError, msg
4695        if self.time_step < 0:
4696            mux_file.close()
4697            raise ANUGAError, msg
4698
4699        # the depth is in meters, and it is the distance from the ocean
4700        # to the sea bottom.
4701        lonlatdep = p_array.array('f')
4702        lonlatdep.read(mux_file, columns * self.points_num)
4703        lonlatdep = array(lonlatdep, typecode=Float)   
4704        lonlatdep = reshape(lonlatdep, (self.points_num, columns))
4705        #print 'lonlatdep',lonlatdep
4706        self.lonlatdep = lonlatdep
4707       
4708        self.mux_file = mux_file
4709        # check this array
4710
4711    def __iter__(self):
4712        """
4713        iterate over quantity data which is with respect to time.
4714
4715        Note: You can only interate once over an object
4716       
4717        returns quantity infomation for each time slice
4718        """
4719        msg =  "You can only interate once over a urs file."
4720        assert not self.iterated, msg
4721        self.time_step = 0
4722        self.iterated = True
4723        return self
4724   
4725    def next(self):
4726        if self.time_step_count == self.time_step:
4727            self.close()
4728            raise StopIteration
4729        #Read in a time slice  from mux file 
4730        hz_p_array = p_array.array('f')
4731        hz_p_array.read(self.mux_file, self.points_num)
4732        hz_p = array(hz_p_array, typecode=Float)
4733        self.time_step += 1
4734       
4735        return hz_p
4736
4737    def close(self):
4738        self.mux_file.close()
4739
4740
4741def write_sww_triangulation(outfile, points_utm, volumes,
4742                            elevation, zone, verbose=False):
4743
4744    number_of_points = len(points_utm)
4745    #Store
4746    x = zeros(number_of_points, Float)  #Easting
4747    y = zeros(number_of_points, Float)  #Northing
4748
4749    volumes = array(volumes)
4750
4751    geo_ref = Geo_reference(zone,min(x),min(y))
4752    geo_ref.write_NetCDF(outfile)
4753
4754    # This will put the geo ref in the middle
4755    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
4756
4757
4758    if verbose:
4759        print '------------------------------------------------'
4760        print 'More Statistics:'
4761        print '  Extent (/lon):'
4762        print '    x in [%f, %f], len(lat) == %d'\
4763              %(min(x), max(x),
4764                len(x))
4765        print '    y in [%f, %f], len(lon) == %d'\
4766              %(min(y), max(y),
4767                len(y))
4768        print '    z in [%f, %f], len(z) == %d'\
4769              %(min(z), max(z),
4770                len(z))
4771        print 'geo_ref: ',geo_ref
4772        print '------------------------------------------------'
4773
4774    #z = resize(bath_grid,outfile.variables['z'][:].shape)
4775    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
4776    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
4777    outfile.variables['z'][:] = elevation
4778    outfile.variables['elevation'][:] = elevation  #FIXME HACK
4779    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
4780
4781
4782    # FIXME: Don't store the whole speeds and stage in  memory.
4783    # block it?
4784def write_sww_time_slice(outfile, has, uas, vas, elevation,
4785                         mean_stage=0, zscale=1,
4786                         verbose=False):   
4787    #Time stepping
4788    stage = outfile.variables['stage']
4789    xmomentum = outfile.variables['xmomentum']
4790    ymomentum = outfile.variables['ymomentum']
4791
4792    if verbose: print 'Converting quantities'
4793    n = len(has)
4794    j = 0
4795   
4796    for ha, ua, va in map(None, has, uas, vas):
4797        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
4798        w = zscale*ha + mean_stage
4799        stage[j] = w
4800        h = w - elevation
4801        xmomentum[j] = ua*h
4802        ymomentum[j] = va*h
4803        j += 1
4804    outfile.close()
4805   
4806    #### END URS UNGRIDDED 2 SWW ###
4807
4808#-------------------------------------------------------------
4809if __name__ == "__main__":
4810    points=Urs_points('urs2sww_test/o_velocity-e-mux')
4811    for i in points:
4812        print 'i',i
4813        pass
4814
Note: See TracBrowser for help on using the repository browser.