source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4254

Last change on this file since 4254 was 4254, checked in by duncan, 18 years ago

the point file blocking line size can now be changed in domain

File size: 139.6 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64
65from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
66     searchsorted, zeros, allclose, around, reshape, transpose, sort
67from Scientific.IO.NetCDF import NetCDFFile
68
69
70from anuga.coordinate_transforms.redfearn import redfearn
71from anuga.coordinate_transforms.geo_reference import Geo_reference
72from anuga.geospatial_data.geospatial_data import Geospatial_data
73from anuga.config import minimum_storable_height as default_minimum_storable_height
74from anuga.utilities.numerical_tools import ensure_numeric
75from anuga.caching.caching import myhash
76from anuga.utilities.anuga_exceptions import ANUGAError
77
78def make_filename(s):
79    """Transform argument string into a suitable filename
80    """
81
82    s = s.strip()
83    s = s.replace(' ', '_')
84    s = s.replace('(', '')
85    s = s.replace(')', '')
86    s = s.replace('__', '_')
87
88    return s
89
90
91def check_dir(path, verbose=None):
92    """Check that specified path exists.
93    If path does not exist it will be created if possible
94
95    USAGE:
96       checkdir(path, verbose):
97
98    ARGUMENTS:
99        path -- Directory
100        verbose -- Flag verbose output (default: None)
101
102    RETURN VALUE:
103        Verified path including trailing separator
104
105    """
106
107    import os, sys
108    import os.path
109
110    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
111        unix = 0
112    else:
113        unix = 1
114
115
116    if path[-1] != os.sep:
117        path = path + os.sep  # Add separator for directories
118
119    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
120    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
121        try:
122            exitcode=os.mkdir(path)
123
124            # Change access rights if possible
125            #
126            if unix:
127                exitcode=os.system('chmod 775 '+path)
128            else:
129                pass  # FIXME: What about acces rights under Windows?
130
131            if verbose: print 'MESSAGE: Directory', path, 'created.'
132
133        except:
134            print 'WARNING: Directory', path, 'could not be created.'
135            if unix:
136                path = '/tmp/'
137            else:
138                path = 'C:'
139
140            print 'Using directory %s instead' %path
141
142    return(path)
143
144
145
146def del_dir(path):
147    """Recursively delete directory path and all its contents
148    """
149
150    import os
151
152    if os.path.isdir(path):
153        for file in os.listdir(path):
154            X = os.path.join(path, file)
155
156
157            if os.path.isdir(X) and not os.path.islink(X):
158                del_dir(X)
159            else:
160                try:
161                    os.remove(X)
162                except:
163                    print "Could not remove file %s" %X
164
165        os.rmdir(path)
166
167
168
169def create_filename(datadir, filename, format, size=None, time=None):
170
171    import os
172    #from anuga.config import data_dir
173
174    FN = check_dir(datadir) + filename
175
176    if size is not None:
177        FN += '_size%d' %size
178
179    if time is not None:
180        FN += '_time%.2f' %time
181
182    FN += '.' + format
183    return FN
184
185
186def get_files(datadir, filename, format, size):
187    """Get all file (names) with gven name, size and format
188    """
189
190    import glob
191
192    import os
193    #from anuga.config import data_dir
194
195    dir = check_dir(datadir)
196
197    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
198    return glob.glob(pattern)
199
200
201
202#Generic class for storing output to e.g. visualisation or checkpointing
203class Data_format:
204    """Generic interface to data formats
205    """
206
207
208    def __init__(self, domain, extension, mode = 'w'):
209        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
210                                        '''   'w' (write)'''+\
211                                        '''   'r' (read)''' +\
212                                        '''   'a' (append)'''
213
214        #Create filename
215        self.filename = create_filename(domain.get_datadir(),
216                                        domain.get_name(), extension)
217
218        #print 'F', self.filename
219        self.timestep = 0
220        self.domain = domain
221       
222
223
224        # Exclude ghosts in case this is a parallel domain
225        self.number_of_nodes = domain.number_of_full_nodes       
226        self.number_of_volumes = domain.number_of_full_triangles
227        #self.number_of_volumes = len(domain)       
228
229
230
231
232        #FIXME: Should we have a general set_precision function?
233
234
235
236#Class for storing output to e.g. visualisation
237class Data_format_sww(Data_format):
238    """Interface to native NetCDF format (.sww) for storing model output
239
240    There are two kinds of data
241
242    1: Constant data: Vertex coordinates and field values. Stored once
243    2: Variable data: Conserved quantities. Stored once per timestep.
244
245    All data is assumed to reside at vertex locations.
246    """
247
248
249    def __init__(self, domain, mode = 'w',\
250                 max_size = 2000000000,
251                 recursion = False):
252        from Scientific.IO.NetCDF import NetCDFFile
253        from Numeric import Int, Float, Float32
254
255        self.precision = Float32 #Use single precision for quantities
256        if hasattr(domain, 'max_size'):
257            self.max_size = domain.max_size #file size max is 2Gig
258        else:
259            self.max_size = max_size
260        self.recursion = recursion
261        self.mode = mode
262
263        Data_format.__init__(self, domain, 'sww', mode)
264
265        if hasattr(domain, 'minimum_storable_height'):
266            self.minimum_storable_height =  domain.minimum_storable_height
267        else:
268            self.minimum_storable_height = default_minimum_storable_height
269
270        # NetCDF file definition
271        fid = NetCDFFile(self.filename, mode)
272
273        if mode == 'w':
274
275            #Create new file
276            fid.institution = 'Geoscience Australia'
277            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
278
279            if domain.smooth:
280                fid.smoothing = 'Yes'
281            else:
282                fid.smoothing = 'No'
283
284            fid.order = domain.default_order
285
286            if hasattr(domain, 'texture'):
287                fid.texture = domain.texture
288            #else:
289            #    fid.texture = 'None'
290
291            #Reference point
292            #Start time in seconds since the epoch (midnight 1/1/1970)
293            #FIXME: Use Georef
294            fid.starttime = domain.starttime
295
296            # dimension definitions
297            fid.createDimension('number_of_volumes', self.number_of_volumes)
298            fid.createDimension('number_of_vertices', 3)
299
300            if domain.smooth is True:
301                #fid.createDimension('number_of_points', len(domain.vertexlist))
302                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
303
304                # FIXME(Ole): This will cause sww files for paralle domains to
305                # have ghost nodes stored (but not used by triangles).
306                # To clean this up, we have to change get_vertex_values and friends in
307                # quantity.py (but I can't be bothered right now)
308            else:
309                fid.createDimension('number_of_points', 3*self.number_of_volumes)
310
311            fid.createDimension('number_of_timesteps', None) #extensible
312
313            # variable definitions
314            fid.createVariable('x', self.precision, ('number_of_points',))
315            fid.createVariable('y', self.precision, ('number_of_points',))
316            fid.createVariable('elevation', self.precision, ('number_of_points',))
317            if domain.geo_reference is not None:
318                domain.geo_reference.write_NetCDF(fid)
319
320            #FIXME: Backwards compatibility
321            fid.createVariable('z', self.precision, ('number_of_points',))
322            #################################
323
324            fid.createVariable('volumes', Int, ('number_of_volumes',
325                                                'number_of_vertices'))
326
327            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
328                                               # close to each other may appear as the same step
329                               ('number_of_timesteps',))
330
331            fid.createVariable('stage', self.precision,
332                               ('number_of_timesteps',
333                                'number_of_points'))
334
335            fid.createVariable('xmomentum', self.precision,
336                               ('number_of_timesteps',
337                                'number_of_points'))
338
339            fid.createVariable('ymomentum', self.precision,
340                               ('number_of_timesteps',
341                                'number_of_points'))
342
343        #Close
344        fid.close()
345
346
347    def store_connectivity(self):
348        """Specialisation of store_connectivity for net CDF format
349
350        Writes x,y,z coordinates of triangles constituting
351        the bed elevation.
352        """
353
354        from Scientific.IO.NetCDF import NetCDFFile
355
356        from Numeric import concatenate, Int
357
358        domain = self.domain
359
360        #Get NetCDF
361        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
362
363        # Get the variables
364        x = fid.variables['x']
365        y = fid.variables['y']
366        z = fid.variables['elevation']
367
368        volumes = fid.variables['volumes']
369
370        # Get X, Y and bed elevation Z
371        Q = domain.quantities['elevation']
372        X,Y,Z,V = Q.get_vertex_values(xy=True,
373                                      precision=self.precision)
374
375        volumes[:] = V.astype(volumes.typecode())
376        x[:] = X
377        y[:] = Y
378        z[:] = Z
379
380        #FIXME: Backwards compatibility
381        z = fid.variables['z']
382        z[:] = Z
383        ################################
384
385
386
387        #Close
388        fid.close()
389
390    def store_timestep(self, names):
391        """Store time and named quantities to file
392        """
393        from Scientific.IO.NetCDF import NetCDFFile
394        import types
395        from time import sleep
396        from os import stat
397
398        from Numeric import choose
399
400        #Get NetCDF
401        retries = 0
402        file_open = False
403        while not file_open and retries < 10:
404            try:
405                fid = NetCDFFile(self.filename, 'a') #Open existing file
406            except IOError:
407                #This could happen if someone was reading the file.
408                #In that case, wait a while and try again
409                msg = 'Warning (store_timestep): File %s could not be opened'\
410                      %self.filename
411                msg += ' - trying step %s again' %self.domain.time
412                print msg
413                retries += 1
414                sleep(1)
415            else:
416                file_open = True
417
418        if not file_open:
419            msg = 'File %s could not be opened for append' %self.filename
420            raise DataFileNotOpenError, msg
421
422
423
424        #Check to see if the file is already too big:
425        time = fid.variables['time']
426        i = len(time)+1
427        file_size = stat(self.filename)[6]
428        file_size_increase =  file_size/i
429        if file_size + file_size_increase > self.max_size*(2**self.recursion):
430            #in order to get the file name and start time correct,
431            #I change the domian.filename and domain.starttime.
432            #This is the only way to do this without changing
433            #other modules (I think).
434
435            #write a filename addon that won't break swollens reader
436            #(10.sww is bad)
437            filename_ext = '_time_%s'%self.domain.time
438            filename_ext = filename_ext.replace('.', '_')
439            #remember the old filename, then give domain a
440            #name with the extension
441            old_domain_filename = self.domain.get_name()
442            if not self.recursion:
443                self.domain.set_name(old_domain_filename+filename_ext)
444
445
446            #change the domain starttime to the current time
447            old_domain_starttime = self.domain.starttime
448            self.domain.starttime = self.domain.time
449
450            #build a new data_structure.
451            next_data_structure=\
452                Data_format_sww(self.domain, mode=self.mode,\
453                                max_size = self.max_size,\
454                                recursion = self.recursion+1)
455            if not self.recursion:
456                print '    file_size = %s'%file_size
457                print '    saving file to %s'%next_data_structure.filename
458            #set up the new data_structure
459            self.domain.writer = next_data_structure
460
461            #FIXME - could be cleaner to use domain.store_timestep etc.
462            next_data_structure.store_connectivity()
463            next_data_structure.store_timestep(names)
464            fid.sync()
465            fid.close()
466
467            #restore the old starttime and filename
468            self.domain.starttime = old_domain_starttime
469            self.domain.set_name(old_domain_filename)           
470        else:
471            self.recursion = False
472            domain = self.domain
473
474            # Get the variables
475            time = fid.variables['time']
476            stage = fid.variables['stage']
477            xmomentum = fid.variables['xmomentum']
478            ymomentum = fid.variables['ymomentum']
479            i = len(time)
480
481            #Store time
482            time[i] = self.domain.time
483
484
485            if type(names) not in [types.ListType, types.TupleType]:
486                names = [names]
487
488            for name in names:
489                # Get quantity
490                Q = domain.quantities[name]
491                A,V = Q.get_vertex_values(xy = False,
492                                          precision = self.precision)
493
494
495                #FIXME: Make this general (see below)
496                if name == 'stage':
497                    z = fid.variables['elevation']
498                    #print z[:]
499                    #print A-z[:]
500
501
502                    A = choose(A-z[:] >= self.minimum_storable_height,
503                               (z[:], A))
504                    stage[i,:] = A.astype(self.precision)
505                elif name == 'xmomentum':
506                    xmomentum[i,:] = A.astype(self.precision)
507                elif name == 'ymomentum':
508                    ymomentum[i,:] = A.astype(self.precision)
509
510                #As in....
511            #eval( name + '[i,:] = A.astype(self.precision)' )
512            #FIXME: But we need a UNIT test for that before refactoring
513
514
515
516            #Flush and close
517            fid.sync()
518            fid.close()
519
520
521
522#Class for handling checkpoints data
523class Data_format_cpt(Data_format):
524    """Interface to native NetCDF format (.cpt)
525    """
526
527
528    def __init__(self, domain, mode = 'w'):
529        from Scientific.IO.NetCDF import NetCDFFile
530        from Numeric import Int, Float, Float
531
532        self.precision = Float #Use full precision
533
534        Data_format.__init__(self, domain, 'sww', mode)
535
536
537        # NetCDF file definition
538        fid = NetCDFFile(self.filename, mode)
539
540        if mode == 'w':
541            #Create new file
542            fid.institution = 'Geoscience Australia'
543            fid.description = 'Checkpoint data'
544            #fid.smooth = domain.smooth
545            fid.order = domain.default_order
546
547            # dimension definitions
548            fid.createDimension('number_of_volumes', self.number_of_volumes)
549            fid.createDimension('number_of_vertices', 3)
550
551            #Store info at all vertices (no smoothing)
552            fid.createDimension('number_of_points', 3*self.number_of_volumes)
553            fid.createDimension('number_of_timesteps', None) #extensible
554
555            # variable definitions
556
557            #Mesh
558            fid.createVariable('x', self.precision, ('number_of_points',))
559            fid.createVariable('y', self.precision, ('number_of_points',))
560
561
562            fid.createVariable('volumes', Int, ('number_of_volumes',
563                                                'number_of_vertices'))
564
565            fid.createVariable('time', self.precision,
566                               ('number_of_timesteps',))
567
568            #Allocate space for all quantities
569            for name in domain.quantities.keys():
570                fid.createVariable(name, self.precision,
571                                   ('number_of_timesteps',
572                                    'number_of_points'))
573
574        #Close
575        fid.close()
576
577
578    def store_checkpoint(self):
579        """
580        Write x,y coordinates of triangles.
581        Write connectivity (
582        constituting
583        the bed elevation.
584        """
585
586        from Scientific.IO.NetCDF import NetCDFFile
587
588        from Numeric import concatenate
589
590        domain = self.domain
591
592        #Get NetCDF
593        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
594
595        # Get the variables
596        x = fid.variables['x']
597        y = fid.variables['y']
598
599        volumes = fid.variables['volumes']
600
601        # Get X, Y and bed elevation Z
602        Q = domain.quantities['elevation']
603        X,Y,Z,V = Q.get_vertex_values(xy=True,
604                      precision = self.precision)
605
606
607
608        x[:] = X.astype(self.precision)
609        y[:] = Y.astype(self.precision)
610        z[:] = Z.astype(self.precision)
611
612        volumes[:] = V
613
614        #Close
615        fid.close()
616
617
618    def store_timestep(self, name):
619        """Store time and named quantity to file
620        """
621        from Scientific.IO.NetCDF import NetCDFFile
622        from time import sleep
623
624        #Get NetCDF
625        retries = 0
626        file_open = False
627        while not file_open and retries < 10:
628            try:
629                fid = NetCDFFile(self.filename, 'a') #Open existing file
630            except IOError:
631                #This could happen if someone was reading the file.
632                #In that case, wait a while and try again
633                msg = 'Warning (store_timestep): File %s could not be opened'\
634                  %self.filename
635                msg += ' - trying again'
636                print msg
637                retries += 1
638                sleep(1)
639            else:
640                file_open = True
641
642        if not file_open:
643            msg = 'File %s could not be opened for append' %self.filename
644            raise DataFileNotOPenError, msg
645
646
647        domain = self.domain
648
649        # Get the variables
650        time = fid.variables['time']
651        stage = fid.variables['stage']
652        i = len(time)
653
654        #Store stage
655        time[i] = self.domain.time
656
657        # Get quantity
658        Q = domain.quantities[name]
659        A,V = Q.get_vertex_values(xy=False,
660                  precision = self.precision)
661
662        stage[i,:] = A.astype(self.precision)
663
664        #Flush and close
665        fid.sync()
666        fid.close()
667
668
669
670
671
672#Function for storing xya output
673#FIXME Not done yet for this version
674#This is obsolete.  Use geo_spatial_data instead
675class Data_format_xya(Data_format):
676    """Generic interface to data formats
677    """
678
679
680    def __init__(self, domain, mode = 'w'):
681        from Scientific.IO.NetCDF import NetCDFFile
682        from Numeric import Int, Float, Float32
683
684        self.precision = Float32 #Use single precision
685
686        Data_format.__init__(self, domain, 'xya', mode)
687
688
689
690    #FIXME -This is the old xya format
691    def store_all(self):
692        """Specialisation of store all for xya format
693
694        Writes x,y,z coordinates of triangles constituting
695        the bed elevation.
696        """
697
698        from Numeric import concatenate
699
700        domain = self.domain
701
702        fd = open(self.filename, 'w')
703
704
705        if domain.smooth is True:
706            number_of_points =  len(domain.vertexlist)
707        else:
708            number_of_points = 3*self.number_of_volumes
709
710        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
711
712        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
713                 " # <vertex #> <x> <y> [attributes]" + "\n")
714
715
716        # Get X, Y, bed elevation and friction (index=0,1)
717        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
718                                           indices = (0,1), precision = self.precision)
719
720        bed_eles = A[:,0]
721        fricts = A[:,1]
722
723        # Get stage (index=0)
724        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
725                                       indices = (0,), precision = self.precision)
726
727        stages = B[:,0]
728
729        #<vertex #> <x> <y> [attributes]
730        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
731                                               stages, fricts):
732
733            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
734            fd.write(s)
735
736        #close
737        fd.close()
738
739
740    def store_timestep(self, t, V0, V1, V2):
741        """Store time, water heights (and momentums) to file
742        """
743        pass
744
745
746#### NED is national exposure database
747   
748LAT_TITLE = 'LATITUDE'
749LONG_TITLE = 'LONGITUDE'
750X_TITLE = 'x'
751Y_TITLE = 'y'
752class Exposure_csv:
753    def __init__(self,file_name, latitude_title=LAT_TITLE,
754                 longitude_title=LONG_TITLE, is_x_y_locations=None,
755                 x_title=X_TITLE, y_title=Y_TITLE,
756                 refine_polygon=None):
757        """
758        This class is for handling the exposure csv file.
759        It reads the file in and converts the lats and longs to a geospatial
760        data object.
761        Use the methods to read and write columns.
762
763        The format of the csv files it reads is;
764           The first row is a title row.
765           comma's are the delimiters
766           each column is a 'set' of data
767
768        Feel free to use/expand it to read other csv files.
769           
770           
771        It is not for adding and deleting rows
772       
773        Can geospatial handle string attributes? It's not made for them.
774        Currently it can't load and save string att's.
775
776        So just use geospatial to hold the x, y and georef? Bad, since
777        different att's are in diferent structures.  Not so bad, the info
778        to write if the .csv file is saved is in attribute_dic
779
780        The location info is in the geospatial attribute.
781       
782       
783        """
784        self._file_name = file_name
785        self._geospatial = None #
786
787        # self._attribute_dic is a dictionary.
788        #The keys are the column titles.
789        #The values are lists of column data
790       
791        # self._title_index_dic is a dictionary.
792        #The keys are the column titles.
793        #The values are the index positions of file columns.
794        self._attribute_dic, self._title_index_dic = \
795        self._load_exposure_csv(self._file_name)
796        try:
797            #Have code here that handles caps or lower
798            lats = self._attribute_dic[latitude_title]
799            longs = self._attribute_dic[longitude_title]
800           
801        except KeyError:
802            # maybe a warning..
803            #Let's see if this works..
804            if False != is_x_y_locations:
805                is_x_y_locations = True
806            pass
807        else:
808            self._geospatial = Geospatial_data(latitudes = lats,
809                 longitudes = longs)
810
811        if is_x_y_locations is True:
812            if self._geospatial is not None:
813                pass #fixme throw an error
814            try:
815                xs = self._attribute_dic[x_title]
816                ys = self._attribute_dic[y_title]
817                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
818            except KeyError:
819                # maybe a warning..
820                msg = "Could not find location information."
821                raise TitleValueError, msg
822            else:
823                self._geospatial = Geospatial_data(data_points=points)
824           
825        # create a list of points that are in the refining_polygon
826        # described by a list of indexes representing the points
827
828    def __cmp__(self, other):
829        #print "self._attribute_dic",self._attribute_dic
830        #print "other._attribute_dic",other._attribute_dic
831        #print "self._title_index_dic", self._title_index_dic
832        #print "other._title_index_dic", other._title_index_dic
833       
834        #check that a is an instance of this class
835        if isinstance(self, type(other)):
836            result = cmp(self._attribute_dic, other._attribute_dic)
837            if result <>0:
838                return result
839            # The order of the columns is important. Therefore..
840            result = cmp(self._title_index_dic, other._title_index_dic)
841            if result <>0:
842                return result
843            for self_ls, other_ls in map(None,self._attribute_dic, \
844                    other._attribute_dic):
845                result = cmp(self._attribute_dic[self_ls],
846                             other._attribute_dic[other_ls])
847                if result <>0:
848                    return result
849            return 0
850        else:
851            return 1
852   
853    def _load_exposure_csv(self, file_name):
854        """
855        Load in the csv as a dic, title as key and column info as value, .
856        Also, create a dic, title as key and column index as value,
857        to keep track of the column order.
858        """
859        #
860        attribute_dic = {}
861        title_index_dic = {}
862        titles_stripped = [] # list of titles
863       
864        reader = csv.reader(file(file_name))
865
866        # Read in and manipulate the title info
867        titles = reader.next()
868        for i,title in enumerate(titles):
869            titles_stripped.append(title.strip())
870            title_index_dic[title.strip()] = i
871        title_count = len(titles_stripped)       
872        #print "title_index_dic",title_index_dic
873
874       
875        #create a dic of colum values, indexed by column title
876        for line in reader:
877            if len(line) <> title_count:
878                raise IOError #FIXME make this nicer
879            for i, value in enumerate(line):
880                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
881           
882        return attribute_dic, title_index_dic
883
884    def get_column(self, column_name, use_refind_polygon=False):
885        """
886        Given a column name return a list of the column values
887
888        Note, the type of the values will be String!
889        do this to change a list of strings to a list of floats
890        time = [float(x) for x in time]
891       
892        Not implemented:
893        if use_refind_polygon is True, only return values in the
894        refined polygon
895        """
896        if not self._attribute_dic.has_key(column_name):
897            msg = 'Therer is  no column called %s!' %column_name
898            raise TitleValueError, msg
899        return self._attribute_dic[column_name]
900
901
902    def get_value(self, value_column_name,
903                  known_column_name,
904                  known_values,
905                  use_refind_polygon=False):
906        """
907        Do linear interpolation on the known_colum, using the known_value,
908        to return a value of the column_value_name.
909        """
910        pass
911
912
913    def get_location(self, use_refind_polygon=False):
914        """
915        Return a geospatial object which describes the
916        locations of the location file.
917
918        Note, if there is not location info, this returns None.
919       
920        Not implemented:
921        if use_refind_polygon is True, only return values in the
922        refined polygon
923        """
924        return self._geospatial
925
926    def set_column(self, column_name, column_values, overwrite=False):
927        """
928        Add a column to the 'end' (with the right most column being the end)
929        of the csv file.
930
931        Set overwrite to True if you want to overwrite a column.
932       
933        Note, in column_name white space is removed and case is not checked.
934        Precondition
935        The column_name and column_values cannot have comma's in it.
936        """
937       
938        value_row_count = \
939            len(self._attribute_dic[self._title_index_dic.keys()[0]])
940        if len(column_values) <> value_row_count: 
941            msg = 'The number of column values must equal the number of rows.'
942            raise DataMissingValuesError, msg
943       
944        if self._attribute_dic.has_key(column_name):
945            if not overwrite:
946                msg = 'Column name %s already in use!' %column_name
947                raise TitleValueError, msg
948        else:
949            # New title.  Add it to the title index.
950            self._title_index_dic[column_name] = len(self._title_index_dic)
951        self._attribute_dic[column_name] = column_values
952        #print "self._title_index_dic[column_name]",self._title_index_dic
953
954    def save(self, file_name=None):
955        """
956        Save the exposure csv file
957        """
958        if file_name is None:
959            file_name = self._file_name
960       
961        fd = open(file_name,'wb')
962        writer = csv.writer(fd)
963       
964        #Write the title to a cvs file
965        line = [None]* len(self._title_index_dic)
966        for title in self._title_index_dic.iterkeys():
967            line[self._title_index_dic[title]]= title
968        writer.writerow(line)
969       
970        # Write the values to a cvs file
971        value_row_count = \
972            len(self._attribute_dic[self._title_index_dic.keys()[0]])
973        for row_i in range(value_row_count):
974            line = [None]* len(self._title_index_dic)
975            for title in self._title_index_dic.iterkeys():
976                line[self._title_index_dic[title]]= \
977                     self._attribute_dic[title][row_i]
978            writer.writerow(line)
979
980
981#Auxiliary
982def write_obj(filename,x,y,z):
983    """Store x,y,z vectors into filename (obj format)
984       Vectors are assumed to have dimension (M,3) where
985       M corresponds to the number elements.
986       triangles are assumed to be disconnected
987
988       The three numbers in each vector correspond to three vertices,
989
990       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
991
992    """
993    #print 'Writing obj to %s' % filename
994
995    import os.path
996
997    root, ext = os.path.splitext(filename)
998    if ext == '.obj':
999        FN = filename
1000    else:
1001        FN = filename + '.obj'
1002
1003
1004    outfile = open(FN, 'wb')
1005    outfile.write("# Triangulation as an obj file\n")
1006
1007    M, N = x.shape
1008    assert N==3  #Assuming three vertices per element
1009
1010    for i in range(M):
1011        for j in range(N):
1012            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1013
1014    for i in range(M):
1015        base = i*N
1016        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1017
1018    outfile.close()
1019
1020
1021#########################################################
1022#Conversion routines
1023########################################################
1024
1025def sww2obj(basefilename, size):
1026    """Convert netcdf based data output to obj
1027    """
1028    from Scientific.IO.NetCDF import NetCDFFile
1029
1030    from Numeric import Float, zeros
1031
1032    #Get NetCDF
1033    FN = create_filename('.', basefilename, 'sww', size)
1034    print 'Reading from ', FN
1035    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1036
1037
1038    # Get the variables
1039    x = fid.variables['x']
1040    y = fid.variables['y']
1041    z = fid.variables['elevation']
1042    time = fid.variables['time']
1043    stage = fid.variables['stage']
1044
1045    M = size  #Number of lines
1046    xx = zeros((M,3), Float)
1047    yy = zeros((M,3), Float)
1048    zz = zeros((M,3), Float)
1049
1050    for i in range(M):
1051        for j in range(3):
1052            xx[i,j] = x[i+j*M]
1053            yy[i,j] = y[i+j*M]
1054            zz[i,j] = z[i+j*M]
1055
1056    #Write obj for bathymetry
1057    FN = create_filename('.', basefilename, 'obj', size)
1058    write_obj(FN,xx,yy,zz)
1059
1060
1061    #Now read all the data with variable information, combine with
1062    #x,y info and store as obj
1063
1064    for k in range(len(time)):
1065        t = time[k]
1066        print 'Processing timestep %f' %t
1067
1068        for i in range(M):
1069            for j in range(3):
1070                zz[i,j] = stage[k,i+j*M]
1071
1072
1073        #Write obj for variable data
1074        #FN = create_filename(basefilename, 'obj', size, time=t)
1075        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1076        write_obj(FN,xx,yy,zz)
1077
1078
1079def dat2obj(basefilename):
1080    """Convert line based data output to obj
1081    FIXME: Obsolete?
1082    """
1083
1084    import glob, os
1085    from anuga.config import data_dir
1086
1087
1088    #Get bathymetry and x,y's
1089    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1090
1091    from Numeric import zeros, Float
1092
1093    M = len(lines)  #Number of lines
1094    x = zeros((M,3), Float)
1095    y = zeros((M,3), Float)
1096    z = zeros((M,3), Float)
1097
1098    ##i = 0
1099    for i, line in enumerate(lines):
1100        tokens = line.split()
1101        values = map(float,tokens)
1102
1103        for j in range(3):
1104            x[i,j] = values[j*3]
1105            y[i,j] = values[j*3+1]
1106            z[i,j] = values[j*3+2]
1107
1108        ##i += 1
1109
1110
1111    #Write obj for bathymetry
1112    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1113
1114
1115    #Now read all the data files with variable information, combine with
1116    #x,y info
1117    #and store as obj
1118
1119    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1120
1121    for filename in files:
1122        print 'Processing %s' % filename
1123
1124        lines = open(data_dir+os.sep+filename,'r').readlines()
1125        assert len(lines) == M
1126        root, ext = os.path.splitext(filename)
1127
1128        #Get time from filename
1129        i0 = filename.find('_time=')
1130        if i0 == -1:
1131            #Skip bathymetry file
1132            continue
1133
1134        i0 += 6  #Position where time starts
1135        i1 = filename.find('.dat')
1136
1137        if i1 > i0:
1138            t = float(filename[i0:i1])
1139        else:
1140            raise DataTimeError, 'Hmmmm'
1141
1142
1143
1144        ##i = 0
1145        for i, line in enumerate(lines):
1146            tokens = line.split()
1147            values = map(float,tokens)
1148
1149            for j in range(3):
1150                z[i,j] = values[j]
1151
1152            ##i += 1
1153
1154        #Write obj for variable data
1155        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1156
1157
1158def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1159    """Read netcdf filename1, pick timesteps first:step:last and save to
1160    nettcdf file filename2
1161    """
1162    from Scientific.IO.NetCDF import NetCDFFile
1163
1164    #Get NetCDF
1165    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1166    outfile = NetCDFFile(filename2, 'w')  #Open new file
1167
1168
1169    #Copy dimensions
1170    for d in infile.dimensions:
1171        outfile.createDimension(d, infile.dimensions[d])
1172
1173    for name in infile.variables:
1174        var = infile.variables[name]
1175        outfile.createVariable(name, var.typecode(), var.dimensions)
1176
1177
1178    #Copy the static variables
1179    for name in infile.variables:
1180        if name == 'time' or name == 'stage':
1181            pass
1182        else:
1183            #Copy
1184            outfile.variables[name][:] = infile.variables[name][:]
1185
1186    #Copy selected timesteps
1187    time = infile.variables['time']
1188    stage = infile.variables['stage']
1189
1190    newtime = outfile.variables['time']
1191    newstage = outfile.variables['stage']
1192
1193    if last is None:
1194        last = len(time)
1195
1196    selection = range(first, last, step)
1197    for i, j in enumerate(selection):
1198        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1199        newtime[i] = time[j]
1200        newstage[i,:] = stage[j,:]
1201
1202    #Close
1203    infile.close()
1204    outfile.close()
1205
1206
1207#Get data objects
1208def get_dataobject(domain, mode='w'):
1209    """Return instance of class of given format using filename
1210    """
1211
1212    cls = eval('Data_format_%s' %domain.format)
1213    return cls(domain, mode)
1214
1215#FIXME move into geospatial.  There should only be one method that
1216# reads xya, and writes pts.
1217def xya2pts(basename_in, basename_out=None, verbose=False,
1218            #easting_min=None, easting_max=None,
1219            #northing_min=None, northing_max=None,
1220            stride = 1,
1221            attribute_name = 'elevation',
1222            z_func = None):
1223    """Read points data from ascii (.xya)
1224
1225    Example:
1226
1227              x(m)        y(m)        z(m)
1228         0.00000e+00  0.00000e+00  1.3535000e-01
1229         0.00000e+00  1.40000e-02  1.3535000e-01
1230
1231
1232
1233    Convert to NetCDF pts format which is
1234
1235    points:  (Nx2) Float array
1236    elevation: N Float array
1237
1238    Only lines that contain three numeric values are processed
1239
1240    If z_func is specified, it will be applied to the third field
1241    """
1242
1243    import os
1244    #from Scientific.IO.NetCDF import NetCDFFile
1245    from Numeric import Float, arrayrange, concatenate
1246
1247    root, ext = os.path.splitext(basename_in)
1248
1249    if ext == '': ext = '.xya'
1250
1251    #Get NetCDF
1252    infile = open(root + ext, 'r')  #Open existing xya file for read
1253
1254    if verbose: print 'Reading xya points from %s' %(root + ext)
1255
1256    points = []
1257    attribute = []
1258    for i, line in enumerate(infile.readlines()):
1259
1260        if i % stride != 0: continue
1261
1262        fields = line.split()
1263
1264        try:
1265            assert len(fields) == 3
1266        except:
1267            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1268
1269        try:
1270            x = float( fields[0] )
1271            y = float( fields[1] )
1272            z = float( fields[2] )
1273        except:
1274            continue
1275
1276        points.append( [x, y] )
1277
1278        if callable(z_func):
1279            attribute.append(z_func(z))
1280        else:
1281            attribute.append(z)
1282
1283
1284    #Get output file
1285    if basename_out == None:
1286        ptsname = root + '.pts'
1287    else:
1288        ptsname = basename_out + '.pts'
1289
1290    if verbose: print 'Store to NetCDF file %s' %ptsname
1291    write_ptsfile(ptsname, points, attribute, attribute_name)
1292
1293
1294
1295######Obsoleted by export_points in load_mesh
1296def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1297                  zone=None, xllcorner=None, yllcorner=None):
1298    """Write points and associated attribute to pts (NetCDF) format
1299    """
1300
1301    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1302
1303    from Numeric import Float
1304
1305    if attribute_name is None:
1306        attribute_name = 'attribute'
1307
1308
1309    from Scientific.IO.NetCDF import NetCDFFile
1310
1311    # NetCDF file definition
1312    outfile = NetCDFFile(ptsname, 'w')
1313
1314
1315    #Create new file
1316    outfile.institution = 'Geoscience Australia'
1317    outfile.description = 'NetCDF pts format for compact and '\
1318                          'portable storage of spatial point data'
1319
1320
1321    #Georeferencing
1322    from anuga.coordinate_transforms.geo_reference import Geo_reference
1323    if zone is None:
1324        assert xllcorner is None, 'xllcorner must be None'
1325        assert yllcorner is None, 'yllcorner must be None'
1326        Geo_reference().write_NetCDF(outfile)
1327    else:
1328        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1329
1330
1331
1332    outfile.createDimension('number_of_points', len(points))
1333    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1334
1335    # variable definitions
1336    outfile.createVariable('points', Float, ('number_of_points',
1337                                             'number_of_dimensions'))
1338    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1339
1340    # Get handles to the variables
1341    nc_points = outfile.variables['points']
1342    nc_attribute = outfile.variables[attribute_name]
1343
1344    #Store data
1345    nc_points[:, :] = points
1346    nc_attribute[:] = attribute
1347
1348    outfile.close()
1349
1350
1351def dem2pts(basename_in, basename_out=None,
1352            easting_min=None, easting_max=None,
1353            northing_min=None, northing_max=None,
1354            use_cache=False, verbose=False,):
1355    """Read Digitial Elevation model from the following NetCDF format (.dem)
1356
1357    Example:
1358
1359    ncols         3121
1360    nrows         1800
1361    xllcorner     722000
1362    yllcorner     5893000
1363    cellsize      25
1364    NODATA_value  -9999
1365    138.3698 137.4194 136.5062 135.5558 ..........
1366
1367    Convert to NetCDF pts format which is
1368
1369    points:  (Nx2) Float array
1370    elevation: N Float array
1371    """
1372
1373
1374
1375    kwargs = {'basename_out': basename_out,
1376              'easting_min': easting_min,
1377              'easting_max': easting_max,
1378              'northing_min': northing_min,
1379              'northing_max': northing_max,
1380              'verbose': verbose}
1381
1382    if use_cache is True:
1383        from caching import cache
1384        result = cache(_dem2pts, basename_in, kwargs,
1385                       dependencies = [basename_in + '.dem'],
1386                       verbose = verbose)
1387
1388    else:
1389        result = apply(_dem2pts, [basename_in], kwargs)
1390
1391    return result
1392
1393
1394def _dem2pts(basename_in, basename_out=None, verbose=False,
1395            easting_min=None, easting_max=None,
1396            northing_min=None, northing_max=None):
1397    """Read Digitial Elevation model from the following NetCDF format (.dem)
1398
1399    Internal function. See public function dem2pts for details.
1400    """
1401
1402    #FIXME: Can this be written feasibly using write_pts?
1403
1404    import os
1405    from Scientific.IO.NetCDF import NetCDFFile
1406    from Numeric import Float, zeros, reshape, sum
1407
1408    root = basename_in
1409
1410    #Get NetCDF
1411    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1412
1413    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1414
1415    ncols = infile.ncols[0]
1416    nrows = infile.nrows[0]
1417    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1418    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1419    cellsize = infile.cellsize[0]
1420    NODATA_value = infile.NODATA_value[0]
1421    dem_elevation = infile.variables['elevation']
1422
1423    zone = infile.zone[0]
1424    false_easting = infile.false_easting[0]
1425    false_northing = infile.false_northing[0]
1426
1427    #Text strings
1428    projection = infile.projection
1429    datum = infile.datum
1430    units = infile.units
1431
1432
1433    #Get output file
1434    if basename_out == None:
1435        ptsname = root + '.pts'
1436    else:
1437        ptsname = basename_out + '.pts'
1438
1439    if verbose: print 'Store to NetCDF file %s' %ptsname
1440    # NetCDF file definition
1441    outfile = NetCDFFile(ptsname, 'w')
1442
1443    #Create new file
1444    outfile.institution = 'Geoscience Australia'
1445    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1446                      'of spatial point data'
1447    #assign default values
1448    if easting_min is None: easting_min = xllcorner
1449    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1450    if northing_min is None: northing_min = yllcorner
1451    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1452
1453    #compute offsets to update georeferencing
1454    easting_offset = xllcorner - easting_min
1455    northing_offset = yllcorner - northing_min
1456
1457    #Georeferencing
1458    outfile.zone = zone
1459    outfile.xllcorner = easting_min #Easting of lower left corner
1460    outfile.yllcorner = northing_min #Northing of lower left corner
1461    outfile.false_easting = false_easting
1462    outfile.false_northing = false_northing
1463
1464    outfile.projection = projection
1465    outfile.datum = datum
1466    outfile.units = units
1467
1468
1469    #Grid info (FIXME: probably not going to be used, but heck)
1470    outfile.ncols = ncols
1471    outfile.nrows = nrows
1472
1473    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1474    totalnopoints = nrows*ncols
1475
1476    # calculating number of NODATA_values for each row in clipped region
1477    #FIXME: use array operations to do faster
1478    nn = 0
1479    k = 0
1480    i1_0 = 0
1481    j1_0 = 0
1482    thisj = 0
1483    thisi = 0
1484    for i in range(nrows):
1485        y = (nrows-i-1)*cellsize + yllcorner
1486        for j in range(ncols):
1487            x = j*cellsize + xllcorner
1488            if easting_min <= x <= easting_max and \
1489               northing_min <= y <= northing_max:
1490                thisj = j
1491                thisi = i
1492                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1493
1494                if k == 0:
1495                    i1_0 = i
1496                    j1_0 = j
1497                k += 1
1498
1499    index1 = j1_0
1500    index2 = thisj
1501
1502    # dimension definitions
1503    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1504    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1505
1506    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1507    nopoints = clippednopoints-nn
1508
1509    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1510
1511    if verbose:
1512        print 'There are %d values in the elevation' %totalnopoints
1513        print 'There are %d values in the clipped elevation' %clippednopoints
1514        print 'There are %d NODATA_values in the clipped elevation' %nn
1515
1516    outfile.createDimension('number_of_points', nopoints)
1517    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1518
1519    # variable definitions
1520    outfile.createVariable('points', Float, ('number_of_points',
1521                                             'number_of_dimensions'))
1522    outfile.createVariable('elevation', Float, ('number_of_points',))
1523
1524    # Get handles to the variables
1525    points = outfile.variables['points']
1526    elevation = outfile.variables['elevation']
1527
1528    lenv = index2-index1+1
1529    #Store data
1530    global_index = 0
1531    #for i in range(nrows):
1532    for i in range(i1_0,thisi+1,1):
1533        if verbose and i%((nrows+10)/10)==0:
1534            print 'Processing row %d of %d' %(i, nrows)
1535
1536        lower_index = global_index
1537
1538        v = dem_elevation_r[i,index1:index2+1]
1539        no_NODATA = sum(v == NODATA_value)
1540        if no_NODATA > 0:
1541            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1542        else:
1543            newcols = lenv #ncols_in_bounding_box
1544
1545        telev = zeros(newcols, Float)
1546        tpoints = zeros((newcols, 2), Float)
1547
1548        local_index = 0
1549
1550        y = (nrows-i-1)*cellsize + yllcorner
1551        #for j in range(ncols):
1552        for j in range(j1_0,index2+1,1):
1553
1554            x = j*cellsize + xllcorner
1555            if easting_min <= x <= easting_max and \
1556               northing_min <= y <= northing_max and \
1557               dem_elevation_r[i,j] <> NODATA_value:
1558                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1559                telev[local_index] = dem_elevation_r[i, j]
1560                global_index += 1
1561                local_index += 1
1562
1563        upper_index = global_index
1564
1565        if upper_index == lower_index + newcols:
1566            points[lower_index:upper_index, :] = tpoints
1567            elevation[lower_index:upper_index] = telev
1568
1569    assert global_index == nopoints, 'index not equal to number of points'
1570
1571    infile.close()
1572    outfile.close()
1573
1574
1575
1576def _read_hecras_cross_sections(lines):
1577    """Return block of surface lines for each cross section
1578    Starts with SURFACE LINE,
1579    Ends with END CROSS-SECTION
1580    """
1581
1582    points = []
1583
1584    reading_surface = False
1585    for i, line in enumerate(lines):
1586
1587        if len(line.strip()) == 0:    #Ignore blanks
1588            continue
1589
1590        if lines[i].strip().startswith('SURFACE LINE'):
1591            reading_surface = True
1592            continue
1593
1594        if lines[i].strip().startswith('END') and reading_surface:
1595            yield points
1596            reading_surface = False
1597            points = []
1598
1599        if reading_surface:
1600            fields = line.strip().split(',')
1601            easting = float(fields[0])
1602            northing = float(fields[1])
1603            elevation = float(fields[2])
1604            points.append([easting, northing, elevation])
1605
1606
1607
1608
1609def hecras_cross_sections2pts(basename_in,
1610                              basename_out=None,
1611                              verbose=False):
1612    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1613
1614    Example:
1615
1616
1617# RAS export file created on Mon 15Aug2005 11:42
1618# by HEC-RAS Version 3.1.1
1619
1620BEGIN HEADER:
1621  UNITS: METRIC
1622  DTM TYPE: TIN
1623  DTM: v:\1\cit\perth_topo\river_tin
1624  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1625  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1626  MAP PROJECTION: UTM
1627  PROJECTION ZONE: 50
1628  DATUM: AGD66
1629  VERTICAL DATUM:
1630  NUMBER OF REACHES:  19
1631  NUMBER OF CROSS-SECTIONS:  14206
1632END HEADER:
1633
1634
1635Only the SURFACE LINE data of the following form will be utilised
1636
1637  CROSS-SECTION:
1638    STREAM ID:Southern-Wungong
1639    REACH ID:Southern-Wungong
1640    STATION:19040.*
1641    CUT LINE:
1642      405548.671603161 , 6438142.7594925
1643      405734.536092045 , 6438326.10404912
1644      405745.130459356 , 6438331.48627354
1645      405813.89633823 , 6438368.6272789
1646    SURFACE LINE:
1647     405548.67,   6438142.76,   35.37
1648     405552.24,   6438146.28,   35.41
1649     405554.78,   6438148.78,   35.44
1650     405555.80,   6438149.79,   35.44
1651     405559.37,   6438153.31,   35.45
1652     405560.88,   6438154.81,   35.44
1653     405562.93,   6438156.83,   35.42
1654     405566.50,   6438160.35,   35.38
1655     405566.99,   6438160.83,   35.37
1656     ...
1657   END CROSS-SECTION
1658
1659    Convert to NetCDF pts format which is
1660
1661    points:  (Nx2) Float array
1662    elevation: N Float array
1663    """
1664
1665    #FIXME: Can this be written feasibly using write_pts?
1666
1667    import os
1668    from Scientific.IO.NetCDF import NetCDFFile
1669    from Numeric import Float, zeros, reshape
1670
1671    root = basename_in
1672
1673    #Get ASCII file
1674    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1675
1676    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1677
1678    lines = infile.readlines()
1679    infile.close()
1680
1681    if verbose: print 'Converting to pts format'
1682
1683    i = 0
1684    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1685        i += 1
1686
1687    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1688    i += 1
1689
1690    assert lines[i].strip().upper().startswith('UNITS:')
1691    units = lines[i].strip().split()[1]
1692    i += 1
1693
1694    assert lines[i].strip().upper().startswith('DTM TYPE:')
1695    i += 1
1696
1697    assert lines[i].strip().upper().startswith('DTM:')
1698    i += 1
1699
1700    assert lines[i].strip().upper().startswith('STREAM')
1701    i += 1
1702
1703    assert lines[i].strip().upper().startswith('CROSS')
1704    i += 1
1705
1706    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1707    projection = lines[i].strip().split(':')[1]
1708    i += 1
1709
1710    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1711    zone = int(lines[i].strip().split(':')[1])
1712    i += 1
1713
1714    assert lines[i].strip().upper().startswith('DATUM:')
1715    datum = lines[i].strip().split(':')[1]
1716    i += 1
1717
1718    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1719    i += 1
1720
1721    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1722    i += 1
1723
1724    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1725    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1726    i += 1
1727
1728
1729    #Now read all points
1730    points = []
1731    elevation = []
1732    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1733        for k, entry in enumerate(entries):
1734            points.append(entry[:2])
1735            elevation.append(entry[2])
1736
1737
1738    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1739          %(j+1, number_of_cross_sections)
1740    assert j+1 == number_of_cross_sections, msg
1741
1742    #Get output file
1743    if basename_out == None:
1744        ptsname = root + '.pts'
1745    else:
1746        ptsname = basename_out + '.pts'
1747
1748    #FIXME (DSG-ON): use loadASCII export_points_file
1749    if verbose: print 'Store to NetCDF file %s' %ptsname
1750    # NetCDF file definition
1751    outfile = NetCDFFile(ptsname, 'w')
1752
1753    #Create new file
1754    outfile.institution = 'Geoscience Australia'
1755    outfile.description = 'NetCDF pts format for compact and portable ' +\
1756                          'storage of spatial point data derived from HEC-RAS'
1757
1758    #Georeferencing
1759    outfile.zone = zone
1760    outfile.xllcorner = 0.0
1761    outfile.yllcorner = 0.0
1762    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1763    outfile.false_northing = 1000000
1764
1765    outfile.projection = projection
1766    outfile.datum = datum
1767    outfile.units = units
1768
1769
1770    outfile.createDimension('number_of_points', len(points))
1771    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1772
1773    # variable definitions
1774    outfile.createVariable('points', Float, ('number_of_points',
1775                                             'number_of_dimensions'))
1776    outfile.createVariable('elevation', Float, ('number_of_points',))
1777
1778    # Get handles to the variables
1779    outfile.variables['points'][:] = points
1780    outfile.variables['elevation'][:] = elevation
1781
1782    outfile.close()
1783
1784
1785
1786def sww2dem(basename_in, basename_out = None,
1787            quantity = None,
1788            timestep = None,
1789            reduction = None,
1790            cellsize = 10,
1791            NODATA_value = -9999,
1792            easting_min = None,
1793            easting_max = None,
1794            northing_min = None,
1795            northing_max = None,
1796            verbose = False,
1797            origin = None,
1798            datum = 'WGS84',
1799        format = 'ers'):
1800
1801    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1802
1803    Example (ASC):
1804
1805    ncols         3121
1806    nrows         1800
1807    xllcorner     722000
1808    yllcorner     5893000
1809    cellsize      25
1810    NODATA_value  -9999
1811    138.3698 137.4194 136.5062 135.5558 ..........
1812
1813    Also write accompanying file with same basename_in but extension .prj
1814    used to fix the UTM zone, datum, false northings and eastings.
1815
1816    The prj format is assumed to be as
1817
1818    Projection    UTM
1819    Zone          56
1820    Datum         WGS84
1821    Zunits        NO
1822    Units         METERS
1823    Spheroid      WGS84
1824    Xshift        0.0000000000
1825    Yshift        10000000.0000000000
1826    Parameters
1827
1828
1829    The parameter quantity must be the name of an existing quantity or
1830    an expression involving existing quantities. The default is
1831    'elevation'.
1832
1833    if timestep (an index) is given, output quantity at that timestep
1834
1835    if reduction is given use that to reduce quantity over all timesteps.
1836
1837    datum
1838
1839    format can be either 'asc' or 'ers'
1840    """
1841
1842    import sys
1843    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1844    from Numeric import array2string
1845
1846    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1847    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1848
1849    msg = 'Format must be either asc or ers'
1850    assert format.lower() in ['asc', 'ers'], msg
1851
1852
1853    false_easting = 500000
1854    false_northing = 10000000
1855
1856    if quantity is None:
1857        quantity = 'elevation'
1858
1859    if reduction is None:
1860        reduction = max
1861
1862    if basename_out is None:
1863        basename_out = basename_in + '_%s' %quantity
1864
1865    swwfile = basename_in + '.sww'
1866    demfile = basename_out + '.' + format
1867    # Note the use of a .ers extension is optional (write_ermapper_grid will
1868    # deal with either option
1869
1870    #Read sww file
1871    if verbose: print 'Reading from %s' %swwfile
1872    from Scientific.IO.NetCDF import NetCDFFile
1873    fid = NetCDFFile(swwfile)
1874
1875    #Get extent and reference
1876    x = fid.variables['x'][:]
1877    y = fid.variables['y'][:]
1878    volumes = fid.variables['volumes'][:]
1879    if timestep is not None:
1880        times = fid.variables['time'][timestep]
1881    else:
1882        times = fid.variables['time'][:]
1883
1884    number_of_timesteps = fid.dimensions['number_of_timesteps']
1885    number_of_points = fid.dimensions['number_of_points']
1886   
1887    if origin is None:
1888
1889        #Get geo_reference
1890        #sww files don't have to have a geo_ref
1891        try:
1892            geo_reference = Geo_reference(NetCDFObject=fid)
1893        except AttributeError, e:
1894            geo_reference = Geo_reference() #Default georef object
1895
1896        xllcorner = geo_reference.get_xllcorner()
1897        yllcorner = geo_reference.get_yllcorner()
1898        zone = geo_reference.get_zone()
1899    else:
1900        zone = origin[0]
1901        xllcorner = origin[1]
1902        yllcorner = origin[2]
1903
1904
1905
1906    #FIXME: Refactor using code from file_function.statistics
1907    #Something like print swwstats(swwname)
1908    if verbose:
1909        print '------------------------------------------------'
1910        print 'Statistics of SWW file:'
1911        print '  Name: %s' %swwfile
1912        print '  Reference:'
1913        print '    Lower left corner: [%f, %f]'\
1914              %(xllcorner, yllcorner)
1915        if timestep is not None:
1916            print '    Time: %f' %(times)
1917        else:
1918            print '    Start time: %f' %fid.starttime[0]
1919        print '  Extent:'
1920        print '    x [m] in [%f, %f], len(x) == %d'\
1921              %(min(x.flat), max(x.flat), len(x.flat))
1922        print '    y [m] in [%f, %f], len(y) == %d'\
1923              %(min(y.flat), max(y.flat), len(y.flat))
1924        if timestep is not None:
1925            print '    t [s] = %f, len(t) == %d' %(times, 1)
1926        else:
1927            print '    t [s] in [%f, %f], len(t) == %d'\
1928              %(min(times), max(times), len(times))
1929        print '  Quantities [SI units]:'
1930        for name in ['stage', 'xmomentum', 'ymomentum']:
1931            q = fid.variables[name][:].flat
1932            if timestep is not None:
1933                q = q[timestep*len(x):(timestep+1)*len(x)]
1934            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1935        for name in ['elevation']:
1936            q = fid.variables[name][:].flat
1937            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1938           
1939    # Get quantity and reduce if applicable
1940    if verbose: print 'Processing quantity %s' %quantity
1941
1942    # Turn NetCDF objects into Numeric arrays
1943    quantity_dict = {}
1944    for name in fid.variables.keys():
1945        quantity_dict[name] = fid.variables[name][:] 
1946
1947
1948    # Convert quantity expression to quantities found in sww file   
1949    q = apply_expression_to_dictionary(quantity, quantity_dict)
1950
1951    if len(q.shape) == 2:
1952        #q has a time component and needs to be reduced along
1953        #the temporal dimension
1954        if verbose: print 'Reducing quantity %s' %quantity
1955        q_reduced = zeros( number_of_points, Float )
1956       
1957        if timestep is not None:
1958            for k in range(number_of_points):
1959                q_reduced[k] = q[timestep,k] 
1960        else:
1961            for k in range(number_of_points):
1962                q_reduced[k] = reduction( q[:,k] )
1963
1964        q = q_reduced
1965
1966    #Post condition: Now q has dimension: number_of_points
1967    assert len(q.shape) == 1
1968    assert q.shape[0] == number_of_points
1969
1970
1971    if verbose:
1972        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1973
1974
1975    #Create grid and update xll/yll corner and x,y
1976
1977    #Relative extent
1978    if easting_min is None:
1979        xmin = min(x)
1980    else:
1981        xmin = easting_min - xllcorner
1982
1983    if easting_max is None:
1984        xmax = max(x)
1985    else:
1986        xmax = easting_max - xllcorner
1987
1988    if northing_min is None:
1989        ymin = min(y)
1990    else:
1991        ymin = northing_min - yllcorner
1992
1993    if northing_max is None:
1994        ymax = max(y)
1995    else:
1996        ymax = northing_max - yllcorner
1997
1998
1999
2000    if verbose: print 'Creating grid'
2001    ncols = int((xmax-xmin)/cellsize)+1
2002    nrows = int((ymax-ymin)/cellsize)+1
2003
2004
2005    #New absolute reference and coordinates
2006    newxllcorner = xmin+xllcorner
2007    newyllcorner = ymin+yllcorner
2008
2009    x = x+xllcorner-newxllcorner
2010    y = y+yllcorner-newyllcorner
2011   
2012    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2013    assert len(vertex_points.shape) == 2
2014
2015    grid_points = zeros ( (ncols*nrows, 2), Float )
2016
2017
2018    for i in xrange(nrows):
2019        if format.lower() == 'asc':
2020            yg = i*cellsize
2021        else:
2022        #this will flip the order of the y values for ers
2023            yg = (nrows-i)*cellsize
2024
2025        for j in xrange(ncols):
2026            xg = j*cellsize
2027            k = i*ncols + j
2028
2029            grid_points[k,0] = xg
2030            grid_points[k,1] = yg
2031
2032    #Interpolate
2033    #from least_squares import Interpolation
2034    from anuga.fit_interpolate.interpolate import Interpolate
2035
2036    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2037
2038    #Interpolate using quantity values
2039    if verbose: print 'Interpolating'
2040    grid_values = interp.interpolate(q, grid_points).flat
2041
2042
2043    if verbose:
2044        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2045                                                      max(grid_values))
2046
2047    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2048    P = interp.mesh.get_boundary_polygon()
2049    outside_indices = outside_polygon(grid_points, P, closed=True)
2050
2051    for i in outside_indices:
2052        grid_values[i] = NODATA_value
2053
2054
2055
2056
2057    if format.lower() == 'ers':
2058        # setup ERS header information
2059        grid_values = reshape(grid_values,(nrows, ncols))
2060        header = {}
2061        header['datum'] = '"' + datum + '"'
2062        # FIXME The use of hardwired UTM and zone number needs to be made optional
2063        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2064        header['projection'] = '"UTM-' + str(zone) + '"'
2065        header['coordinatetype'] = 'EN'
2066        if header['coordinatetype'] == 'LL':
2067            header['longitude'] = str(newxllcorner)
2068            header['latitude'] = str(newyllcorner)
2069        elif header['coordinatetype'] == 'EN':
2070            header['eastings'] = str(newxllcorner)
2071            header['northings'] = str(newyllcorner)
2072        header['nullcellvalue'] = str(NODATA_value)
2073        header['xdimension'] = str(cellsize)
2074        header['ydimension'] = str(cellsize)
2075        header['value'] = '"' + quantity + '"'
2076        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2077
2078
2079        #Write
2080        if verbose: print 'Writing %s' %demfile
2081        import ermapper_grids
2082        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2083
2084        fid.close()
2085    else:
2086        #Write to Ascii format
2087
2088        #Write prj file
2089        prjfile = basename_out + '.prj'
2090
2091        if verbose: print 'Writing %s' %prjfile
2092        prjid = open(prjfile, 'w')
2093        prjid.write('Projection    %s\n' %'UTM')
2094        prjid.write('Zone          %d\n' %zone)
2095        prjid.write('Datum         %s\n' %datum)
2096        prjid.write('Zunits        NO\n')
2097        prjid.write('Units         METERS\n')
2098        prjid.write('Spheroid      %s\n' %datum)
2099        prjid.write('Xshift        %d\n' %false_easting)
2100        prjid.write('Yshift        %d\n' %false_northing)
2101        prjid.write('Parameters\n')
2102        prjid.close()
2103
2104
2105
2106        if verbose: print 'Writing %s' %demfile
2107
2108        ascid = open(demfile, 'w')
2109
2110        ascid.write('ncols         %d\n' %ncols)
2111        ascid.write('nrows         %d\n' %nrows)
2112        ascid.write('xllcorner     %d\n' %newxllcorner)
2113        ascid.write('yllcorner     %d\n' %newyllcorner)
2114        ascid.write('cellsize      %f\n' %cellsize)
2115        ascid.write('NODATA_value  %d\n' %NODATA_value)
2116
2117
2118        #Get bounding polygon from mesh
2119        #P = interp.mesh.get_boundary_polygon()
2120        #inside_indices = inside_polygon(grid_points, P)
2121
2122        for i in range(nrows):
2123            if verbose and i%((nrows+10)/10)==0:
2124                print 'Doing row %d of %d' %(i, nrows)
2125
2126            base_index = (nrows-i-1)*ncols
2127
2128            slice = grid_values[base_index:base_index+ncols]
2129            s = array2string(slice, max_line_width=sys.maxint)
2130            ascid.write(s[1:-1] + '\n')
2131
2132
2133            #print
2134            #for j in range(ncols):
2135            #    index = base_index+j#
2136            #    print grid_values[index],
2137            #    ascid.write('%f ' %grid_values[index])
2138            #ascid.write('\n')
2139
2140        #Close
2141        ascid.close()
2142        fid.close()
2143
2144#Backwards compatibility
2145def sww2asc(basename_in, basename_out = None,
2146            quantity = None,
2147            timestep = None,
2148            reduction = None,
2149            cellsize = 10,
2150            verbose = False,
2151            origin = None):
2152    print 'sww2asc will soon be obsoleted - please use sww2dem'
2153    sww2dem(basename_in,
2154            basename_out = basename_out,
2155            quantity = quantity,
2156            timestep = timestep,
2157            reduction = reduction,
2158            cellsize = cellsize,
2159            verbose = verbose,
2160            origin = origin,
2161        datum = 'WGS84',
2162        format = 'asc')
2163
2164def sww2ers(basename_in, basename_out = None,
2165            quantity = None,
2166            timestep = None,
2167            reduction = None,
2168            cellsize = 10,
2169            verbose = False,
2170            origin = None,
2171            datum = 'WGS84'):
2172    print 'sww2ers will soon be obsoleted - please use sww2dem'
2173    sww2dem(basename_in,
2174            basename_out = basename_out,
2175            quantity = quantity,
2176            timestep = timestep,
2177            reduction = reduction,
2178            cellsize = cellsize,
2179            verbose = verbose,
2180            origin = origin,
2181            datum = datum,
2182            format = 'ers')
2183################################# END COMPATIBILITY ##############
2184
2185
2186
2187def sww2pts(basename_in, basename_out=None,
2188            data_points=None,
2189            quantity=None,
2190            timestep=None,
2191            reduction=None,
2192            NODATA_value=-9999,
2193            verbose=False,
2194            origin=None):
2195            #datum = 'WGS84')
2196
2197
2198    """Read SWW file and convert to interpolated values at selected points
2199
2200    The parameter quantity' must be the name of an existing quantity or
2201    an expression involving existing quantities. The default is
2202    'elevation'.
2203
2204    if timestep (an index) is given, output quantity at that timestep
2205
2206    if reduction is given use that to reduce quantity over all timesteps.
2207
2208    data_points (Nx2 array) give locations of points where quantity is to be computed.
2209   
2210    """
2211
2212    import sys
2213    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2214    from Numeric import array2string
2215
2216    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2217    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2218
2219    from anuga.geospatial_data.geospatial_data import Geospatial_data
2220
2221    if quantity is None:
2222        quantity = 'elevation'
2223
2224    if reduction is None:
2225        reduction = max
2226
2227    if basename_out is None:
2228        basename_out = basename_in + '_%s' %quantity
2229
2230    swwfile = basename_in + '.sww'
2231    ptsfile = basename_out + '.pts'
2232
2233    # Read sww file
2234    if verbose: print 'Reading from %s' %swwfile
2235    from Scientific.IO.NetCDF import NetCDFFile
2236    fid = NetCDFFile(swwfile)
2237
2238    # Get extent and reference
2239    x = fid.variables['x'][:]
2240    y = fid.variables['y'][:]
2241    volumes = fid.variables['volumes'][:]
2242
2243    number_of_timesteps = fid.dimensions['number_of_timesteps']
2244    number_of_points = fid.dimensions['number_of_points']
2245    if origin is None:
2246
2247        # Get geo_reference
2248        # sww files don't have to have a geo_ref
2249        try:
2250            geo_reference = Geo_reference(NetCDFObject=fid)
2251        except AttributeError, e:
2252            geo_reference = Geo_reference() #Default georef object
2253
2254        xllcorner = geo_reference.get_xllcorner()
2255        yllcorner = geo_reference.get_yllcorner()
2256        zone = geo_reference.get_zone()
2257    else:
2258        zone = origin[0]
2259        xllcorner = origin[1]
2260        yllcorner = origin[2]
2261
2262
2263
2264    # FIXME: Refactor using code from file_function.statistics
2265    # Something like print swwstats(swwname)
2266    if verbose:
2267        x = fid.variables['x'][:]
2268        y = fid.variables['y'][:]
2269        times = fid.variables['time'][:]
2270        print '------------------------------------------------'
2271        print 'Statistics of SWW file:'
2272        print '  Name: %s' %swwfile
2273        print '  Reference:'
2274        print '    Lower left corner: [%f, %f]'\
2275              %(xllcorner, yllcorner)
2276        print '    Start time: %f' %fid.starttime[0]
2277        print '  Extent:'
2278        print '    x [m] in [%f, %f], len(x) == %d'\
2279              %(min(x.flat), max(x.flat), len(x.flat))
2280        print '    y [m] in [%f, %f], len(y) == %d'\
2281              %(min(y.flat), max(y.flat), len(y.flat))
2282        print '    t [s] in [%f, %f], len(t) == %d'\
2283              %(min(times), max(times), len(times))
2284        print '  Quantities [SI units]:'
2285        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2286            q = fid.variables[name][:].flat
2287            print '    %s in [%f, %f]' %(name, min(q), max(q))
2288
2289
2290
2291    # Get quantity and reduce if applicable
2292    if verbose: print 'Processing quantity %s' %quantity
2293
2294    # Turn NetCDF objects into Numeric arrays
2295    quantity_dict = {}
2296    for name in fid.variables.keys():
2297        quantity_dict[name] = fid.variables[name][:]
2298
2299
2300
2301    # Convert quantity expression to quantities found in sww file   
2302    q = apply_expression_to_dictionary(quantity, quantity_dict)
2303
2304
2305
2306    if len(q.shape) == 2:
2307        # q has a time component and needs to be reduced along
2308        # the temporal dimension
2309        if verbose: print 'Reducing quantity %s' %quantity
2310        q_reduced = zeros( number_of_points, Float )
2311
2312        for k in range(number_of_points):
2313            q_reduced[k] = reduction( q[:,k] )
2314
2315        q = q_reduced
2316
2317    # Post condition: Now q has dimension: number_of_points
2318    assert len(q.shape) == 1
2319    assert q.shape[0] == number_of_points
2320
2321
2322    if verbose:
2323        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2324
2325
2326    # Create grid and update xll/yll corner and x,y
2327    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2328    assert len(vertex_points.shape) == 2
2329
2330    # Interpolate
2331    from anuga.fit_interpolate.interpolate import Interpolate
2332    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2333
2334    # Interpolate using quantity values
2335    if verbose: print 'Interpolating'
2336    interpolated_values = interp.interpolate(q, data_points).flat
2337
2338
2339    if verbose:
2340        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2341                                                      max(interpolated_values))
2342
2343    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2344    P = interp.mesh.get_boundary_polygon()
2345    outside_indices = outside_polygon(data_points, P, closed=True)
2346
2347    for i in outside_indices:
2348        interpolated_values[i] = NODATA_value
2349
2350
2351    # Store results   
2352    G = Geospatial_data(data_points=data_points,
2353                        attributes=interpolated_values)
2354
2355    G.export_points_file(ptsfile, absolute = True)
2356
2357    fid.close()
2358
2359
2360def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2361                                  use_cache = False,
2362                                  verbose = False):
2363    """Read Digitial Elevation model from the following ASCII format (.asc)
2364
2365    Example:
2366
2367    ncols         3121
2368    nrows         1800
2369    xllcorner     722000
2370    yllcorner     5893000
2371    cellsize      25
2372    NODATA_value  -9999
2373    138.3698 137.4194 136.5062 135.5558 ..........
2374
2375    Convert basename_in + '.asc' to NetCDF format (.dem)
2376    mimicking the ASCII format closely.
2377
2378
2379    An accompanying file with same basename_in but extension .prj must exist
2380    and is used to fix the UTM zone, datum, false northings and eastings.
2381
2382    The prj format is assumed to be as
2383
2384    Projection    UTM
2385    Zone          56
2386    Datum         WGS84
2387    Zunits        NO
2388    Units         METERS
2389    Spheroid      WGS84
2390    Xshift        0.0000000000
2391    Yshift        10000000.0000000000
2392    Parameters
2393    """
2394
2395
2396
2397    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2398
2399    if use_cache is True:
2400        from caching import cache
2401        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2402                       dependencies = [basename_in + '.asc'],
2403                       verbose = verbose)
2404
2405    else:
2406        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2407
2408    return result
2409
2410
2411
2412
2413
2414
2415def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2416                                  verbose = False):
2417    """Read Digitial Elevation model from the following ASCII format (.asc)
2418
2419    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2420    """
2421
2422    import os
2423    from Scientific.IO.NetCDF import NetCDFFile
2424    from Numeric import Float, array
2425
2426    #root, ext = os.path.splitext(basename_in)
2427    root = basename_in
2428
2429    ###########################################
2430    # Read Meta data
2431    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2432    metadatafile = open(root + '.prj')
2433    metalines = metadatafile.readlines()
2434    metadatafile.close()
2435
2436    L = metalines[0].strip().split()
2437    assert L[0].strip().lower() == 'projection'
2438    projection = L[1].strip()                   #TEXT
2439
2440    L = metalines[1].strip().split()
2441    assert L[0].strip().lower() == 'zone'
2442    zone = int(L[1].strip())
2443
2444    L = metalines[2].strip().split()
2445    assert L[0].strip().lower() == 'datum'
2446    datum = L[1].strip()                        #TEXT
2447
2448    L = metalines[3].strip().split()
2449    assert L[0].strip().lower() == 'zunits'     #IGNORE
2450    zunits = L[1].strip()                       #TEXT
2451
2452    L = metalines[4].strip().split()
2453    assert L[0].strip().lower() == 'units'
2454    units = L[1].strip()                        #TEXT
2455
2456    L = metalines[5].strip().split()
2457    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2458    spheroid = L[1].strip()                     #TEXT
2459
2460    L = metalines[6].strip().split()
2461    assert L[0].strip().lower() == 'xshift'
2462    false_easting = float(L[1].strip())
2463
2464    L = metalines[7].strip().split()
2465    assert L[0].strip().lower() == 'yshift'
2466    false_northing = float(L[1].strip())
2467
2468    #print false_easting, false_northing, zone, datum
2469
2470
2471    ###########################################
2472    #Read DEM data
2473
2474    datafile = open(basename_in + '.asc')
2475
2476    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2477    lines = datafile.readlines()
2478    datafile.close()
2479
2480    if verbose: print 'Got', len(lines), ' lines'
2481
2482    ncols = int(lines[0].split()[1].strip())
2483    nrows = int(lines[1].split()[1].strip())
2484    xllcorner = float(lines[2].split()[1].strip())
2485    yllcorner = float(lines[3].split()[1].strip())
2486    cellsize = float(lines[4].split()[1].strip())
2487    NODATA_value = int(lines[5].split()[1].strip())
2488
2489    assert len(lines) == nrows + 6
2490
2491
2492    ##########################################
2493
2494
2495    if basename_out == None:
2496        netcdfname = root + '.dem'
2497    else:
2498        netcdfname = basename_out + '.dem'
2499
2500    if verbose: print 'Store to NetCDF file %s' %netcdfname
2501    # NetCDF file definition
2502    fid = NetCDFFile(netcdfname, 'w')
2503
2504    #Create new file
2505    fid.institution = 'Geoscience Australia'
2506    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2507                      'of spatial point data'
2508
2509    fid.ncols = ncols
2510    fid.nrows = nrows
2511    fid.xllcorner = xllcorner
2512    fid.yllcorner = yllcorner
2513    fid.cellsize = cellsize
2514    fid.NODATA_value = NODATA_value
2515
2516    fid.zone = zone
2517    fid.false_easting = false_easting
2518    fid.false_northing = false_northing
2519    fid.projection = projection
2520    fid.datum = datum
2521    fid.units = units
2522
2523
2524    # dimension definitions
2525    fid.createDimension('number_of_rows', nrows)
2526    fid.createDimension('number_of_columns', ncols)
2527
2528    # variable definitions
2529    fid.createVariable('elevation', Float, ('number_of_rows',
2530                                            'number_of_columns'))
2531
2532    # Get handles to the variables
2533    elevation = fid.variables['elevation']
2534
2535    #Store data
2536    n = len(lines[6:])
2537    for i, line in enumerate(lines[6:]):
2538        fields = line.split()
2539        if verbose and i%((n+10)/10)==0:
2540            print 'Processing row %d of %d' %(i, nrows)
2541
2542        elevation[i, :] = array([float(x) for x in fields])
2543
2544    fid.close()
2545
2546
2547
2548
2549
2550def ferret2sww(basename_in, basename_out = None,
2551               verbose = False,
2552               minlat = None, maxlat = None,
2553               minlon = None, maxlon = None,
2554               mint = None, maxt = None, mean_stage = 0,
2555               origin = None, zscale = 1,
2556               fail_on_NaN = True,
2557               NaN_filler = 0,
2558               elevation = None,
2559               inverted_bathymetry = True
2560               ): #FIXME: Bathymetry should be obtained
2561                                  #from MOST somehow.
2562                                  #Alternatively from elsewhere
2563                                  #or, as a last resort,
2564                                  #specified here.
2565                                  #The value of -100 will work
2566                                  #for the Wollongong tsunami
2567                                  #scenario but is very hacky
2568    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2569    sww format native to abstract_2d_finite_volumes.
2570
2571    Specify only basename_in and read files of the form
2572    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2573    relative height, x-velocity and y-velocity, respectively.
2574
2575    Also convert latitude and longitude to UTM. All coordinates are
2576    assumed to be given in the GDA94 datum.
2577
2578    min's and max's: If omitted - full extend is used.
2579    To include a value min may equal it, while max must exceed it.
2580    Lat and lon are assuemd to be in decimal degrees
2581
2582    origin is a 3-tuple with geo referenced
2583    UTM coordinates (zone, easting, northing)
2584
2585    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2586    which means that longitude is the fastest
2587    varying dimension (row major order, so to speak)
2588
2589    ferret2sww uses grid points as vertices in a triangular grid
2590    counting vertices from lower left corner upwards, then right
2591    """
2592
2593    import os
2594    from Scientific.IO.NetCDF import NetCDFFile
2595    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2596    from Numeric import allclose, around
2597
2598    precision = Float
2599
2600    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2601
2602    if minlat != None:
2603        assert -90 < minlat < 90 , msg
2604    if maxlat != None:
2605        assert -90 < maxlat < 90 , msg
2606        if minlat != None:
2607            assert maxlat > minlat
2608    if minlon != None:
2609        assert -180 < minlon < 180 , msg
2610    if maxlon != None:
2611        assert -180 < maxlon < 180 , msg
2612        if minlon != None:
2613            assert maxlon > minlon
2614       
2615
2616
2617    #Get NetCDF data
2618    if verbose: print 'Reading files %s_*.nc' %basename_in
2619    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2620    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2621    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2622    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2623    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2624
2625    if basename_out is None:
2626        swwname = basename_in + '.sww'
2627    else:
2628        swwname = basename_out + '.sww'
2629
2630#    Get dimensions of file_h
2631    for dimension in file_h.dimensions.keys():
2632        if dimension[:3] == 'LON':
2633            dim_h_longitude = dimension
2634        if dimension[:3] == 'LAT':
2635            dim_h_latitude = dimension
2636        if dimension[:4] == 'TIME':
2637            dim_h_time = dimension
2638
2639#    print 'long:', dim_h_longitude
2640#    print 'lats:', dim_h_latitude
2641#    print 'times:', dim_h_time
2642
2643    times = file_h.variables[dim_h_time]
2644    latitudes = file_h.variables[dim_h_latitude]
2645    longitudes = file_h.variables[dim_h_longitude]
2646   
2647# get dimensions for file_e
2648    for dimension in file_e.dimensions.keys():
2649        if dimension[:3] == 'LON':
2650            dim_e_longitude = dimension
2651        if dimension[:3] == 'LAT':
2652            dim_e_latitude = dimension
2653
2654# get dimensions for file_u
2655    for dimension in file_u.dimensions.keys():
2656        if dimension[:3] == 'LON':
2657            dim_u_longitude = dimension
2658        if dimension[:3] == 'LAT':
2659            dim_u_latitude = dimension
2660        if dimension[:4] == 'TIME':
2661            dim_u_time = dimension
2662           
2663# get dimensions for file_v
2664    for dimension in file_v.dimensions.keys():
2665        if dimension[:3] == 'LON':
2666            dim_v_longitude = dimension
2667        if dimension[:3] == 'LAT':
2668            dim_v_latitude = dimension
2669        if dimension[:4] == 'TIME':
2670            dim_v_time = dimension
2671
2672
2673    #Precision used by most for lat/lon is 4 or 5 decimals
2674    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2675    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2676
2677    #Check that files are compatible
2678    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2679    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2680    assert allclose(latitudes, e_lat)
2681
2682    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2683    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2684    assert allclose(longitudes, e_lon)
2685
2686    if mint == None:
2687        jmin = 0
2688    else:
2689        jmin = searchsorted(times, mint)
2690       
2691    if maxt == None:
2692        jmax=len(times)
2693    else:
2694        jmax = searchsorted(times, maxt)
2695
2696    #print "latitudes[:]",latitudes[:]
2697    #print "longitudes[:]",longitudes [:]
2698    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2699                                                  longitudes[:],
2700                                                 minlat, maxlat,
2701                                                 minlon, maxlon)
2702
2703
2704#    print' j', jmin, jmax
2705    times = times[jmin:jmax]
2706    latitudes = latitudes[kmin:kmax]
2707    longitudes = longitudes[lmin:lmax]
2708
2709    #print "latitudes[:]",latitudes[:]
2710    #print "longitudes[:]",longitudes [:]
2711
2712    if verbose: print 'cropping'
2713    zname = 'ELEVATION'
2714
2715    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2716    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2717    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2718    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2719
2720    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2721    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2722    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2723    #        from Numeric import asarray
2724    #        elevations=elevations.tolist()
2725    #        elevations.reverse()
2726    #        elevations=asarray(elevations)
2727    #    else:
2728    #        from Numeric import asarray
2729    #        elevations=elevations.tolist()
2730    #        elevations.reverse()
2731    #        elevations=asarray(elevations)
2732    #        'print hmmm'
2733
2734
2735
2736    #Get missing values
2737    nan_ha = file_h.variables['HA'].missing_value[0]
2738    nan_ua = file_u.variables['UA'].missing_value[0]
2739    nan_va = file_v.variables['VA'].missing_value[0]
2740    if hasattr(file_e.variables[zname],'missing_value'):
2741        nan_e  = file_e.variables[zname].missing_value[0]
2742    else:
2743        nan_e = None
2744
2745    #Cleanup
2746    from Numeric import sometrue
2747
2748    missing = (amplitudes == nan_ha)
2749    if sometrue (missing):
2750        if fail_on_NaN:
2751            msg = 'NetCDFFile %s contains missing values'\
2752                  %(basename_in+'_ha.nc')
2753            raise DataMissingValuesError, msg
2754        else:
2755            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2756
2757    missing = (uspeed == nan_ua)
2758    if sometrue (missing):
2759        if fail_on_NaN:
2760            msg = 'NetCDFFile %s contains missing values'\
2761                  %(basename_in+'_ua.nc')
2762            raise DataMissingValuesError, msg
2763        else:
2764            uspeed = uspeed*(missing==0) + missing*NaN_filler
2765
2766    missing = (vspeed == nan_va)
2767    if sometrue (missing):
2768        if fail_on_NaN:
2769            msg = 'NetCDFFile %s contains missing values'\
2770                  %(basename_in+'_va.nc')
2771            raise DataMissingValuesError, msg
2772        else:
2773            vspeed = vspeed*(missing==0) + missing*NaN_filler
2774
2775
2776    missing = (elevations == nan_e)
2777    if sometrue (missing):
2778        if fail_on_NaN:
2779            msg = 'NetCDFFile %s contains missing values'\
2780                  %(basename_in+'_e.nc')
2781            raise DataMissingValuesError, msg
2782        else:
2783            elevations = elevations*(missing==0) + missing*NaN_filler
2784
2785    #######
2786
2787
2788
2789    number_of_times = times.shape[0]
2790    number_of_latitudes = latitudes.shape[0]
2791    number_of_longitudes = longitudes.shape[0]
2792
2793    assert amplitudes.shape[0] == number_of_times
2794    assert amplitudes.shape[1] == number_of_latitudes
2795    assert amplitudes.shape[2] == number_of_longitudes
2796
2797    if verbose:
2798        print '------------------------------------------------'
2799        print 'Statistics:'
2800        print '  Extent (lat/lon):'
2801        print '    lat in [%f, %f], len(lat) == %d'\
2802              %(min(latitudes.flat), max(latitudes.flat),
2803                len(latitudes.flat))
2804        print '    lon in [%f, %f], len(lon) == %d'\
2805              %(min(longitudes.flat), max(longitudes.flat),
2806                len(longitudes.flat))
2807        print '    t in [%f, %f], len(t) == %d'\
2808              %(min(times.flat), max(times.flat), len(times.flat))
2809
2810        q = amplitudes.flat
2811        name = 'Amplitudes (ha) [cm]'
2812        print %s in [%f, %f]' %(name, min(q), max(q))
2813
2814        q = uspeed.flat
2815        name = 'Speeds (ua) [cm/s]'
2816        print %s in [%f, %f]' %(name, min(q), max(q))
2817
2818        q = vspeed.flat
2819        name = 'Speeds (va) [cm/s]'
2820        print %s in [%f, %f]' %(name, min(q), max(q))
2821
2822        q = elevations.flat
2823        name = 'Elevations (e) [m]'
2824        print %s in [%f, %f]' %(name, min(q), max(q))
2825
2826
2827    #print number_of_latitudes, number_of_longitudes
2828    number_of_points = number_of_latitudes*number_of_longitudes
2829    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2830
2831
2832    file_h.close()
2833    file_u.close()
2834    file_v.close()
2835    file_e.close()
2836
2837
2838    # NetCDF file definition
2839    outfile = NetCDFFile(swwname, 'w')
2840
2841    #Create new file
2842    outfile.institution = 'Geoscience Australia'
2843    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2844                          %(basename_in + '_ha.nc',
2845                            basename_in + '_ua.nc',
2846                            basename_in + '_va.nc',
2847                            basename_in + '_e.nc')
2848
2849
2850    #For sww compatibility
2851    outfile.smoothing = 'Yes'
2852    outfile.order = 1
2853
2854    #Start time in seconds since the epoch (midnight 1/1/1970)
2855    outfile.starttime = starttime = times[0]
2856    times = times - starttime  #Store relative times
2857
2858    # dimension definitions
2859    outfile.createDimension('number_of_volumes', number_of_volumes)
2860
2861    outfile.createDimension('number_of_vertices', 3)
2862    outfile.createDimension('number_of_points', number_of_points)
2863
2864
2865    #outfile.createDimension('number_of_timesteps', len(times))
2866    outfile.createDimension('number_of_timesteps', len(times))
2867
2868    # variable definitions
2869    outfile.createVariable('x', precision, ('number_of_points',))
2870    outfile.createVariable('y', precision, ('number_of_points',))
2871    outfile.createVariable('elevation', precision, ('number_of_points',))
2872
2873    #FIXME: Backwards compatibility
2874    outfile.createVariable('z', precision, ('number_of_points',))
2875    #################################
2876
2877    outfile.createVariable('volumes', Int, ('number_of_volumes',
2878                                            'number_of_vertices'))
2879
2880    outfile.createVariable('time', precision,
2881                           ('number_of_timesteps',))
2882
2883    outfile.createVariable('stage', precision,
2884                           ('number_of_timesteps',
2885                            'number_of_points'))
2886
2887    outfile.createVariable('xmomentum', precision,
2888                           ('number_of_timesteps',
2889                            'number_of_points'))
2890
2891    outfile.createVariable('ymomentum', precision,
2892                           ('number_of_timesteps',
2893                            'number_of_points'))
2894
2895
2896    #Store
2897    from anuga.coordinate_transforms.redfearn import redfearn
2898    x = zeros(number_of_points, Float)  #Easting
2899    y = zeros(number_of_points, Float)  #Northing
2900
2901
2902    if verbose: print 'Making triangular grid'
2903    #Check zone boundaries
2904    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2905
2906    vertices = {}
2907    i = 0
2908    for k, lat in enumerate(latitudes):       #Y direction
2909        for l, lon in enumerate(longitudes):  #X direction
2910
2911            vertices[l,k] = i
2912
2913            zone, easting, northing = redfearn(lat,lon)
2914
2915            msg = 'Zone boundary crossed at longitude =', lon
2916            #assert zone == refzone, msg
2917            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2918            x[i] = easting
2919            y[i] = northing
2920            i += 1
2921
2922
2923    #Construct 2 triangles per 'rectangular' element
2924    volumes = []
2925    for l in range(number_of_longitudes-1):    #X direction
2926        for k in range(number_of_latitudes-1): #Y direction
2927            v1 = vertices[l,k+1]
2928            v2 = vertices[l,k]
2929            v3 = vertices[l+1,k+1]
2930            v4 = vertices[l+1,k]
2931
2932            volumes.append([v1,v2,v3]) #Upper element
2933            volumes.append([v4,v3,v2]) #Lower element
2934
2935    volumes = array(volumes)
2936
2937    if origin == None:
2938        zone = refzone
2939        xllcorner = min(x)
2940        yllcorner = min(y)
2941    else:
2942        zone = origin[0]
2943        xllcorner = origin[1]
2944        yllcorner = origin[2]
2945
2946
2947    outfile.xllcorner = xllcorner
2948    outfile.yllcorner = yllcorner
2949    outfile.zone = zone
2950
2951
2952    if elevation is not None:
2953        z = elevation
2954    else:
2955        if inverted_bathymetry:
2956            z = -1*elevations
2957        else:
2958            z = elevations
2959    #FIXME: z should be obtained from MOST and passed in here
2960
2961    from Numeric import resize
2962    z = resize(z,outfile.variables['z'][:].shape)
2963    outfile.variables['x'][:] = x - xllcorner
2964    outfile.variables['y'][:] = y - yllcorner
2965    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2966    outfile.variables['elevation'][:] = z
2967    outfile.variables['time'][:] = times   #Store time relative
2968    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2969
2970
2971
2972    #Time stepping
2973    stage = outfile.variables['stage']
2974    xmomentum = outfile.variables['xmomentum']
2975    ymomentum = outfile.variables['ymomentum']
2976
2977    if verbose: print 'Converting quantities'
2978    n = len(times)
2979    for j in range(n):
2980        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2981        i = 0
2982        for k in range(number_of_latitudes):      #Y direction
2983            for l in range(number_of_longitudes): #X direction
2984                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2985                stage[j,i] = w
2986                h = w - z[i]
2987                xmomentum[j,i] = uspeed[j,k,l]/100*h
2988                ymomentum[j,i] = vspeed[j,k,l]/100*h
2989                i += 1
2990
2991    #outfile.close()
2992
2993    #FIXME: Refactor using code from file_function.statistics
2994    #Something like print swwstats(swwname)
2995    if verbose:
2996        x = outfile.variables['x'][:]
2997        y = outfile.variables['y'][:]
2998        times = outfile.variables['time'][:]
2999        print '------------------------------------------------'
3000        print 'Statistics of output file:'
3001        print '  Name: %s' %swwname
3002        print '  Reference:'
3003        print '    Lower left corner: [%f, %f]'\
3004              %(xllcorner, yllcorner)
3005        print '    Start time: %f' %starttime
3006        print '  Extent:'
3007        print '    x [m] in [%f, %f], len(x) == %d'\
3008              %(min(x.flat), max(x.flat), len(x.flat))
3009        print '    y [m] in [%f, %f], len(y) == %d'\
3010              %(min(y.flat), max(y.flat), len(y.flat))
3011        print '    t [s] in [%f, %f], len(t) == %d'\
3012              %(min(times), max(times), len(times))
3013        print '  Quantities [SI units]:'
3014        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3015            q = outfile.variables[name][:].flat
3016            print '    %s in [%f, %f]' %(name, min(q), max(q))
3017
3018
3019
3020    outfile.close()
3021
3022
3023
3024
3025
3026def timefile2netcdf(filename, quantity_names = None):
3027    """Template for converting typical text files with time series to
3028    NetCDF tms file.
3029
3030
3031    The file format is assumed to be either two fields separated by a comma:
3032
3033        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3034
3035    E.g
3036
3037      31/08/04 00:00:00, 1.328223 0 0
3038      31/08/04 00:15:00, 1.292912 0 0
3039
3040    will provide a time dependent function f(t) with three attributes
3041
3042    filename is assumed to be the rootname with extenisons .txt and .sww
3043    """
3044
3045    import time, calendar
3046    from Numeric import array
3047    from anuga.config import time_format
3048    from anuga.utilities.numerical_tools import ensure_numeric
3049
3050
3051
3052    fid = open(filename + '.txt')
3053    line = fid.readline()
3054    fid.close()
3055
3056    fields = line.split(',')
3057    msg = 'File %s must have the format date, value0 value1 value2 ...'
3058    assert len(fields) == 2, msg
3059
3060    try:
3061        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3062    except ValueError:
3063        msg = 'First field in file %s must be' %filename
3064        msg += ' date-time with format %s.\n' %time_format
3065        msg += 'I got %s instead.' %fields[0]
3066        raise DataTimeError, msg
3067
3068
3069    #Split values
3070    values = []
3071    for value in fields[1].split():
3072        values.append(float(value))
3073
3074    q = ensure_numeric(values)
3075
3076    msg = 'ERROR: File must contain at least one independent value'
3077    assert len(q.shape) == 1, msg
3078
3079
3080
3081    #Read times proper
3082    from Numeric import zeros, Float, alltrue
3083    from anuga.config import time_format
3084    import time, calendar
3085
3086    fid = open(filename + '.txt')
3087    lines = fid.readlines()
3088    fid.close()
3089
3090    N = len(lines)
3091    d = len(q)
3092
3093    T = zeros(N, Float)       #Time
3094    Q = zeros((N, d), Float)  #Values
3095
3096    for i, line in enumerate(lines):
3097        fields = line.split(',')
3098        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3099
3100        T[i] = realtime - starttime
3101
3102        for j, value in enumerate(fields[1].split()):
3103            Q[i, j] = float(value)
3104
3105    msg = 'File %s must list time as a monotonuosly ' %filename
3106    msg += 'increasing sequence'
3107    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3108
3109
3110    #Create NetCDF file
3111    from Scientific.IO.NetCDF import NetCDFFile
3112
3113    fid = NetCDFFile(filename + '.tms', 'w')
3114
3115
3116    fid.institution = 'Geoscience Australia'
3117    fid.description = 'Time series'
3118
3119
3120    #Reference point
3121    #Start time in seconds since the epoch (midnight 1/1/1970)
3122    #FIXME: Use Georef
3123    fid.starttime = starttime
3124
3125
3126    # dimension definitions
3127    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3128    #fid.createDimension('number_of_vertices', 3)
3129
3130
3131    fid.createDimension('number_of_timesteps', len(T))
3132
3133    fid.createVariable('time', Float, ('number_of_timesteps',))
3134
3135    fid.variables['time'][:] = T
3136
3137    for i in range(Q.shape[1]):
3138        try:
3139            name = quantity_names[i]
3140        except:
3141            name = 'Attribute%d'%i
3142
3143        fid.createVariable(name, Float, ('number_of_timesteps',))
3144        fid.variables[name][:] = Q[:,i]
3145
3146    fid.close()
3147
3148
3149def extent_sww(file_name):
3150    """
3151    Read in an sww file.
3152
3153    Input;
3154    file_name - the sww file
3155
3156    Output;
3157    z - Vector of bed elevation
3158    volumes - Array.  Each row has 3 values, representing
3159    the vertices that define the volume
3160    time - Vector of the times where there is stage information
3161    stage - array with respect to time and vertices (x,y)
3162    """
3163
3164
3165    from Scientific.IO.NetCDF import NetCDFFile
3166
3167    #Check contents
3168    #Get NetCDF
3169    fid = NetCDFFile(file_name, 'r')
3170
3171    # Get the variables
3172    x = fid.variables['x'][:]
3173    y = fid.variables['y'][:]
3174    stage = fid.variables['stage'][:]
3175    #print "stage",stage
3176    #print "stage.shap",stage.shape
3177    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3178    #print "min(stage)",min(stage)
3179
3180    fid.close()
3181
3182    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3183
3184
3185def sww2domain(filename,boundary=None,t=None,\
3186               fail_if_NaN=True,NaN_filler=0\
3187               ,verbose = False,very_verbose = False):
3188    """
3189    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3190
3191    Boundary is not recommended if domain.smooth is not selected, as it
3192    uses unique coordinates, but not unique boundaries. This means that
3193    the boundary file will not be compatable with the coordinates, and will
3194    give a different final boundary, or crash.
3195    """
3196    NaN=9.969209968386869e+036
3197    #initialise NaN.
3198
3199    from Scientific.IO.NetCDF import NetCDFFile
3200    from shallow_water import Domain
3201    from Numeric import asarray, transpose, resize
3202
3203    if verbose: print 'Reading from ', filename
3204    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3205    time = fid.variables['time']       #Timesteps
3206    if t is None:
3207        t = time[-1]
3208    time_interp = get_time_interp(time,t)
3209
3210    # Get the variables as Numeric arrays
3211    x = fid.variables['x'][:]             #x-coordinates of vertices
3212    y = fid.variables['y'][:]             #y-coordinates of vertices
3213    elevation = fid.variables['elevation']     #Elevation
3214    stage = fid.variables['stage']     #Water level
3215    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3216    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3217
3218    starttime = fid.starttime[0]
3219    volumes = fid.variables['volumes'][:] #Connectivity
3220    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3221
3222    conserved_quantities = []
3223    interpolated_quantities = {}
3224    other_quantities = []
3225
3226    # get geo_reference
3227    #sww files don't have to have a geo_ref
3228    try:
3229        geo_reference = Geo_reference(NetCDFObject=fid)
3230    except: #AttributeError, e:
3231        geo_reference = None
3232
3233    if verbose: print '    getting quantities'
3234    for quantity in fid.variables.keys():
3235        dimensions = fid.variables[quantity].dimensions
3236        if 'number_of_timesteps' in dimensions:
3237            conserved_quantities.append(quantity)
3238            interpolated_quantities[quantity]=\
3239                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3240        else: other_quantities.append(quantity)
3241
3242    other_quantities.remove('x')
3243    other_quantities.remove('y')
3244    other_quantities.remove('z')
3245    other_quantities.remove('volumes')
3246
3247    conserved_quantities.remove('time')
3248
3249    if verbose: print '    building domain'
3250    #    From domain.Domain:
3251    #    domain = Domain(coordinates, volumes,\
3252    #                    conserved_quantities = conserved_quantities,\
3253    #                    other_quantities = other_quantities,zone=zone,\
3254    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3255
3256    #   From shallow_water.Domain:
3257    coordinates=coordinates.tolist()
3258    volumes=volumes.tolist()
3259    #FIXME:should this be in mesh?(peter row)
3260    if fid.smoothing == 'Yes': unique = False
3261    else: unique = True
3262    if unique:
3263        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3264
3265
3266    try:
3267        domain = Domain(coordinates, volumes, boundary)
3268    except AssertionError, e:
3269        fid.close()
3270        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3271        raise DataDomainError, msg
3272
3273    if not boundary is None:
3274        domain.boundary = boundary
3275
3276    domain.geo_reference = geo_reference
3277
3278    domain.starttime=float(starttime)+float(t)
3279    domain.time=0.0
3280
3281    for quantity in other_quantities:
3282        try:
3283            NaN = fid.variables[quantity].missing_value
3284        except:
3285            pass #quantity has no missing_value number
3286        X = fid.variables[quantity][:]
3287        if very_verbose:
3288            print '       ',quantity
3289            print '        NaN =',NaN
3290            print '        max(X)'
3291            print '       ',max(X)
3292            print '        max(X)==NaN'
3293            print '       ',max(X)==NaN
3294            print ''
3295        if (max(X)==NaN) or (min(X)==NaN):
3296            if fail_if_NaN:
3297                msg = 'quantity "%s" contains no_data entry'%quantity
3298                raise DataMissingValuesError, msg
3299            else:
3300                data = (X<>NaN)
3301                X = (X*data)+(data==0)*NaN_filler
3302        if unique:
3303            X = resize(X,(len(X)/3,3))
3304        domain.set_quantity(quantity,X)
3305    #
3306    for quantity in conserved_quantities:
3307        try:
3308            NaN = fid.variables[quantity].missing_value
3309        except:
3310            pass #quantity has no missing_value number
3311        X = interpolated_quantities[quantity]
3312        if very_verbose:
3313            print '       ',quantity
3314            print '        NaN =',NaN
3315            print '        max(X)'
3316            print '       ',max(X)
3317            print '        max(X)==NaN'
3318            print '       ',max(X)==NaN
3319            print ''
3320        if (max(X)==NaN) or (min(X)==NaN):
3321            if fail_if_NaN:
3322                msg = 'quantity "%s" contains no_data entry'%quantity
3323                raise DataMissingValuesError, msg
3324            else:
3325                data = (X<>NaN)
3326                X = (X*data)+(data==0)*NaN_filler
3327        if unique:
3328            X = resize(X,(X.shape[0]/3,3))
3329        domain.set_quantity(quantity,X)
3330
3331    fid.close()
3332    return domain
3333
3334def interpolated_quantity(saved_quantity,time_interp):
3335
3336    #given an index and ratio, interpolate quantity with respect to time.
3337    index,ratio = time_interp
3338    Q = saved_quantity
3339    if ratio > 0:
3340        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3341    else:
3342        q = Q[index]
3343    #Return vector of interpolated values
3344    return q
3345
3346def get_time_interp(time,t=None):
3347    #Finds the ratio and index for time interpolation.
3348    #It is borrowed from previous abstract_2d_finite_volumes code.
3349    if t is None:
3350        t=time[-1]
3351        index = -1
3352        ratio = 0.
3353    else:
3354        T = time
3355        tau = t
3356        index=0
3357        msg = 'Time interval derived from file %s [%s:%s]'\
3358            %('FIXMEfilename', T[0], T[-1])
3359        msg += ' does not match model time: %s' %tau
3360        if tau < time[0]: raise DataTimeError, msg
3361        if tau > time[-1]: raise DataTimeError, msg
3362        while tau > time[index]: index += 1
3363        while tau < time[index]: index -= 1
3364        if tau == time[index]:
3365            #Protect against case where tau == time[-1] (last time)
3366            # - also works in general when tau == time[i]
3367            ratio = 0
3368        else:
3369            #t is now between index and index+1
3370            ratio = (tau - time[index])/(time[index+1] - time[index])
3371    return (index,ratio)
3372
3373
3374def weed(coordinates,volumes,boundary = None):
3375    if type(coordinates)=='array':
3376        coordinates = coordinates.tolist()
3377    if type(volumes)=='array':
3378        volumes = volumes.tolist()
3379
3380    unique = False
3381    point_dict = {}
3382    same_point = {}
3383    for i in range(len(coordinates)):
3384        point = tuple(coordinates[i])
3385        if point_dict.has_key(point):
3386            unique = True
3387            same_point[i]=point
3388            #to change all point i references to point j
3389        else:
3390            point_dict[point]=i
3391            same_point[i]=point
3392
3393    coordinates = []
3394    i = 0
3395    for point in point_dict.keys():
3396        point = tuple(point)
3397        coordinates.append(list(point))
3398        point_dict[point]=i
3399        i+=1
3400
3401
3402    for volume in volumes:
3403        for i in range(len(volume)):
3404            index = volume[i]
3405            if index>-1:
3406                volume[i]=point_dict[same_point[index]]
3407
3408    new_boundary = {}
3409    if not boundary is None:
3410        for segment in boundary.keys():
3411            point0 = point_dict[same_point[segment[0]]]
3412            point1 = point_dict[same_point[segment[1]]]
3413            label = boundary[segment]
3414            #FIXME should the bounday attributes be concaterated
3415            #('exterior, pond') or replaced ('pond')(peter row)
3416
3417            if new_boundary.has_key((point0,point1)):
3418                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3419                                              #+','+label
3420
3421            elif new_boundary.has_key((point1,point0)):
3422                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3423                                              #+','+label
3424            else: new_boundary[(point0,point1)]=label
3425
3426        boundary = new_boundary
3427
3428    return coordinates,volumes,boundary
3429
3430
3431def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3432                 verbose=False):
3433    """Read Digitial Elevation model from the following NetCDF format (.dem)
3434
3435    Example:
3436
3437    ncols         3121
3438    nrows         1800
3439    xllcorner     722000
3440    yllcorner     5893000
3441    cellsize      25
3442    NODATA_value  -9999
3443    138.3698 137.4194 136.5062 135.5558 ..........
3444
3445    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3446    """
3447
3448    import os
3449    from Scientific.IO.NetCDF import NetCDFFile
3450    from Numeric import Float, zeros, sum, reshape, equal
3451
3452    root = basename_in
3453    inname = root + '.dem'
3454
3455    #Open existing netcdf file to read
3456    infile = NetCDFFile(inname, 'r')
3457    if verbose: print 'Reading DEM from %s' %inname
3458
3459    #Read metadata
3460    ncols = infile.ncols[0]
3461    nrows = infile.nrows[0]
3462    xllcorner = infile.xllcorner[0]
3463    yllcorner = infile.yllcorner[0]
3464    cellsize = infile.cellsize[0]
3465    NODATA_value = infile.NODATA_value[0]
3466    zone = infile.zone[0]
3467    false_easting = infile.false_easting[0]
3468    false_northing = infile.false_northing[0]
3469    projection = infile.projection
3470    datum = infile.datum
3471    units = infile.units
3472
3473    dem_elevation = infile.variables['elevation']
3474
3475    #Get output file name
3476    if basename_out == None:
3477        outname = root + '_' + repr(cellsize_new) + '.dem'
3478    else:
3479        outname = basename_out + '.dem'
3480
3481    if verbose: print 'Write decimated NetCDF file to %s' %outname
3482
3483    #Determine some dimensions for decimated grid
3484    (nrows_stencil, ncols_stencil) = stencil.shape
3485    x_offset = ncols_stencil / 2
3486    y_offset = nrows_stencil / 2
3487    cellsize_ratio = int(cellsize_new / cellsize)
3488    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3489    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3490
3491    #Open netcdf file for output
3492    outfile = NetCDFFile(outname, 'w')
3493
3494    #Create new file
3495    outfile.institution = 'Geoscience Australia'
3496    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3497                           'of spatial point data'
3498    #Georeferencing
3499    outfile.zone = zone
3500    outfile.projection = projection
3501    outfile.datum = datum
3502    outfile.units = units
3503
3504    outfile.cellsize = cellsize_new
3505    outfile.NODATA_value = NODATA_value
3506    outfile.false_easting = false_easting
3507    outfile.false_northing = false_northing
3508
3509    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3510    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3511    outfile.ncols = ncols_new
3512    outfile.nrows = nrows_new
3513
3514    # dimension definition
3515    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3516
3517    # variable definition
3518    outfile.createVariable('elevation', Float, ('number_of_points',))
3519
3520    # Get handle to the variable
3521    elevation = outfile.variables['elevation']
3522
3523    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3524
3525    #Store data
3526    global_index = 0
3527    for i in range(nrows_new):
3528        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3529        lower_index = global_index
3530        telev =  zeros(ncols_new, Float)
3531        local_index = 0
3532        trow = i * cellsize_ratio
3533
3534        for j in range(ncols_new):
3535            tcol = j * cellsize_ratio
3536            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3537
3538            #if dem contains 1 or more NODATA_values set value in
3539            #decimated dem to NODATA_value, else compute decimated
3540            #value using stencil
3541            if sum(sum(equal(tmp, NODATA_value))) > 0:
3542                telev[local_index] = NODATA_value
3543            else:
3544                telev[local_index] = sum(sum(tmp * stencil))
3545
3546            global_index += 1
3547            local_index += 1
3548
3549        upper_index = global_index
3550
3551        elevation[lower_index:upper_index] = telev
3552
3553    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3554
3555    infile.close()
3556    outfile.close()
3557
3558
3559
3560
3561def tsh2sww(filename, verbose=False): #test_tsh2sww
3562    """
3563    to check if a tsh/msh file 'looks' good.
3564    """
3565
3566    from shallow_water import Domain
3567    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
3568    import time, os
3569    #from data_manager import get_dataobject
3570    from os import sep, path
3571    from anuga.utilities.numerical_tools import mean
3572
3573    if verbose == True:print 'Creating domain from', filename
3574    domain = pmesh_to_domain_instance(filename, Domain)
3575    if verbose == True:print "Number of triangles = ", len(domain)
3576
3577    domain.smooth = True
3578    domain.format = 'sww'   #Native netcdf visualisation format
3579    file_path, filename = path.split(filename)
3580    filename, ext = path.splitext(filename)
3581    domain.set_name(filename)   
3582    domain.reduction = mean
3583    if verbose == True:print "file_path",file_path
3584    if file_path == "":file_path = "."
3585    domain.set_datadir(file_path)
3586
3587    if verbose == True:
3588        print "Output written to " + domain.get_datadir() + sep + \
3589              domain.get_name() + "." + domain.format
3590    sww = get_dataobject(domain)
3591    sww.store_connectivity()
3592    sww.store_timestep('stage')
3593
3594
3595def asc_csiro2sww(bath_dir,
3596                  elevation_dir,
3597                  ucur_dir,
3598                  vcur_dir,
3599                  sww_file,
3600                  minlat = None, maxlat = None,
3601                  minlon = None, maxlon = None,
3602                  zscale=1,
3603                  mean_stage = 0,
3604                  fail_on_NaN = True,
3605                  elevation_NaN_filler = 0,
3606                  bath_prefix='ba',
3607                  elevation_prefix='el',
3608                  verbose=False):
3609    """
3610    Produce an sww boundary file, from esri ascii data from CSIRO.
3611
3612    Also convert latitude and longitude to UTM. All coordinates are
3613    assumed to be given in the GDA94 datum.
3614
3615    assume:
3616    All files are in esri ascii format
3617
3618    4 types of information
3619    bathymetry
3620    elevation
3621    u velocity
3622    v velocity
3623
3624    Assumptions
3625    The metadata of all the files is the same
3626    Each type is in a seperate directory
3627    One bath file with extention .000
3628    The time period is less than 24hrs and uniform.
3629    """
3630    from Scientific.IO.NetCDF import NetCDFFile
3631
3632    from anuga.coordinate_transforms.redfearn import redfearn
3633
3634    precision = Float # So if we want to change the precision its done here
3635
3636    # go in to the bath dir and load the only file,
3637    bath_files = os.listdir(bath_dir)
3638    #print "bath_files",bath_files
3639
3640    #fixme: make more general?
3641    bath_file = bath_files[0]
3642    bath_dir_file =  bath_dir + os.sep + bath_file
3643    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3644    #print "bath_metadata",bath_metadata
3645    #print "bath_grid",bath_grid
3646
3647    #Use the date.time of the bath file as a basis for
3648    #the start time for other files
3649    base_start = bath_file[-12:]
3650
3651    #go into the elevation dir and load the 000 file
3652    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3653                         + base_start
3654    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3655    #print "elevation_dir_file",elevation_dir_file
3656    #print "elevation_grid", elevation_grid
3657
3658    elevation_files = os.listdir(elevation_dir)
3659    ucur_files = os.listdir(ucur_dir)
3660    vcur_files = os.listdir(vcur_dir)
3661    elevation_files.sort()
3662    # the first elevation file should be the
3663    # file with the same base name as the bath data
3664    #print "elevation_files",elevation_files
3665    #print "'el' + base_start",'el' + base_start
3666    assert elevation_files[0] == 'el' + base_start
3667
3668    #assert bath_metadata == elevation_metadata
3669
3670
3671
3672    number_of_latitudes = bath_grid.shape[0]
3673    number_of_longitudes = bath_grid.shape[1]
3674    #number_of_times = len(os.listdir(elevation_dir))
3675    #number_of_points = number_of_latitudes*number_of_longitudes
3676    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3677
3678    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3679                  for x in range(number_of_longitudes)]
3680    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3681                 for y in range(number_of_latitudes)]
3682
3683     # reverse order of lat, so the fist lat represents the first grid row
3684    latitudes.reverse()
3685
3686    #print "latitudes - before _get_min_max_indexes",latitudes
3687    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3688                                                 minlat=minlat, maxlat=maxlat,
3689                                                 minlon=minlon, maxlon=maxlon)
3690
3691
3692    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3693    #print "bath_grid",bath_grid
3694    latitudes = latitudes[kmin:kmax]
3695    longitudes = longitudes[lmin:lmax]
3696    number_of_latitudes = len(latitudes)
3697    number_of_longitudes = len(longitudes)
3698    number_of_times = len(os.listdir(elevation_dir))
3699    number_of_points = number_of_latitudes*number_of_longitudes
3700    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3701    #print "number_of_points",number_of_points
3702
3703    #Work out the times
3704    if len(elevation_files) > 1:
3705        # Assume: The time period is less than 24hrs.
3706        time_period = (int(elevation_files[1][-3:]) - \
3707                      int(elevation_files[0][-3:]))*60*60
3708        times = [x*time_period for x in range(len(elevation_files))]
3709    else:
3710        times = [0.0]
3711    #print "times", times
3712    #print "number_of_latitudes", number_of_latitudes
3713    #print "number_of_longitudes", number_of_longitudes
3714    #print "number_of_times", number_of_times
3715    #print "latitudes", latitudes
3716    #print "longitudes", longitudes
3717
3718
3719    if verbose:
3720        print '------------------------------------------------'
3721        print 'Statistics:'
3722        print '  Extent (lat/lon):'
3723        print '    lat in [%f, %f], len(lat) == %d'\
3724              %(min(latitudes), max(latitudes),
3725                len(latitudes))
3726        print '    lon in [%f, %f], len(lon) == %d'\
3727              %(min(longitudes), max(longitudes),
3728                len(longitudes))
3729        print '    t in [%f, %f], len(t) == %d'\
3730              %(min(times), max(times), len(times))
3731
3732    ######### WRITE THE SWW FILE #############
3733    # NetCDF file definition
3734    outfile = NetCDFFile(sww_file, 'w')
3735
3736    #Create new file
3737    outfile.institution = 'Geoscience Australia'
3738    outfile.description = 'Converted from XXX'
3739
3740
3741    #For sww compatibility
3742    outfile.smoothing = 'Yes'
3743    outfile.order = 1
3744
3745    #Start time in seconds since the epoch (midnight 1/1/1970)
3746    outfile.starttime = starttime = times[0]
3747
3748
3749    # dimension definitions
3750    outfile.createDimension('number_of_volumes', number_of_volumes)
3751
3752    outfile.createDimension('number_of_vertices', 3)
3753    outfile.createDimension('number_of_points', number_of_points)
3754    outfile.createDimension('number_of_timesteps', number_of_times)
3755
3756    # variable definitions
3757    outfile.createVariable('x', precision, ('number_of_points',))
3758    outfile.createVariable('y', precision, ('number_of_points',))
3759    outfile.createVariable('elevation', precision, ('number_of_points',))
3760
3761    #FIXME: Backwards compatibility
3762    outfile.createVariable('z', precision, ('number_of_points',))
3763    #################################
3764
3765    outfile.createVariable('volumes', Int, ('number_of_volumes',
3766                                            'number_of_vertices'))
3767
3768    outfile.createVariable('time', precision,
3769                           ('number_of_timesteps',))
3770
3771    outfile.createVariable('stage', precision,
3772                           ('number_of_timesteps',
3773                            'number_of_points'))
3774
3775    outfile.createVariable('xmomentum', precision,
3776                           ('number_of_timesteps',
3777                            'number_of_points'))
3778
3779    outfile.createVariable('ymomentum', precision,
3780                           ('number_of_timesteps',
3781                            'number_of_points'))
3782
3783    #Store
3784    from anuga.coordinate_transforms.redfearn import redfearn
3785    x = zeros(number_of_points, Float)  #Easting
3786    y = zeros(number_of_points, Float)  #Northing
3787
3788    if verbose: print 'Making triangular grid'
3789    #Get zone of 1st point.
3790    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3791
3792    vertices = {}
3793    i = 0
3794    for k, lat in enumerate(latitudes):
3795        for l, lon in enumerate(longitudes):
3796
3797            vertices[l,k] = i
3798
3799            zone, easting, northing = redfearn(lat,lon)
3800
3801            msg = 'Zone boundary crossed at longitude =', lon
3802            #assert zone == refzone, msg
3803            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3804            x[i] = easting
3805            y[i] = northing
3806            i += 1
3807
3808
3809    #Construct 2 triangles per 'rectangular' element
3810    volumes = []
3811    for l in range(number_of_longitudes-1):    #X direction
3812        for k in range(number_of_latitudes-1): #Y direction
3813            v1 = vertices[l,k+1]
3814            v2 = vertices[l,k]
3815            v3 = vertices[l+1,k+1]
3816            v4 = vertices[l+1,k]
3817
3818            #Note, this is different to the ferrit2sww code
3819            #since the order of the lats is reversed.
3820            volumes.append([v1,v3,v2]) #Upper element
3821            volumes.append([v4,v2,v3]) #Lower element
3822
3823    volumes = array(volumes)
3824
3825    geo_ref = Geo_reference(refzone,min(x),min(y))
3826    geo_ref.write_NetCDF(outfile)
3827
3828    # This will put the geo ref in the middle
3829    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3830
3831
3832    if verbose:
3833        print '------------------------------------------------'
3834        print 'More Statistics:'
3835        print '  Extent (/lon):'
3836        print '    x in [%f, %f], len(lat) == %d'\
3837              %(min(x), max(x),
3838                len(x))
3839        print '    y in [%f, %f], len(lon) == %d'\
3840              %(min(y), max(y),
3841                len(y))
3842        print 'geo_ref: ',geo_ref
3843
3844    z = resize(bath_grid,outfile.variables['z'][:].shape)
3845    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3846    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3847    outfile.variables['z'][:] = z
3848    outfile.variables['elevation'][:] = z  #FIXME HACK
3849    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3850
3851    # do this to create an ok sww file.
3852    #outfile.variables['time'][:] = [0]   #Store time relative
3853    #outfile.variables['stage'] = z
3854    # put the next line up in the code after outfile.order = 1
3855    #number_of_times = 1
3856
3857    stage = outfile.variables['stage']
3858    xmomentum = outfile.variables['xmomentum']
3859    ymomentum = outfile.variables['ymomentum']
3860
3861    outfile.variables['time'][:] = times   #Store time relative
3862
3863    if verbose: print 'Converting quantities'
3864    n = number_of_times
3865    for j in range(number_of_times):
3866        # load in files
3867        elevation_meta, elevation_grid = \
3868           _read_asc(elevation_dir + os.sep + elevation_files[j])
3869
3870        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3871        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3872
3873        #print "elevation_grid",elevation_grid
3874        #cut matrix to desired size
3875        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3876        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3877        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3878        #print "elevation_grid",elevation_grid
3879        # handle missing values
3880        missing = (elevation_grid == elevation_meta['NODATA_value'])
3881        if sometrue (missing):
3882            if fail_on_NaN:
3883                msg = 'File %s contains missing values'\
3884                      %(elevation_files[j])
3885                raise DataMissingValuesError, msg
3886            else:
3887                elevation_grid = elevation_grid*(missing==0) + \
3888                                 missing*elevation_NaN_filler
3889
3890
3891        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3892        i = 0
3893        for k in range(number_of_latitudes):      #Y direction
3894            for l in range(number_of_longitudes): #X direction
3895                w = zscale*elevation_grid[k,l] + mean_stage
3896                stage[j,i] = w
3897                h = w - z[i]
3898                xmomentum[j,i] = u_momentum_grid[k,l]*h
3899                ymomentum[j,i] = v_momentum_grid[k,l]*h
3900                i += 1
3901    outfile.close()
3902
3903def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3904                        minlat=None, maxlat=None,
3905                        minlon=None, maxlon=None):
3906    """
3907    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3908    specified with min/max lat/long
3909
3910    Think of the latitudes and longitudes describing a 2d surface.
3911    The area returned is, if possible, just big enough to cover the
3912    inputed max/min area. (This will not be possible if the max/min area
3913    has a section outside of the latitudes/longitudes area.)
3914
3915    asset  longitudes are sorted,
3916    long - from low to high (west to east, eg 148 - 151)
3917    assert latitudes are sorted, ascending or decending
3918    """
3919    latitudes = latitudes_ref[:]
3920    longitudes = longitudes_ref[:]
3921
3922    latitudes = ensure_numeric(latitudes)
3923    longitudes = ensure_numeric(longitudes)
3924   
3925    assert allclose(sort(longitudes), longitudes)
3926   
3927    lat_ascending = True
3928    if not allclose(sort(latitudes), latitudes):
3929        lat_ascending = False
3930        # reverse order of lat, so it's in ascending order         
3931        latitudes = latitudes[::-1]
3932        assert allclose(sort(latitudes), latitudes)
3933    #print "latitudes  in funct", latitudes
3934   
3935    largest_lat_index = len(latitudes)-1
3936    #Cut out a smaller extent.
3937    if minlat == None:
3938        lat_min_index = 0
3939    else:
3940        lat_min_index = searchsorted(latitudes, minlat)-1
3941        if lat_min_index <0:
3942            lat_min_index = 0
3943
3944
3945    if maxlat == None:
3946        lat_max_index = largest_lat_index #len(latitudes)
3947    else:
3948        lat_max_index = searchsorted(latitudes, maxlat)
3949        if lat_max_index > largest_lat_index:
3950            lat_max_index = largest_lat_index
3951
3952    if minlon == None:
3953        lon_min_index = 0
3954    else:
3955        lon_min_index = searchsorted(longitudes, minlon)-1
3956        if lon_min_index <0:
3957            lon_min_index = 0
3958
3959    if maxlon == None:
3960        lon_max_index = len(longitudes)
3961    else:
3962        lon_max_index = searchsorted(longitudes, maxlon)
3963
3964    # Reversing the indexes, if the lat array is decending
3965    if lat_ascending is False:
3966        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3967                                       largest_lat_index - lat_min_index
3968    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3969    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3970
3971    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3972
3973
3974def _read_asc(filename, verbose=False):
3975    """Read esri file from the following ASCII format (.asc)
3976
3977    Example:
3978
3979    ncols         3121
3980    nrows         1800
3981    xllcorner     722000
3982    yllcorner     5893000
3983    cellsize      25
3984    NODATA_value  -9999
3985    138.3698 137.4194 136.5062 135.5558 ..........
3986
3987    """
3988
3989    datafile = open(filename)
3990
3991    if verbose: print 'Reading DEM from %s' %(filename)
3992    lines = datafile.readlines()
3993    datafile.close()
3994
3995    if verbose: print 'Got', len(lines), ' lines'
3996
3997    ncols = int(lines.pop(0).split()[1].strip())
3998    nrows = int(lines.pop(0).split()[1].strip())
3999    xllcorner = float(lines.pop(0).split()[1].strip())
4000    yllcorner = float(lines.pop(0).split()[1].strip())
4001    cellsize = float(lines.pop(0).split()[1].strip())
4002    NODATA_value = float(lines.pop(0).split()[1].strip())
4003
4004    assert len(lines) == nrows
4005
4006    #Store data
4007    grid = []
4008
4009    n = len(lines)
4010    for i, line in enumerate(lines):
4011        cells = line.split()
4012        assert len(cells) == ncols
4013        grid.append(array([float(x) for x in cells]))
4014    grid = array(grid)
4015
4016    return {'xllcorner':xllcorner,
4017            'yllcorner':yllcorner,
4018            'cellsize':cellsize,
4019            'NODATA_value':NODATA_value}, grid
4020
4021
4022
4023    ####  URS 2 SWW  ###
4024
4025lon_name = 'LON'
4026lat_name = 'LAT'
4027time_name = 'TIME'
4028precision = Float # So if we want to change the precision its done here       
4029class Write_nc:
4030    """
4031    Write an nc file.
4032   
4033    Note, this should be checked to meet cdc netcdf conventions for gridded
4034    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4035   
4036    """
4037    def __init__(self,
4038                 quantity_name,
4039                 file_name,
4040                 time_step_count,
4041                 time_step,
4042                 lon,
4043                 lat):
4044        """
4045        time_step_count is the number of time steps.
4046        time_step is the time step size
4047       
4048        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4049        """
4050        self.quantity_name = quantity_name
4051        quantity_units= {'HA':'METERS',
4052                              'UA':'METERS/SECOND',
4053                              'VA':'METERS/SECOND'}
4054       
4055        #self.file_name = file_name
4056        self.time_step_count = time_step_count
4057        self.time_step = time_step
4058
4059        # NetCDF file definition
4060        self.outfile = NetCDFFile(file_name, 'w')
4061        outfile = self.outfile       
4062
4063        #Create new file
4064        nc_lon_lat_header(outfile, lon, lat)
4065   
4066        # TIME
4067        outfile.createDimension(time_name, None)
4068        outfile.createVariable(time_name, precision, (time_name,))
4069
4070        #QUANTITY
4071        outfile.createVariable(self.quantity_name, precision,
4072                               (time_name, lat_name, lon_name))
4073        outfile.variables[self.quantity_name].missing_value=-1.e+034
4074        outfile.variables[self.quantity_name].units= \
4075                                 quantity_units[self.quantity_name]
4076        outfile.variables[lon_name][:]= ensure_numeric(lon)
4077        outfile.variables[lat_name][:]= ensure_numeric(lat)
4078
4079        #Assume no one will be wanting to read this, while we are writing
4080        #outfile.close()
4081       
4082    def store_timestep(self, quantity_slice):
4083        """
4084        quantity_slice is the data to be stored at this time step
4085        """
4086       
4087        outfile = self.outfile
4088       
4089        # Get the variables
4090        time = outfile.variables[time_name]
4091        quantity = outfile.variables[self.quantity_name]
4092           
4093        i = len(time)
4094
4095        #Store time
4096        time[i] = i*self.time_step #self.domain.time
4097        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4098                                           # And m/s to cm/sec
4099       
4100    def close(self):
4101        self.outfile.close()
4102
4103def urs2sww(basename_in='o', basename_out=None, verbose=False,
4104            remove_nc_files=True,
4105            minlat=None, maxlat=None,
4106            minlon= None, maxlon=None,
4107            mint=None, maxt=None,
4108            mean_stage=0,
4109            origin = None,
4110            zscale=1,
4111            fail_on_NaN=True,
4112            NaN_filler=0,
4113            elevation=None):
4114    """
4115    Convert URS C binary format for wave propagation to
4116    sww format native to abstract_2d_finite_volumes.
4117
4118    Specify only basename_in and read files of the form
4119    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4120    relative height, x-velocity and y-velocity, respectively.
4121
4122    Also convert latitude and longitude to UTM. All coordinates are
4123    assumed to be given in the GDA94 datum. The latitude and longitude
4124    information is for  a grid.
4125
4126    min's and max's: If omitted - full extend is used.
4127    To include a value min may equal it, while max must exceed it.
4128    Lat and lon are assumed to be in decimal degrees.
4129    NOTE: minlon is the most east boundary.
4130   
4131    origin is a 3-tuple with geo referenced
4132    UTM coordinates (zone, easting, northing)
4133    It will be the origin of the sww file. This shouldn't be used,
4134    since all of anuga should be able to handle an arbitary origin.
4135
4136
4137    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4138    which means that latitude is the fastest
4139    varying dimension (row major order, so to speak)
4140
4141    In URS C binary the latitudes and longitudes are in assending order.
4142    """
4143   
4144    if basename_out == None:
4145        basename_out = basename_in
4146    files_out = urs2nc(basename_in, basename_out)
4147    ferret2sww(basename_out,
4148               minlat=minlat,
4149               maxlat=maxlat,
4150               minlon=minlon,
4151               maxlon=maxlon,
4152               mint=mint,
4153               maxt=maxt,
4154               mean_stage=mean_stage,
4155               origin=origin,
4156               zscale=zscale,
4157               fail_on_NaN=fail_on_NaN,
4158               NaN_filler=NaN_filler,
4159               inverted_bathymetry=True,
4160               verbose=verbose)
4161    #print "files_out",files_out
4162    if remove_nc_files:
4163        for file_out in files_out:
4164            os.remove(file_out)
4165   
4166def urs2nc(basename_in = 'o', basename_out = 'urs'):
4167    """
4168    Convert the 3 urs files to 4 nc files.
4169
4170    The name of the urs file names must be;
4171    [basename_in]-z-mux
4172    [basename_in]-e-mux
4173    [basename_in]-n-mux
4174   
4175    """
4176    files_in = [basename_in+'-z-mux',
4177                basename_in+'-e-mux',
4178                basename_in+'-n-mux']
4179    files_out = [basename_out+'_ha.nc',
4180                 basename_out+'_ua.nc',
4181                 basename_out+'_va.nc']
4182    quantities = ['HA','UA','VA']
4183
4184    for file_name in files_in:
4185        if os.access(file_name, os.F_OK) == 0 :
4186            msg = 'File %s does not exist or is not accessible' %file_name
4187            raise IOError, msg
4188       
4189    hashed_elevation = None
4190    for file_in, file_out, quantity in map(None, files_in,
4191                                           files_out,
4192                                           quantities):
4193        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4194                                         file_out,
4195                                         quantity)
4196        #print "lonlatdep", lonlatdep
4197        if hashed_elevation == None:
4198            elevation_file = basename_out+'_e.nc'
4199            write_elevation_nc(elevation_file,
4200                                lon,
4201                                lat,
4202                                depth)
4203            hashed_elevation = myhash(lonlatdep)
4204        else:
4205            msg = "The elevation information in the mux files is inconsistent"
4206            assert hashed_elevation == myhash(lonlatdep), msg
4207    files_out.append(elevation_file)
4208    return files_out
4209   
4210def _binary_c2nc(file_in, file_out, quantity):
4211    """
4212    Reads in a quantity urs file and writes a quantity nc file.
4213    additionally, returns the depth and lat, long info,
4214    so it can be written to a file.
4215    """
4216    columns = 3 # long, lat , depth
4217    mux_file = open(file_in, 'rb')
4218   
4219    # Number of points/stations
4220    (points_num,)= unpack('i',mux_file.read(4))
4221
4222    # nt, int - Number of time steps
4223    (time_step_count,)= unpack('i',mux_file.read(4))
4224
4225    #dt, float - time step, seconds
4226    (time_step,) = unpack('f', mux_file.read(4))
4227   
4228    msg = "Bad data in the mux file."
4229    if points_num < 0:
4230        mux_file.close()
4231        raise ANUGAError, msg
4232    if time_step_count < 0:
4233        mux_file.close()
4234        raise ANUGAError, msg
4235    if time_step < 0:
4236        mux_file.close()
4237        raise ANUGAError, msg
4238   
4239    lonlatdep = p_array.array('f')
4240    lonlatdep.read(mux_file, columns * points_num)
4241    lonlatdep = array(lonlatdep, typecode=Float)   
4242    lonlatdep = reshape(lonlatdep, (points_num, columns))
4243   
4244    lon, lat, depth = lon_lat2grid(lonlatdep)
4245    lon_sorted = list(lon)
4246    lon_sorted.sort()
4247
4248    if not lon == lon_sorted:
4249        msg = "Longitudes in mux file are not in ascending order"
4250        raise IOError, msg
4251    lat_sorted = list(lat)
4252    lat_sorted.sort()
4253
4254    if not lat == lat_sorted:
4255        msg = "Latitudes in mux file are not in ascending order"
4256   
4257    nc_file = Write_nc(quantity,
4258                       file_out,
4259                       time_step_count,
4260                       time_step,
4261                       lon,
4262                       lat)
4263
4264    for i in range(time_step_count):
4265        #Read in a time slice  from mux file 
4266        hz_p_array = p_array.array('f')
4267        hz_p_array.read(mux_file, points_num)
4268        hz_p = array(hz_p_array, typecode=Float)
4269        hz_p = reshape(hz_p, (len(lon), len(lat)))
4270        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4271
4272        #write time slice to nc file
4273        nc_file.store_timestep(hz_p)
4274    mux_file.close()
4275    nc_file.close()
4276
4277    return lonlatdep, lon, lat, depth
4278   
4279
4280def write_elevation_nc(file_out, lon, lat, depth_vector):
4281    """
4282    Write an nc elevation file.
4283    """
4284   
4285    # NetCDF file definition
4286    outfile = NetCDFFile(file_out, 'w')
4287
4288    #Create new file
4289    nc_lon_lat_header(outfile, lon, lat)
4290   
4291    # ELEVATION
4292    zname = 'ELEVATION'
4293    outfile.createVariable(zname, precision, (lat_name, lon_name))
4294    outfile.variables[zname].units='CENTIMETERS'
4295    outfile.variables[zname].missing_value=-1.e+034
4296
4297    outfile.variables[lon_name][:]= ensure_numeric(lon)
4298    outfile.variables[lat_name][:]= ensure_numeric(lat)
4299
4300    depth = reshape(depth_vector, ( len(lat), len(lon)))
4301    outfile.variables[zname][:]= depth
4302   
4303    outfile.close()
4304   
4305def nc_lon_lat_header(outfile, lon, lat):
4306    """
4307    outfile is the netcdf file handle.
4308    lon - a list/array of the longitudes
4309    lat - a list/array of the latitudes
4310    """
4311   
4312    outfile.institution = 'Geoscience Australia'
4313    outfile.description = 'Converted from URS binary C'
4314   
4315    # Longitude
4316    outfile.createDimension(lon_name, len(lon))
4317    outfile.createVariable(lon_name, precision, (lon_name,))
4318    outfile.variables[lon_name].point_spacing='uneven'
4319    outfile.variables[lon_name].units='degrees_east'
4320    outfile.variables[lon_name].assignValue(lon)
4321
4322
4323    # Latitude
4324    outfile.createDimension(lat_name, len(lat))
4325    outfile.createVariable(lat_name, precision, (lat_name,))
4326    outfile.variables[lat_name].point_spacing='uneven'
4327    outfile.variables[lat_name].units='degrees_north'
4328    outfile.variables[lat_name].assignValue(lat)
4329
4330
4331   
4332def lon_lat2grid(long_lat_dep):
4333    """
4334    given a list of points that are assumed to be an a grid,
4335    return the long's and lat's of the grid.
4336    long_lat_dep is an array where each row is a position.
4337    The first column is longitudes.
4338    The second column is latitudes.
4339
4340    The latitude is the fastest varying dimension - in mux files
4341    """
4342    LONG = 0
4343    LAT = 1
4344    QUANTITY = 2
4345
4346    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4347   
4348    num_points = long_lat_dep.shape[0]
4349    this_rows_long = long_lat_dep[0,LONG]
4350
4351    # Count the length of unique latitudes
4352    i = 0
4353    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4354        i += 1
4355    # determine the lats and longsfrom the grid
4356    lat = long_lat_dep[:i, LAT]       
4357    long = long_lat_dep[::i, LONG]
4358   
4359    lenlong = len(long)
4360    lenlat = len(lat)
4361    #print 'len lat', lat, len(lat)
4362    #print 'len long', long, len(long)
4363         
4364    msg = 'Input data is not gridded'     
4365    assert num_points % lenlat == 0, msg
4366    assert num_points % lenlong == 0, msg
4367         
4368    # Test that data is gridded       
4369    for i in range(lenlong):
4370        msg = 'Data is not gridded.  It must be for this operation'
4371        first = i*lenlat
4372        last = first + lenlat
4373               
4374        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4375        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4376   
4377   
4378#    print 'range long', min(long), max(long)
4379#    print 'range lat', min(lat), max(lat)
4380#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4381#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4382   
4383   
4384   
4385    msg = 'Out of range latitudes/longitudes'
4386    for l in lat:assert -90 < l < 90 , msg
4387    for l in long:assert -180 < l < 180 , msg
4388
4389    # Changing quantity from lat being the fastest varying dimension to
4390    # long being the fastest varying dimension
4391    # FIXME - make this faster/do this a better way
4392    # use numeric transpose, after reshaping the quantity vector
4393#    quantity = zeros(len(long_lat_dep), Float)
4394    quantity = zeros(num_points, Float)
4395   
4396#    print 'num',num_points
4397    for lat_i, _ in enumerate(lat):
4398        for long_i, _ in enumerate(long):
4399            q_index = lat_i*lenlong+long_i
4400            lld_index = long_i*lenlat+lat_i
4401#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4402            temp = long_lat_dep[lld_index, QUANTITY]
4403            quantity[q_index] = temp
4404           
4405    return long, lat, quantity
4406
4407    ####  END URS 2 SWW  ###
4408
4409    #### URS UNGRIDDED 2 SWW ###
4410
4411def URS_points_needed_to_file(file_name, boundary_polygon, ll_lat, ll_long,
4412                              grid_spacing, 
4413                      lat_amount, long_amount, zone=None):
4414    """
4415    file_name - name of the urs file produced for David.
4416    boundary_polygon - a list of points that describes a polygon.
4417                      The last point is assumed ot join the first point.
4418                      This is in UTM (lat long would be better though)
4419
4420    ll_lat - lower left latitude, in decimal degrees
4421    ll-long - lower left longitude, in decimal degrees
4422    grid_spacing - in deciamal degrees
4423
4424
4425    Don't add the file extension.  It will be added.
4426    """
4427    geo = URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing, 
4428                      lat_amount, long_amount, zone=zone)
4429    if not file_name[-4:] == ".urs":
4430        file_name += ".urs"
4431    geo.export_points_file(file_name)
4432   
4433def URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing, 
4434                      lat_amount, long_amount, zone=None):
4435    """
4436
4437    boundary_polygon - a list of points that describes a polygon.
4438                      The last point is assumed ot join the first point.
4439                      This is in UTM (lat long would be better though)
4440
4441    ll_lat - lower left latitude, in decimal degrees
4442    ll-long - lower left longitude, in decimal degrees
4443    grid_spacing - in deciamal degrees
4444
4445    """
4446   
4447    from sets import ImmutableSet
4448   
4449    msg = "grid_spacing can not be zero"
4450    assert not grid_spacing ==0, msg
4451    a = boundary_polygon
4452    # List of segments.  Each segment is two points.
4453    segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))]
4454
4455    # convert the segs to Lat's and longs.
4456    if zone is None:
4457        # Assume the zone of the segments is the same as the lower left
4458        # corner of the lat long data
4459        zone, _, _ = redfearn(ll_lat, ll_long)
4460    lat_long_set = ImmutableSet()
4461    for seg in segs:
4462        points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, 
4463                      lat_amount, long_amount, zone)
4464        lat_long_set |= ImmutableSet(points_lat_long)
4465    #print "lat_long_set",lat_long_set
4466    geo = Geospatial_data(data_points=list(lat_long_set),
4467                              points_are_lats_longs=True)
4468    return geo
4469   
4470def points_needed(seg, ll_lat, ll_long, grid_spacing, 
4471                  lat_amount, long_amount, zone):
4472    """
4473    return a list of the points, in lats and longs that are needed to
4474    interpolate any point on the segment.
4475    """
4476    #print "zone",zone
4477    geo_reference = Geo_reference(zone=zone)
4478    #print "seg",seg
4479    geo = Geospatial_data(seg,geo_reference=geo_reference)
4480    seg_lat_long = geo.get_data_points(as_lat_long=True)
4481    #print "seg_lat_long", seg_lat_long
4482    # 1.415 = 2^0.5, rounded up....
4483    buffer = 1.415 * grid_spacing
4484   
4485    #
4486   
4487    max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer
4488    max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer
4489    min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer
4490    min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer
4491
4492    #print "ll_lat", ll_lat
4493    #print "ll_long", ll_long
4494    #print "max_lat", max_lat
4495    #print "max_long", max_long
4496    #print "min_lat", min_lat
4497    #print "min_long", min_long
4498    first_row = (min_long - ll_long)/grid_spacing
4499    # To round up
4500    first_row_long = int(round(first_row + 0.5))
4501    #print "first_row", first_row_long
4502
4503    last_row = (max_long - ll_long)/grid_spacing # round down
4504    last_row_long = int(round(last_row))
4505    #print "last_row",last_row _long
4506
4507   
4508    first_row = (min_lat - ll_lat)/grid_spacing
4509    # To round up
4510    first_row_lat = int(round(first_row + 0.5))
4511    #print "first_row", first_row_lat
4512
4513    last_row = (max_lat - ll_lat)/grid_spacing # round down
4514    last_row_lat = int(round(last_row))
4515    #print "last_row",last_row_lat
4516
4517    points_lat_long = []
4518    # Create a list of the lat long points to include.
4519    for index_lat in range(first_row_lat, last_row_lat + 1):
4520        for index_long in range(first_row_long, last_row_long + 1):
4521            lat = ll_lat + index_lat*grid_spacing
4522            long = ll_long + index_long*grid_spacing
4523            points_lat_long.append((lat, long)) #must be hashable
4524    #print "points_lat_long", points_lat_long
4525    return points_lat_long   
4526   
4527    #### END URS UNGRIDDED 2 SWW ###
4528
4529#-------------------------------------------------------------
4530if __name__ == "__main__":
4531    pass
4532
Note: See TracBrowser for help on using the repository browser.