source: anuga_core/source/anuga/shallow_water/data_manager.py @ 4150

Last change on this file since 4150 was 4059, checked in by duncan, 18 years ago

checking in, so I can checkout in another repository...

File size: 135.2 KB
Line 
1"""datamanager.py - input output for AnuGA
2
3
4This module takes care of reading and writing datafiles such as topograhies,
5model output, etc
6
7
8Formats used within AnuGA:
9
10.sww: Netcdf format for storing model output f(t,x,y)
11.tms: Netcdf format for storing time series f(t)
12
13.xya: ASCII format for storing arbitrary points and associated attributes
14.pts: NetCDF format for storing arbitrary points and associated attributes
15
16.asc: ASCII format of regular DEMs as output from ArcView
17.prj: Associated ArcView file giving more meta data for asc format
18.ers: ERMapper header format of regular DEMs for ArcView
19
20.dem: NetCDF representation of regular DEM data
21
22.tsh: ASCII format for storing meshes and associated boundary and region info
23.msh: NetCDF format for storing meshes and associated boundary and region info
24
25.nc: Native ferret NetCDF format
26.geo: Houdinis ascii geometry format (?)
27
28
29A typical dataflow can be described as follows
30
31Manually created files:
32ASC, PRJ:     Digital elevation models (gridded)
33TSH:          Triangular meshes (e.g. created from anuga.pmesh)
34NC            Model outputs for use as boundary conditions (e.g from MOST)
35
36
37AUTOMATICALLY CREATED FILES:
38
39ASC, PRJ  ->  DEM  ->  PTS: Conversion of DEM's to native pts file
40
41NC -> SWW: Conversion of MOST bundary files to boundary sww
42
43PTS + TSH -> TSH with elevation: Least squares fit
44
45TSH -> SWW:  Conversion of TSH to sww viewable using Swollen
46
47TSH + Boundary SWW -> SWW: Simluation using abstract_2d_finite_volumes
48
49"""
50
51import exceptions
52class TitleValueError(exceptions.Exception): pass
53class DataMissingValuesError(exceptions.Exception): pass
54class DataFileNotOpenError(exceptions.Exception): pass
55class DataTimeError(exceptions.Exception): pass
56class DataDomainError(exceptions.Exception): pass
57
58
59
60import csv
61import os
62from struct import unpack
63import array as p_array
64
65from Numeric import concatenate, array, Float, Int, Int32, resize, sometrue, \
66     searchsorted, zeros, allclose, around, reshape, transpose, sort
67from Scientific.IO.NetCDF import NetCDFFile
68
69
70from anuga.coordinate_transforms.geo_reference import Geo_reference
71from anuga.geospatial_data.geospatial_data import Geospatial_data
72from anuga.config import minimum_storable_height as default_minimum_storable_height
73from anuga.utilities.numerical_tools import ensure_numeric
74from anuga.caching.caching import myhash
75from anuga.utilities.anuga_exceptions import ANUGAError
76
77def make_filename(s):
78    """Transform argument string into a suitable filename
79    """
80
81    s = s.strip()
82    s = s.replace(' ', '_')
83    s = s.replace('(', '')
84    s = s.replace(')', '')
85    s = s.replace('__', '_')
86
87    return s
88
89
90def check_dir(path, verbose=None):
91    """Check that specified path exists.
92    If path does not exist it will be created if possible
93
94    USAGE:
95       checkdir(path, verbose):
96
97    ARGUMENTS:
98        path -- Directory
99        verbose -- Flag verbose output (default: None)
100
101    RETURN VALUE:
102        Verified path including trailing separator
103
104    """
105
106    import os, sys
107    import os.path
108
109    if sys.platform in ['nt', 'dos', 'win32', 'what else?']:
110        unix = 0
111    else:
112        unix = 1
113
114
115    if path[-1] != os.sep:
116        path = path + os.sep  # Add separator for directories
117
118    path = os.path.expanduser(path) # Expand ~ or ~user in pathname
119    if not (os.access(path,os.R_OK and os.W_OK) or path == ''):
120        try:
121            exitcode=os.mkdir(path)
122
123            # Change access rights if possible
124            #
125            if unix:
126                exitcode=os.system('chmod 775 '+path)
127            else:
128                pass  # FIXME: What about acces rights under Windows?
129
130            if verbose: print 'MESSAGE: Directory', path, 'created.'
131
132        except:
133            print 'WARNING: Directory', path, 'could not be created.'
134            if unix:
135                path = '/tmp/'
136            else:
137                path = 'C:'
138
139            print 'Using directory %s instead' %path
140
141    return(path)
142
143
144
145def del_dir(path):
146    """Recursively delete directory path and all its contents
147    """
148
149    import os
150
151    if os.path.isdir(path):
152        for file in os.listdir(path):
153            X = os.path.join(path, file)
154
155
156            if os.path.isdir(X) and not os.path.islink(X):
157                del_dir(X)
158            else:
159                try:
160                    os.remove(X)
161                except:
162                    print "Could not remove file %s" %X
163
164        os.rmdir(path)
165
166
167
168def create_filename(datadir, filename, format, size=None, time=None):
169
170    import os
171    #from anuga.config import data_dir
172
173    FN = check_dir(datadir) + filename
174
175    if size is not None:
176        FN += '_size%d' %size
177
178    if time is not None:
179        FN += '_time%.2f' %time
180
181    FN += '.' + format
182    return FN
183
184
185def get_files(datadir, filename, format, size):
186    """Get all file (names) with gven name, size and format
187    """
188
189    import glob
190
191    import os
192    #from anuga.config import data_dir
193
194    dir = check_dir(datadir)
195
196    pattern = dir + os.sep + filename + '_size=%d*.%s' %(size, format)
197    return glob.glob(pattern)
198
199
200
201#Generic class for storing output to e.g. visualisation or checkpointing
202class Data_format:
203    """Generic interface to data formats
204    """
205
206
207    def __init__(self, domain, extension, mode = 'w'):
208        assert mode in ['r', 'w', 'a'], '''Mode %s must be either:''' %mode +\
209                                        '''   'w' (write)'''+\
210                                        '''   'r' (read)''' +\
211                                        '''   'a' (append)'''
212
213        #Create filename
214        self.filename = create_filename(domain.get_datadir(),
215                                        domain.get_name(), extension)
216
217        #print 'F', self.filename
218        self.timestep = 0
219        self.domain = domain
220       
221
222
223        # Exclude ghosts in case this is a parallel domain
224        self.number_of_nodes = domain.number_of_full_nodes       
225        self.number_of_volumes = domain.number_of_full_triangles
226        #self.number_of_volumes = len(domain)       
227
228
229
230
231        #FIXME: Should we have a general set_precision function?
232
233
234
235#Class for storing output to e.g. visualisation
236class Data_format_sww(Data_format):
237    """Interface to native NetCDF format (.sww) for storing model output
238
239    There are two kinds of data
240
241    1: Constant data: Vertex coordinates and field values. Stored once
242    2: Variable data: Conserved quantities. Stored once per timestep.
243
244    All data is assumed to reside at vertex locations.
245    """
246
247
248    def __init__(self, domain, mode = 'w',\
249                 max_size = 2000000000,
250                 recursion = False):
251        from Scientific.IO.NetCDF import NetCDFFile
252        from Numeric import Int, Float, Float32
253
254        self.precision = Float32 #Use single precision for quantities
255        if hasattr(domain, 'max_size'):
256            self.max_size = domain.max_size #file size max is 2Gig
257        else:
258            self.max_size = max_size
259        self.recursion = recursion
260        self.mode = mode
261
262        Data_format.__init__(self, domain, 'sww', mode)
263
264        if hasattr(domain, 'minimum_storable_height'):
265            self.minimum_storable_height =  domain.minimum_storable_height
266        else:
267            self.minimum_storable_height = default_minimum_storable_height
268
269        # NetCDF file definition
270        fid = NetCDFFile(self.filename, mode)
271
272        if mode == 'w':
273
274            #Create new file
275            fid.institution = 'Geoscience Australia'
276            fid.description = 'Output from anuga.abstract_2d_finite_volumes suitable for plotting'
277
278            if domain.smooth:
279                fid.smoothing = 'Yes'
280            else:
281                fid.smoothing = 'No'
282
283            fid.order = domain.default_order
284
285            if hasattr(domain, 'texture'):
286                fid.texture = domain.texture
287            #else:
288            #    fid.texture = 'None'
289
290            #Reference point
291            #Start time in seconds since the epoch (midnight 1/1/1970)
292            #FIXME: Use Georef
293            fid.starttime = domain.starttime
294
295            # dimension definitions
296            fid.createDimension('number_of_volumes', self.number_of_volumes)
297            fid.createDimension('number_of_vertices', 3)
298
299            if domain.smooth is True:
300                #fid.createDimension('number_of_points', len(domain.vertexlist))
301                fid.createDimension('number_of_points', self.domain.number_of_full_nodes)
302
303                # FIXME(Ole): This will cause sww files for paralle domains to
304                # have ghost nodes stored (but not used by triangles).
305                # To clean this up, we have to change get_vertex_values and friends in
306                # quantity.py (but I can't be bothered right now)
307            else:
308                fid.createDimension('number_of_points', 3*self.number_of_volumes)
309
310            fid.createDimension('number_of_timesteps', None) #extensible
311
312            # variable definitions
313            fid.createVariable('x', self.precision, ('number_of_points',))
314            fid.createVariable('y', self.precision, ('number_of_points',))
315            fid.createVariable('elevation', self.precision, ('number_of_points',))
316            if domain.geo_reference is not None:
317                domain.geo_reference.write_NetCDF(fid)
318
319            #FIXME: Backwards compatibility
320            fid.createVariable('z', self.precision, ('number_of_points',))
321            #################################
322
323            fid.createVariable('volumes', Int, ('number_of_volumes',
324                                                'number_of_vertices'))
325
326            fid.createVariable('time', Float,  # Always use full precision lest two timesteps
327                                               # close to each other may appear as the same step
328                               ('number_of_timesteps',))
329
330            fid.createVariable('stage', self.precision,
331                               ('number_of_timesteps',
332                                'number_of_points'))
333
334            fid.createVariable('xmomentum', self.precision,
335                               ('number_of_timesteps',
336                                'number_of_points'))
337
338            fid.createVariable('ymomentum', self.precision,
339                               ('number_of_timesteps',
340                                'number_of_points'))
341
342        #Close
343        fid.close()
344
345
346    def store_connectivity(self):
347        """Specialisation of store_connectivity for net CDF format
348
349        Writes x,y,z coordinates of triangles constituting
350        the bed elevation.
351        """
352
353        from Scientific.IO.NetCDF import NetCDFFile
354
355        from Numeric import concatenate, Int
356
357        domain = self.domain
358
359        #Get NetCDF
360        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
361
362        # Get the variables
363        x = fid.variables['x']
364        y = fid.variables['y']
365        z = fid.variables['elevation']
366
367        volumes = fid.variables['volumes']
368
369        # Get X, Y and bed elevation Z
370        Q = domain.quantities['elevation']
371        X,Y,Z,V = Q.get_vertex_values(xy=True,
372                                      precision=self.precision)
373
374        volumes[:] = V.astype(volumes.typecode())
375        x[:] = X
376        y[:] = Y
377        z[:] = Z
378
379        #FIXME: Backwards compatibility
380        z = fid.variables['z']
381        z[:] = Z
382        ################################
383
384
385
386        #Close
387        fid.close()
388
389    def store_timestep(self, names):
390        """Store time and named quantities to file
391        """
392        from Scientific.IO.NetCDF import NetCDFFile
393        import types
394        from time import sleep
395        from os import stat
396
397        from Numeric import choose
398
399        #Get NetCDF
400        retries = 0
401        file_open = False
402        while not file_open and retries < 10:
403            try:
404                fid = NetCDFFile(self.filename, 'a') #Open existing file
405            except IOError:
406                #This could happen if someone was reading the file.
407                #In that case, wait a while and try again
408                msg = 'Warning (store_timestep): File %s could not be opened'\
409                      %self.filename
410                msg += ' - trying step %s again' %self.domain.time
411                print msg
412                retries += 1
413                sleep(1)
414            else:
415                file_open = True
416
417        if not file_open:
418            msg = 'File %s could not be opened for append' %self.filename
419            raise DataFileNotOpenError, msg
420
421
422
423        #Check to see if the file is already too big:
424        time = fid.variables['time']
425        i = len(time)+1
426        file_size = stat(self.filename)[6]
427        file_size_increase =  file_size/i
428        if file_size + file_size_increase > self.max_size*(2**self.recursion):
429            #in order to get the file name and start time correct,
430            #I change the domian.filename and domain.starttime.
431            #This is the only way to do this without changing
432            #other modules (I think).
433
434            #write a filename addon that won't break swollens reader
435            #(10.sww is bad)
436            filename_ext = '_time_%s'%self.domain.time
437            filename_ext = filename_ext.replace('.', '_')
438            #remember the old filename, then give domain a
439            #name with the extension
440            old_domain_filename = self.domain.get_name()
441            if not self.recursion:
442                self.domain.set_name(old_domain_filename+filename_ext)
443
444
445            #change the domain starttime to the current time
446            old_domain_starttime = self.domain.starttime
447            self.domain.starttime = self.domain.time
448
449            #build a new data_structure.
450            next_data_structure=\
451                Data_format_sww(self.domain, mode=self.mode,\
452                                max_size = self.max_size,\
453                                recursion = self.recursion+1)
454            if not self.recursion:
455                print '    file_size = %s'%file_size
456                print '    saving file to %s'%next_data_structure.filename
457            #set up the new data_structure
458            self.domain.writer = next_data_structure
459
460            #FIXME - could be cleaner to use domain.store_timestep etc.
461            next_data_structure.store_connectivity()
462            next_data_structure.store_timestep(names)
463            fid.sync()
464            fid.close()
465
466            #restore the old starttime and filename
467            self.domain.starttime = old_domain_starttime
468            self.domain.set_name(old_domain_filename)           
469        else:
470            self.recursion = False
471            domain = self.domain
472
473            # Get the variables
474            time = fid.variables['time']
475            stage = fid.variables['stage']
476            xmomentum = fid.variables['xmomentum']
477            ymomentum = fid.variables['ymomentum']
478            i = len(time)
479
480            #Store time
481            time[i] = self.domain.time
482
483
484            if type(names) not in [types.ListType, types.TupleType]:
485                names = [names]
486
487            for name in names:
488                # Get quantity
489                Q = domain.quantities[name]
490                A,V = Q.get_vertex_values(xy = False,
491                                          precision = self.precision)
492
493
494                #FIXME: Make this general (see below)
495                if name == 'stage':
496                    z = fid.variables['elevation']
497                    #print z[:]
498                    #print A-z[:]
499
500
501                    A = choose(A-z[:] >= self.minimum_storable_height,
502                               (z[:], A))
503                    stage[i,:] = A.astype(self.precision)
504                elif name == 'xmomentum':
505                    xmomentum[i,:] = A.astype(self.precision)
506                elif name == 'ymomentum':
507                    ymomentum[i,:] = A.astype(self.precision)
508
509                #As in....
510            #eval( name + '[i,:] = A.astype(self.precision)' )
511            #FIXME: But we need a UNIT test for that before refactoring
512
513
514
515            #Flush and close
516            fid.sync()
517            fid.close()
518
519
520
521#Class for handling checkpoints data
522class Data_format_cpt(Data_format):
523    """Interface to native NetCDF format (.cpt)
524    """
525
526
527    def __init__(self, domain, mode = 'w'):
528        from Scientific.IO.NetCDF import NetCDFFile
529        from Numeric import Int, Float, Float
530
531        self.precision = Float #Use full precision
532
533        Data_format.__init__(self, domain, 'sww', mode)
534
535
536        # NetCDF file definition
537        fid = NetCDFFile(self.filename, mode)
538
539        if mode == 'w':
540            #Create new file
541            fid.institution = 'Geoscience Australia'
542            fid.description = 'Checkpoint data'
543            #fid.smooth = domain.smooth
544            fid.order = domain.default_order
545
546            # dimension definitions
547            fid.createDimension('number_of_volumes', self.number_of_volumes)
548            fid.createDimension('number_of_vertices', 3)
549
550            #Store info at all vertices (no smoothing)
551            fid.createDimension('number_of_points', 3*self.number_of_volumes)
552            fid.createDimension('number_of_timesteps', None) #extensible
553
554            # variable definitions
555
556            #Mesh
557            fid.createVariable('x', self.precision, ('number_of_points',))
558            fid.createVariable('y', self.precision, ('number_of_points',))
559
560
561            fid.createVariable('volumes', Int, ('number_of_volumes',
562                                                'number_of_vertices'))
563
564            fid.createVariable('time', self.precision,
565                               ('number_of_timesteps',))
566
567            #Allocate space for all quantities
568            for name in domain.quantities.keys():
569                fid.createVariable(name, self.precision,
570                                   ('number_of_timesteps',
571                                    'number_of_points'))
572
573        #Close
574        fid.close()
575
576
577    def store_checkpoint(self):
578        """
579        Write x,y coordinates of triangles.
580        Write connectivity (
581        constituting
582        the bed elevation.
583        """
584
585        from Scientific.IO.NetCDF import NetCDFFile
586
587        from Numeric import concatenate
588
589        domain = self.domain
590
591        #Get NetCDF
592        fid = NetCDFFile(self.filename, 'a')  #Open existing file for append
593
594        # Get the variables
595        x = fid.variables['x']
596        y = fid.variables['y']
597
598        volumes = fid.variables['volumes']
599
600        # Get X, Y and bed elevation Z
601        Q = domain.quantities['elevation']
602        X,Y,Z,V = Q.get_vertex_values(xy=True,
603                      precision = self.precision)
604
605
606
607        x[:] = X.astype(self.precision)
608        y[:] = Y.astype(self.precision)
609        z[:] = Z.astype(self.precision)
610
611        volumes[:] = V
612
613        #Close
614        fid.close()
615
616
617    def store_timestep(self, name):
618        """Store time and named quantity to file
619        """
620        from Scientific.IO.NetCDF import NetCDFFile
621        from time import sleep
622
623        #Get NetCDF
624        retries = 0
625        file_open = False
626        while not file_open and retries < 10:
627            try:
628                fid = NetCDFFile(self.filename, 'a') #Open existing file
629            except IOError:
630                #This could happen if someone was reading the file.
631                #In that case, wait a while and try again
632                msg = 'Warning (store_timestep): File %s could not be opened'\
633                  %self.filename
634                msg += ' - trying again'
635                print msg
636                retries += 1
637                sleep(1)
638            else:
639                file_open = True
640
641        if not file_open:
642            msg = 'File %s could not be opened for append' %self.filename
643            raise DataFileNotOPenError, msg
644
645
646        domain = self.domain
647
648        # Get the variables
649        time = fid.variables['time']
650        stage = fid.variables['stage']
651        i = len(time)
652
653        #Store stage
654        time[i] = self.domain.time
655
656        # Get quantity
657        Q = domain.quantities[name]
658        A,V = Q.get_vertex_values(xy=False,
659                  precision = self.precision)
660
661        stage[i,:] = A.astype(self.precision)
662
663        #Flush and close
664        fid.sync()
665        fid.close()
666
667
668
669
670
671#Function for storing xya output
672#FIXME Not done yet for this version
673#This is obsolete.  Use geo_spatial_data instead
674class Data_format_xya(Data_format):
675    """Generic interface to data formats
676    """
677
678
679    def __init__(self, domain, mode = 'w'):
680        from Scientific.IO.NetCDF import NetCDFFile
681        from Numeric import Int, Float, Float32
682
683        self.precision = Float32 #Use single precision
684
685        Data_format.__init__(self, domain, 'xya', mode)
686
687
688
689    #FIXME -This is the old xya format
690    def store_all(self):
691        """Specialisation of store all for xya format
692
693        Writes x,y,z coordinates of triangles constituting
694        the bed elevation.
695        """
696
697        from Numeric import concatenate
698
699        domain = self.domain
700
701        fd = open(self.filename, 'w')
702
703
704        if domain.smooth is True:
705            number_of_points =  len(domain.vertexlist)
706        else:
707            number_of_points = 3*self.number_of_volumes
708
709        numVertAttrib = 3 #Three attributes is what is assumed by the xya format
710
711        fd.write(str(number_of_points) + " " + str(numVertAttrib) +\
712                 " # <vertex #> <x> <y> [attributes]" + "\n")
713
714
715        # Get X, Y, bed elevation and friction (index=0,1)
716        X,Y,A,V = domain.get_vertex_values(xy=True, value_array='field_values',
717                                           indices = (0,1), precision = self.precision)
718
719        bed_eles = A[:,0]
720        fricts = A[:,1]
721
722        # Get stage (index=0)
723        B,V = domain.get_vertex_values(xy=False, value_array='conserved_quantities',
724                                       indices = (0,), precision = self.precision)
725
726        stages = B[:,0]
727
728        #<vertex #> <x> <y> [attributes]
729        for x, y, bed_ele, stage, frict in map(None, X, Y, bed_eles,
730                                               stages, fricts):
731
732            s = '%.6f %.6f %.6f %.6f %.6f\n' %(x, y, bed_ele, stage, frict)
733            fd.write(s)
734
735        #close
736        fd.close()
737
738
739    def store_timestep(self, t, V0, V1, V2):
740        """Store time, water heights (and momentums) to file
741        """
742        pass
743
744
745#### NED is national exposure database
746   
747LAT_TITLE = 'LATITUDE'
748LONG_TITLE = 'LONGITUDE'
749X_TITLE = 'x'
750Y_TITLE = 'y'
751class Exposure_csv:
752    def __init__(self,file_name, latitude_title=LAT_TITLE,
753                 longitude_title=LONG_TITLE, is_x_y_locations=None,
754                 x_title=X_TITLE, y_title=Y_TITLE,
755                 refine_polygon=None):
756        """
757        This class is for handling the exposure csv file.
758        It reads the file in and converts the lats and longs to a geospatial
759        data object.
760        Use the methods to read and write columns.
761
762        The format of the csv files it reads is;
763           The first row is a title row.
764           comma's are the delimiters
765           each column is a 'set' of data
766
767        Feel free to use/expand it to read other csv files.
768           
769           
770        It is not for adding and deleting rows
771       
772        Can geospatial handle string attributes? It's not made for them.
773        Currently it can't load and save string att's.
774
775        So just use geospatial to hold the x, y and georef? Bad, since
776        different att's are in diferent structures.  Not so bad, the info
777        to write if the .csv file is saved is in attribute_dic
778
779        The location info is in the geospatial attribute.
780       
781       
782        """
783        self._file_name = file_name
784        self._geospatial = None #
785
786        # self._attribute_dic is a dictionary.
787        #The keys are the column titles.
788        #The values are lists of column data
789       
790        # self._title_index_dic is a dictionary.
791        #The keys are the column titles.
792        #The values are the index positions of file columns.
793        self._attribute_dic, self._title_index_dic = \
794        self._load_exposure_csv(self._file_name)
795        try:
796            #Have code here that handles caps or lower
797            lats = self._attribute_dic[latitude_title]
798            longs = self._attribute_dic[longitude_title]
799           
800        except KeyError:
801            # maybe a warning..
802            #Let's see if this works..
803            if False != is_x_y_locations:
804                is_x_y_locations = True
805            pass
806        else:
807            self._geospatial = Geospatial_data(latitudes = lats,
808                 longitudes = longs)
809
810        if is_x_y_locations is True:
811            if self._geospatial is not None:
812                pass #fixme throw an error
813            try:
814                xs = self._attribute_dic[x_title]
815                ys = self._attribute_dic[y_title]
816                points = [[float(i),float(j)] for i,j in map(None,xs,ys)]
817            except KeyError:
818                # maybe a warning..
819                msg = "Could not find location information."
820                raise TitleValueError, msg
821            else:
822                self._geospatial = Geospatial_data(data_points=points)
823           
824        # create a list of points that are in the refining_polygon
825        # described by a list of indexes representing the points
826
827    def __cmp__(self, other):
828        #print "self._attribute_dic",self._attribute_dic
829        #print "other._attribute_dic",other._attribute_dic
830        #print "self._title_index_dic", self._title_index_dic
831        #print "other._title_index_dic", other._title_index_dic
832       
833        #check that a is an instance of this class
834        if isinstance(self, type(other)):
835            result = cmp(self._attribute_dic, other._attribute_dic)
836            if result <>0:
837                return result
838            # The order of the columns is important. Therefore..
839            result = cmp(self._title_index_dic, other._title_index_dic)
840            if result <>0:
841                return result
842            for self_ls, other_ls in map(None,self._attribute_dic, \
843                    other._attribute_dic):
844                result = cmp(self._attribute_dic[self_ls],
845                             other._attribute_dic[other_ls])
846                if result <>0:
847                    return result
848            return 0
849        else:
850            return 1
851   
852    def _load_exposure_csv(self, file_name):
853        """
854        Load in the csv as a dic, title as key and column info as value, .
855        Also, create a dic, title as key and column index as value,
856        to keep track of the column order.
857        """
858        #
859        attribute_dic = {}
860        title_index_dic = {}
861        titles_stripped = [] # list of titles
862       
863        reader = csv.reader(file(file_name))
864
865        # Read in and manipulate the title info
866        titles = reader.next()
867        for i,title in enumerate(titles):
868            titles_stripped.append(title.strip())
869            title_index_dic[title.strip()] = i
870        title_count = len(titles_stripped)       
871        #print "title_index_dic",title_index_dic
872
873       
874        #create a dic of colum values, indexed by column title
875        for line in reader:
876            if len(line) <> title_count:
877                raise IOError #FIXME make this nicer
878            for i, value in enumerate(line):
879                attribute_dic.setdefault(titles_stripped[i],[]).append(value)
880           
881        return attribute_dic, title_index_dic
882
883    def get_column(self, column_name, use_refind_polygon=False):
884        """
885        Given a column name return a list of the column values
886
887        Note, the type of the values will be String!
888        do this to change a list of strings to a list of floats
889        time = [float(x) for x in time]
890       
891        Not implemented:
892        if use_refind_polygon is True, only return values in the
893        refined polygon
894        """
895        if not self._attribute_dic.has_key(column_name):
896            msg = 'Therer is  no column called %s!' %column_name
897            raise TitleValueError, msg
898        return self._attribute_dic[column_name]
899
900
901    def get_value(self, value_column_name,
902                  known_column_name,
903                  known_values,
904                  use_refind_polygon=False):
905        """
906        Do linear interpolation on the known_colum, using the known_value,
907        to return a value of the column_value_name.
908        """
909        pass
910
911
912    def get_location(self, use_refind_polygon=False):
913        """
914        Return a geospatial object which describes the
915        locations of the location file.
916
917        Note, if there is not location info, this returns None.
918       
919        Not implemented:
920        if use_refind_polygon is True, only return values in the
921        refined polygon
922        """
923        return self._geospatial
924
925    def set_column(self, column_name, column_values, overwrite=False):
926        """
927        Add a column to the 'end' (with the right most column being the end)
928        of the csv file.
929
930        Set overwrite to True if you want to overwrite a column.
931       
932        Note, in column_name white space is removed and case is not checked.
933        Precondition
934        The column_name and column_values cannot have comma's in it.
935        """
936       
937        value_row_count = \
938            len(self._attribute_dic[self._title_index_dic.keys()[0]])
939        if len(column_values) <> value_row_count: 
940            msg = 'The number of column values must equal the number of rows.'
941            raise DataMissingValuesError, msg
942       
943        if self._attribute_dic.has_key(column_name):
944            if not overwrite:
945                msg = 'Column name %s already in use!' %column_name
946                raise TitleValueError, msg
947        else:
948            # New title.  Add it to the title index.
949            self._title_index_dic[column_name] = len(self._title_index_dic)
950        self._attribute_dic[column_name] = column_values
951        #print "self._title_index_dic[column_name]",self._title_index_dic
952
953    def save(self, file_name=None):
954        """
955        Save the exposure csv file
956        """
957        if file_name is None:
958            file_name = self._file_name
959       
960        fd = open(file_name,'wb')
961        writer = csv.writer(fd)
962       
963        #Write the title to a cvs file
964        line = [None]* len(self._title_index_dic)
965        for title in self._title_index_dic.iterkeys():
966            line[self._title_index_dic[title]]= title
967        writer.writerow(line)
968       
969        # Write the values to a cvs file
970        value_row_count = \
971            len(self._attribute_dic[self._title_index_dic.keys()[0]])
972        for row_i in range(value_row_count):
973            line = [None]* len(self._title_index_dic)
974            for title in self._title_index_dic.iterkeys():
975                line[self._title_index_dic[title]]= \
976                     self._attribute_dic[title][row_i]
977            writer.writerow(line)
978
979
980#Auxiliary
981def write_obj(filename,x,y,z):
982    """Store x,y,z vectors into filename (obj format)
983       Vectors are assumed to have dimension (M,3) where
984       M corresponds to the number elements.
985       triangles are assumed to be disconnected
986
987       The three numbers in each vector correspond to three vertices,
988
989       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
990
991    """
992    #print 'Writing obj to %s' % filename
993
994    import os.path
995
996    root, ext = os.path.splitext(filename)
997    if ext == '.obj':
998        FN = filename
999    else:
1000        FN = filename + '.obj'
1001
1002
1003    outfile = open(FN, 'wb')
1004    outfile.write("# Triangulation as an obj file\n")
1005
1006    M, N = x.shape
1007    assert N==3  #Assuming three vertices per element
1008
1009    for i in range(M):
1010        for j in range(N):
1011            outfile.write("v %f %f %f\n" % (x[i,j],y[i,j],z[i,j]))
1012
1013    for i in range(M):
1014        base = i*N
1015        outfile.write("f %d %d %d\n" % (base+1,base+2,base+3))
1016
1017    outfile.close()
1018
1019
1020#########################################################
1021#Conversion routines
1022########################################################
1023
1024def sww2obj(basefilename, size):
1025    """Convert netcdf based data output to obj
1026    """
1027    from Scientific.IO.NetCDF import NetCDFFile
1028
1029    from Numeric import Float, zeros
1030
1031    #Get NetCDF
1032    FN = create_filename('.', basefilename, 'sww', size)
1033    print 'Reading from ', FN
1034    fid = NetCDFFile(FN, 'r')  #Open existing file for read
1035
1036
1037    # Get the variables
1038    x = fid.variables['x']
1039    y = fid.variables['y']
1040    z = fid.variables['elevation']
1041    time = fid.variables['time']
1042    stage = fid.variables['stage']
1043
1044    M = size  #Number of lines
1045    xx = zeros((M,3), Float)
1046    yy = zeros((M,3), Float)
1047    zz = zeros((M,3), Float)
1048
1049    for i in range(M):
1050        for j in range(3):
1051            xx[i,j] = x[i+j*M]
1052            yy[i,j] = y[i+j*M]
1053            zz[i,j] = z[i+j*M]
1054
1055    #Write obj for bathymetry
1056    FN = create_filename('.', basefilename, 'obj', size)
1057    write_obj(FN,xx,yy,zz)
1058
1059
1060    #Now read all the data with variable information, combine with
1061    #x,y info and store as obj
1062
1063    for k in range(len(time)):
1064        t = time[k]
1065        print 'Processing timestep %f' %t
1066
1067        for i in range(M):
1068            for j in range(3):
1069                zz[i,j] = stage[k,i+j*M]
1070
1071
1072        #Write obj for variable data
1073        #FN = create_filename(basefilename, 'obj', size, time=t)
1074        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
1075        write_obj(FN,xx,yy,zz)
1076
1077
1078def dat2obj(basefilename):
1079    """Convert line based data output to obj
1080    FIXME: Obsolete?
1081    """
1082
1083    import glob, os
1084    from anuga.config import data_dir
1085
1086
1087    #Get bathymetry and x,y's
1088    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
1089
1090    from Numeric import zeros, Float
1091
1092    M = len(lines)  #Number of lines
1093    x = zeros((M,3), Float)
1094    y = zeros((M,3), Float)
1095    z = zeros((M,3), Float)
1096
1097    ##i = 0
1098    for i, line in enumerate(lines):
1099        tokens = line.split()
1100        values = map(float,tokens)
1101
1102        for j in range(3):
1103            x[i,j] = values[j*3]
1104            y[i,j] = values[j*3+1]
1105            z[i,j] = values[j*3+2]
1106
1107        ##i += 1
1108
1109
1110    #Write obj for bathymetry
1111    write_obj(data_dir+os.sep+basefilename+'_geometry',x,y,z)
1112
1113
1114    #Now read all the data files with variable information, combine with
1115    #x,y info
1116    #and store as obj
1117
1118    files = glob.glob(data_dir+os.sep+basefilename+'*.dat')
1119
1120    for filename in files:
1121        print 'Processing %s' % filename
1122
1123        lines = open(data_dir+os.sep+filename,'r').readlines()
1124        assert len(lines) == M
1125        root, ext = os.path.splitext(filename)
1126
1127        #Get time from filename
1128        i0 = filename.find('_time=')
1129        if i0 == -1:
1130            #Skip bathymetry file
1131            continue
1132
1133        i0 += 6  #Position where time starts
1134        i1 = filename.find('.dat')
1135
1136        if i1 > i0:
1137            t = float(filename[i0:i1])
1138        else:
1139            raise DataTimeError, 'Hmmmm'
1140
1141
1142
1143        ##i = 0
1144        for i, line in enumerate(lines):
1145            tokens = line.split()
1146            values = map(float,tokens)
1147
1148            for j in range(3):
1149                z[i,j] = values[j]
1150
1151            ##i += 1
1152
1153        #Write obj for variable data
1154        write_obj(data_dir+os.sep+basefilename+'_time=%.4f' %t,x,y,z)
1155
1156
1157def filter_netcdf(filename1, filename2, first=0, last=None, step = 1):
1158    """Read netcdf filename1, pick timesteps first:step:last and save to
1159    nettcdf file filename2
1160    """
1161    from Scientific.IO.NetCDF import NetCDFFile
1162
1163    #Get NetCDF
1164    infile = NetCDFFile(filename1, 'r')  #Open existing file for read
1165    outfile = NetCDFFile(filename2, 'w')  #Open new file
1166
1167
1168    #Copy dimensions
1169    for d in infile.dimensions:
1170        outfile.createDimension(d, infile.dimensions[d])
1171
1172    for name in infile.variables:
1173        var = infile.variables[name]
1174        outfile.createVariable(name, var.typecode(), var.dimensions)
1175
1176
1177    #Copy the static variables
1178    for name in infile.variables:
1179        if name == 'time' or name == 'stage':
1180            pass
1181        else:
1182            #Copy
1183            outfile.variables[name][:] = infile.variables[name][:]
1184
1185    #Copy selected timesteps
1186    time = infile.variables['time']
1187    stage = infile.variables['stage']
1188
1189    newtime = outfile.variables['time']
1190    newstage = outfile.variables['stage']
1191
1192    if last is None:
1193        last = len(time)
1194
1195    selection = range(first, last, step)
1196    for i, j in enumerate(selection):
1197        print 'Copying timestep %d of %d (%f)' %(j, last-first, time[j])
1198        newtime[i] = time[j]
1199        newstage[i,:] = stage[j,:]
1200
1201    #Close
1202    infile.close()
1203    outfile.close()
1204
1205
1206#Get data objects
1207def get_dataobject(domain, mode='w'):
1208    """Return instance of class of given format using filename
1209    """
1210
1211    cls = eval('Data_format_%s' %domain.format)
1212    return cls(domain, mode)
1213
1214#FIXME move into geospatial.  There should only be one method that
1215# reads xya, and writes pts.
1216def xya2pts(basename_in, basename_out=None, verbose=False,
1217            #easting_min=None, easting_max=None,
1218            #northing_min=None, northing_max=None,
1219            stride = 1,
1220            attribute_name = 'elevation',
1221            z_func = None):
1222    """Read points data from ascii (.xya)
1223
1224    Example:
1225
1226              x(m)        y(m)        z(m)
1227         0.00000e+00  0.00000e+00  1.3535000e-01
1228         0.00000e+00  1.40000e-02  1.3535000e-01
1229
1230
1231
1232    Convert to NetCDF pts format which is
1233
1234    points:  (Nx2) Float array
1235    elevation: N Float array
1236
1237    Only lines that contain three numeric values are processed
1238
1239    If z_func is specified, it will be applied to the third field
1240    """
1241
1242    import os
1243    #from Scientific.IO.NetCDF import NetCDFFile
1244    from Numeric import Float, arrayrange, concatenate
1245
1246    root, ext = os.path.splitext(basename_in)
1247
1248    if ext == '': ext = '.xya'
1249
1250    #Get NetCDF
1251    infile = open(root + ext, 'r')  #Open existing xya file for read
1252
1253    if verbose: print 'Reading xya points from %s' %(root + ext)
1254
1255    points = []
1256    attribute = []
1257    for i, line in enumerate(infile.readlines()):
1258
1259        if i % stride != 0: continue
1260
1261        fields = line.split()
1262
1263        try:
1264            assert len(fields) == 3
1265        except:
1266            print 'WARNING: Line %d doesn\'t have 3 elements: %s' %(i, line)
1267
1268        try:
1269            x = float( fields[0] )
1270            y = float( fields[1] )
1271            z = float( fields[2] )
1272        except:
1273            continue
1274
1275        points.append( [x, y] )
1276
1277        if callable(z_func):
1278            attribute.append(z_func(z))
1279        else:
1280            attribute.append(z)
1281
1282
1283    #Get output file
1284    if basename_out == None:
1285        ptsname = root + '.pts'
1286    else:
1287        ptsname = basename_out + '.pts'
1288
1289    if verbose: print 'Store to NetCDF file %s' %ptsname
1290    write_ptsfile(ptsname, points, attribute, attribute_name)
1291
1292
1293
1294######Obsoleted by export_points in load_mesh
1295def write_ptsfile(ptsname, points, attribute, attribute_name = None,
1296                  zone=None, xllcorner=None, yllcorner=None):
1297    """Write points and associated attribute to pts (NetCDF) format
1298    """
1299
1300    print 'WARNING: write_ptsfile is obsolete. Use export_points from load_mesh.loadASCII instead.'
1301
1302    from Numeric import Float
1303
1304    if attribute_name is None:
1305        attribute_name = 'attribute'
1306
1307
1308    from Scientific.IO.NetCDF import NetCDFFile
1309
1310    # NetCDF file definition
1311    outfile = NetCDFFile(ptsname, 'w')
1312
1313
1314    #Create new file
1315    outfile.institution = 'Geoscience Australia'
1316    outfile.description = 'NetCDF pts format for compact and '\
1317                          'portable storage of spatial point data'
1318
1319
1320    #Georeferencing
1321    from anuga.coordinate_transforms.geo_reference import Geo_reference
1322    if zone is None:
1323        assert xllcorner is None, 'xllcorner must be None'
1324        assert yllcorner is None, 'yllcorner must be None'
1325        Geo_reference().write_NetCDF(outfile)
1326    else:
1327        Geo_reference(zone, xllcorner, yllcorner).write_NetCDF(outfile)
1328
1329
1330
1331    outfile.createDimension('number_of_points', len(points))
1332    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1333
1334    # variable definitions
1335    outfile.createVariable('points', Float, ('number_of_points',
1336                                             'number_of_dimensions'))
1337    outfile.createVariable(attribute_name, Float, ('number_of_points',))
1338
1339    # Get handles to the variables
1340    nc_points = outfile.variables['points']
1341    nc_attribute = outfile.variables[attribute_name]
1342
1343    #Store data
1344    nc_points[:, :] = points
1345    nc_attribute[:] = attribute
1346
1347    outfile.close()
1348
1349
1350def dem2pts(basename_in, basename_out=None,
1351            easting_min=None, easting_max=None,
1352            northing_min=None, northing_max=None,
1353            use_cache=False, verbose=False,):
1354    """Read Digitial Elevation model from the following NetCDF format (.dem)
1355
1356    Example:
1357
1358    ncols         3121
1359    nrows         1800
1360    xllcorner     722000
1361    yllcorner     5893000
1362    cellsize      25
1363    NODATA_value  -9999
1364    138.3698 137.4194 136.5062 135.5558 ..........
1365
1366    Convert to NetCDF pts format which is
1367
1368    points:  (Nx2) Float array
1369    elevation: N Float array
1370    """
1371
1372
1373
1374    kwargs = {'basename_out': basename_out,
1375              'easting_min': easting_min,
1376              'easting_max': easting_max,
1377              'northing_min': northing_min,
1378              'northing_max': northing_max,
1379              'verbose': verbose}
1380
1381    if use_cache is True:
1382        from caching import cache
1383        result = cache(_dem2pts, basename_in, kwargs,
1384                       dependencies = [basename_in + '.dem'],
1385                       verbose = verbose)
1386
1387    else:
1388        result = apply(_dem2pts, [basename_in], kwargs)
1389
1390    return result
1391
1392
1393def _dem2pts(basename_in, basename_out=None, verbose=False,
1394            easting_min=None, easting_max=None,
1395            northing_min=None, northing_max=None):
1396    """Read Digitial Elevation model from the following NetCDF format (.dem)
1397
1398    Internal function. See public function dem2pts for details.
1399    """
1400
1401    #FIXME: Can this be written feasibly using write_pts?
1402
1403    import os
1404    from Scientific.IO.NetCDF import NetCDFFile
1405    from Numeric import Float, zeros, reshape, sum
1406
1407    root = basename_in
1408
1409    #Get NetCDF
1410    infile = NetCDFFile(root + '.dem', 'r')  #Open existing netcdf file for read
1411
1412    if verbose: print 'Reading DEM from %s' %(root + '.dem')
1413
1414    ncols = infile.ncols[0]
1415    nrows = infile.nrows[0]
1416    xllcorner = infile.xllcorner[0]  #Easting of lower left corner
1417    yllcorner = infile.yllcorner[0]  #Northing of lower left corner
1418    cellsize = infile.cellsize[0]
1419    NODATA_value = infile.NODATA_value[0]
1420    dem_elevation = infile.variables['elevation']
1421
1422    zone = infile.zone[0]
1423    false_easting = infile.false_easting[0]
1424    false_northing = infile.false_northing[0]
1425
1426    #Text strings
1427    projection = infile.projection
1428    datum = infile.datum
1429    units = infile.units
1430
1431
1432    #Get output file
1433    if basename_out == None:
1434        ptsname = root + '.pts'
1435    else:
1436        ptsname = basename_out + '.pts'
1437
1438    if verbose: print 'Store to NetCDF file %s' %ptsname
1439    # NetCDF file definition
1440    outfile = NetCDFFile(ptsname, 'w')
1441
1442    #Create new file
1443    outfile.institution = 'Geoscience Australia'
1444    outfile.description = 'NetCDF pts format for compact and portable storage ' +\
1445                      'of spatial point data'
1446    #assign default values
1447    if easting_min is None: easting_min = xllcorner
1448    if easting_max is None: easting_max = xllcorner + ncols*cellsize
1449    if northing_min is None: northing_min = yllcorner
1450    if northing_max is None: northing_max = yllcorner + nrows*cellsize
1451
1452    #compute offsets to update georeferencing
1453    easting_offset = xllcorner - easting_min
1454    northing_offset = yllcorner - northing_min
1455
1456    #Georeferencing
1457    outfile.zone = zone
1458    outfile.xllcorner = easting_min #Easting of lower left corner
1459    outfile.yllcorner = northing_min #Northing of lower left corner
1460    outfile.false_easting = false_easting
1461    outfile.false_northing = false_northing
1462
1463    outfile.projection = projection
1464    outfile.datum = datum
1465    outfile.units = units
1466
1467
1468    #Grid info (FIXME: probably not going to be used, but heck)
1469    outfile.ncols = ncols
1470    outfile.nrows = nrows
1471
1472    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
1473    totalnopoints = nrows*ncols
1474
1475    # calculating number of NODATA_values for each row in clipped region
1476    #FIXME: use array operations to do faster
1477    nn = 0
1478    k = 0
1479    i1_0 = 0
1480    j1_0 = 0
1481    thisj = 0
1482    thisi = 0
1483    for i in range(nrows):
1484        y = (nrows-i-1)*cellsize + yllcorner
1485        for j in range(ncols):
1486            x = j*cellsize + xllcorner
1487            if easting_min <= x <= easting_max and \
1488               northing_min <= y <= northing_max:
1489                thisj = j
1490                thisi = i
1491                if dem_elevation_r[i,j] == NODATA_value: nn += 1
1492
1493                if k == 0:
1494                    i1_0 = i
1495                    j1_0 = j
1496                k += 1
1497
1498    index1 = j1_0
1499    index2 = thisj
1500
1501    # dimension definitions
1502    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
1503    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
1504
1505    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
1506    nopoints = clippednopoints-nn
1507
1508    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
1509
1510    if verbose:
1511        print 'There are %d values in the elevation' %totalnopoints
1512        print 'There are %d values in the clipped elevation' %clippednopoints
1513        print 'There are %d NODATA_values in the clipped elevation' %nn
1514
1515    outfile.createDimension('number_of_points', nopoints)
1516    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1517
1518    # variable definitions
1519    outfile.createVariable('points', Float, ('number_of_points',
1520                                             'number_of_dimensions'))
1521    outfile.createVariable('elevation', Float, ('number_of_points',))
1522
1523    # Get handles to the variables
1524    points = outfile.variables['points']
1525    elevation = outfile.variables['elevation']
1526
1527    lenv = index2-index1+1
1528    #Store data
1529    global_index = 0
1530    #for i in range(nrows):
1531    for i in range(i1_0,thisi+1,1):
1532        if verbose and i%((nrows+10)/10)==0:
1533            print 'Processing row %d of %d' %(i, nrows)
1534
1535        lower_index = global_index
1536
1537        v = dem_elevation_r[i,index1:index2+1]
1538        no_NODATA = sum(v == NODATA_value)
1539        if no_NODATA > 0:
1540            newcols = lenv - no_NODATA #ncols_in_bounding_box - no_NODATA
1541        else:
1542            newcols = lenv #ncols_in_bounding_box
1543
1544        telev = zeros(newcols, Float)
1545        tpoints = zeros((newcols, 2), Float)
1546
1547        local_index = 0
1548
1549        y = (nrows-i-1)*cellsize + yllcorner
1550        #for j in range(ncols):
1551        for j in range(j1_0,index2+1,1):
1552
1553            x = j*cellsize + xllcorner
1554            if easting_min <= x <= easting_max and \
1555               northing_min <= y <= northing_max and \
1556               dem_elevation_r[i,j] <> NODATA_value:
1557                tpoints[local_index, :] = [x-easting_min,y-northing_min]
1558                telev[local_index] = dem_elevation_r[i, j]
1559                global_index += 1
1560                local_index += 1
1561
1562        upper_index = global_index
1563
1564        if upper_index == lower_index + newcols:
1565            points[lower_index:upper_index, :] = tpoints
1566            elevation[lower_index:upper_index] = telev
1567
1568    assert global_index == nopoints, 'index not equal to number of points'
1569
1570    infile.close()
1571    outfile.close()
1572
1573
1574
1575def _read_hecras_cross_sections(lines):
1576    """Return block of surface lines for each cross section
1577    Starts with SURFACE LINE,
1578    Ends with END CROSS-SECTION
1579    """
1580
1581    points = []
1582
1583    reading_surface = False
1584    for i, line in enumerate(lines):
1585
1586        if len(line.strip()) == 0:    #Ignore blanks
1587            continue
1588
1589        if lines[i].strip().startswith('SURFACE LINE'):
1590            reading_surface = True
1591            continue
1592
1593        if lines[i].strip().startswith('END') and reading_surface:
1594            yield points
1595            reading_surface = False
1596            points = []
1597
1598        if reading_surface:
1599            fields = line.strip().split(',')
1600            easting = float(fields[0])
1601            northing = float(fields[1])
1602            elevation = float(fields[2])
1603            points.append([easting, northing, elevation])
1604
1605
1606
1607
1608def hecras_cross_sections2pts(basename_in,
1609                              basename_out=None,
1610                              verbose=False):
1611    """Read HEC-RAS Elevation datal from the following ASCII format (.sdf)
1612
1613    Example:
1614
1615
1616# RAS export file created on Mon 15Aug2005 11:42
1617# by HEC-RAS Version 3.1.1
1618
1619BEGIN HEADER:
1620  UNITS: METRIC
1621  DTM TYPE: TIN
1622  DTM: v:\1\cit\perth_topo\river_tin
1623  STREAM LAYER: c:\local\hecras\21_02_03\up_canning_cent3d.shp
1624  CROSS-SECTION LAYER: c:\local\hecras\21_02_03\up_can_xs3d.shp
1625  MAP PROJECTION: UTM
1626  PROJECTION ZONE: 50
1627  DATUM: AGD66
1628  VERTICAL DATUM:
1629  NUMBER OF REACHES:  19
1630  NUMBER OF CROSS-SECTIONS:  14206
1631END HEADER:
1632
1633
1634Only the SURFACE LINE data of the following form will be utilised
1635
1636  CROSS-SECTION:
1637    STREAM ID:Southern-Wungong
1638    REACH ID:Southern-Wungong
1639    STATION:19040.*
1640    CUT LINE:
1641      405548.671603161 , 6438142.7594925
1642      405734.536092045 , 6438326.10404912
1643      405745.130459356 , 6438331.48627354
1644      405813.89633823 , 6438368.6272789
1645    SURFACE LINE:
1646     405548.67,   6438142.76,   35.37
1647     405552.24,   6438146.28,   35.41
1648     405554.78,   6438148.78,   35.44
1649     405555.80,   6438149.79,   35.44
1650     405559.37,   6438153.31,   35.45
1651     405560.88,   6438154.81,   35.44
1652     405562.93,   6438156.83,   35.42
1653     405566.50,   6438160.35,   35.38
1654     405566.99,   6438160.83,   35.37
1655     ...
1656   END CROSS-SECTION
1657
1658    Convert to NetCDF pts format which is
1659
1660    points:  (Nx2) Float array
1661    elevation: N Float array
1662    """
1663
1664    #FIXME: Can this be written feasibly using write_pts?
1665
1666    import os
1667    from Scientific.IO.NetCDF import NetCDFFile
1668    from Numeric import Float, zeros, reshape
1669
1670    root = basename_in
1671
1672    #Get ASCII file
1673    infile = open(root + '.sdf', 'r')  #Open SDF file for read
1674
1675    if verbose: print 'Reading DEM from %s' %(root + '.sdf')
1676
1677    lines = infile.readlines()
1678    infile.close()
1679
1680    if verbose: print 'Converting to pts format'
1681
1682    i = 0
1683    while lines[i].strip() == '' or lines[i].strip().startswith('#'):
1684        i += 1
1685
1686    assert lines[i].strip().upper() == 'BEGIN HEADER:'
1687    i += 1
1688
1689    assert lines[i].strip().upper().startswith('UNITS:')
1690    units = lines[i].strip().split()[1]
1691    i += 1
1692
1693    assert lines[i].strip().upper().startswith('DTM TYPE:')
1694    i += 1
1695
1696    assert lines[i].strip().upper().startswith('DTM:')
1697    i += 1
1698
1699    assert lines[i].strip().upper().startswith('STREAM')
1700    i += 1
1701
1702    assert lines[i].strip().upper().startswith('CROSS')
1703    i += 1
1704
1705    assert lines[i].strip().upper().startswith('MAP PROJECTION:')
1706    projection = lines[i].strip().split(':')[1]
1707    i += 1
1708
1709    assert lines[i].strip().upper().startswith('PROJECTION ZONE:')
1710    zone = int(lines[i].strip().split(':')[1])
1711    i += 1
1712
1713    assert lines[i].strip().upper().startswith('DATUM:')
1714    datum = lines[i].strip().split(':')[1]
1715    i += 1
1716
1717    assert lines[i].strip().upper().startswith('VERTICAL DATUM:')
1718    i += 1
1719
1720    assert lines[i].strip().upper().startswith('NUMBER OF REACHES:')
1721    i += 1
1722
1723    assert lines[i].strip().upper().startswith('NUMBER OF CROSS-SECTIONS:')
1724    number_of_cross_sections = int(lines[i].strip().split(':')[1])
1725    i += 1
1726
1727
1728    #Now read all points
1729    points = []
1730    elevation = []
1731    for j, entries in enumerate(_read_hecras_cross_sections(lines[i:])):
1732        for k, entry in enumerate(entries):
1733            points.append(entry[:2])
1734            elevation.append(entry[2])
1735
1736
1737    msg = 'Actual #number_of_cross_sections == %d, Reported as %d'\
1738          %(j+1, number_of_cross_sections)
1739    assert j+1 == number_of_cross_sections, msg
1740
1741    #Get output file
1742    if basename_out == None:
1743        ptsname = root + '.pts'
1744    else:
1745        ptsname = basename_out + '.pts'
1746
1747    #FIXME (DSG-ON): use loadASCII export_points_file
1748    if verbose: print 'Store to NetCDF file %s' %ptsname
1749    # NetCDF file definition
1750    outfile = NetCDFFile(ptsname, 'w')
1751
1752    #Create new file
1753    outfile.institution = 'Geoscience Australia'
1754    outfile.description = 'NetCDF pts format for compact and portable ' +\
1755                          'storage of spatial point data derived from HEC-RAS'
1756
1757    #Georeferencing
1758    outfile.zone = zone
1759    outfile.xllcorner = 0.0
1760    outfile.yllcorner = 0.0
1761    outfile.false_easting = 500000    #FIXME: Use defaults from e.g. config.py
1762    outfile.false_northing = 1000000
1763
1764    outfile.projection = projection
1765    outfile.datum = datum
1766    outfile.units = units
1767
1768
1769    outfile.createDimension('number_of_points', len(points))
1770    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
1771
1772    # variable definitions
1773    outfile.createVariable('points', Float, ('number_of_points',
1774                                             'number_of_dimensions'))
1775    outfile.createVariable('elevation', Float, ('number_of_points',))
1776
1777    # Get handles to the variables
1778    outfile.variables['points'][:] = points
1779    outfile.variables['elevation'][:] = elevation
1780
1781    outfile.close()
1782
1783
1784
1785def sww2dem(basename_in, basename_out = None,
1786            quantity = None,
1787            timestep = None,
1788            reduction = None,
1789            cellsize = 10,
1790            NODATA_value = -9999,
1791            easting_min = None,
1792            easting_max = None,
1793            northing_min = None,
1794            northing_max = None,
1795            verbose = False,
1796            origin = None,
1797            datum = 'WGS84',
1798        format = 'ers'):
1799
1800    """Read SWW file and convert to Digitial Elevation model format (.asc or .ers)
1801
1802    Example (ASC):
1803
1804    ncols         3121
1805    nrows         1800
1806    xllcorner     722000
1807    yllcorner     5893000
1808    cellsize      25
1809    NODATA_value  -9999
1810    138.3698 137.4194 136.5062 135.5558 ..........
1811
1812    Also write accompanying file with same basename_in but extension .prj
1813    used to fix the UTM zone, datum, false northings and eastings.
1814
1815    The prj format is assumed to be as
1816
1817    Projection    UTM
1818    Zone          56
1819    Datum         WGS84
1820    Zunits        NO
1821    Units         METERS
1822    Spheroid      WGS84
1823    Xshift        0.0000000000
1824    Yshift        10000000.0000000000
1825    Parameters
1826
1827
1828    The parameter quantity must be the name of an existing quantity or
1829    an expression involving existing quantities. The default is
1830    'elevation'.
1831
1832    if timestep (an index) is given, output quantity at that timestep
1833
1834    if reduction is given use that to reduce quantity over all timesteps.
1835
1836    datum
1837
1838    format can be either 'asc' or 'ers'
1839    """
1840
1841    import sys
1842    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
1843    from Numeric import array2string
1844
1845    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
1846    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
1847
1848    msg = 'Format must be either asc or ers'
1849    assert format.lower() in ['asc', 'ers'], msg
1850
1851
1852    false_easting = 500000
1853    false_northing = 10000000
1854
1855    if quantity is None:
1856        quantity = 'elevation'
1857
1858    if reduction is None:
1859        reduction = max
1860
1861    if basename_out is None:
1862        basename_out = basename_in + '_%s' %quantity
1863
1864    swwfile = basename_in + '.sww'
1865    demfile = basename_out + '.' + format
1866    # Note the use of a .ers extension is optional (write_ermapper_grid will
1867    # deal with either option
1868
1869    #Read sww file
1870    if verbose: print 'Reading from %s' %swwfile
1871    from Scientific.IO.NetCDF import NetCDFFile
1872    fid = NetCDFFile(swwfile)
1873
1874    #Get extent and reference
1875    x = fid.variables['x'][:]
1876    y = fid.variables['y'][:]
1877    volumes = fid.variables['volumes'][:]
1878    if timestep is not None:
1879        times = fid.variables['time'][timestep]
1880    else:
1881        times = fid.variables['time'][:]
1882
1883    number_of_timesteps = fid.dimensions['number_of_timesteps']
1884    number_of_points = fid.dimensions['number_of_points']
1885   
1886    if origin is None:
1887
1888        #Get geo_reference
1889        #sww files don't have to have a geo_ref
1890        try:
1891            geo_reference = Geo_reference(NetCDFObject=fid)
1892        except AttributeError, e:
1893            geo_reference = Geo_reference() #Default georef object
1894
1895        xllcorner = geo_reference.get_xllcorner()
1896        yllcorner = geo_reference.get_yllcorner()
1897        zone = geo_reference.get_zone()
1898    else:
1899        zone = origin[0]
1900        xllcorner = origin[1]
1901        yllcorner = origin[2]
1902
1903
1904
1905    #FIXME: Refactor using code from file_function.statistics
1906    #Something like print swwstats(swwname)
1907    if verbose:
1908        print '------------------------------------------------'
1909        print 'Statistics of SWW file:'
1910        print '  Name: %s' %swwfile
1911        print '  Reference:'
1912        print '    Lower left corner: [%f, %f]'\
1913              %(xllcorner, yllcorner)
1914        if timestep is not None:
1915            print '    Time: %f' %(times)
1916        else:
1917            print '    Start time: %f' %fid.starttime[0]
1918        print '  Extent:'
1919        print '    x [m] in [%f, %f], len(x) == %d'\
1920              %(min(x.flat), max(x.flat), len(x.flat))
1921        print '    y [m] in [%f, %f], len(y) == %d'\
1922              %(min(y.flat), max(y.flat), len(y.flat))
1923        if timestep is not None:
1924            print '    t [s] = %f, len(t) == %d' %(times, 1)
1925        else:
1926            print '    t [s] in [%f, %f], len(t) == %d'\
1927              %(min(times), max(times), len(times))
1928        print '  Quantities [SI units]:'
1929        for name in ['stage', 'xmomentum', 'ymomentum']:
1930            q = fid.variables[name][:].flat
1931            if timestep is not None:
1932                q = q[timestep*len(x):(timestep+1)*len(x)]
1933            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1934        for name in ['elevation']:
1935            q = fid.variables[name][:].flat
1936            if verbose: print '    %s in [%f, %f]' %(name, min(q), max(q))
1937           
1938    # Get quantity and reduce if applicable
1939    if verbose: print 'Processing quantity %s' %quantity
1940
1941    # Turn NetCDF objects into Numeric arrays
1942    quantity_dict = {}
1943    for name in fid.variables.keys():
1944        quantity_dict[name] = fid.variables[name][:] 
1945
1946
1947    # Convert quantity expression to quantities found in sww file   
1948    q = apply_expression_to_dictionary(quantity, quantity_dict)
1949
1950    if len(q.shape) == 2:
1951        #q has a time component and needs to be reduced along
1952        #the temporal dimension
1953        if verbose: print 'Reducing quantity %s' %quantity
1954        q_reduced = zeros( number_of_points, Float )
1955       
1956        if timestep is not None:
1957            for k in range(number_of_points):
1958                q_reduced[k] = q[timestep,k] 
1959        else:
1960            for k in range(number_of_points):
1961                q_reduced[k] = reduction( q[:,k] )
1962
1963        q = q_reduced
1964
1965    #Post condition: Now q has dimension: number_of_points
1966    assert len(q.shape) == 1
1967    assert q.shape[0] == number_of_points
1968
1969
1970    if verbose:
1971        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
1972
1973
1974    #Create grid and update xll/yll corner and x,y
1975
1976    #Relative extent
1977    if easting_min is None:
1978        xmin = min(x)
1979    else:
1980        xmin = easting_min - xllcorner
1981
1982    if easting_max is None:
1983        xmax = max(x)
1984    else:
1985        xmax = easting_max - xllcorner
1986
1987    if northing_min is None:
1988        ymin = min(y)
1989    else:
1990        ymin = northing_min - yllcorner
1991
1992    if northing_max is None:
1993        ymax = max(y)
1994    else:
1995        ymax = northing_max - yllcorner
1996
1997
1998
1999    if verbose: print 'Creating grid'
2000    ncols = int((xmax-xmin)/cellsize)+1
2001    nrows = int((ymax-ymin)/cellsize)+1
2002
2003
2004    #New absolute reference and coordinates
2005    newxllcorner = xmin+xllcorner
2006    newyllcorner = ymin+yllcorner
2007
2008    x = x+xllcorner-newxllcorner
2009    y = y+yllcorner-newyllcorner
2010   
2011    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2012    assert len(vertex_points.shape) == 2
2013
2014    grid_points = zeros ( (ncols*nrows, 2), Float )
2015
2016
2017    for i in xrange(nrows):
2018        if format.lower() == 'asc':
2019            yg = i*cellsize
2020        else:
2021        #this will flip the order of the y values for ers
2022            yg = (nrows-i)*cellsize
2023
2024        for j in xrange(ncols):
2025            xg = j*cellsize
2026            k = i*ncols + j
2027
2028            grid_points[k,0] = xg
2029            grid_points[k,1] = yg
2030
2031    #Interpolate
2032    #from least_squares import Interpolation
2033    from anuga.fit_interpolate.interpolate import Interpolate
2034
2035    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2036
2037    #Interpolate using quantity values
2038    if verbose: print 'Interpolating'
2039    grid_values = interp.interpolate(q, grid_points).flat
2040
2041
2042    if verbose:
2043        print 'Interpolated values are in [%f, %f]' %(min(grid_values),
2044                                                      max(grid_values))
2045
2046    #Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2047    P = interp.mesh.get_boundary_polygon()
2048    outside_indices = outside_polygon(grid_points, P, closed=True)
2049
2050    for i in outside_indices:
2051        grid_values[i] = NODATA_value
2052
2053
2054
2055
2056    if format.lower() == 'ers':
2057        # setup ERS header information
2058        grid_values = reshape(grid_values,(nrows, ncols))
2059        header = {}
2060        header['datum'] = '"' + datum + '"'
2061        # FIXME The use of hardwired UTM and zone number needs to be made optional
2062        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
2063        header['projection'] = '"UTM-' + str(zone) + '"'
2064        header['coordinatetype'] = 'EN'
2065        if header['coordinatetype'] == 'LL':
2066            header['longitude'] = str(newxllcorner)
2067            header['latitude'] = str(newyllcorner)
2068        elif header['coordinatetype'] == 'EN':
2069            header['eastings'] = str(newxllcorner)
2070            header['northings'] = str(newyllcorner)
2071        header['nullcellvalue'] = str(NODATA_value)
2072        header['xdimension'] = str(cellsize)
2073        header['ydimension'] = str(cellsize)
2074        header['value'] = '"' + quantity + '"'
2075        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test
2076
2077
2078        #Write
2079        if verbose: print 'Writing %s' %demfile
2080        import ermapper_grids
2081        ermapper_grids.write_ermapper_grid(demfile, grid_values, header)
2082
2083        fid.close()
2084    else:
2085        #Write to Ascii format
2086
2087        #Write prj file
2088        prjfile = basename_out + '.prj'
2089
2090        if verbose: print 'Writing %s' %prjfile
2091        prjid = open(prjfile, 'w')
2092        prjid.write('Projection    %s\n' %'UTM')
2093        prjid.write('Zone          %d\n' %zone)
2094        prjid.write('Datum         %s\n' %datum)
2095        prjid.write('Zunits        NO\n')
2096        prjid.write('Units         METERS\n')
2097        prjid.write('Spheroid      %s\n' %datum)
2098        prjid.write('Xshift        %d\n' %false_easting)
2099        prjid.write('Yshift        %d\n' %false_northing)
2100        prjid.write('Parameters\n')
2101        prjid.close()
2102
2103
2104
2105        if verbose: print 'Writing %s' %demfile
2106
2107        ascid = open(demfile, 'w')
2108
2109        ascid.write('ncols         %d\n' %ncols)
2110        ascid.write('nrows         %d\n' %nrows)
2111        ascid.write('xllcorner     %d\n' %newxllcorner)
2112        ascid.write('yllcorner     %d\n' %newyllcorner)
2113        ascid.write('cellsize      %f\n' %cellsize)
2114        ascid.write('NODATA_value  %d\n' %NODATA_value)
2115
2116
2117        #Get bounding polygon from mesh
2118        #P = interp.mesh.get_boundary_polygon()
2119        #inside_indices = inside_polygon(grid_points, P)
2120
2121        for i in range(nrows):
2122            if verbose and i%((nrows+10)/10)==0:
2123                print 'Doing row %d of %d' %(i, nrows)
2124
2125            base_index = (nrows-i-1)*ncols
2126
2127            slice = grid_values[base_index:base_index+ncols]
2128            s = array2string(slice, max_line_width=sys.maxint)
2129            ascid.write(s[1:-1] + '\n')
2130
2131
2132            #print
2133            #for j in range(ncols):
2134            #    index = base_index+j#
2135            #    print grid_values[index],
2136            #    ascid.write('%f ' %grid_values[index])
2137            #ascid.write('\n')
2138
2139        #Close
2140        ascid.close()
2141        fid.close()
2142
2143#Backwards compatibility
2144def sww2asc(basename_in, basename_out = None,
2145            quantity = None,
2146            timestep = None,
2147            reduction = None,
2148            cellsize = 10,
2149            verbose = False,
2150            origin = None):
2151    print 'sww2asc will soon be obsoleted - please use sww2dem'
2152    sww2dem(basename_in,
2153            basename_out = basename_out,
2154            quantity = quantity,
2155            timestep = timestep,
2156            reduction = reduction,
2157            cellsize = cellsize,
2158            verbose = verbose,
2159            origin = origin,
2160        datum = 'WGS84',
2161        format = 'asc')
2162
2163def sww2ers(basename_in, basename_out = None,
2164            quantity = None,
2165            timestep = None,
2166            reduction = None,
2167            cellsize = 10,
2168            verbose = False,
2169            origin = None,
2170            datum = 'WGS84'):
2171    print 'sww2ers will soon be obsoleted - please use sww2dem'
2172    sww2dem(basename_in,
2173            basename_out = basename_out,
2174            quantity = quantity,
2175            timestep = timestep,
2176            reduction = reduction,
2177            cellsize = cellsize,
2178            verbose = verbose,
2179            origin = origin,
2180            datum = datum,
2181            format = 'ers')
2182################################# END COMPATIBILITY ##############
2183
2184
2185
2186def sww2pts(basename_in, basename_out=None,
2187            data_points=None,
2188            quantity=None,
2189            timestep=None,
2190            reduction=None,
2191            NODATA_value=-9999,
2192            verbose=False,
2193            origin=None):
2194            #datum = 'WGS84')
2195
2196
2197    """Read SWW file and convert to interpolated values at selected points
2198
2199    The parameter quantity' must be the name of an existing quantity or
2200    an expression involving existing quantities. The default is
2201    'elevation'.
2202
2203    if timestep (an index) is given, output quantity at that timestep
2204
2205    if reduction is given use that to reduce quantity over all timesteps.
2206
2207    data_points (Nx2 array) give locations of points where quantity is to be computed.
2208   
2209    """
2210
2211    import sys
2212    from Numeric import array, Float, concatenate, NewAxis, zeros, reshape, sometrue
2213    from Numeric import array2string
2214
2215    from anuga.utilities.polygon import inside_polygon, outside_polygon, separate_points_by_polygon
2216    from anuga.abstract_2d_finite_volumes.util import apply_expression_to_dictionary
2217
2218    from anuga.geospatial_data.geospatial_data import Geospatial_data
2219
2220    if quantity is None:
2221        quantity = 'elevation'
2222
2223    if reduction is None:
2224        reduction = max
2225
2226    if basename_out is None:
2227        basename_out = basename_in + '_%s' %quantity
2228
2229    swwfile = basename_in + '.sww'
2230    ptsfile = basename_out + '.pts'
2231
2232    # Read sww file
2233    if verbose: print 'Reading from %s' %swwfile
2234    from Scientific.IO.NetCDF import NetCDFFile
2235    fid = NetCDFFile(swwfile)
2236
2237    # Get extent and reference
2238    x = fid.variables['x'][:]
2239    y = fid.variables['y'][:]
2240    volumes = fid.variables['volumes'][:]
2241
2242    number_of_timesteps = fid.dimensions['number_of_timesteps']
2243    number_of_points = fid.dimensions['number_of_points']
2244    if origin is None:
2245
2246        # Get geo_reference
2247        # sww files don't have to have a geo_ref
2248        try:
2249            geo_reference = Geo_reference(NetCDFObject=fid)
2250        except AttributeError, e:
2251            geo_reference = Geo_reference() #Default georef object
2252
2253        xllcorner = geo_reference.get_xllcorner()
2254        yllcorner = geo_reference.get_yllcorner()
2255        zone = geo_reference.get_zone()
2256    else:
2257        zone = origin[0]
2258        xllcorner = origin[1]
2259        yllcorner = origin[2]
2260
2261
2262
2263    # FIXME: Refactor using code from file_function.statistics
2264    # Something like print swwstats(swwname)
2265    if verbose:
2266        x = fid.variables['x'][:]
2267        y = fid.variables['y'][:]
2268        times = fid.variables['time'][:]
2269        print '------------------------------------------------'
2270        print 'Statistics of SWW file:'
2271        print '  Name: %s' %swwfile
2272        print '  Reference:'
2273        print '    Lower left corner: [%f, %f]'\
2274              %(xllcorner, yllcorner)
2275        print '    Start time: %f' %fid.starttime[0]
2276        print '  Extent:'
2277        print '    x [m] in [%f, %f], len(x) == %d'\
2278              %(min(x.flat), max(x.flat), len(x.flat))
2279        print '    y [m] in [%f, %f], len(y) == %d'\
2280              %(min(y.flat), max(y.flat), len(y.flat))
2281        print '    t [s] in [%f, %f], len(t) == %d'\
2282              %(min(times), max(times), len(times))
2283        print '  Quantities [SI units]:'
2284        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
2285            q = fid.variables[name][:].flat
2286            print '    %s in [%f, %f]' %(name, min(q), max(q))
2287
2288
2289
2290    # Get quantity and reduce if applicable
2291    if verbose: print 'Processing quantity %s' %quantity
2292
2293    # Turn NetCDF objects into Numeric arrays
2294    quantity_dict = {}
2295    for name in fid.variables.keys():
2296        quantity_dict[name] = fid.variables[name][:]
2297
2298
2299
2300    # Convert quantity expression to quantities found in sww file   
2301    q = apply_expression_to_dictionary(quantity, quantity_dict)
2302
2303
2304
2305    if len(q.shape) == 2:
2306        # q has a time component and needs to be reduced along
2307        # the temporal dimension
2308        if verbose: print 'Reducing quantity %s' %quantity
2309        q_reduced = zeros( number_of_points, Float )
2310
2311        for k in range(number_of_points):
2312            q_reduced[k] = reduction( q[:,k] )
2313
2314        q = q_reduced
2315
2316    # Post condition: Now q has dimension: number_of_points
2317    assert len(q.shape) == 1
2318    assert q.shape[0] == number_of_points
2319
2320
2321    if verbose:
2322        print 'Processed values for %s are in [%f, %f]' %(quantity, min(q), max(q))
2323
2324
2325    # Create grid and update xll/yll corner and x,y
2326    vertex_points = concatenate ((x[:, NewAxis] ,y[:, NewAxis]), axis = 1)
2327    assert len(vertex_points.shape) == 2
2328
2329    # Interpolate
2330    from anuga.fit_interpolate.interpolate import Interpolate
2331    interp = Interpolate(vertex_points, volumes, verbose = verbose)
2332
2333    # Interpolate using quantity values
2334    if verbose: print 'Interpolating'
2335    interpolated_values = interp.interpolate(q, data_points).flat
2336
2337
2338    if verbose:
2339        print 'Interpolated values are in [%f, %f]' %(min(interpolated_values),
2340                                                      max(interpolated_values))
2341
2342    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)
2343    P = interp.mesh.get_boundary_polygon()
2344    outside_indices = outside_polygon(data_points, P, closed=True)
2345
2346    for i in outside_indices:
2347        interpolated_values[i] = NODATA_value
2348
2349
2350    # Store results   
2351    G = Geospatial_data(data_points=data_points,
2352                        attributes=interpolated_values)
2353
2354    G.export_points_file(ptsfile, absolute = True)
2355
2356    fid.close()
2357
2358
2359def convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2360                                  use_cache = False,
2361                                  verbose = False):
2362    """Read Digitial Elevation model from the following ASCII format (.asc)
2363
2364    Example:
2365
2366    ncols         3121
2367    nrows         1800
2368    xllcorner     722000
2369    yllcorner     5893000
2370    cellsize      25
2371    NODATA_value  -9999
2372    138.3698 137.4194 136.5062 135.5558 ..........
2373
2374    Convert basename_in + '.asc' to NetCDF format (.dem)
2375    mimicking the ASCII format closely.
2376
2377
2378    An accompanying file with same basename_in but extension .prj must exist
2379    and is used to fix the UTM zone, datum, false northings and eastings.
2380
2381    The prj format is assumed to be as
2382
2383    Projection    UTM
2384    Zone          56
2385    Datum         WGS84
2386    Zunits        NO
2387    Units         METERS
2388    Spheroid      WGS84
2389    Xshift        0.0000000000
2390    Yshift        10000000.0000000000
2391    Parameters
2392    """
2393
2394
2395
2396    kwargs = {'basename_out': basename_out, 'verbose': verbose}
2397
2398    if use_cache is True:
2399        from caching import cache
2400        result = cache(_convert_dem_from_ascii2netcdf, basename_in, kwargs,
2401                       dependencies = [basename_in + '.asc'],
2402                       verbose = verbose)
2403
2404    else:
2405        result = apply(_convert_dem_from_ascii2netcdf, [basename_in], kwargs)
2406
2407    return result
2408
2409
2410
2411
2412
2413
2414def _convert_dem_from_ascii2netcdf(basename_in, basename_out = None,
2415                                  verbose = False):
2416    """Read Digitial Elevation model from the following ASCII format (.asc)
2417
2418    Internal function. See public function convert_dem_from_ascii2netcdf for details.
2419    """
2420
2421    import os
2422    from Scientific.IO.NetCDF import NetCDFFile
2423    from Numeric import Float, array
2424
2425    #root, ext = os.path.splitext(basename_in)
2426    root = basename_in
2427
2428    ###########################################
2429    # Read Meta data
2430    if verbose: print 'Reading METADATA from %s' %root + '.prj'
2431    metadatafile = open(root + '.prj')
2432    metalines = metadatafile.readlines()
2433    metadatafile.close()
2434
2435    L = metalines[0].strip().split()
2436    assert L[0].strip().lower() == 'projection'
2437    projection = L[1].strip()                   #TEXT
2438
2439    L = metalines[1].strip().split()
2440    assert L[0].strip().lower() == 'zone'
2441    zone = int(L[1].strip())
2442
2443    L = metalines[2].strip().split()
2444    assert L[0].strip().lower() == 'datum'
2445    datum = L[1].strip()                        #TEXT
2446
2447    L = metalines[3].strip().split()
2448    assert L[0].strip().lower() == 'zunits'     #IGNORE
2449    zunits = L[1].strip()                       #TEXT
2450
2451    L = metalines[4].strip().split()
2452    assert L[0].strip().lower() == 'units'
2453    units = L[1].strip()                        #TEXT
2454
2455    L = metalines[5].strip().split()
2456    assert L[0].strip().lower() == 'spheroid'   #IGNORE
2457    spheroid = L[1].strip()                     #TEXT
2458
2459    L = metalines[6].strip().split()
2460    assert L[0].strip().lower() == 'xshift'
2461    false_easting = float(L[1].strip())
2462
2463    L = metalines[7].strip().split()
2464    assert L[0].strip().lower() == 'yshift'
2465    false_northing = float(L[1].strip())
2466
2467    #print false_easting, false_northing, zone, datum
2468
2469
2470    ###########################################
2471    #Read DEM data
2472
2473    datafile = open(basename_in + '.asc')
2474
2475    if verbose: print 'Reading DEM from %s' %(basename_in + '.asc')
2476    lines = datafile.readlines()
2477    datafile.close()
2478
2479    if verbose: print 'Got', len(lines), ' lines'
2480
2481    ncols = int(lines[0].split()[1].strip())
2482    nrows = int(lines[1].split()[1].strip())
2483    xllcorner = float(lines[2].split()[1].strip())
2484    yllcorner = float(lines[3].split()[1].strip())
2485    cellsize = float(lines[4].split()[1].strip())
2486    NODATA_value = int(lines[5].split()[1].strip())
2487
2488    assert len(lines) == nrows + 6
2489
2490
2491    ##########################################
2492
2493
2494    if basename_out == None:
2495        netcdfname = root + '.dem'
2496    else:
2497        netcdfname = basename_out + '.dem'
2498
2499    if verbose: print 'Store to NetCDF file %s' %netcdfname
2500    # NetCDF file definition
2501    fid = NetCDFFile(netcdfname, 'w')
2502
2503    #Create new file
2504    fid.institution = 'Geoscience Australia'
2505    fid.description = 'NetCDF DEM format for compact and portable storage ' +\
2506                      'of spatial point data'
2507
2508    fid.ncols = ncols
2509    fid.nrows = nrows
2510    fid.xllcorner = xllcorner
2511    fid.yllcorner = yllcorner
2512    fid.cellsize = cellsize
2513    fid.NODATA_value = NODATA_value
2514
2515    fid.zone = zone
2516    fid.false_easting = false_easting
2517    fid.false_northing = false_northing
2518    fid.projection = projection
2519    fid.datum = datum
2520    fid.units = units
2521
2522
2523    # dimension definitions
2524    fid.createDimension('number_of_rows', nrows)
2525    fid.createDimension('number_of_columns', ncols)
2526
2527    # variable definitions
2528    fid.createVariable('elevation', Float, ('number_of_rows',
2529                                            'number_of_columns'))
2530
2531    # Get handles to the variables
2532    elevation = fid.variables['elevation']
2533
2534    #Store data
2535    n = len(lines[6:])
2536    for i, line in enumerate(lines[6:]):
2537        fields = line.split()
2538        if verbose and i%((n+10)/10)==0:
2539            print 'Processing row %d of %d' %(i, nrows)
2540
2541        elevation[i, :] = array([float(x) for x in fields])
2542
2543    fid.close()
2544
2545
2546
2547
2548
2549def ferret2sww(basename_in, basename_out = None,
2550               verbose = False,
2551               minlat = None, maxlat = None,
2552               minlon = None, maxlon = None,
2553               mint = None, maxt = None, mean_stage = 0,
2554               origin = None, zscale = 1,
2555               fail_on_NaN = True,
2556               NaN_filler = 0,
2557               elevation = None,
2558               inverted_bathymetry = True
2559               ): #FIXME: Bathymetry should be obtained
2560                                  #from MOST somehow.
2561                                  #Alternatively from elsewhere
2562                                  #or, as a last resort,
2563                                  #specified here.
2564                                  #The value of -100 will work
2565                                  #for the Wollongong tsunami
2566                                  #scenario but is very hacky
2567    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
2568    sww format native to abstract_2d_finite_volumes.
2569
2570    Specify only basename_in and read files of the form
2571    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
2572    relative height, x-velocity and y-velocity, respectively.
2573
2574    Also convert latitude and longitude to UTM. All coordinates are
2575    assumed to be given in the GDA94 datum.
2576
2577    min's and max's: If omitted - full extend is used.
2578    To include a value min may equal it, while max must exceed it.
2579    Lat and lon are assuemd to be in decimal degrees
2580
2581    origin is a 3-tuple with geo referenced
2582    UTM coordinates (zone, easting, northing)
2583
2584    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
2585    which means that longitude is the fastest
2586    varying dimension (row major order, so to speak)
2587
2588    ferret2sww uses grid points as vertices in a triangular grid
2589    counting vertices from lower left corner upwards, then right
2590    """
2591
2592    import os
2593    from Scientific.IO.NetCDF import NetCDFFile
2594    from Numeric import Float, Int, Int32, searchsorted, zeros, array
2595    from Numeric import allclose, around
2596
2597    precision = Float
2598
2599    msg = 'Must use latitudes and longitudes for minlat, maxlon etc'
2600
2601    if minlat != None:
2602        assert -90 < minlat < 90 , msg
2603    if maxlat != None:
2604        assert -90 < maxlat < 90 , msg
2605        if minlat != None:
2606            assert maxlat > minlat
2607    if minlon != None:
2608        assert -180 < minlon < 180 , msg
2609    if maxlon != None:
2610        assert -180 < maxlon < 180 , msg
2611        if minlon != None:
2612            assert maxlon > minlon
2613       
2614
2615
2616    #Get NetCDF data
2617    if verbose: print 'Reading files %s_*.nc' %basename_in
2618    #print "basename_in + '_ha.nc'",basename_in + '_ha.nc'
2619    file_h = NetCDFFile(basename_in + '_ha.nc', 'r') #Wave amplitude (cm)
2620    file_u = NetCDFFile(basename_in + '_ua.nc', 'r') #Velocity (x) (cm/s)
2621    file_v = NetCDFFile(basename_in + '_va.nc', 'r') #Velocity (y) (cm/s)
2622    file_e = NetCDFFile(basename_in + '_e.nc', 'r')  #Elevation (z) (m)
2623
2624    if basename_out is None:
2625        swwname = basename_in + '.sww'
2626    else:
2627        swwname = basename_out + '.sww'
2628
2629#    Get dimensions of file_h
2630    for dimension in file_h.dimensions.keys():
2631        if dimension[:3] == 'LON':
2632            dim_h_longitude = dimension
2633        if dimension[:3] == 'LAT':
2634            dim_h_latitude = dimension
2635        if dimension[:4] == 'TIME':
2636            dim_h_time = dimension
2637
2638#    print 'long:', dim_h_longitude
2639#    print 'lats:', dim_h_latitude
2640#    print 'times:', dim_h_time
2641
2642    times = file_h.variables[dim_h_time]
2643    latitudes = file_h.variables[dim_h_latitude]
2644    longitudes = file_h.variables[dim_h_longitude]
2645   
2646# get dimensions for file_e
2647    for dimension in file_e.dimensions.keys():
2648        if dimension[:3] == 'LON':
2649            dim_e_longitude = dimension
2650        if dimension[:3] == 'LAT':
2651            dim_e_latitude = dimension
2652
2653# get dimensions for file_u
2654    for dimension in file_u.dimensions.keys():
2655        if dimension[:3] == 'LON':
2656            dim_u_longitude = dimension
2657        if dimension[:3] == 'LAT':
2658            dim_u_latitude = dimension
2659        if dimension[:4] == 'TIME':
2660            dim_u_time = dimension
2661           
2662# get dimensions for file_v
2663    for dimension in file_v.dimensions.keys():
2664        if dimension[:3] == 'LON':
2665            dim_v_longitude = dimension
2666        if dimension[:3] == 'LAT':
2667            dim_v_latitude = dimension
2668        if dimension[:4] == 'TIME':
2669            dim_v_time = dimension
2670
2671
2672    #Precision used by most for lat/lon is 4 or 5 decimals
2673    e_lat = around(file_e.variables[dim_e_latitude][:], 5)
2674    e_lon = around(file_e.variables[dim_e_longitude][:], 5)
2675
2676    #Check that files are compatible
2677    assert allclose(latitudes, file_u.variables[dim_u_latitude])
2678    assert allclose(latitudes, file_v.variables[dim_v_latitude])
2679    assert allclose(latitudes, e_lat)
2680
2681    assert allclose(longitudes, file_u.variables[dim_u_longitude])
2682    assert allclose(longitudes, file_v.variables[dim_v_longitude])
2683    assert allclose(longitudes, e_lon)
2684
2685    if mint == None:
2686        jmin = 0
2687    else:
2688        jmin = searchsorted(times, mint)
2689       
2690    if maxt == None:
2691        jmax=len(times)
2692    else:
2693        jmax = searchsorted(times, maxt)
2694
2695    #print "latitudes[:]",latitudes[:]
2696    #print "longitudes[:]",longitudes [:]
2697    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],
2698                                                  longitudes[:],
2699                                                 minlat, maxlat,
2700                                                 minlon, maxlon)
2701
2702
2703#    print' j', jmin, jmax
2704    times = times[jmin:jmax]
2705    latitudes = latitudes[kmin:kmax]
2706    longitudes = longitudes[lmin:lmax]
2707
2708    #print "latitudes[:]",latitudes[:]
2709    #print "longitudes[:]",longitudes [:]
2710
2711    if verbose: print 'cropping'
2712    zname = 'ELEVATION'
2713
2714    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
2715    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
2716    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
2717    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]
2718
2719    #    if latitudes2[0]==latitudes[0] and latitudes2[-1]==latitudes[-1]:
2720    #        elevations = file_e.variables['ELEVATION'][kmin:kmax, lmin:lmax]
2721    #    elif latitudes2[0]==latitudes[-1] and latitudes2[-1]==latitudes[0]:
2722    #        from Numeric import asarray
2723    #        elevations=elevations.tolist()
2724    #        elevations.reverse()
2725    #        elevations=asarray(elevations)
2726    #    else:
2727    #        from Numeric import asarray
2728    #        elevations=elevations.tolist()
2729    #        elevations.reverse()
2730    #        elevations=asarray(elevations)
2731    #        'print hmmm'
2732
2733
2734
2735    #Get missing values
2736    nan_ha = file_h.variables['HA'].missing_value[0]
2737    nan_ua = file_u.variables['UA'].missing_value[0]
2738    nan_va = file_v.variables['VA'].missing_value[0]
2739    if hasattr(file_e.variables[zname],'missing_value'):
2740        nan_e  = file_e.variables[zname].missing_value[0]
2741    else:
2742        nan_e = None
2743
2744    #Cleanup
2745    from Numeric import sometrue
2746
2747    missing = (amplitudes == nan_ha)
2748    if sometrue (missing):
2749        if fail_on_NaN:
2750            msg = 'NetCDFFile %s contains missing values'\
2751                  %(basename_in+'_ha.nc')
2752            raise DataMissingValuesError, msg
2753        else:
2754            amplitudes = amplitudes*(missing==0) + missing*NaN_filler
2755
2756    missing = (uspeed == nan_ua)
2757    if sometrue (missing):
2758        if fail_on_NaN:
2759            msg = 'NetCDFFile %s contains missing values'\
2760                  %(basename_in+'_ua.nc')
2761            raise DataMissingValuesError, msg
2762        else:
2763            uspeed = uspeed*(missing==0) + missing*NaN_filler
2764
2765    missing = (vspeed == nan_va)
2766    if sometrue (missing):
2767        if fail_on_NaN:
2768            msg = 'NetCDFFile %s contains missing values'\
2769                  %(basename_in+'_va.nc')
2770            raise DataMissingValuesError, msg
2771        else:
2772            vspeed = vspeed*(missing==0) + missing*NaN_filler
2773
2774
2775    missing = (elevations == nan_e)
2776    if sometrue (missing):
2777        if fail_on_NaN:
2778            msg = 'NetCDFFile %s contains missing values'\
2779                  %(basename_in+'_e.nc')
2780            raise DataMissingValuesError, msg
2781        else:
2782            elevations = elevations*(missing==0) + missing*NaN_filler
2783
2784    #######
2785
2786
2787
2788    number_of_times = times.shape[0]
2789    number_of_latitudes = latitudes.shape[0]
2790    number_of_longitudes = longitudes.shape[0]
2791
2792    assert amplitudes.shape[0] == number_of_times
2793    assert amplitudes.shape[1] == number_of_latitudes
2794    assert amplitudes.shape[2] == number_of_longitudes
2795
2796    if verbose:
2797        print '------------------------------------------------'
2798        print 'Statistics:'
2799        print '  Extent (lat/lon):'
2800        print '    lat in [%f, %f], len(lat) == %d'\
2801              %(min(latitudes.flat), max(latitudes.flat),
2802                len(latitudes.flat))
2803        print '    lon in [%f, %f], len(lon) == %d'\
2804              %(min(longitudes.flat), max(longitudes.flat),
2805                len(longitudes.flat))
2806        print '    t in [%f, %f], len(t) == %d'\
2807              %(min(times.flat), max(times.flat), len(times.flat))
2808
2809        q = amplitudes.flat
2810        name = 'Amplitudes (ha) [cm]'
2811        print %s in [%f, %f]' %(name, min(q), max(q))
2812
2813        q = uspeed.flat
2814        name = 'Speeds (ua) [cm/s]'
2815        print %s in [%f, %f]' %(name, min(q), max(q))
2816
2817        q = vspeed.flat
2818        name = 'Speeds (va) [cm/s]'
2819        print %s in [%f, %f]' %(name, min(q), max(q))
2820
2821        q = elevations.flat
2822        name = 'Elevations (e) [m]'
2823        print %s in [%f, %f]' %(name, min(q), max(q))
2824
2825
2826    #print number_of_latitudes, number_of_longitudes
2827    number_of_points = number_of_latitudes*number_of_longitudes
2828    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
2829
2830
2831    file_h.close()
2832    file_u.close()
2833    file_v.close()
2834    file_e.close()
2835
2836
2837    # NetCDF file definition
2838    outfile = NetCDFFile(swwname, 'w')
2839
2840    #Create new file
2841    outfile.institution = 'Geoscience Australia'
2842    outfile.description = 'Converted from Ferret files: %s, %s, %s, %s'\
2843                          %(basename_in + '_ha.nc',
2844                            basename_in + '_ua.nc',
2845                            basename_in + '_va.nc',
2846                            basename_in + '_e.nc')
2847
2848
2849    #For sww compatibility
2850    outfile.smoothing = 'Yes'
2851    outfile.order = 1
2852
2853    #Start time in seconds since the epoch (midnight 1/1/1970)
2854    outfile.starttime = starttime = times[0]
2855    times = times - starttime  #Store relative times
2856
2857    # dimension definitions
2858    outfile.createDimension('number_of_volumes', number_of_volumes)
2859
2860    outfile.createDimension('number_of_vertices', 3)
2861    outfile.createDimension('number_of_points', number_of_points)
2862
2863
2864    #outfile.createDimension('number_of_timesteps', len(times))
2865    outfile.createDimension('number_of_timesteps', len(times))
2866
2867    # variable definitions
2868    outfile.createVariable('x', precision, ('number_of_points',))
2869    outfile.createVariable('y', precision, ('number_of_points',))
2870    outfile.createVariable('elevation', precision, ('number_of_points',))
2871
2872    #FIXME: Backwards compatibility
2873    outfile.createVariable('z', precision, ('number_of_points',))
2874    #################################
2875
2876    outfile.createVariable('volumes', Int, ('number_of_volumes',
2877                                            'number_of_vertices'))
2878
2879    outfile.createVariable('time', precision,
2880                           ('number_of_timesteps',))
2881
2882    outfile.createVariable('stage', precision,
2883                           ('number_of_timesteps',
2884                            'number_of_points'))
2885
2886    outfile.createVariable('xmomentum', precision,
2887                           ('number_of_timesteps',
2888                            'number_of_points'))
2889
2890    outfile.createVariable('ymomentum', precision,
2891                           ('number_of_timesteps',
2892                            'number_of_points'))
2893
2894
2895    #Store
2896    from anuga.coordinate_transforms.redfearn import redfearn
2897    x = zeros(number_of_points, Float)  #Easting
2898    y = zeros(number_of_points, Float)  #Northing
2899
2900
2901    if verbose: print 'Making triangular grid'
2902    #Check zone boundaries
2903    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
2904
2905    vertices = {}
2906    i = 0
2907    for k, lat in enumerate(latitudes):       #Y direction
2908        for l, lon in enumerate(longitudes):  #X direction
2909
2910            vertices[l,k] = i
2911
2912            zone, easting, northing = redfearn(lat,lon)
2913
2914            msg = 'Zone boundary crossed at longitude =', lon
2915            #assert zone == refzone, msg
2916            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
2917            x[i] = easting
2918            y[i] = northing
2919            i += 1
2920
2921
2922    #Construct 2 triangles per 'rectangular' element
2923    volumes = []
2924    for l in range(number_of_longitudes-1):    #X direction
2925        for k in range(number_of_latitudes-1): #Y direction
2926            v1 = vertices[l,k+1]
2927            v2 = vertices[l,k]
2928            v3 = vertices[l+1,k+1]
2929            v4 = vertices[l+1,k]
2930
2931            volumes.append([v1,v2,v3]) #Upper element
2932            volumes.append([v4,v3,v2]) #Lower element
2933
2934    volumes = array(volumes)
2935
2936    if origin == None:
2937        zone = refzone
2938        xllcorner = min(x)
2939        yllcorner = min(y)
2940    else:
2941        zone = origin[0]
2942        xllcorner = origin[1]
2943        yllcorner = origin[2]
2944
2945
2946    outfile.xllcorner = xllcorner
2947    outfile.yllcorner = yllcorner
2948    outfile.zone = zone
2949
2950
2951    if elevation is not None:
2952        z = elevation
2953    else:
2954        if inverted_bathymetry:
2955            z = -1*elevations
2956        else:
2957            z = elevations
2958    #FIXME: z should be obtained from MOST and passed in here
2959
2960    from Numeric import resize
2961    z = resize(z,outfile.variables['z'][:].shape)
2962    outfile.variables['x'][:] = x - xllcorner
2963    outfile.variables['y'][:] = y - yllcorner
2964    outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
2965    outfile.variables['elevation'][:] = z
2966    outfile.variables['time'][:] = times   #Store time relative
2967    outfile.variables['volumes'][:] = volumes.astype(Int32) #For Opteron 64
2968
2969
2970
2971    #Time stepping
2972    stage = outfile.variables['stage']
2973    xmomentum = outfile.variables['xmomentum']
2974    ymomentum = outfile.variables['ymomentum']
2975
2976    if verbose: print 'Converting quantities'
2977    n = len(times)
2978    for j in range(n):
2979        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
2980        i = 0
2981        for k in range(number_of_latitudes):      #Y direction
2982            for l in range(number_of_longitudes): #X direction
2983                w = zscale*amplitudes[j,k,l]/100 + mean_stage
2984                stage[j,i] = w
2985                h = w - z[i]
2986                xmomentum[j,i] = uspeed[j,k,l]/100*h
2987                ymomentum[j,i] = vspeed[j,k,l]/100*h
2988                i += 1
2989
2990    #outfile.close()
2991
2992    #FIXME: Refactor using code from file_function.statistics
2993    #Something like print swwstats(swwname)
2994    if verbose:
2995        x = outfile.variables['x'][:]
2996        y = outfile.variables['y'][:]
2997        times = outfile.variables['time'][:]
2998        print '------------------------------------------------'
2999        print 'Statistics of output file:'
3000        print '  Name: %s' %swwname
3001        print '  Reference:'
3002        print '    Lower left corner: [%f, %f]'\
3003              %(xllcorner, yllcorner)
3004        print '    Start time: %f' %starttime
3005        print '  Extent:'
3006        print '    x [m] in [%f, %f], len(x) == %d'\
3007              %(min(x.flat), max(x.flat), len(x.flat))
3008        print '    y [m] in [%f, %f], len(y) == %d'\
3009              %(min(y.flat), max(y.flat), len(y.flat))
3010        print '    t [s] in [%f, %f], len(t) == %d'\
3011              %(min(times), max(times), len(times))
3012        print '  Quantities [SI units]:'
3013        for name in ['stage', 'xmomentum', 'ymomentum', 'elevation']:
3014            q = outfile.variables[name][:].flat
3015            print '    %s in [%f, %f]' %(name, min(q), max(q))
3016
3017
3018
3019    outfile.close()
3020
3021
3022
3023
3024
3025def timefile2netcdf(filename, quantity_names = None):
3026    """Template for converting typical text files with time series to
3027    NetCDF tms file.
3028
3029
3030    The file format is assumed to be either two fields separated by a comma:
3031
3032        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
3033
3034    E.g
3035
3036      31/08/04 00:00:00, 1.328223 0 0
3037      31/08/04 00:15:00, 1.292912 0 0
3038
3039    will provide a time dependent function f(t) with three attributes
3040
3041    filename is assumed to be the rootname with extenisons .txt and .sww
3042    """
3043
3044    import time, calendar
3045    from Numeric import array
3046    from anuga.config import time_format
3047    from anuga.utilities.numerical_tools import ensure_numeric
3048
3049
3050
3051    fid = open(filename + '.txt')
3052    line = fid.readline()
3053    fid.close()
3054
3055    fields = line.split(',')
3056    msg = 'File %s must have the format date, value0 value1 value2 ...'
3057    assert len(fields) == 2, msg
3058
3059    try:
3060        starttime = calendar.timegm(time.strptime(fields[0], time_format))
3061    except ValueError:
3062        msg = 'First field in file %s must be' %filename
3063        msg += ' date-time with format %s.\n' %time_format
3064        msg += 'I got %s instead.' %fields[0]
3065        raise DataTimeError, msg
3066
3067
3068    #Split values
3069    values = []
3070    for value in fields[1].split():
3071        values.append(float(value))
3072
3073    q = ensure_numeric(values)
3074
3075    msg = 'ERROR: File must contain at least one independent value'
3076    assert len(q.shape) == 1, msg
3077
3078
3079
3080    #Read times proper
3081    from Numeric import zeros, Float, alltrue
3082    from anuga.config import time_format
3083    import time, calendar
3084
3085    fid = open(filename + '.txt')
3086    lines = fid.readlines()
3087    fid.close()
3088
3089    N = len(lines)
3090    d = len(q)
3091
3092    T = zeros(N, Float)       #Time
3093    Q = zeros((N, d), Float)  #Values
3094
3095    for i, line in enumerate(lines):
3096        fields = line.split(',')
3097        realtime = calendar.timegm(time.strptime(fields[0], time_format))
3098
3099        T[i] = realtime - starttime
3100
3101        for j, value in enumerate(fields[1].split()):
3102            Q[i, j] = float(value)
3103
3104    msg = 'File %s must list time as a monotonuosly ' %filename
3105    msg += 'increasing sequence'
3106    assert alltrue( T[1:] - T[:-1] > 0 ), msg
3107
3108
3109    #Create NetCDF file
3110    from Scientific.IO.NetCDF import NetCDFFile
3111
3112    fid = NetCDFFile(filename + '.tms', 'w')
3113
3114
3115    fid.institution = 'Geoscience Australia'
3116    fid.description = 'Time series'
3117
3118
3119    #Reference point
3120    #Start time in seconds since the epoch (midnight 1/1/1970)
3121    #FIXME: Use Georef
3122    fid.starttime = starttime
3123
3124
3125    # dimension definitions
3126    #fid.createDimension('number_of_volumes', self.number_of_volumes)
3127    #fid.createDimension('number_of_vertices', 3)
3128
3129
3130    fid.createDimension('number_of_timesteps', len(T))
3131
3132    fid.createVariable('time', Float, ('number_of_timesteps',))
3133
3134    fid.variables['time'][:] = T
3135
3136    for i in range(Q.shape[1]):
3137        try:
3138            name = quantity_names[i]
3139        except:
3140            name = 'Attribute%d'%i
3141
3142        fid.createVariable(name, Float, ('number_of_timesteps',))
3143        fid.variables[name][:] = Q[:,i]
3144
3145    fid.close()
3146
3147
3148def extent_sww(file_name):
3149    """
3150    Read in an sww file.
3151
3152    Input;
3153    file_name - the sww file
3154
3155    Output;
3156    z - Vector of bed elevation
3157    volumes - Array.  Each row has 3 values, representing
3158    the vertices that define the volume
3159    time - Vector of the times where there is stage information
3160    stage - array with respect to time and vertices (x,y)
3161    """
3162
3163
3164    from Scientific.IO.NetCDF import NetCDFFile
3165
3166    #Check contents
3167    #Get NetCDF
3168    fid = NetCDFFile(file_name, 'r')
3169
3170    # Get the variables
3171    x = fid.variables['x'][:]
3172    y = fid.variables['y'][:]
3173    stage = fid.variables['stage'][:]
3174    #print "stage",stage
3175    #print "stage.shap",stage.shape
3176    #print "min(stage.flat), mpythonax(stage.flat)",min(stage.flat), max(stage.flat)
3177    #print "min(stage)",min(stage)
3178
3179    fid.close()
3180
3181    return [min(x),max(x),min(y),max(y),min(stage.flat),max(stage.flat)]
3182
3183
3184def sww2domain(filename,boundary=None,t=None,\
3185               fail_if_NaN=True,NaN_filler=0\
3186               ,verbose = False,very_verbose = False):
3187    """
3188    Usage: domain = sww2domain('file.sww',t=time (default = last time in file))
3189
3190    Boundary is not recommended if domain.smooth is not selected, as it
3191    uses unique coordinates, but not unique boundaries. This means that
3192    the boundary file will not be compatable with the coordinates, and will
3193    give a different final boundary, or crash.
3194    """
3195    NaN=9.969209968386869e+036
3196    #initialise NaN.
3197
3198    from Scientific.IO.NetCDF import NetCDFFile
3199    from shallow_water import Domain
3200    from Numeric import asarray, transpose, resize
3201
3202    if verbose: print 'Reading from ', filename
3203    fid = NetCDFFile(filename, 'r')    #Open existing file for read
3204    time = fid.variables['time']       #Timesteps
3205    if t is None:
3206        t = time[-1]
3207    time_interp = get_time_interp(time,t)
3208
3209    # Get the variables as Numeric arrays
3210    x = fid.variables['x'][:]             #x-coordinates of vertices
3211    y = fid.variables['y'][:]             #y-coordinates of vertices
3212    elevation = fid.variables['elevation']     #Elevation
3213    stage = fid.variables['stage']     #Water level
3214    xmomentum = fid.variables['xmomentum']   #Momentum in the x-direction
3215    ymomentum = fid.variables['ymomentum']   #Momentum in the y-direction
3216
3217    starttime = fid.starttime[0]
3218    volumes = fid.variables['volumes'][:] #Connectivity
3219    coordinates=transpose(asarray([x.tolist(),y.tolist()]))
3220
3221    conserved_quantities = []
3222    interpolated_quantities = {}
3223    other_quantities = []
3224
3225    # get geo_reference
3226    #sww files don't have to have a geo_ref
3227    try:
3228        geo_reference = Geo_reference(NetCDFObject=fid)
3229    except: #AttributeError, e:
3230        geo_reference = None
3231
3232    if verbose: print '    getting quantities'
3233    for quantity in fid.variables.keys():
3234        dimensions = fid.variables[quantity].dimensions
3235        if 'number_of_timesteps' in dimensions:
3236            conserved_quantities.append(quantity)
3237            interpolated_quantities[quantity]=\
3238                  interpolated_quantity(fid.variables[quantity][:],time_interp)
3239        else: other_quantities.append(quantity)
3240
3241    other_quantities.remove('x')
3242    other_quantities.remove('y')
3243    other_quantities.remove('z')
3244    other_quantities.remove('volumes')
3245
3246    conserved_quantities.remove('time')
3247
3248    if verbose: print '    building domain'
3249    #    From domain.Domain:
3250    #    domain = Domain(coordinates, volumes,\
3251    #                    conserved_quantities = conserved_quantities,\
3252    #                    other_quantities = other_quantities,zone=zone,\
3253    #                    xllcorner=xllcorner, yllcorner=yllcorner)
3254
3255    #   From shallow_water.Domain:
3256    coordinates=coordinates.tolist()
3257    volumes=volumes.tolist()
3258    #FIXME:should this be in mesh?(peter row)
3259    if fid.smoothing == 'Yes': unique = False
3260    else: unique = True
3261    if unique:
3262        coordinates,volumes,boundary=weed(coordinates,volumes,boundary)
3263
3264
3265    try:
3266        domain = Domain(coordinates, volumes, boundary)
3267    except AssertionError, e:
3268        fid.close()
3269        msg = 'Domain could not be created: %s. Perhaps use "fail_if_NaN=False and NaN_filler = ..."' %e
3270        raise DataDomainError, msg
3271
3272    if not boundary is None:
3273        domain.boundary = boundary
3274
3275    domain.geo_reference = geo_reference
3276
3277    domain.starttime=float(starttime)+float(t)
3278    domain.time=0.0
3279
3280    for quantity in other_quantities:
3281        try:
3282            NaN = fid.variables[quantity].missing_value
3283        except:
3284            pass #quantity has no missing_value number
3285        X = fid.variables[quantity][:]
3286        if very_verbose:
3287            print '       ',quantity
3288            print '        NaN =',NaN
3289            print '        max(X)'
3290            print '       ',max(X)
3291            print '        max(X)==NaN'
3292            print '       ',max(X)==NaN
3293            print ''
3294        if (max(X)==NaN) or (min(X)==NaN):
3295            if fail_if_NaN:
3296                msg = 'quantity "%s" contains no_data entry'%quantity
3297                raise DataMissingValuesError, msg
3298            else:
3299                data = (X<>NaN)
3300                X = (X*data)+(data==0)*NaN_filler
3301        if unique:
3302            X = resize(X,(len(X)/3,3))
3303        domain.set_quantity(quantity,X)
3304    #
3305    for quantity in conserved_quantities:
3306        try:
3307            NaN = fid.variables[quantity].missing_value
3308        except:
3309            pass #quantity has no missing_value number
3310        X = interpolated_quantities[quantity]
3311        if very_verbose:
3312            print '       ',quantity
3313            print '        NaN =',NaN
3314            print '        max(X)'
3315            print '       ',max(X)
3316            print '        max(X)==NaN'
3317            print '       ',max(X)==NaN
3318            print ''
3319        if (max(X)==NaN) or (min(X)==NaN):
3320            if fail_if_NaN:
3321                msg = 'quantity "%s" contains no_data entry'%quantity
3322                raise DataMissingValuesError, msg
3323            else:
3324                data = (X<>NaN)
3325                X = (X*data)+(data==0)*NaN_filler
3326        if unique:
3327            X = resize(X,(X.shape[0]/3,3))
3328        domain.set_quantity(quantity,X)
3329
3330    fid.close()
3331    return domain
3332
3333def interpolated_quantity(saved_quantity,time_interp):
3334
3335    #given an index and ratio, interpolate quantity with respect to time.
3336    index,ratio = time_interp
3337    Q = saved_quantity
3338    if ratio > 0:
3339        q = (1-ratio)*Q[index]+ ratio*Q[index+1]
3340    else:
3341        q = Q[index]
3342    #Return vector of interpolated values
3343    return q
3344
3345def get_time_interp(time,t=None):
3346    #Finds the ratio and index for time interpolation.
3347    #It is borrowed from previous abstract_2d_finite_volumes code.
3348    if t is None:
3349        t=time[-1]
3350        index = -1
3351        ratio = 0.
3352    else:
3353        T = time
3354        tau = t
3355        index=0
3356        msg = 'Time interval derived from file %s [%s:%s]'\
3357            %('FIXMEfilename', T[0], T[-1])
3358        msg += ' does not match model time: %s' %tau
3359        if tau < time[0]: raise DataTimeError, msg
3360        if tau > time[-1]: raise DataTimeError, msg
3361        while tau > time[index]: index += 1
3362        while tau < time[index]: index -= 1
3363        if tau == time[index]:
3364            #Protect against case where tau == time[-1] (last time)
3365            # - also works in general when tau == time[i]
3366            ratio = 0
3367        else:
3368            #t is now between index and index+1
3369            ratio = (tau - time[index])/(time[index+1] - time[index])
3370    return (index,ratio)
3371
3372
3373def weed(coordinates,volumes,boundary = None):
3374    if type(coordinates)=='array':
3375        coordinates = coordinates.tolist()
3376    if type(volumes)=='array':
3377        volumes = volumes.tolist()
3378
3379    unique = False
3380    point_dict = {}
3381    same_point = {}
3382    for i in range(len(coordinates)):
3383        point = tuple(coordinates[i])
3384        if point_dict.has_key(point):
3385            unique = True
3386            same_point[i]=point
3387            #to change all point i references to point j
3388        else:
3389            point_dict[point]=i
3390            same_point[i]=point
3391
3392    coordinates = []
3393    i = 0
3394    for point in point_dict.keys():
3395        point = tuple(point)
3396        coordinates.append(list(point))
3397        point_dict[point]=i
3398        i+=1
3399
3400
3401    for volume in volumes:
3402        for i in range(len(volume)):
3403            index = volume[i]
3404            if index>-1:
3405                volume[i]=point_dict[same_point[index]]
3406
3407    new_boundary = {}
3408    if not boundary is None:
3409        for segment in boundary.keys():
3410            point0 = point_dict[same_point[segment[0]]]
3411            point1 = point_dict[same_point[segment[1]]]
3412            label = boundary[segment]
3413            #FIXME should the bounday attributes be concaterated
3414            #('exterior, pond') or replaced ('pond')(peter row)
3415
3416            if new_boundary.has_key((point0,point1)):
3417                new_boundary[(point0,point1)]=new_boundary[(point0,point1)]#\
3418                                              #+','+label
3419
3420            elif new_boundary.has_key((point1,point0)):
3421                new_boundary[(point1,point0)]=new_boundary[(point1,point0)]#\
3422                                              #+','+label
3423            else: new_boundary[(point0,point1)]=label
3424
3425        boundary = new_boundary
3426
3427    return coordinates,volumes,boundary
3428
3429
3430def decimate_dem(basename_in, stencil, cellsize_new, basename_out=None,
3431                 verbose=False):
3432    """Read Digitial Elevation model from the following NetCDF format (.dem)
3433
3434    Example:
3435
3436    ncols         3121
3437    nrows         1800
3438    xllcorner     722000
3439    yllcorner     5893000
3440    cellsize      25
3441    NODATA_value  -9999
3442    138.3698 137.4194 136.5062 135.5558 ..........
3443
3444    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
3445    """
3446
3447    import os
3448    from Scientific.IO.NetCDF import NetCDFFile
3449    from Numeric import Float, zeros, sum, reshape, equal
3450
3451    root = basename_in
3452    inname = root + '.dem'
3453
3454    #Open existing netcdf file to read
3455    infile = NetCDFFile(inname, 'r')
3456    if verbose: print 'Reading DEM from %s' %inname
3457
3458    #Read metadata
3459    ncols = infile.ncols[0]
3460    nrows = infile.nrows[0]
3461    xllcorner = infile.xllcorner[0]
3462    yllcorner = infile.yllcorner[0]
3463    cellsize = infile.cellsize[0]
3464    NODATA_value = infile.NODATA_value[0]
3465    zone = infile.zone[0]
3466    false_easting = infile.false_easting[0]
3467    false_northing = infile.false_northing[0]
3468    projection = infile.projection
3469    datum = infile.datum
3470    units = infile.units
3471
3472    dem_elevation = infile.variables['elevation']
3473
3474    #Get output file name
3475    if basename_out == None:
3476        outname = root + '_' + repr(cellsize_new) + '.dem'
3477    else:
3478        outname = basename_out + '.dem'
3479
3480    if verbose: print 'Write decimated NetCDF file to %s' %outname
3481
3482    #Determine some dimensions for decimated grid
3483    (nrows_stencil, ncols_stencil) = stencil.shape
3484    x_offset = ncols_stencil / 2
3485    y_offset = nrows_stencil / 2
3486    cellsize_ratio = int(cellsize_new / cellsize)
3487    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
3488    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio
3489
3490    #Open netcdf file for output
3491    outfile = NetCDFFile(outname, 'w')
3492
3493    #Create new file
3494    outfile.institution = 'Geoscience Australia'
3495    outfile.description = 'NetCDF DEM format for compact and portable storage ' +\
3496                           'of spatial point data'
3497    #Georeferencing
3498    outfile.zone = zone
3499    outfile.projection = projection
3500    outfile.datum = datum
3501    outfile.units = units
3502
3503    outfile.cellsize = cellsize_new
3504    outfile.NODATA_value = NODATA_value
3505    outfile.false_easting = false_easting
3506    outfile.false_northing = false_northing
3507
3508    outfile.xllcorner = xllcorner + (x_offset * cellsize)
3509    outfile.yllcorner = yllcorner + (y_offset * cellsize)
3510    outfile.ncols = ncols_new
3511    outfile.nrows = nrows_new
3512
3513    # dimension definition
3514    outfile.createDimension('number_of_points', nrows_new*ncols_new)
3515
3516    # variable definition
3517    outfile.createVariable('elevation', Float, ('number_of_points',))
3518
3519    # Get handle to the variable
3520    elevation = outfile.variables['elevation']
3521
3522    dem_elevation_r = reshape(dem_elevation, (nrows, ncols))
3523
3524    #Store data
3525    global_index = 0
3526    for i in range(nrows_new):
3527        if verbose: print 'Processing row %d of %d' %(i, nrows_new)
3528        lower_index = global_index
3529        telev =  zeros(ncols_new, Float)
3530        local_index = 0
3531        trow = i * cellsize_ratio
3532
3533        for j in range(ncols_new):
3534            tcol = j * cellsize_ratio
3535            tmp = dem_elevation_r[trow:trow+nrows_stencil, tcol:tcol+ncols_stencil]
3536
3537            #if dem contains 1 or more NODATA_values set value in
3538            #decimated dem to NODATA_value, else compute decimated
3539            #value using stencil
3540            if sum(sum(equal(tmp, NODATA_value))) > 0:
3541                telev[local_index] = NODATA_value
3542            else:
3543                telev[local_index] = sum(sum(tmp * stencil))
3544
3545            global_index += 1
3546            local_index += 1
3547
3548        upper_index = global_index
3549
3550        elevation[lower_index:upper_index] = telev
3551
3552    assert global_index == nrows_new*ncols_new, 'index not equal to number of points'
3553
3554    infile.close()
3555    outfile.close()
3556
3557
3558
3559
3560def tsh2sww(filename, verbose=False): #test_tsh2sww
3561    """
3562    to check if a tsh/msh file 'looks' good.
3563    """
3564
3565    from shallow_water import Domain
3566    from anuga.abstract_2d_finite_volumes.pmesh2domain import pmesh_to_domain_instance
3567    import time, os
3568    #from data_manager import get_dataobject
3569    from os import sep, path
3570    from anuga.utilities.numerical_tools import mean
3571
3572    if verbose == True:print 'Creating domain from', filename
3573    domain = pmesh_to_domain_instance(filename, Domain)
3574    if verbose == True:print "Number of triangles = ", len(domain)
3575
3576    domain.smooth = True
3577    domain.format = 'sww'   #Native netcdf visualisation format
3578    file_path, filename = path.split(filename)
3579    filename, ext = path.splitext(filename)
3580    domain.set_name(filename)   
3581    domain.reduction = mean
3582    if verbose == True:print "file_path",file_path
3583    if file_path == "":file_path = "."
3584    domain.set_datadir(file_path)
3585
3586    if verbose == True:
3587        print "Output written to " + domain.get_datadir() + sep + \
3588              domain.get_name() + "." + domain.format
3589    sww = get_dataobject(domain)
3590    sww.store_connectivity()
3591    sww.store_timestep('stage')
3592
3593
3594def asc_csiro2sww(bath_dir,
3595                  elevation_dir,
3596                  ucur_dir,
3597                  vcur_dir,
3598                  sww_file,
3599                  minlat = None, maxlat = None,
3600                  minlon = None, maxlon = None,
3601                  zscale=1,
3602                  mean_stage = 0,
3603                  fail_on_NaN = True,
3604                  elevation_NaN_filler = 0,
3605                  bath_prefix='ba',
3606                  elevation_prefix='el',
3607                  verbose=False):
3608    """
3609    Produce an sww boundary file, from esri ascii data from CSIRO.
3610
3611    Also convert latitude and longitude to UTM. All coordinates are
3612    assumed to be given in the GDA94 datum.
3613
3614    assume:
3615    All files are in esri ascii format
3616
3617    4 types of information
3618    bathymetry
3619    elevation
3620    u velocity
3621    v velocity
3622
3623    Assumptions
3624    The metadata of all the files is the same
3625    Each type is in a seperate directory
3626    One bath file with extention .000
3627    The time period is less than 24hrs and uniform.
3628    """
3629    from Scientific.IO.NetCDF import NetCDFFile
3630
3631    from anuga.coordinate_transforms.redfearn import redfearn
3632
3633    precision = Float # So if we want to change the precision its done here
3634
3635    # go in to the bath dir and load the only file,
3636    bath_files = os.listdir(bath_dir)
3637    #print "bath_files",bath_files
3638
3639    #fixme: make more general?
3640    bath_file = bath_files[0]
3641    bath_dir_file =  bath_dir + os.sep + bath_file
3642    bath_metadata,bath_grid =  _read_asc(bath_dir_file)
3643    #print "bath_metadata",bath_metadata
3644    #print "bath_grid",bath_grid
3645
3646    #Use the date.time of the bath file as a basis for
3647    #the start time for other files
3648    base_start = bath_file[-12:]
3649
3650    #go into the elevation dir and load the 000 file
3651    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
3652                         + base_start
3653    #elevation_metadata,elevation_grid =  _read_asc(elevation_dir_file)
3654    #print "elevation_dir_file",elevation_dir_file
3655    #print "elevation_grid", elevation_grid
3656
3657    elevation_files = os.listdir(elevation_dir)
3658    ucur_files = os.listdir(ucur_dir)
3659    vcur_files = os.listdir(vcur_dir)
3660    elevation_files.sort()
3661    # the first elevation file should be the
3662    # file with the same base name as the bath data
3663    #print "elevation_files",elevation_files
3664    #print "'el' + base_start",'el' + base_start
3665    assert elevation_files[0] == 'el' + base_start
3666
3667    #assert bath_metadata == elevation_metadata
3668
3669
3670
3671    number_of_latitudes = bath_grid.shape[0]
3672    number_of_longitudes = bath_grid.shape[1]
3673    #number_of_times = len(os.listdir(elevation_dir))
3674    #number_of_points = number_of_latitudes*number_of_longitudes
3675    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3676
3677    longitudes = [bath_metadata['xllcorner']+x*bath_metadata['cellsize'] \
3678                  for x in range(number_of_longitudes)]
3679    latitudes = [bath_metadata['yllcorner']+y*bath_metadata['cellsize'] \
3680                 for y in range(number_of_latitudes)]
3681
3682     # reverse order of lat, so the fist lat represents the first grid row
3683    latitudes.reverse()
3684
3685    #print "latitudes - before _get_min_max_indexes",latitudes
3686    kmin, kmax, lmin, lmax = _get_min_max_indexes(latitudes[:],longitudes[:],
3687                                                 minlat=minlat, maxlat=maxlat,
3688                                                 minlon=minlon, maxlon=maxlon)
3689
3690
3691    bath_grid = bath_grid[kmin:kmax,lmin:lmax]
3692    #print "bath_grid",bath_grid
3693    latitudes = latitudes[kmin:kmax]
3694    longitudes = longitudes[lmin:lmax]
3695    number_of_latitudes = len(latitudes)
3696    number_of_longitudes = len(longitudes)
3697    number_of_times = len(os.listdir(elevation_dir))
3698    number_of_points = number_of_latitudes*number_of_longitudes
3699    number_of_volumes = (number_of_latitudes-1)*(number_of_longitudes-1)*2
3700    #print "number_of_points",number_of_points
3701
3702    #Work out the times
3703    if len(elevation_files) > 1:
3704        # Assume: The time period is less than 24hrs.
3705        time_period = (int(elevation_files[1][-3:]) - \
3706                      int(elevation_files[0][-3:]))*60*60
3707        times = [x*time_period for x in range(len(elevation_files))]
3708    else:
3709        times = [0.0]
3710    #print "times", times
3711    #print "number_of_latitudes", number_of_latitudes
3712    #print "number_of_longitudes", number_of_longitudes
3713    #print "number_of_times", number_of_times
3714    #print "latitudes", latitudes
3715    #print "longitudes", longitudes
3716
3717
3718    if verbose:
3719        print '------------------------------------------------'
3720        print 'Statistics:'
3721        print '  Extent (lat/lon):'
3722        print '    lat in [%f, %f], len(lat) == %d'\
3723              %(min(latitudes), max(latitudes),
3724                len(latitudes))
3725        print '    lon in [%f, %f], len(lon) == %d'\
3726              %(min(longitudes), max(longitudes),
3727                len(longitudes))
3728        print '    t in [%f, %f], len(t) == %d'\
3729              %(min(times), max(times), len(times))
3730
3731    ######### WRITE THE SWW FILE #############
3732    # NetCDF file definition
3733    outfile = NetCDFFile(sww_file, 'w')
3734
3735    #Create new file
3736    outfile.institution = 'Geoscience Australia'
3737    outfile.description = 'Converted from XXX'
3738
3739
3740    #For sww compatibility
3741    outfile.smoothing = 'Yes'
3742    outfile.order = 1
3743
3744    #Start time in seconds since the epoch (midnight 1/1/1970)
3745    outfile.starttime = starttime = times[0]
3746
3747
3748    # dimension definitions
3749    outfile.createDimension('number_of_volumes', number_of_volumes)
3750
3751    outfile.createDimension('number_of_vertices', 3)
3752    outfile.createDimension('number_of_points', number_of_points)
3753    outfile.createDimension('number_of_timesteps', number_of_times)
3754
3755    # variable definitions
3756    outfile.createVariable('x', precision, ('number_of_points',))
3757    outfile.createVariable('y', precision, ('number_of_points',))
3758    outfile.createVariable('elevation', precision, ('number_of_points',))
3759
3760    #FIXME: Backwards compatibility
3761    outfile.createVariable('z', precision, ('number_of_points',))
3762    #################################
3763
3764    outfile.createVariable('volumes', Int, ('number_of_volumes',
3765                                            'number_of_vertices'))
3766
3767    outfile.createVariable('time', precision,
3768                           ('number_of_timesteps',))
3769
3770    outfile.createVariable('stage', precision,
3771                           ('number_of_timesteps',
3772                            'number_of_points'))
3773
3774    outfile.createVariable('xmomentum', precision,
3775                           ('number_of_timesteps',
3776                            'number_of_points'))
3777
3778    outfile.createVariable('ymomentum', precision,
3779                           ('number_of_timesteps',
3780                            'number_of_points'))
3781
3782    #Store
3783    from anuga.coordinate_transforms.redfearn import redfearn
3784    x = zeros(number_of_points, Float)  #Easting
3785    y = zeros(number_of_points, Float)  #Northing
3786
3787    if verbose: print 'Making triangular grid'
3788    #Get zone of 1st point.
3789    refzone, _, _ = redfearn(latitudes[0],longitudes[0])
3790
3791    vertices = {}
3792    i = 0
3793    for k, lat in enumerate(latitudes):
3794        for l, lon in enumerate(longitudes):
3795
3796            vertices[l,k] = i
3797
3798            zone, easting, northing = redfearn(lat,lon)
3799
3800            msg = 'Zone boundary crossed at longitude =', lon
3801            #assert zone == refzone, msg
3802            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
3803            x[i] = easting
3804            y[i] = northing
3805            i += 1
3806
3807
3808    #Construct 2 triangles per 'rectangular' element
3809    volumes = []
3810    for l in range(number_of_longitudes-1):    #X direction
3811        for k in range(number_of_latitudes-1): #Y direction
3812            v1 = vertices[l,k+1]
3813            v2 = vertices[l,k]
3814            v3 = vertices[l+1,k+1]
3815            v4 = vertices[l+1,k]
3816
3817            #Note, this is different to the ferrit2sww code
3818            #since the order of the lats is reversed.
3819            volumes.append([v1,v3,v2]) #Upper element
3820            volumes.append([v4,v2,v3]) #Lower element
3821
3822    volumes = array(volumes)
3823
3824    geo_ref = Geo_reference(refzone,min(x),min(y))
3825    geo_ref.write_NetCDF(outfile)
3826
3827    # This will put the geo ref in the middle
3828    #geo_ref = Geo_reference(refzone,(max(x)+min(x))/2.0,(max(x)+min(y))/2.)
3829
3830
3831    if verbose:
3832        print '------------------------------------------------'
3833        print 'More Statistics:'
3834        print '  Extent (/lon):'
3835        print '    x in [%f, %f], len(lat) == %d'\
3836              %(min(x), max(x),
3837                len(x))
3838        print '    y in [%f, %f], len(lon) == %d'\
3839              %(min(y), max(y),
3840                len(y))
3841        print 'geo_ref: ',geo_ref
3842
3843    z = resize(bath_grid,outfile.variables['z'][:].shape)
3844    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
3845    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
3846    outfile.variables['z'][:] = z
3847    outfile.variables['elevation'][:] = z  #FIXME HACK
3848    outfile.variables['volumes'][:] = volumes.astype(Int32) #On Opteron 64
3849
3850    # do this to create an ok sww file.
3851    #outfile.variables['time'][:] = [0]   #Store time relative
3852    #outfile.variables['stage'] = z
3853    # put the next line up in the code after outfile.order = 1
3854    #number_of_times = 1
3855
3856    stage = outfile.variables['stage']
3857    xmomentum = outfile.variables['xmomentum']
3858    ymomentum = outfile.variables['ymomentum']
3859
3860    outfile.variables['time'][:] = times   #Store time relative
3861
3862    if verbose: print 'Converting quantities'
3863    n = number_of_times
3864    for j in range(number_of_times):
3865        # load in files
3866        elevation_meta, elevation_grid = \
3867           _read_asc(elevation_dir + os.sep + elevation_files[j])
3868
3869        _, u_momentum_grid =  _read_asc(ucur_dir + os.sep + ucur_files[j])
3870        _, v_momentum_grid =  _read_asc(vcur_dir + os.sep + vcur_files[j])
3871
3872        #print "elevation_grid",elevation_grid
3873        #cut matrix to desired size
3874        elevation_grid = elevation_grid[kmin:kmax,lmin:lmax]
3875        u_momentum_grid = u_momentum_grid[kmin:kmax,lmin:lmax]
3876        v_momentum_grid = v_momentum_grid[kmin:kmax,lmin:lmax]
3877        #print "elevation_grid",elevation_grid
3878        # handle missing values
3879        missing = (elevation_grid == elevation_meta['NODATA_value'])
3880        if sometrue (missing):
3881            if fail_on_NaN:
3882                msg = 'File %s contains missing values'\
3883                      %(elevation_files[j])
3884                raise DataMissingValuesError, msg
3885            else:
3886                elevation_grid = elevation_grid*(missing==0) + \
3887                                 missing*elevation_NaN_filler
3888
3889
3890        if verbose and j%((n+10)/10)==0: print '  Doing %d of %d' %(j, n)
3891        i = 0
3892        for k in range(number_of_latitudes):      #Y direction
3893            for l in range(number_of_longitudes): #X direction
3894                w = zscale*elevation_grid[k,l] + mean_stage
3895                stage[j,i] = w
3896                h = w - z[i]
3897                xmomentum[j,i] = u_momentum_grid[k,l]*h
3898                ymomentum[j,i] = v_momentum_grid[k,l]*h
3899                i += 1
3900    outfile.close()
3901
3902def _get_min_max_indexes(latitudes_ref,longitudes_ref,
3903                        minlat=None, maxlat=None,
3904                        minlon=None, maxlon=None):
3905    """
3906    return max, min indexes (for slicing) of the lat and long arrays to cover the area
3907    specified with min/max lat/long
3908
3909    Think of the latitudes and longitudes describing a 2d surface.
3910    The area returned is, if possible, just big enough to cover the
3911    inputed max/min area. (This will not be possible if the max/min area
3912    has a section outside of the latitudes/longitudes area.)
3913
3914    asset  longitudes are sorted,
3915    long - from low to high (west to east, eg 148 - 151)
3916    assert latitudes are sorted, ascending or decending
3917    """
3918    latitudes = latitudes_ref[:]
3919    longitudes = longitudes_ref[:]
3920
3921    latitudes = ensure_numeric(latitudes)
3922    longitudes = ensure_numeric(longitudes)
3923   
3924    assert allclose(sort(longitudes), longitudes)
3925   
3926    lat_ascending = True
3927    if not allclose(sort(latitudes), latitudes):
3928        lat_ascending = False
3929        # reverse order of lat, so it's in ascending order         
3930        latitudes = latitudes[::-1]
3931        assert allclose(sort(latitudes), latitudes)
3932    #print "latitudes  in funct", latitudes
3933   
3934    largest_lat_index = len(latitudes)-1
3935    #Cut out a smaller extent.
3936    if minlat == None:
3937        lat_min_index = 0
3938    else:
3939        lat_min_index = searchsorted(latitudes, minlat)-1
3940        if lat_min_index <0:
3941            lat_min_index = 0
3942
3943
3944    if maxlat == None:
3945        lat_max_index = largest_lat_index #len(latitudes)
3946    else:
3947        lat_max_index = searchsorted(latitudes, maxlat)
3948        if lat_max_index > largest_lat_index:
3949            lat_max_index = largest_lat_index
3950
3951    if minlon == None:
3952        lon_min_index = 0
3953    else:
3954        lon_min_index = searchsorted(longitudes, minlon)-1
3955        if lon_min_index <0:
3956            lon_min_index = 0
3957
3958    if maxlon == None:
3959        lon_max_index = len(longitudes)
3960    else:
3961        lon_max_index = searchsorted(longitudes, maxlon)
3962
3963    # Reversing the indexes, if the lat array is decending
3964    if lat_ascending is False:
3965        lat_min_index, lat_max_index = largest_lat_index - lat_max_index , \
3966                                       largest_lat_index - lat_min_index
3967    lat_max_index = lat_max_index + 1 # taking into account how slicing works
3968    lon_max_index = lon_max_index + 1 # taking into account how slicing works
3969
3970    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
3971
3972
3973def _read_asc(filename, verbose=False):
3974    """Read esri file from the following ASCII format (.asc)
3975
3976    Example:
3977
3978    ncols         3121
3979    nrows         1800
3980    xllcorner     722000
3981    yllcorner     5893000
3982    cellsize      25
3983    NODATA_value  -9999
3984    138.3698 137.4194 136.5062 135.5558 ..........
3985
3986    """
3987
3988    datafile = open(filename)
3989
3990    if verbose: print 'Reading DEM from %s' %(filename)
3991    lines = datafile.readlines()
3992    datafile.close()
3993
3994    if verbose: print 'Got', len(lines), ' lines'
3995
3996    ncols = int(lines.pop(0).split()[1].strip())
3997    nrows = int(lines.pop(0).split()[1].strip())
3998    xllcorner = float(lines.pop(0).split()[1].strip())
3999    yllcorner = float(lines.pop(0).split()[1].strip())
4000    cellsize = float(lines.pop(0).split()[1].strip())
4001    NODATA_value = float(lines.pop(0).split()[1].strip())
4002
4003    assert len(lines) == nrows
4004
4005    #Store data
4006    grid = []
4007
4008    n = len(lines)
4009    for i, line in enumerate(lines):
4010        cells = line.split()
4011        assert len(cells) == ncols
4012        grid.append(array([float(x) for x in cells]))
4013    grid = array(grid)
4014
4015    return {'xllcorner':xllcorner,
4016            'yllcorner':yllcorner,
4017            'cellsize':cellsize,
4018            'NODATA_value':NODATA_value}, grid
4019
4020
4021
4022    ####  URS 2 SWW  ###
4023
4024lon_name = 'LON'
4025lat_name = 'LAT'
4026time_name = 'TIME'
4027precision = Float # So if we want to change the precision its done here       
4028class Write_nc:
4029    """
4030    Write an nc file.
4031   
4032    Note, this should be checked to meet cdc netcdf conventions for gridded
4033    data. http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml
4034   
4035    """
4036    def __init__(self,
4037                 quantity_name,
4038                 file_name,
4039                 time_step_count,
4040                 time_step,
4041                 lon,
4042                 lat):
4043        """
4044        time_step_count is the number of time steps.
4045        time_step is the time step size
4046       
4047        pre-condition: quantity_name must be 'HA' 'UA'or 'VA'.
4048        """
4049        self.quantity_name = quantity_name
4050        quantity_units= {'HA':'METERS',
4051                              'UA':'METERS/SECOND',
4052                              'VA':'METERS/SECOND'}
4053       
4054        #self.file_name = file_name
4055        self.time_step_count = time_step_count
4056        self.time_step = time_step
4057
4058        # NetCDF file definition
4059        self.outfile = NetCDFFile(file_name, 'w')
4060        outfile = self.outfile       
4061
4062        #Create new file
4063        nc_lon_lat_header(outfile, lon, lat)
4064   
4065        # TIME
4066        outfile.createDimension(time_name, None)
4067        outfile.createVariable(time_name, precision, (time_name,))
4068
4069        #QUANTITY
4070        outfile.createVariable(self.quantity_name, precision,
4071                               (time_name, lat_name, lon_name))
4072        outfile.variables[self.quantity_name].missing_value=-1.e+034
4073        outfile.variables[self.quantity_name].units= \
4074                                 quantity_units[self.quantity_name]
4075        outfile.variables[lon_name][:]= ensure_numeric(lon)
4076        outfile.variables[lat_name][:]= ensure_numeric(lat)
4077
4078        #Assume no one will be wanting to read this, while we are writing
4079        #outfile.close()
4080       
4081    def store_timestep(self, quantity_slice):
4082        """
4083        quantity_slice is the data to be stored at this time step
4084        """
4085       
4086        outfile = self.outfile
4087       
4088        # Get the variables
4089        time = outfile.variables[time_name]
4090        quantity = outfile.variables[self.quantity_name]
4091           
4092        i = len(time)
4093
4094        #Store time
4095        time[i] = i*self.time_step #self.domain.time
4096        quantity[i,:] = quantity_slice*100 # To convert from m to cm
4097                                           # And m/s to cm/sec
4098       
4099    def close(self):
4100        self.outfile.close()
4101
4102def urs2sww(basename_in='o', basename_out=None, verbose=False,
4103            remove_nc_files=True,
4104            minlat=None, maxlat=None,
4105            minlon= None, maxlon=None,
4106            mint=None, maxt=None,
4107            mean_stage=0,
4108            origin = None,
4109            zscale=1,
4110            fail_on_NaN=True,
4111            NaN_filler=0,
4112            elevation=None):
4113    """
4114    Convert URS C binary format for wave propagation to
4115    sww format native to abstract_2d_finite_volumes.
4116
4117    Specify only basename_in and read files of the form
4118    basefilename-z-mux, basefilename-e-mux and basefilename-n-mux containing
4119    relative height, x-velocity and y-velocity, respectively.
4120
4121    Also convert latitude and longitude to UTM. All coordinates are
4122    assumed to be given in the GDA94 datum. The latitude and longitude
4123    information is for  a grid.
4124
4125    min's and max's: If omitted - full extend is used.
4126    To include a value min may equal it, while max must exceed it.
4127    Lat and lon are assumed to be in decimal degrees.
4128    NOTE: minlon is the most east boundary.
4129   
4130    origin is a 3-tuple with geo referenced
4131    UTM coordinates (zone, easting, northing)
4132    It will be the origin of the sww file. This shouldn't be used,
4133    since all of anuga should be able to handle an arbitary origin.
4134
4135
4136    URS C binary format has data orgainised as TIME, LONGITUDE, LATITUDE
4137    which means that latitude is the fastest
4138    varying dimension (row major order, so to speak)
4139
4140    In URS C binary the latitudes and longitudes are in assending order.
4141    """
4142   
4143    if basename_out == None:
4144        basename_out = basename_in
4145    files_out = urs2nc(basename_in, basename_out)
4146    ferret2sww(basename_out,
4147               minlat=minlat,
4148               maxlat=maxlat,
4149               minlon=minlon,
4150               maxlon=maxlon,
4151               mint=mint,
4152               maxt=maxt,
4153               mean_stage=mean_stage,
4154               origin=origin,
4155               zscale=zscale,
4156               fail_on_NaN=fail_on_NaN,
4157               NaN_filler=NaN_filler,
4158               inverted_bathymetry=True,
4159               verbose=verbose)
4160    #print "files_out",files_out
4161    if remove_nc_files:
4162        for file_out in files_out:
4163            os.remove(file_out)
4164   
4165def urs2nc(basename_in = 'o', basename_out = 'urs'):
4166    """
4167    Convert the 3 urs files to 4 nc files.
4168
4169    The name of the urs file names must be;
4170    [basename_in]-z-mux
4171    [basename_in]-e-mux
4172    [basename_in]-n-mux
4173   
4174    """
4175    files_in = [basename_in+'-z-mux',
4176                basename_in+'-e-mux',
4177                basename_in+'-n-mux']
4178    files_out = [basename_out+'_ha.nc',
4179                 basename_out+'_ua.nc',
4180                 basename_out+'_va.nc']
4181    quantities = ['HA','UA','VA']
4182
4183    for file_name in files_in:
4184        if os.access(file_name, os.F_OK) == 0 :
4185            msg = 'File %s does not exist or is not accessible' %file_name
4186            raise IOError, msg
4187       
4188    hashed_elevation = None
4189    for file_in, file_out, quantity in map(None, files_in,
4190                                           files_out,
4191                                           quantities):
4192        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
4193                                         file_out,
4194                                         quantity)
4195        #print "lonlatdep", lonlatdep
4196        if hashed_elevation == None:
4197            elevation_file = basename_out+'_e.nc'
4198            write_elevation_nc(elevation_file,
4199                                lon,
4200                                lat,
4201                                depth)
4202            hashed_elevation = myhash(lonlatdep)
4203        else:
4204            msg = "The elevation information in the mux files is inconsistent"
4205            assert hashed_elevation == myhash(lonlatdep), msg
4206    files_out.append(elevation_file)
4207    return files_out
4208   
4209def _binary_c2nc(file_in, file_out, quantity):
4210    """
4211    Reads in a quantity urs file and writes a quantity nc file.
4212    additionally, returns the depth and lat, long info,
4213    so it can be written to a file.
4214    """
4215    columns = 3 # long, lat , depth
4216    mux_file = open(file_in, 'rb')
4217   
4218    # Number of points/stations
4219    (points_num,)= unpack('i',mux_file.read(4))
4220
4221    # nt, int - Number of time steps
4222    (time_step_count,)= unpack('i',mux_file.read(4))
4223
4224    #dt, float - time step, seconds
4225    (time_step,) = unpack('f', mux_file.read(4))
4226   
4227    msg = "Bad data in the mux file."
4228    if points_num < 0:
4229        mux_file.close()
4230        raise ANUGAError, msg
4231    if time_step_count < 0:
4232        mux_file.close()
4233        raise ANUGAError, msg
4234    if time_step < 0:
4235        mux_file.close()
4236        raise ANUGAError, msg
4237   
4238    lonlatdep = p_array.array('f')
4239    lonlatdep.read(mux_file, columns * points_num)
4240    lonlatdep = array(lonlatdep, typecode=Float)   
4241    lonlatdep = reshape(lonlatdep, (points_num, columns))
4242   
4243    lon, lat, depth = lon_lat2grid(lonlatdep)
4244    lon_sorted = list(lon)
4245    lon_sorted.sort()
4246
4247    if not lon == lon_sorted:
4248        msg = "Longitudes in mux file are not in ascending order"
4249        raise IOError, msg
4250    lat_sorted = list(lat)
4251    lat_sorted.sort()
4252
4253    if not lat == lat_sorted:
4254        msg = "Latitudes in mux file are not in ascending order"
4255   
4256    nc_file = Write_nc(quantity,
4257                       file_out,
4258                       time_step_count,
4259                       time_step,
4260                       lon,
4261                       lat)
4262
4263    for i in range(time_step_count):
4264        #Read in a time slice  from mux file 
4265        hz_p_array = p_array.array('f')
4266        hz_p_array.read(mux_file, points_num)
4267        hz_p = array(hz_p_array, typecode=Float)
4268        hz_p = reshape(hz_p, (len(lon), len(lat)))
4269        hz_p = transpose(hz_p) #mux has lat varying fastest, nc has long v.f.
4270
4271        #write time slice to nc file
4272        nc_file.store_timestep(hz_p)
4273    mux_file.close()
4274    nc_file.close()
4275
4276    return lonlatdep, lon, lat, depth
4277   
4278
4279def write_elevation_nc(file_out, lon, lat, depth_vector):
4280    """
4281    Write an nc elevation file.
4282    """
4283   
4284    # NetCDF file definition
4285    outfile = NetCDFFile(file_out, 'w')
4286
4287    #Create new file
4288    nc_lon_lat_header(outfile, lon, lat)
4289   
4290    # ELEVATION
4291    zname = 'ELEVATION'
4292    outfile.createVariable(zname, precision, (lat_name, lon_name))
4293    outfile.variables[zname].units='CENTIMETERS'
4294    outfile.variables[zname].missing_value=-1.e+034
4295
4296    outfile.variables[lon_name][:]= ensure_numeric(lon)
4297    outfile.variables[lat_name][:]= ensure_numeric(lat)
4298
4299    depth = reshape(depth_vector, ( len(lat), len(lon)))
4300    outfile.variables[zname][:]= depth
4301   
4302    outfile.close()
4303   
4304def nc_lon_lat_header(outfile, lon, lat):
4305    """
4306    outfile is the netcdf file handle.
4307    lon - a list/array of the longitudes
4308    lat - a list/array of the latitudes
4309    """
4310   
4311    outfile.institution = 'Geoscience Australia'
4312    outfile.description = 'Converted from URS binary C'
4313   
4314    # Longitude
4315    outfile.createDimension(lon_name, len(lon))
4316    outfile.createVariable(lon_name, precision, (lon_name,))
4317    outfile.variables[lon_name].point_spacing='uneven'
4318    outfile.variables[lon_name].units='degrees_east'
4319    outfile.variables[lon_name].assignValue(lon)
4320
4321
4322    # Latitude
4323    outfile.createDimension(lat_name, len(lat))
4324    outfile.createVariable(lat_name, precision, (lat_name,))
4325    outfile.variables[lat_name].point_spacing='uneven'
4326    outfile.variables[lat_name].units='degrees_north'
4327    outfile.variables[lat_name].assignValue(lat)
4328
4329
4330   
4331def lon_lat2grid(long_lat_dep):
4332    """
4333    given a list of points that are assumed to be an a grid,
4334    return the long's and lat's of the grid.
4335    long_lat_dep is an array where each row is a position.
4336    The first column is longitudes.
4337    The second column is latitudes.
4338
4339    The latitude is the fastest varying dimension - in mux files
4340    """
4341    LONG = 0
4342    LAT = 1
4343    QUANTITY = 2
4344
4345    long_lat_dep = ensure_numeric(long_lat_dep, Float)
4346   
4347    num_points = long_lat_dep.shape[0]
4348    this_rows_long = long_lat_dep[0,LONG]
4349
4350    # Count the length of unique latitudes
4351    i = 0
4352    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
4353        i += 1
4354    # determine the lats and longsfrom the grid
4355    lat = long_lat_dep[:i, LAT]       
4356    long = long_lat_dep[::i, LONG]
4357   
4358    lenlong = len(long)
4359    lenlat = len(lat)
4360    #print 'len lat', lat, len(lat)
4361    #print 'len long', long, len(long)
4362         
4363    msg = 'Input data is not gridded'     
4364    assert num_points % lenlat == 0, msg
4365    assert num_points % lenlong == 0, msg
4366         
4367    # Test that data is gridded       
4368    for i in range(lenlong):
4369        msg = 'Data is not gridded.  It must be for this operation'
4370        first = i*lenlat
4371        last = first + lenlat
4372               
4373        assert allclose(long_lat_dep[first:last,LAT], lat), msg
4374        assert allclose(long_lat_dep[first:last,LONG], long[i]), msg
4375   
4376   
4377#    print 'range long', min(long), max(long)
4378#    print 'range lat', min(lat), max(lat)
4379#    print 'ref long', min(long_lat_dep[:,0]), max(long_lat_dep[:,0])
4380#    print 'ref lat', min(long_lat_dep[:,1]), max(long_lat_dep[:,1])
4381   
4382   
4383   
4384    msg = 'Out of range latitudes/longitudes'
4385    for l in lat:assert -90 < l < 90 , msg
4386    for l in long:assert -180 < l < 180 , msg
4387
4388    # Changing quantity from lat being the fastest varying dimension to
4389    # long being the fastest varying dimension
4390    # FIXME - make this faster/do this a better way
4391    # use numeric transpose, after reshaping the quantity vector
4392#    quantity = zeros(len(long_lat_dep), Float)
4393    quantity = zeros(num_points, Float)
4394   
4395#    print 'num',num_points
4396    for lat_i, _ in enumerate(lat):
4397        for long_i, _ in enumerate(long):
4398            q_index = lat_i*lenlong+long_i
4399            lld_index = long_i*lenlat+lat_i
4400#            print 'lat_i', lat_i, 'long_i',long_i, 'q_index', q_index, 'lld_index', lld_index
4401            temp = long_lat_dep[lld_index, QUANTITY]
4402            quantity[q_index] = temp
4403           
4404    return long, lat, quantity
4405
4406    ####  END URS 2 SWW  ###     
4407#-------------------------------------------------------------
4408if __name__ == "__main__":
4409    pass
4410
Note: See TracBrowser for help on using the repository browser.