source: trunk/anuga_core/source/anuga/utilities/plot_utils.py @ 9193

Last change on this file since 9193 was 9193, checked in by steve, 10 years ago

Test Linear interpolation instead of nearest neighbours

File size: 40.5 KB
Line 
1""" Random utilities for reading sww file data and for plotting
2(in ipython, or in scripts)
3
4    Functionality of note:
5
6    util.get_outputs -- read the data from a single sww file
7    into a single object
8   
9    util.combine_outputs -- read the data from a list of sww
10    files into a single object
11   
12    util.near_transect -- for finding the indices of points
13                          'near' to a given line, and
14                          assigning these points a
15                          coordinate along that line.
16
17    This is useful for plotting outputs which are 'almost' along a
18    transect (e.g. a channel cross-section) -- see example below
19
20    util.sort_sww_filenames -- match sww filenames by a wildcard, and order
21                               them according to their 'time'. This means that
22                               they can be stuck together using
23                               'combine_outputs' correctly
24
25    util.triangle_areas -- compute the areas of every triangle
26                           in a get_outputs object [ must be vertex-based]
27
28    util.water_volume -- compute the water volume at every
29                         time step in an sww file (needs both
30                         vertex and centroid value input).
31
32    util.Make_Geotiff -- convert sww centroids to a georeferenced tiff
33 
34    Here is an example ipython session which uses some of these functions:
35
36    > import util
37    > from matplotlib import pyplot as pyplot
38    > p=util.get_output('myfile.sww',minimum_allowed_height=0.01)
39    > p2=util.get_centroids(p,velocity_extrapolation=True)
40    > xxx=util.near_transect(p,[95., 85.], [120.,68.],tol=2.) # Could equally well use p2
41    > pyplot.ion() # Interactive plotting
42    > pyplot.scatter(xxx[1],p.vel[140,xxx[0]],color='red') # Plot along the transect
43
44    FIXME: TODO -- Convert to a single function 'get_output', which can either take a
45          single filename, a list of filenames, or a wildcard defining a number of
46          filenames, and ensure that in each case, the output will be as desired.
47
48"""
49from anuga.file.netcdf import NetCDFFile
50import numpy
51import copy
52
53class combine_outputs:
54    """
55    Read in a list of filenames, and combine all their outputs into a single object.
56    e.g.:
57
58    p = util.combine_outputs(['file1.sww', 'file1_time_10000.sww', 'file1_time_20000.sww'], 0.01)
59   
60    will make an object p which has components p.x,p.y,p.time,p.stage, .... etc,
61    where the values of stage / momentum / velocity from the sww files are concatenated as appropriate.
62
63    This is nice for interactive interrogation of model outputs, or for sticking together outputs in scripts
64   
65    WARNING: It is easy to use lots of memory, if the sww files are large.
66
67    Note: If you want the centroid values, then you could subsequently use:
68
69    p2 = util.get_centroids(p,velocity_extrapolation=False)
70
71    which would make an object p2 that is like p, but holds information at centroids
72    """
73    def __init__(self, filename_list, minimum_allowed_height=1.0e-03, verbose=False):
74        #
75        # Go through the sww files in 'filename_list', and combine them into one object.
76        #
77
78        for i, filename in enumerate(filename_list):
79            if verbose: print i, filename
80            # Store output from filename
81            p_tmp = get_output(filename, minimum_allowed_height,verbose=verbose)
82            if(i==0):
83                # Create self
84                p1=p_tmp
85            else:
86                # Append extra data to self
87                # Note that p1.x, p1.y, p1.vols, p1.elev should not change
88                assert (p1.x == p_tmp.x).all()
89                assert (p1.y == p_tmp.y).all()
90                assert (p1.vols ==p_tmp.vols).all()
91                p1.time = numpy.append(p1.time, p_tmp.time)
92                p1.stage = numpy.append(p1.stage, p_tmp.stage, axis=0)
93                p1.height = numpy.append(p1.height, p_tmp.height, axis=0)
94                p1.xmom = numpy.append(p1.xmom, p_tmp.xmom, axis=0)
95                p1.ymom = numpy.append(p1.ymom, p_tmp.ymom, axis=0)
96                p1.xvel = numpy.append(p1.xvel, p_tmp.xvel, axis=0)
97                p1.yvel = numpy.append(p1.yvel, p_tmp.yvel, axis=0)
98                p1.vel = numpy.append(p1.vel, p_tmp.vel, axis=0)
99       
100        self.x, self.y, self.time, self.vols, self.stage, \
101                self.height, self.elev, self.friction, self.xmom, self.ymom, \
102                self.xvel, self.yvel, self.vel, self.minimum_allowed_height,\
103                self.xllcorner, self.yllcorner, self.timeSlices =\
104                p1.x, p1.y, p1.time, p1.vols, p1.stage, \
105                p1.height, p1.elev, p1.friction, p1.xmom, p1.ymom, \
106                p1.xvel, p1.yvel, p1.vel, p1.minimum_allowed_height,\
107                p1.xllcorner, p1.yllcorner, p1.timeSlices
108
109        self.filename = p1.filename
110        self.verbose = p1.verbose
111
112
113####################
114
115def sort_sww_filenames(sww_wildcard):
116    # Function to take a 'wildcard' sww filename,
117    # and return a list of all filenames of this type,
118    # sorted by their time.
119    # This can then be used efficiently in 'combine_outputs'
120    # if you have many filenames starting with the same pattern
121    import glob
122    filenames=glob.glob(sww_wildcard)
123   
124    # Extract time from filenames
125    file_time=range(len(filenames)) # Predefine
126     
127    for i,filename in enumerate(filenames):
128        filesplit=filename.rsplit('_time_')
129        if(len(filesplit)>1):
130            file_time[i]=int(filesplit[1].split('_0.sww')[0])
131        else:
132            file_time[i]=0         
133   
134    name_and_time=zip(file_time,filenames)
135    name_and_time.sort() # Sort by file_time
136   
137    output_times, output_names = zip(*name_and_time)
138   
139    return list(output_names)
140
141#####################################################################
142class get_output:
143    """Read in data from an .sww file in a convenient form
144       e.g.
145        p = util.get_output('channel3.sww', minimum_allowed_height=0.01)
146       
147       p then contains most relevant information as e.g., p.stage, p.elev, p.xmom, etc
148    """
149    def __init__(self, filename, minimum_allowed_height=1.0e-03, timeSlices='all', verbose=False):
150                # FIXME: verbose is not used
151        self.x, self.y, self.time, self.vols, self.stage, \
152                self.height, self.elev, self.friction, self.xmom, self.ymom, \
153                self.xvel, self.yvel, self.vel, self.minimum_allowed_height,\
154                self.xllcorner, self.yllcorner, self.timeSlices = \
155                read_output(filename, minimum_allowed_height,copy.copy(timeSlices))
156        self.filename = filename
157        self.verbose = verbose
158
159####################################################################
160def getInds(varIn, timeSlices, absMax=False):
161    """
162     Convenience function to get the indices we want in an array.
163     There are a number of special cases that make this worthwhile
164     having in its own function
165   
166     INPUT: varIn -- numpy array, either 1D (variables in space) or 2D
167            (variables in time+space)
168            timeSlices -- times that we want the variable, see read_output or get_output
169            absMax -- if TRUE and timeSlices is 'max', then get max-absolute-values
170     OUTPUT:
171           
172    """
173    var=copy.copy(varIn) # avoid python pointer issues
174    if (len(varIn.shape)==2):
175        # Get particular time-slices, unless the variable is constant
176        # (e.g. elevation is often constant)
177        if timeSlices is 'max':
178            # Extract the maxima over time, assuming there are multiple
179            # time-slices, and ensure the var is still a 2D array
180            if( not absMax):
181                var=var.max(axis=0)
182            else:
183                # For variables xmom,ymom,xvel,yvel we want the 'maximum-absolute-value'
184                # We could do this everywhere, but I assume the loop is a bit slower
185                varInds=abs(var).argmax(axis=0)
186                varNew=varInds*0.
187                for i in range(len(varInds)):
188                    varNew[i] = var[varInds[i],i]
189                #var=[var[varInds[i],i] for i in varInds]
190                var=varNew
191            var=var.reshape((1,len(var)))
192        else:
193            var=var[timeSlices,:]
194   
195    return var
196
197############################################################################
198
199def read_output(filename, minimum_allowed_height, timeSlices):
200    """
201     Purpose: To read the sww file, and output a number of variables as arrays that
202              we can then e.g. plot, interrogate
203
204              See get_output for the typical interface, and get_centroids for
205                working with centroids directly
206   
207     Input: filename -- The name of an .sww file to read data from,
208                        e.g. read_sww('channel3.sww')
209            minimum_allowed_height -- zero velocity when height < this
210            timeSlices -- List of time indices to read (e.g. [100] or [0, 10, 21]), or 'all' or 'last' or 'max'
211                          If 'max', the time-max of each variable will be computed. For xmom/ymom/xvel/yvel, the
212                           one with maximum magnitude is reported
213   
214   
215     Output: x, y, time, stage, height, elev, xmom, ymom, xvel, yvel, vel
216             x,y are only stored at one time
217             elevation may be stored at one or multiple times
218             everything else is stored every time step for vertices
219    """
220
221    # Import modules
222
223
224
225    # Open ncdf connection
226    fid=NetCDFFile(filename)
227   
228    time=fid.variables['time'][:]
229
230    # Treat specification of timeSlices
231    if(timeSlices=='all'):
232        inds=range(len(time))
233    elif(timeSlices=='last'):
234        inds=[len(time)-1]
235    elif(timeSlices=='max'):
236        inds='max' #
237    else:
238        try:
239            inds=list(timeSlices)
240        except:
241            inds=[timeSlices]
242   
243    if(inds is not 'max'):
244        time=time[inds]
245    else:
246        # We can't really assign a time to 'max', but I guess max(time) is
247        # technically the right thing -- if not misleading
248        time=time.max()
249
250   
251    # Get lower-left
252    xllcorner=fid.xllcorner
253    yllcorner=fid.yllcorner
254
255    # Read variables
256    x=fid.variables['x'][:]
257    y=fid.variables['y'][:]
258
259    stage=getInds(fid.variables['stage'][:], timeSlices=inds)
260    elev=getInds(fid.variables['elevation'][:], timeSlices=inds)
261
262    # Simple approach for volumes
263    vols=fid.variables['volumes'][:]
264
265    # Friction if it exists
266    if(fid.variables.has_key('friction')):
267        friction=getInds(fid.variables['friction'][:],timeSlices=inds) 
268    else:
269        # Set friction to nan if it is not stored
270        friction=elev*0.+numpy.nan
271   
272    #@ Here we get 'all' of height / xmom /ymom
273    #@ This could be done using less memory/computation in
274    #@  the case of multiple time-slices
275
276    if(fid.variables.has_key('height')):
277        heightAll=fid.variables['height'][:]
278    else:
279        # Back calculate height if it is not stored
280        heightAll=fid.variables['stage'][:]
281        if(len(heightAll.shape)==len(elev.shape)):
282            heightAll=heightAll-elev
283        else:
284            for i in range(heightAll.shape[0]):
285                heightAll[i,:]=heightAll[i,:]-elev
286    heightAll=heightAll*(heightAll>0.) # Height could be negative for tsunami algorithm
287    # Need xmom,ymom for all timesteps
288    xmomAll=fid.variables['xmomentum'][:]
289    ymomAll=fid.variables['ymomentum'][:]
290
291    height=getInds(heightAll, timeSlices=inds) 
292    # For momenta, we want maximum-absolute-value events
293    xmom=getInds(xmomAll, timeSlices=inds, absMax=True)
294    ymom=getInds(ymomAll, timeSlices=inds, absMax=True)
295
296    # velocity requires some intermediate calculation in general
297    tmp = xmomAll/(heightAll+1.0e-12)*(heightAll>minimum_allowed_height)
298    xvel=getInds(tmp,timeSlices=inds, absMax=True)
299    tmp = ymomAll/(heightAll+1.0e-12)*(heightAll>minimum_allowed_height)
300    yvel=getInds(tmp,timeSlices=inds, absMax=True)
301    tmp = (xmomAll**2+ymomAll**2)**0.5/(heightAll+1.0e-12)*(heightAll>minimum_allowed_height)
302    vel=getInds(tmp, timeSlices=inds) # Vel is always >= 0.
303
304    fid.close()
305
306    return x, y, time, vols, stage, height, elev, friction, xmom, ymom,\
307           xvel, yvel, vel, minimum_allowed_height, xllcorner,yllcorner, inds
308
309######################################################################################
310
311class get_centroids:
312    """
313    Extract centroid values from the output of get_output, OR from a
314        filename 
315    See read_output or get_centroid_values for further explanation of
316        arguments
317    e.g.
318        # Case 1 -- get vertex values first, then centroids
319        p = util.get_output('my_sww.sww', minimum_allowed_height=0.01)
320        pc=util.get_centroids(p, velocity_extrapolation=True)
321
322        # Case 2 -- get centroids directly
323        pc=util.get_centroids('my_sww.sww', velocity_extrapolation=True)
324
325    NOTE: elevation is only stored once in the output, even if it was
326          stored every timestep.
327           This is done because presently centroid elevations in ANUGA
328           do not change over time. 
329           Also lots of existing plotting code assumes elevation is a 1D
330           array
331    """
332    def __init__(self,p, velocity_extrapolation=False, verbose=False,
333                 timeSlices=None, minimum_allowed_height=1.0e-03):
334       
335        self.time, self.x, self.y, self.stage, self.xmom,\
336             self.ymom, self.height, self.elev, self.friction, self.xvel,\
337             self.yvel, self.vel, self.xllcorner, self.yllcorner, self.timeSlices= \
338             get_centroid_values(p, velocity_extrapolation,\
339                         timeSlices=copy.copy(timeSlices),\
340                         minimum_allowed_height=minimum_allowed_height,\
341                         verbose=verbose)
342                                 
343
344def get_centroid_values(p, velocity_extrapolation, verbose, timeSlices, 
345                        minimum_allowed_height):
346    """
347    Function to get centroid information -- main interface is through
348        get_centroids.
349        See get_centroids for usage examples, and read_output or get_output for further relevant info
350     Input:
351           p --  EITHER:
352                  The result of e.g. p=util.get_output('mysww.sww').
353                  See the get_output class defined above.
354                 OR:
355                  Alternatively, the name of an sww file
356   
357           velocity_extrapolation -- If true, and centroid values are not
358            in the file, then compute centroid velocities from vertex velocities, and
359            centroid momenta from centroid velocities. If false, and centroid values
360            are not in the file, then compute centroid momenta from vertex momenta,
361            and centroid velocities from centroid momenta
362   
363           timeSlices = list of integer indices when we want output for, or
364                        'all' or 'last' or 'max'. See read_output
365   
366           minimum_allowed_height = height at which velocities are zeroed. See read_output
367   
368     Output: Values of x, y, Stage, xmom, ymom, elev, xvel, yvel, vel etc at centroids
369    """
370
371    #@ Figure out if p is a string (filename) or the output of get_output
372    pIsFile=(type(p) is str)
373 
374    if(pIsFile): 
375        fid=NetCDFFile(p) 
376    else:
377        fid=NetCDFFile(p.filename)
378
379    # UPDATE: 15/06/2014 -- below, we now get all variables directly from the file
380    #         This is more flexible, and allows to get 'max' as well
381    #         However, potentially it could have performance penalities vs the old approach (?)
382
383    # Make 3 arrays, each containing one index of a vertex of every triangle.
384    vols=fid.variables['volumes'][:]
385    vols0=vols[:,0]
386    vols1=vols[:,1]
387    vols2=vols[:,2]
388   
389    # Get lower-left offset
390    xllcorner=fid.xllcorner
391    yllcorner=fid.yllcorner
392   
393    #@ Get timeSlices
394    # It will be either a list of integers, or 'max'
395    l=len(vols)
396    time=fid.variables['time'][:]
397    nts=len(time) # number of time slices in the file
398    if(timeSlices is None):
399        if(pIsFile):
400            # Assume all timeSlices
401            timeSlices=range(nts)
402        else:
403            timeSlices=copy.copy(p.timeSlices)
404    else:
405        # Treat word-based special cases
406        if(timeSlices is 'all'):
407            timeSlices=range(nts)
408        if(timeSlices is 'last'):
409            timeSlices=[nts-1]
410
411    #@ Get minimum_allowed_height
412    if(minimum_allowed_height is None):
413        if(pIsFile):
414            minimum_allowed_height=0.
415        else:
416            minimum_allowed_height=copy.copy(p.minimum_allowed_height)
417
418    # Treat specification of timeSlices
419    if(timeSlices=='all'):
420        inds=range(len(time))
421    elif(timeSlices=='last'):
422        inds=[len(time)-1]
423    elif(timeSlices=='max'):
424        inds='max' #
425    else:
426        try:
427            inds=list(timeSlices)
428        except:
429            inds=[timeSlices]
430   
431    if(inds is not 'max'):
432        time=time[inds]
433    else:
434        # We can't really assign a time to 'max', but I guess max(time) is
435        # technically the right thing -- if not misleading
436        time=time.max()
437
438    # Get coordinates
439    x=fid.variables['x'][:]
440    y=fid.variables['y'][:]
441    x_cent=(x[vols0]+x[vols1]+x[vols2])/3.0
442    y_cent=(y[vols0]+y[vols1]+y[vols2])/3.0
443
444    def getCentVar(varkey_c, timeSlices=inds, absMax=False):
445        """
446            Convenience function, assumes knowedge of 'timeSlices' and vols0,1,2
447        """
448        if(fid.variables.has_key(varkey_c)==False):
449            # It looks like centroid values are not stored
450            # In this case, compute centroid values from vertex values
451           
452            newkey=varkey_c.replace('_c','')
453            tmp = fid.variables[newkey][:]
454            try: # array contain time slides
455                tmp=(tmp[:,vols0]+tmp[:,vols1]+tmp[:,vols2])/3.0
456            except:
457                tmp=(tmp[vols0]+tmp[vols1]+tmp[vols2])/3.0
458            var_cent=getInds(tmp, timeSlices=timeSlices, absMax=absMax)
459        else:
460            var_cent=getInds(fid.variables[varkey_c][:], timeSlices=timeSlices, absMax=absMax)
461        return var_cent
462
463    # Stage and height and elevation
464    stage_cent=getCentVar('stage_c', timeSlices=inds)
465    elev_cent=getCentVar('elevation_c', timeSlices=inds)
466
467    if(len(elev_cent)==2):
468        # Coerce to 1D array, since lots of our code assumes it is
469        elev_cent=elev_cent[0,:]
470
471    height_cent=stage_cent*0.
472    for i in range(stage_cent.shape[0]):
473        height_cent[i,:]=stage_cent[i,:]-elev_cent
474
475    # Friction might not be stored at all
476    try:
477        friction_cent=getCentVar('friction_c')
478    except:
479        friction_cent=elev_cent*0.+numpy.nan
480
481    if(fid.variables.has_key('xmomentum_c')):
482        # Assume that both xmomentum,ymomentum are stored at centroids
483        # Because velocity is back computed, and we might want maxima,
484        # we get all data for convenience
485        xmomC=getCentVar('xmomentum_c', timeSlices=range(nts))
486        ymomC=getCentVar('ymomentum_c', timeSlices=range(nts))
487
488        # height might not be stored
489        try:
490            hC = getCentVar('height_c', timeSlices=range(nts))
491        except:
492            # Compute from stage
493            hC = getCentVar('stage_c', timeSlices=range(nts))
494            for i in range(hC.shape[0]):
495                hC[i,:]=hC[i,:]-elev_cent
496           
497        xmom_cent = getInds(xmomC*(hC>minimum_allowed_height), timeSlices=inds,absMax=True)
498        xvel_cent = getInds(xmomC/(hC+1.0e-06)*(hC>minimum_allowed_height), timeSlices=inds, absMax=True)
499
500        ymom_cent = getInds(ymomC*(hC>minimum_allowed_height), timeSlices=inds,absMax=True)
501        yvel_cent = getInds(ymomC/(hC+1.0e-06)*(hC>minimum_allowed_height), timeSlices=inds, absMax=True)
502
503        tmp = (xmomC**2 + ymomC**2)**0.5/(hC+1.0e-06)*(hC>minimum_allowed_height)
504        vel_cent=getInds(tmp, timeSlices=inds)
505       
506    else:
507        #@ COMPUTE CENTROIDS FROM VERTEX VALUES
508        #@
509        #@ Here we get 'all' of height / xmom /ymom
510        #@ This could be done using less memory/computation in
511        #@  the case of multiple time-slices
512        if(fid.variables.has_key('height')):
513            heightAll=fid.variables['height'][:]
514        else:
515            # Back calculate height if it is not stored
516            heightAll=fid.variables['stage'][:]
517            elev = fid.variables['elevation'][:]
518            if(len(heightAll.shape)==len(elev.shape)):
519                heightAll=heightAll-elev
520            else:
521                for i in range(heightAll.shape[0]):
522                    heightAll[i,:]=heightAll[i,:]-elev
523        heightAll=heightAll*(heightAll>0.) # Height could be negative for tsunami algorithm
524        # Need xmom,ymom for all timesteps
525        xmomAll=fid.variables['xmomentum'][:]
526        ymomAll=fid.variables['ymomentum'][:]
527       
528        if velocity_extrapolation:
529            # Compute velocity from vertex velocities, then back-compute
530            # momentum from that
531            tmp = xmomAll/(heightAll+1.0-06)*(heightAll>minimum_allowed_height)
532            xvel=(tmp[:,vols0]+tmp[:,vols1]+tmp[:,vols2])/3.0
533            htc = (heightAll[:,vols0] + heightAll[:,vols1] + heightAll[:,vols2])/3.0
534            xvel_cent=getInds(xvel, timeSlices=inds, absMax=True)
535            xmom_cent=getInds(xvel*htc, timeSlices=inds, absMax=True)
536
537            tmp = ymomAll/(heightAll+1.0-06)*(heightAll>minimum_allowed_height)
538            yvel=(tmp[:,vols0]+tmp[:,vols1]+tmp[:,vols2])/3.0
539            yvel_cent=getInds(yvel, timeSlices=inds, absMax=True)
540            ymom_cent=getInds(yvel*htc, timeSlices=inds, absMax=True)
541           
542            vel_cent=getInds( (xvel**2+yvel**2)**0.5, timeSlices=inds)
543           
544        else:
545            # Compute momenta from vertex momenta, then back compute velocity from that
546            tmp=xmomAll*(heightAll>minimum_allowed_height)
547            htc = (heightAll[:,vols0] + heightAll[:,vols1] + heightAll[:,vols2])/3.0
548            xmom=(tmp[:,vols0]+tmp[:,vols1]+tmp[:,vols2])/3.0
549            xmom_cent=getInds(xmom,  timeSlices=inds, absMax=True)
550            xvel_cent=getInds(xmom/(htc+1.0e-06), timeSlices=inds, absMax=True)
551
552            tmp=ymomAll*(heightAll>minimum_allowed_height)
553            ymom=(tmp[:,vols0]+tmp[:,vols1]+tmp[:,vols2])/3.0
554            ymom_cent=getInds(ymom,  timeSlices=inds, absMax=True)
555            yvel_cent=getInds(ymom/(htc+1.0e-06), timeSlices=inds, absMax=True)
556            vel_cent = getInds( (xmom**2+ymom**2)**0.5/(htc+1.0e-06), timeSlices=inds)
557
558    fid.close()
559   
560    return time, x_cent, y_cent, stage_cent, xmom_cent,\
561             ymom_cent, height_cent, elev_cent, friction_cent,\
562             xvel_cent, yvel_cent, vel_cent, xllcorner, yllcorner, inds
563
564
565def animate_1D(time, var, x, ylab=' '): #, x=range(var.shape[1]), vmin=var.min(), vmax=var.max()):
566    # Input: time = one-dimensional time vector;
567    #        var =  array with first dimension = len(time) ;
568    #        x = (optional) vector width dimension equal to var.shape[1];
569   
570    import pylab
571    import numpy
572   
573   
574
575    pylab.close()
576    pylab.ion()
577
578    # Initial plot
579    vmin=var.min()
580    vmax=var.max()
581    line, = pylab.plot( (x.min(), x.max()), (vmin, vmax), 'o')
582
583    # Lots of plots
584    for i in range(len(time)):
585        line.set_xdata(x)
586        line.set_ydata(var[i,:])
587        pylab.draw()
588        pylab.xlabel('x')
589        pylab.ylabel(ylab)
590        pylab.title('time = ' + str(time[i]))
591   
592    return
593
594def near_transect(p, point1, point2, tol=1.):
595    # Function to get the indices of points in p less than 'tol' from the line
596    # joining (x1,y1), and (x2,y2)
597    # p comes from util.get_output('mysww.sww')
598    #
599    # e.g.
600    # import util
601    # from matplotlib import pyplot
602    # p=util.get_output('merewether_1m.sww',0.01)
603    # p2=util.get_centroids(p,velocity_extrapolation=True)
604    # #xxx=transect_interpolate.near_transect(p,[95., 85.], [120.,68.],tol=2.)
605    # xxx=util.near_transect(p,[95., 85.], [120.,68.],tol=2.)
606    # pyplot.scatter(xxx[1],p.vel[140,xxx[0]],color='red')
607   
608    x1=point1[0]
609    y1=point1[1]
610   
611    x2=point2[0]
612    y2=point2[1]
613   
614    # Find line equation a*x + b*y + c = 0
615    # based on y=gradient*x +intercept
616    if x1!=x2:
617        gradient= (y2-y1)/(x2-x1)
618        intercept = y1 - gradient*x1
619        #
620        a = -gradient
621        b = 1.
622        c = -intercept
623    else:
624        a=1.
625        b=0.
626        c=-x2
627   
628    # Distance formula
629    inv_denom = 1./(a**2 + b**2)**0.5
630    distp = abs(p.x*a + p.y*b + c)*inv_denom
631   
632    near_points = (distp<tol).nonzero()[0]
633   
634    # Now find a 'local' coordinate for the point, projected onto the line
635    # g1 = unit vector parallel to the line
636    # g2 = vector joining (x1,y1) and (p.x,p.y)
637    g1x = x2-x1
638    g1y = y2-y1
639    g1_norm = (g1x**2 + g1y**2)**0.5
640    g1x=g1x/g1_norm
641    g1y=g1y/g1_norm
642   
643    g2x = p.x[near_points] - x1
644    g2y = p.y[near_points] - y1
645   
646    # Dot product = projected distance == a local coordinate
647    local_coord = g1x*g2x + g1y*g2y
648   
649    # only keep coordinates between zero and the distance along the line
650    dl=((x1-x2)**2+(y1-y2)**2)**0.5
651    keepers=(local_coord<=dl)*(local_coord>=0.)
652    keepers=keepers.nonzero()
653   
654    return near_points[keepers], local_coord[keepers]
655
656########################
657# TRIANGLE AREAS, WATER VOLUME
658def triangle_areas(p, subset=None):
659    # Compute areas of triangles in p -- assumes p contains vertex information
660    # subset = vector of centroid indices to include in the computation.
661
662    if(subset is None):
663        subset=range(len(p.vols[:,0]))
664   
665    x0=p.x[p.vols[subset,0]]
666    x1=p.x[p.vols[subset,1]]
667    x2=p.x[p.vols[subset,2]]
668   
669    y0=p.y[p.vols[subset,0]]
670    y1=p.y[p.vols[subset,1]]
671    y2=p.y[p.vols[subset,2]]
672   
673    # Vectors for cross-product
674    v1_x=x0-x1
675    v1_y=y0-y1
676    #
677    v2_x=x2-x1
678    v2_y=y2-y1
679    # Area
680    area=(v1_x*v2_y-v1_y*v2_x)*0.5
681    area=abs(area)
682    return area
683
684###
685
686def water_volume(p, p2, per_unit_area=False, subset=None):
687    # Compute the water volume from p(vertex values) and p2(centroid values)
688
689    if(subset is None):
690        subset=range(len(p2.x))
691
692    l=len(p2.time)
693    area=triangle_areas(p, subset=subset)
694   
695    total_area=area.sum()
696    volume=p2.time*0.
697   
698    # This accounts for how volume is measured in ANUGA
699    # Compute in 2 steps to reduce precision error (important sometimes)
700    # Is this really needed?
701    for i in range(l):
702        #volume[i]=((p2.stage[i,subset]-p2.elev[subset])*(p2.stage[i,subset]>p2.elev[subset])*area).sum()
703        volume[i]=((p2.stage[i,subset])*(p2.stage[i,subset]>p2.elev[subset])*area).sum()
704        volume[i]=volume[i]+((-p2.elev[subset])*(p2.stage[i,subset]>p2.elev[subset])*area).sum()
705   
706    if(per_unit_area):
707        volume=volume/total_area
708   
709    return volume
710
711
712def get_triangle_containing_point(p,point):
713
714    V = p.vols
715
716    x = p.x
717    y = p.y
718
719    l = len(x)
720
721    from anuga.geometry.polygon import is_outside_polygon,is_inside_polygon
722
723    # FIXME: Horrible brute force
724    for i in xrange(l):
725        i0 = V[i,0]
726        i1 = V[i,1]
727        i2 = V[i,2]
728        poly = [ [x[i0], y[i0]], [x[i1], y[i1]], [x[i2], y[i2]] ]
729
730        if is_inside_polygon(point, poly, closed=True):
731            return i
732
733    msg = 'Point %s not found within a triangle' %str(point)
734    raise Exception(msg)
735
736
737def get_extent(p):
738
739    import numpy
740
741    x_min = numpy.min(p.x)
742    x_max = numpy.max(p.x)
743
744    y_min = numpy.min(p.y)
745    y_max = numpy.max(p.y)
746
747    return x_min, x_max, y_min, y_max
748
749
750
751def make_grid(data, lats, lons, fileName, EPSG_CODE=None, proj4string=None):
752    """
753        Convert data,lats,lons to a georeferenced raster tif
754        INPUT: data -- array with desired raster cell values
755               lats -- 1d array with 'latitude' or 'y' range
756               lons -- 1D array with 'longitude' or 'x' range
757               fileName -- name of file to write to
758               EPSG_CODE -- Integer code with projection information in EPSG format
759               proj4string -- proj4string with projection information
760
761        NOTE: proj4string is used in preference to EPSG_CODE if available
762    """
763    try:
764        import gdal
765        import osr
766    except:
767        raise Exception, 'Cannot find gdal and/or osr python modules'
768
769    xres = lons[1] - lons[0]
770    yres = lats[1] - lats[0]
771
772    ysize = len(lats)
773    xsize = len(lons)
774
775    # Assume data/lats/longs refer to cell centres, and compute upper left coordinate
776    ulx = lons[0] - (xres / 2.)
777    uly = lats[lats.shape[0]-1] + (yres / 2.)
778
779    # GDAL magic to make the tif
780    driver = gdal.GetDriverByName('GTiff')
781    ds = driver.Create(fileName, xsize, ysize, 1, gdal.GDT_Float32)
782
783    srs = osr.SpatialReference()
784    if(proj4string is not None):
785        srs.ImportFromProj4(proj4string)
786    elif(EPSG_CODE is not None):
787        srs.ImportFromEPSG(EPSG_CODE)
788    else:
789        raise Exception, 'No spatial reference information given'
790
791    ds.SetProjection(srs.ExportToWkt())
792
793    gt = [ulx, xres, 0, uly, 0, -yres ]
794    #gt = [llx, xres, 0, lly, yres,0 ]
795    ds.SetGeoTransform(gt)
796
797    #import pdb
798    #pdb.set_trace()
799
800    outband = ds.GetRasterBand(1)
801    outband.WriteArray(data)
802
803    ds = None
804    return
805
806##################################################################################
807
808def Make_Geotif(swwFile=None, 
809             output_quantities=['depth'],
810             myTimeStep=0, CellSize=5.0, 
811             lower_left=None, upper_right=None,
812             EPSG_CODE=None, 
813             proj4string=None,
814             velocity_extrapolation=True,
815             min_allowed_height=1.0e-05,
816             output_dir='TIFS',
817             bounding_polygon=None,
818             verbose=False):
819    """
820        Make a georeferenced tif by nearest-neighbour interpolation of sww file outputs (or a 3-column array with xyz Points)
821
822        You must supply projection information as either a proj4string or an integer EPSG_CODE (but not both!)
823
824        INPUTS: swwFile -- name of sww file, OR a 3-column array with x/y/z
825                    points. In the latter case x and y are assumed to be in georeferenced
826                    coordinates.  The output raster will contain 'z', and will have a name-tag
827                    based on the name in 'output_quantities'.
828                output_quantities -- list of quantitiies to plot, e.g.
829                                ['depth', 'velocity', 'stage','elevation','depthIntegratedVelocity','friction']
830                myTimeStep -- list containing time-index of swwFile to plot (e.g. [0, 10, 32] ) or 'last', or 'max', or 'all'
831                CellSize -- approximate pixel size for output raster [adapted to fit lower_left / upper_right]
832                lower_left -- [x0,y0] of lower left corner. If None, use extent of swwFile.
833                upper_right -- [x1,y1] of upper right corner. If None, use extent of swwFile.
834                EPSG_CODE -- Projection information as an integer EPSG code (e.g. 3123 for PRS92 Zone 3, 32756 for UTM Zone 56 S, etc).
835                             Google for info on EPSG Codes
836                proj4string -- Projection information as a proj4string (e.g. '+init=epsg:3123')
837                             Google for info on proj4strings.
838                velocity_extrapolation -- Compute velocity assuming the code extrapolates with velocity (instead of momentum)?
839                min_allowed_height -- Minimum allowed height from ANUGA
840                output_dir -- Write outputs to this directory
841                bounding_polygon -- polygon (e.g. from read_polygon) If present, only set values of raster cells inside the bounding_polygon
842               
843    """
844
845    #import pdb
846    #pdb.set_trace()
847
848    try:
849        import gdal
850        import osr
851        import scipy.io
852        import scipy.interpolate
853        import anuga
854        from anuga.utilities import plot_utils as util
855        import os
856        from matplotlib import nxutils
857    except:
858        raise Exception, 'Required modules not installed for Make_Geotif'
859
860
861    # Check whether swwFile is an array, and if so, redefine various inputs to
862    # make the code work
863    if(type(swwFile)==scipy.ndarray):
864        import copy
865        xyzPoints=copy.copy(swwFile)
866        swwFile=None
867
868    if(((EPSG_CODE is None) & (proj4string is None) )|
869       ((EPSG_CODE is not None) & (proj4string is not None))):
870        raise Exception, 'Must specify EITHER an integer EPSG_CODE describing the file projection, OR a proj4string'
871
872
873    # Make output_dir
874    try:
875        os.mkdir(output_dir)
876    except:
877        pass
878
879    if(swwFile is not None):
880        # Read in ANUGA outputs
881       
882
883           
884        if(verbose):
885            print 'Reading sww File ...'
886        p2=util.get_centroids(swwFile, velocity_extrapolation, timeSlices=myTimeStep,
887                              minimum_allowed_height=min_allowed_height)
888        xllcorner=p2.xllcorner
889        yllcorner=p2.yllcorner
890
891        #if(myTimeStep=='all'):
892        #    myTimeStep=range(len(p2.time))
893        #elif(myTimeStep=='last'):
894        #    # This is [0]!
895        #    myTimeStep=[len(p2.time)-1]
896
897        # Now, myTimeStep just holds indices we want to plot in p2
898        if(myTimeStep!='max'):
899            myTimeStep=range(len(p2.time))
900
901        # Ensure myTimeStep is a list
902        if type(myTimeStep)!=list:
903            myTimeStep=[myTimeStep]
904
905        if(verbose):
906            print 'Extracting required data ...'
907        # Get ANUGA points
908        swwX=p2.x+xllcorner
909        swwY=p2.y+yllcorner
910    else:
911        # Get the point data from the 3-column array
912        if(xyzPoints.shape[1]!=3):
913            raise Exception, 'If an array is passed, it must have exactly 3 columns'
914        if(len(output_quantities)!=1):
915            raise Exception, 'Can only have 1 output quantity when passing an array'
916        swwX=xyzPoints[:,0]
917        swwY=xyzPoints[:,1]
918        myTimeStep=['pointData']
919
920    # Grid for meshing
921    if(verbose):
922        print 'Computing grid of output locations...'
923    # Get points where we want raster cells
924    if(lower_left is None):
925        lower_left=[swwX.min(),swwY.min()]
926    if(upper_right is None):
927        upper_right=[swwX.max(),swwY.max()]
928    nx=round((upper_right[0]-lower_left[0])*1.0/(1.0*CellSize)) + 1
929    xres=(upper_right[0]-lower_left[0])*1.0/(1.0*(nx-1))
930    desiredX=scipy.arange(lower_left[0], upper_right[0],xres )
931    ny=round((upper_right[1]-lower_left[1])*1.0/(1.0*CellSize)) + 1
932    yres=(upper_right[1]-lower_left[1])*1.0/(1.0*(ny-1))
933    desiredY=scipy.arange(lower_left[1], upper_right[1], yres)
934
935    gridX, gridY=scipy.meshgrid(desiredX,desiredY)
936
937    if(verbose):
938        print 'Making interpolation functions...'
939    swwXY=scipy.array([swwX[:],swwY[:]]).transpose()
940    # Get index of nearest point
941    #index_qFun=scipy.interpolate.NearestNDInterpolator(swwXY,scipy.arange(len(swwX),dtype='int64').transpose())
942    index_qFun=scipy.interpolate.LinearNDInterpolator(swwXY,scipy.arange(len(swwX),dtype='int64').transpose())
943
944    gridXY_array=scipy.array([scipy.concatenate(gridX),scipy.concatenate(gridY)]).transpose()
945    gridqInd=index_qFun(gridXY_array)
946
947    if(bounding_polygon is not None):
948        # Find points to exclude (i.e. outside the bounding polygon)
949        cut_points=(nxutils.points_inside_poly(gridXY_array, bounding_polygon)==False).nonzero()[0]
950       
951    # Loop over all output quantities and produce the output
952    for myTSi in myTimeStep:
953        if(verbose):
954            print myTSi
955        for output_quantity in output_quantities:
956            if (verbose): print output_quantity
957
958            if(myTSi is not 'max'):
959                myTS=myTSi
960            else:
961                # We have already extracted the max, and e.g.
962                # p2.stage is an array of dimension (1, number_of_pointS).
963                myTS=0
964
965            if(type(myTS)==int):
966                if(output_quantity=='stage'):
967                    gridq=p2.stage[myTS,:][gridqInd]
968                if(output_quantity=='depth'):
969                    gridq=p2.height[myTS,:][gridqInd]
970                    gridq=gridq*(gridq>=0.) # Force positive depth (tsunami alg)
971                if(output_quantity=='velocity'):
972                    gridq=p2.vel[myTS,:][gridqInd]
973                if(output_quantity=='friction'):
974                    gridq=p2.friction[gridqInd]
975                if(output_quantity=='depthIntegratedVelocity'):
976                    swwDIVel=(p2.xmom[myTS,:]**2+p2.ymom[myTS,:]**2)**0.5
977                    gridq=swwDIVel[gridqInd]
978                if(output_quantity=='elevation'):
979                    gridq=p2.elev[gridqInd]
980   
981                if(myTSi is 'max'):
982                    timestepString='max'
983                else:
984                    timestepString=str(round(p2.time[myTS]))
985            elif(myTS=='pointData'):
986                gridq=xyzPoints[:,2][gridqInd]
987
988
989            if(bounding_polygon is not None):
990                # Cut the points outside the bounding polygon
991                gridq[cut_points]=scipy.nan
992
993            # Make name for output file
994            if(myTS!='pointData'):
995                output_name=output_dir+'/'+os.path.splitext(os.path.basename(swwFile))[0] + '_'+\
996                            output_quantity+'_'+timestepString+\
997                            '.tif'
998                            #'_'+str(myTS)+'.tif'
999            else:
1000                output_name=output_dir+'/'+'PointData_'+output_quantity+'.tif'
1001
1002            if(verbose):
1003                print 'Making raster ...'
1004            gridq.shape=(len(desiredY),len(desiredX))
1005            make_grid(scipy.flipud(gridq),desiredY,desiredX, output_name,EPSG_CODE=EPSG_CODE, proj4string=proj4string)
1006
1007    return
1008
1009def plot_triangles(p, adjustLowerLeft=False):
1010    """ Add mesh triangles to a pyplot plot
1011    """
1012    from matplotlib import pyplot as pyplot
1013    #
1014    x0=p.xllcorner
1015    x1=p.yllcorner
1016    #
1017    for i in range(len(p.vols)):
1018        k1=p.vols[i][0]
1019        k2=p.vols[i][1]
1020        k3=p.vols[i][2]
1021        if(not adjustLowerLeft):
1022            pyplot.plot([p.x[k1], p.x[k2], p.x[k3], p.x[k1]], [p.y[k1], p.y[k2], p.y[k3], p.y[k1]],'-',color='black')
1023        else:
1024            pyplot.plot([p.x[k1]+x0, p.x[k2]+x0, p.x[k3]+x0, p.x[k1]+x0], [p.y[k1]+x1, p.y[k2]+x1, p.y[k3]+x1, p.y[k1]+x1],'-',color='black')
1025        #pyplot.plot([p.x[k3], p.x[k2]], [p.y[k3], p.y[k2]],'-',color='black')
1026        #pyplot.plot([p.x[k3], p.x[k1]], [p.y[k3], p.y[k1]],'-',color='black')
1027
1028def find_neighbours(p,ind):
1029    """
1030        Find the triangles neighbouring triangle 'ind'
1031        p is an object from get_output containing mesh vertices
1032    """
1033    ind_nei=p.vols[ind]
1034   
1035    shared_nei0=p.vols[:,1]*0.0
1036    shared_nei1=p.vols[:,1]*0.0
1037    shared_nei2=p.vols[:,1]*0.0
1038    # Compute indices that match one of the vertices of triangle ind
1039    # Note: Each triangle can only match a vertex, at most, once
1040    for i in range(3):
1041        shared_nei0+=1*(p.x[p.vols[:,i]]==p.x[ind_nei[0]])*\
1042            1*(p.y[p.vols[:,i]]==p.y[ind_nei[0]])
1043       
1044        shared_nei1+=1*(p.x[p.vols[:,i]]==p.x[ind_nei[1]])*\
1045            1*(p.y[p.vols[:,i]]==p.y[ind_nei[1]])
1046       
1047        shared_nei2+=1*(p.x[p.vols[:,i]]==p.x[ind_nei[2]])*\
1048            1*(p.y[p.vols[:,i]]==p.y[ind_nei[2]])
1049   
1050    out=(shared_nei2 + shared_nei1 + shared_nei0)
1051    return((out==2).nonzero())
1052
1053def calc_edge_elevations(p):
1054    """
1055        Compute the triangle edge elevations on p
1056        Return x,y,elev for edges
1057    """
1058    pe_x=p.x*0.
1059    pe_y=p.y*0.
1060    pe_el=p.elev*0.
1061
1062   
1063    # Compute coordinates + elevations
1064    pe_x[p.vols[:,0]] = 0.5*(p.x[p.vols[:,1]] + p.x[p.vols[:,2]])
1065    pe_y[p.vols[:,0]] = 0.5*(p.y[p.vols[:,1]] + p.y[p.vols[:,2]])
1066    pe_el[p.vols[:,0]] = 0.5*(p.elev[p.vols[:,1]] + p.elev[p.vols[:,2]])
1067   
1068    pe_x[p.vols[:,1]] = 0.5*(p.x[p.vols[:,0]] + p.x[p.vols[:,2]])
1069    pe_y[p.vols[:,1]] = 0.5*(p.y[p.vols[:,0]] + p.y[p.vols[:,2]])
1070    pe_el[p.vols[:,1]] = 0.5*(p.elev[p.vols[:,0]] + p.elev[p.vols[:,2]])
1071
1072    pe_x[p.vols[:,2]] = 0.5*(p.x[p.vols[:,0]] + p.x[p.vols[:,1]])
1073    pe_y[p.vols[:,2]] = 0.5*(p.y[p.vols[:,0]] + p.y[p.vols[:,1]])
1074    pe_el[p.vols[:,2]] = 0.5*(p.elev[p.vols[:,0]] + p.elev[p.vols[:,1]])
1075
1076    return [pe_x, pe_y, pe_el]
1077
1078
Note: See TracBrowser for help on using the repository browser.