source: trunk/anuga_core/source/anuga/utilities/plot_utils.py @ 9072

Last change on this file since 9072 was 9072, checked in by davies, 11 years ago

Adding option to plotTriangles for use of spatial coordinates

File size: 30.4 KB
Line 
1""" Random utilities for reading sww file data and for plotting
2(in ipython, or in scripts)
3
4    Functionality of note:
5
6    util.get_outputs -- read the data from a single sww file
7    into a single object
8   
9    util.combine_outputs -- read the data from a list of sww
10    files into a single object
11   
12    util.near_transect -- for finding the indices of points
13                          'near' to a given line, and
14                          assigning these points a
15                          coordinate along that line.
16
17    This is useful for plotting outputs which are 'almost' along a
18    transect (e.g. a channel cross-section) -- see example below
19
20    util.sort_sww_filenames -- match sww filenames by a wildcard, and order
21                               them according to their 'time'. This means that
22                               they can be stuck together using
23                               'combine_outputs' correctly
24
25    util.triangle_areas -- compute the areas of every triangle
26                           in a get_outputs object [ must be vertex-based]
27
28    util.water_volume -- compute the water volume at every
29                         time step in an sww file (needs both
30                         vertex and centroid value input).
31
32    util.Make_Geotiff -- convert sww centroids to a georeferenced tiff
33 
34    Here is an example ipython session which uses some of these functions:
35
36    > import util
37    > from matplotlib import pyplot as pyplot
38    > p=util.get_output('myfile.sww',minimum_allowed_height=0.01)
39    > p2=util.get_centroids(p,velocity_extrapolation=True)
40    > xxx=util.near_transect(p,[95., 85.], [120.,68.],tol=2.) # Could equally well use p2
41    > pyplot.ion() # Interactive plotting
42    > pyplot.scatter(xxx[1],p.vel[140,xxx[0]],color='red') # Plot along the transect
43
44    FIXME: TODO -- Convert to a single function 'get_output', which can either take a
45          single filename, a list of filenames, or a wildcard defining a number of
46          filenames, and ensure that in each case, the output will be as desired.
47
48"""
49from anuga.file.netcdf import NetCDFFile
50import numpy
51
52class combine_outputs:
53    """
54    Read in a list of filenames, and combine all their outputs into a single object.
55    e.g.:
56
57    p = util.combine_outputs(['file1.sww', 'file1_time_10000.sww', 'file1_time_20000.sww'], 0.01)
58   
59    will make an object p which has components p.x,p.y,p.time,p.stage, .... etc,
60    where the values of stage / momentum / velocity from the sww files are concatenated as appropriate.
61
62    This is nice for interactive interrogation of model outputs, or for sticking together outputs in scripts
63   
64    WARNING: It is easy to use lots of memory, if the sww files are large.
65
66    Note: If you want the centroid values, then you could subsequently use:
67
68    p2 = util.get_centroids(p,velocity_extrapolation=False)
69
70    which would make an object p2 that is like p, but holds information at centroids
71    """
72    def __init__(self, filename_list, minimum_allowed_height=1.0e-03):
73        #
74        # Go through the sww files in 'filename_list', and combine them into one object.
75        #
76
77        for i, filename in enumerate(filename_list):
78            print i, filename
79            # Store output from filename
80            p_tmp = get_output(filename, minimum_allowed_height)
81            if(i==0):
82                # Create self
83                p1=p_tmp
84            else:
85                # Append extra data to self
86                # Note that p1.x, p1.y, p1.vols, p1.elev should not change
87                assert (p1.x == p_tmp.x).all()
88                assert (p1.y == p_tmp.y).all()
89                assert (p1.vols ==p_tmp.vols).all()
90                p1.time = numpy.append(p1.time, p_tmp.time)
91                p1.stage = numpy.append(p1.stage, p_tmp.stage, axis=0)
92                p1.height = numpy.append(p1.height, p_tmp.height, axis=0)
93                p1.xmom = numpy.append(p1.xmom, p_tmp.xmom, axis=0)
94                p1.ymom = numpy.append(p1.ymom, p_tmp.ymom, axis=0)
95                p1.xvel = numpy.append(p1.xvel, p_tmp.xvel, axis=0)
96                p1.yvel = numpy.append(p1.yvel, p_tmp.yvel, axis=0)
97                p1.vel = numpy.append(p1.vel, p_tmp.vel, axis=0)
98
99        self.x, self.y, self.time, self.vols, self.elev, self.stage, self.xmom, \
100                self.ymom, self.xvel, self.yvel, self.vel, self.minimum_allowed_height = \
101                p1.x, p1.y, p1.time, p1.vols, p1.elev, p1.stage, p1.xmom, p1.ymom, p1.xvel,\
102                p1.yvel, p1.vel, p1.minimum_allowed_height
103
104####################
105
106def sort_sww_filenames(sww_wildcard):
107    # Function to take a 'wildcard' sww filename,
108    # and return a list of all filenames of this type,
109    # sorted by their time.
110    # This can then be used efficiently in 'combine_outputs'
111    # if you have many filenames starting with the same pattern
112    import glob
113    filenames=glob.glob(sww_wildcard)
114   
115    # Extract time from filenames
116    file_time=range(len(filenames)) # Predefine
117     
118    for i,filename in enumerate(filenames):
119        filesplit=filename.rsplit('_time_')
120        if(len(filesplit)>1):
121            file_time[i]=int(filesplit[1].split('_0.sww')[0])
122        else:
123            file_time[i]=0         
124   
125    name_and_time=zip(file_time,filenames)
126    name_and_time.sort() # Sort by file_time
127   
128    output_times, output_names = zip(*name_and_time)
129   
130    return list(output_names)
131
132##############
133
134class get_output:
135    """Read in data from an .sww file in a convenient form
136       e.g.
137        p = util.get_output('channel3.sww', minimum_allowed_height=0.01)
138       
139       p then contains most relevant information as e.g., p.stage, p.elev, p.xmom, etc
140    """
141    def __init__(self, filename, minimum_allowed_height=1.0e-03):
142        self.x, self.y, self.time, self.vols, self.stage, \
143                self.height, self.elev, self.xmom, self.ymom, \
144                self.xvel, self.yvel, self.vel, self.minimum_allowed_height,\
145                self.xllcorner, self.yllcorner = \
146                read_output(filename, minimum_allowed_height)
147        self.filename=filename
148
149
150def read_output(filename, minimum_allowed_height):
151    # Input: The name of an .sww file to read data from,
152    #                    e.g. read_sww('channel3.sww')
153    #
154    # Purpose: To read the sww file, and output a number of variables as arrays that
155    #          we can then manipulate (e.g. plot, interrogate)
156    #
157    # Output: x, y, time, stage, height, elev, xmom, ymom, xvel, yvel, vel
158    #         x,y are only stored at one time
159    #         elevation may be stored at one or multiple times
160    #         everything else is stored every time step for vertices
161
162    # Import modules
163
164
165
166    # Open ncdf connection
167    fid=NetCDFFile(filename)
168   
169    # Get lower-left
170    xllcorner=fid.xllcorner
171    yllcorner=fid.yllcorner
172
173
174    # Read variables
175    x=fid.variables['x'][:]
176    #x=x.getValue()
177    y=fid.variables['y'][:]
178    #y=y.getValue()
179
180    stage=fid.variables['stage'][:]
181    #stage=stage.getValue()
182
183    elev=fid.variables['elevation'][:]
184
185    if(fid.variables.has_key('height')):
186        height=fid.variables['height'][:]
187    else:
188        # Back calculate height if it is not stored
189        height=fid.variables['stage'][:]
190        if(len(stage.shape)==len(elev.shape)):
191            for i in range(stage.shape[0]):
192                height[i,:]=stage[i,:]-elev[i,:]
193        else:
194            for i in range(stage.shape[0]):
195                height[i,:]=stage[i,:]-elev
196
197
198    xmom=fid.variables['xmomentum'][:]
199    #xmom=xmom.getValue()
200
201    ymom=fid.variables['ymomentum'][:]
202    #ymom=ymom.getValue()
203
204    time=fid.variables['time'][:]
205    #time=time.getValue()
206
207    vols=fid.variables['volumes'][:]
208    #vols=vols.getValue()
209
210
211    # Calculate velocity = momentum/depth
212    xvel=xmom*0.0
213    yvel=ymom*0.0
214
215    for i in range(xmom.shape[0]):
216        xvel[i,:]=xmom[i,:]/(height[i,:]+1.0e-06)*(height[i,:]>minimum_allowed_height)
217        yvel[i,:]=ymom[i,:]/(height[i,:]+1.0e-06)*(height[i,:]>minimum_allowed_height)
218
219    vel = (xvel**2+yvel**2)**0.5
220
221    return x, y, time, vols, stage, height, elev, xmom, ymom, xvel, yvel, vel, minimum_allowed_height, xllcorner,yllcorner
222
223##############
224
225
226
227class get_centroids:
228    """
229    Extract centroid values from the output of get_output. 
230    e.g.
231        p = util.get_output('my_sww.sww', minimum_allowed_height=0.01) # vertex values
232        pc=util.get_centroids(p, velocity_extrapolation=True) # centroid values
233
234    NOTE: elevation is only stored once in the output, even if it was stored every timestep
235         This is done because presently centroid elevations do not change over time.
236    """
237    def __init__(self,p, velocity_extrapolation=False):
238       
239        self.time, self.x, self.y, self.stage, self.xmom,\
240             self.ymom, self.height, self.elev, self.xvel, \
241             self.yvel, self.vel= \
242             get_centroid_values(p, velocity_extrapolation)
243                                 
244
245def get_centroid_values(p, velocity_extrapolation):
246    # Input: p is the result of e.g. p=util.get_output('mysww.sww'). See the get_output class defined above
247    # Output: Values of x, y, Stage, xmom, ymom, elev, xvel, yvel, vel at centroids
248    #import numpy
249   
250    # Make 3 arrays, each containing one index of a vertex of every triangle.
251    l=len(p.vols)
252    #vols0=numpy.zeros(l, dtype='int')
253    #vols1=numpy.zeros(l, dtype='int')
254    #vols2=numpy.zeros(l, dtype='int')
255
256    # FIXME: 22/2/12/ - I think this loop is slow, should be able to do this
257    # another way
258#    for i in range(l):
259#        vols0[i]=p.vols[i][0]
260#        vols1[i]=p.vols[i][1]
261#        vols2[i]=p.vols[i][2]
262
263
264
265    vols0=p.vols[:,0]
266    vols1=p.vols[:,1]
267    vols2=p.vols[:,2]
268
269    #print vols0.shape
270    #print p.vols.shape
271
272    # Then use these to compute centroid averages
273    x_cent=(p.x[vols0]+p.x[vols1]+p.x[vols2])/3.0
274    y_cent=(p.y[vols0]+p.y[vols1]+p.y[vols2])/3.0
275       
276    fid=NetCDFFile(p.filename)
277    if(fid.variables.has_key('stage_c')==False):
278
279        stage_cent=(p.stage[:,vols0]+p.stage[:,vols1]+p.stage[:,vols2])/3.0
280        height_cent=(p.height[:,vols0]+p.height[:,vols1]+p.height[:,vols2])/3.0
281        #elev_cent=(p.elev[:,vols0]+p.elev[:,vols1]+p.elev[:,vols2])/3.0
282        # Only store elevation centroid once (since it doesn't change)
283        if(len(p.elev.shape)==2):
284            elev_cent=(p.elev[0,vols0]+p.elev[0,vols1]+p.elev[0,vols2])/3.0
285        else:
286            elev_cent=(p.elev[vols0]+p.elev[vols1]+p.elev[vols2])/3.0
287
288        # Here, we need to treat differently the case of momentum extrapolation or
289        # velocity extrapolation
290        if velocity_extrapolation:
291            xvel_cent=(p.xvel[:,vols0]+p.xvel[:,vols1]+p.xvel[:,vols2])/3.0
292            yvel_cent=(p.yvel[:,vols0]+p.yvel[:,vols1]+p.yvel[:,vols2])/3.0
293           
294            # Now compute momenta
295            xmom_cent=stage_cent*0.0
296            ymom_cent=stage_cent*0.0
297
298            t=len(p.time)
299
300            for i in range(t):
301                xmom_cent[i,:]=xvel_cent[i,:]*(height_cent[i,:]+1e-06)*\
302                                                (height_cent[i,:]>p.minimum_allowed_height)
303                ymom_cent[i,:]=yvel_cent[i,:]*(height_cent[i,:]+1e-06)*\
304                                                (height_cent[i,:]>p.minimum_allowed_height)
305        else:
306            xmom_cent=(p.xmom[:,vols0]+p.xmom[:,vols1]+p.xmom[:,vols2])/3.0
307            ymom_cent=(p.ymom[:,vols0]+p.ymom[:,vols1]+p.ymom[:,vols2])/3.0
308
309            # Now compute velocities
310            xvel_cent=stage_cent*0.0
311            yvel_cent=stage_cent*0.0
312
313            t=len(p.time)
314
315            for i in range(t):
316                xvel_cent[i,:]=xmom_cent[i,:]/(height_cent[i,:]+1.0e-06)*(height_cent[i,:]>p.minimum_allowed_height)
317                yvel_cent[i,:]=ymom_cent[i,:]/(height_cent[i,:]+1.0e-06)*(height_cent[i,:]>p.minimum_allowed_height)
318   
319    else:
320        # Get centroid values from file
321        print 'Reading centroids from file'
322        stage_cent=fid.variables['stage_c'][:]
323        elev_cent=fid.variables['elevation_c'][:]
324        if(len(elev_cent.shape)==2):
325            elev_cent=elev_cent[0,:] # Only store elevation centroid once -- since it is constant
326        if(fid.variables.has_key('height_c')==True):
327            height_cent=fid.variables['height_c'][:]
328        else:
329            height_cent=1.0*stage_cent
330            for i in range(len(p.time)):
331                height_cent[i,:]=stage_cent[i,:]-elev_cent
332       
333        xmom_cent=fid.variables['xmomentum_c'][:]*(height_cent>p.minimum_allowed_height)
334        ymom_cent=fid.variables['ymomentum_c'][:]*(height_cent>p.minimum_allowed_height)
335        xvel_cent=xmom_cent/(height_cent+1.0e-06)
336        yvel_cent=ymom_cent/(height_cent+1.0e-06)
337       
338   
339    # Compute velocity
340    vel_cent=(xvel_cent**2 + yvel_cent**2)**0.5
341
342    return p.time, x_cent, y_cent, stage_cent, xmom_cent,\
343             ymom_cent, height_cent, elev_cent, xvel_cent, yvel_cent, vel_cent
344
345
346def animate_1D(time, var, x, ylab=' '): #, x=range(var.shape[1]), vmin=var.min(), vmax=var.max()):
347    # Input: time = one-dimensional time vector;
348    #        var =  array with first dimension = len(time) ;
349    #        x = (optional) vector width dimension equal to var.shape[1];
350   
351    import pylab
352    import numpy
353   
354   
355
356    pylab.close()
357    pylab.ion()
358
359    # Initial plot
360    vmin=var.min()
361    vmax=var.max()
362    line, = pylab.plot( (x.min(), x.max()), (vmin, vmax), 'o')
363
364    # Lots of plots
365    for i in range(len(time)):
366        line.set_xdata(x)
367        line.set_ydata(var[i,:])
368        pylab.draw()
369        pylab.xlabel('x')
370        pylab.ylabel(ylab)
371        pylab.title('time = ' + str(time[i]))
372   
373    return
374
375def near_transect(p, point1, point2, tol=1.):
376    # Function to get the indices of points in p less than 'tol' from the line
377    # joining (x1,y1), and (x2,y2)
378    # p comes from util.get_output('mysww.sww')
379    #
380    # e.g.
381    # import util
382    # from matplotlib import pyplot
383    # p=util.get_output('merewether_1m.sww',0.01)
384    # p2=util.get_centroids(p,velocity_extrapolation=True)
385    # #xxx=transect_interpolate.near_transect(p,[95., 85.], [120.,68.],tol=2.)
386    # xxx=util.near_transect(p,[95., 85.], [120.,68.],tol=2.)
387    # pyplot.scatter(xxx[1],p.vel[140,xxx[0]],color='red')
388
389    x1=point1[0]
390    y1=point1[1]
391
392    x2=point2[0]
393    y2=point2[1]
394
395    # Find line equation a*x + b*y + c = 0
396    # based on y=gradient*x +intercept
397    if x1!=x2:
398        gradient= (y2-y1)/(x2-x1)
399        intercept = y1 - gradient*x1
400
401        a = -gradient
402        b = 1.
403        c = -intercept
404    else:
405        #print 'FIXME: Still need to treat 0 and infinite gradients'
406        #assert 0==1
407        a=1.
408        b=0.
409        c=-x2
410
411    # Distance formula
412    inv_denom = 1./(a**2 + b**2)**0.5
413    distp = abs(p.x*a + p.y*b + c)*inv_denom
414
415    near_points = (distp<tol).nonzero()[0]
416   
417    # Now find a 'local' coordinate for the point, projected onto the line
418    # g1 = unit vector parallel to the line
419    # g2 = vector joining (x1,y1) and (p.x,p.y)
420    g1x = x2-x1
421    g1y = y2-y1
422    g1_norm = (g1x**2 + g1y**2)**0.5
423    g1x=g1x/g1_norm
424    g1y=g1x/g1_norm
425
426    g2x = p.x[near_points] - x1
427    g2y = p.y[near_points] - y1
428   
429    # Dot product = projected distance == a local coordinate
430    local_coord = g1x*g2x + g1y*g2y
431
432    return near_points, local_coord
433
434########################
435# TRIANGLE AREAS, WATER VOLUME
436def triangle_areas(p, subset=None):
437    # Compute areas of triangles in p -- assumes p contains vertex information
438    # subset = vector of centroid indices to include in the computation.
439
440    if(subset is None):
441        subset=range(len(p.vols[:,0]))
442   
443    x0=p.x[p.vols[subset,0]]
444    x1=p.x[p.vols[subset,1]]
445    x2=p.x[p.vols[subset,2]]
446   
447    y0=p.y[p.vols[subset,0]]
448    y1=p.y[p.vols[subset,1]]
449    y2=p.y[p.vols[subset,2]]
450   
451    # Vectors for cross-product
452    v1_x=x0-x1
453    v1_y=y0-y1
454    #
455    v2_x=x2-x1
456    v2_y=y2-y1
457    # Area
458    area=(v1_x*v2_y-v1_y*v2_x)*0.5
459    area=abs(area)
460    return area
461
462###
463
464def water_volume(p, p2, per_unit_area=False, subset=None):
465    # Compute the water volume from p(vertex values) and p2(centroid values)
466
467    if(subset is None):
468        subset=range(len(p2.x))
469
470    l=len(p2.time)
471    area=triangle_areas(p, subset=subset)
472   
473    total_area=area.sum()
474    volume=p2.time*0.
475   
476    # This accounts for how volume is measured in ANUGA
477    # Compute in 2 steps to reduce precision error (important sometimes)
478    for i in range(l):
479        #volume[i]=((p2.stage[i,subset]-p2.elev[subset])*(p2.stage[i,subset]>p2.elev[subset])*area).sum()
480        volume[i]=((p2.stage[i,subset])*(p2.stage[i,subset]>p2.elev[subset])*area).sum()
481        volume[i]=volume[i]+((-p2.elev[subset])*(p2.stage[i,subset]>p2.elev[subset])*area).sum()
482   
483    if(per_unit_area):
484        volume=volume/total_area
485   
486    return volume
487
488
489def get_triangle_containing_point(p,point):
490
491    V = p.vols
492
493    x = p.x
494    y = p.y
495
496    l = len(x)
497
498    from anuga.geometry.polygon import is_outside_polygon,is_inside_polygon
499
500    # FIXME: Horrible brute force
501    for i in xrange(l):
502        i0 = V[i,0]
503        i1 = V[i,1]
504        i2 = V[i,2]
505        poly = [ [x[i0], y[i0]], [x[i1], y[i1]], [x[i2], y[i2]] ]
506
507        if is_inside_polygon(point, poly, closed=True):
508            return i
509
510    msg = 'Point %s not found within a triangle' %str(point)
511    raise Exception(msg)
512
513
514def get_extent(p):
515
516    import numpy
517
518    x_min = numpy.min(p.x)
519    x_max = numpy.max(p.x)
520
521    y_min = numpy.min(p.y)
522    y_max = numpy.max(p.y)
523
524    return x_min, x_max, y_min, y_max
525
526
527
528def make_grid(data, lats, lons, fileName, EPSG_CODE=None, proj4string=None):
529    """
530        Convert data,lats,lons to a georeferenced raster tif
531        INPUT: data -- array with desired raster cell values
532               lats -- 1d array with 'latitude' or 'y' range
533               lons -- 1D array with 'longitude' or 'x' range
534               fileName -- name of file to write to
535               EPSG_CODE -- Integer code with projection information in EPSG format
536               proj4string -- proj4string with projection information
537
538        NOTE: proj4string is used in preference to EPSG_CODE if available
539    """
540    try:
541        import gdal
542        import osr
543    except:
544        raise Exception, 'Cannot find gdal and/or osr python modules'
545
546    xres = lons[1] - lons[0]
547    yres = lats[1] - lats[0]
548
549    ysize = len(lats)
550    xsize = len(lons)
551
552    # Assume data/lats/longs refer to cell centres, and compute upper left coordinate
553    ulx = lons[0] - (xres / 2.)
554    uly = lats[lats.shape[0]-1] + (yres / 2.)
555
556    # GDAL magic to make the tif
557    driver = gdal.GetDriverByName('GTiff')
558    ds = driver.Create(fileName, xsize, ysize, 1, gdal.GDT_Float32)
559
560    srs = osr.SpatialReference()
561    if(proj4string is not None):
562        srs.ImportFromProj4(proj4string)
563    elif(EPSG_CODE is not None):
564        srs.ImportFromEPSG(EPSG_CODE)
565    else:
566        raise Exception, 'No spatial reference information given'
567
568    ds.SetProjection(srs.ExportToWkt())
569
570    gt = [ulx, xres, 0, uly, 0, -yres ]
571    #gt = [llx, xres, 0, lly, yres,0 ]
572    ds.SetGeoTransform(gt)
573
574    #import pdb
575    #pdb.set_trace()
576
577    outband = ds.GetRasterBand(1)
578    outband.WriteArray(data)
579
580    ds = None
581    return
582
583##################################################################################
584
585def Make_Geotif(swwFile=None, 
586             output_quantities=['depth'],
587             myTimeStep=1, CellSize=5.0, 
588             lower_left=None, upper_right=None,
589             EPSG_CODE=None, 
590             proj4string=None,
591             velocity_extrapolation=True,
592             min_allowed_height=1.0e-05,
593             output_dir='TIFS',
594             bounding_polygon=None,
595             verbose=False):
596    """
597        Make a georeferenced tif by nearest-neighbour interpolation of sww file outputs (or a 3-column array with xyz Points)
598
599        You must supply projection information as either a proj4string or an integer EPSG_CODE (but not both!)
600
601        INPUTS: swwFile -- name of sww file, OR a 3-column array with x/y/z
602                    points. In the latter case x and y are assumed to be in georeferenced
603                    coordinates.  The output raster will contain 'z', and will have a name-tag
604                    based on the name in 'output_quantities'.
605                output_quantities -- list of quantitiies to plot, e.g. ['depth', 'velocity', 'stage','elevation','depthIntegratedVelocity']
606                myTimeStep -- list containing time-index of swwFile to plot (e.g. [1, 10, 32] ) or 'last', or 'max', or 'all'
607                CellSize -- approximate pixel size for output raster [adapted to fit lower_left / upper_right]
608                lower_left -- [x0,y0] of lower left corner. If None, use extent of swwFile.
609                upper_right -- [x1,y1] of upper right corner. If None, use extent of swwFile.
610                EPSG_CODE -- Projection information as an integer EPSG code (e.g. 3123 for PRS92 Zone 3, 32756 for UTM Zone 56 S, etc).
611                             Google for info on EPSG Codes
612                proj4string -- Projection information as a proj4string (e.g. '+init=epsg:3123')
613                             Google for info on proj4strings.
614                velocity_extrapolation -- Compute velocity assuming the code extrapolates with velocity (instead of momentum)?
615                min_allowed_height -- Minimum allowed height from ANUGA
616                output_dir -- Write outputs to this directory
617                bounding_polygon -- polygon (e.g. from read_polygon) If present, only set values of raster cells inside the bounding_polygon
618               
619    """
620
621    #import pdb
622    #pdb.set_trace()
623
624    try:
625        import gdal
626        import osr
627        import scipy.io
628        import scipy.interpolate
629        import anuga
630        from anuga.utilities import plot_utils as util
631        import os
632        from matplotlib import nxutils
633    except:
634        raise Exception, 'Required modules not installed for Make_Geotif'
635
636
637    # Check whether swwFile is an array, and if so, redefine various inputs to
638    # make the code work
639    if(type(swwFile)==scipy.ndarray):
640        import copy
641        xyzPoints=copy.copy(swwFile)
642        swwFile=None
643
644    if(((EPSG_CODE is None) & (proj4string is None) )|
645       ((EPSG_CODE is not None) & (proj4string is not None))):
646        raise Exception, 'Must specify EITHER an integer EPSG_CODE describing the file projection, OR a proj4string'
647
648
649    # Make output_dir
650    try:
651        os.mkdir(output_dir)
652    except:
653        pass
654
655    if(swwFile is not None):
656        # Read in ANUGA outputs
657        # FIXME: It would be good to support reading of data subsets
658        if(verbose):
659            print 'Reading sww File ...'
660        p=util.get_output(swwFile,min_allowed_height)
661        p2=util.get_centroids(p,velocity_extrapolation)
662        swwIn=scipy.io.netcdf_file(swwFile)
663        xllcorner=swwIn.xllcorner
664        yllcorner=swwIn.yllcorner
665
666        if(myTimeStep=='all'):
667            myTimeStep=range(len(p2.time))
668        # Ensure myTimeStep is a list
669        if type(myTimeStep)!=list:
670            myTimeStep=[myTimeStep]
671
672        if(verbose):
673            print 'Extracting required data ...'
674        # Get ANUGA points
675        swwX=p2.x+xllcorner
676        swwY=p2.y+yllcorner
677    else:
678        # Get the point data from the 3-column array
679        if(xyzPoints.shape[1]!=3):
680            raise Exception, 'If an array is passed, it must have exactly 3 columns'
681        if(len(output_quantities)!=1):
682            raise Exception, 'Can only have 1 output quantity when passing an array'
683        swwX=xyzPoints[:,0]
684        swwY=xyzPoints[:,1]
685        myTimeStep=['pointData']
686
687    # Grid for meshing
688    if(verbose):
689        print 'Computing grid of output locations...'
690    # Get points where we want raster cells
691    if(lower_left is None):
692        lower_left=[swwX.min(),swwY.min()]
693    if(upper_right is None):
694        upper_right=[swwX.max(),swwY.max()]
695    nx=round((upper_right[0]-lower_left[0])*1.0/(1.0*CellSize)) + 1
696    xres=(upper_right[0]-lower_left[0])*1.0/(1.0*(nx-1))
697    desiredX=scipy.arange(lower_left[0], upper_right[0],xres )
698    ny=round((upper_right[1]-lower_left[1])*1.0/(1.0*CellSize)) + 1
699    yres=(upper_right[1]-lower_left[1])*1.0/(1.0*(ny-1))
700    desiredY=scipy.arange(lower_left[1], upper_right[1], yres)
701
702    gridX, gridY=scipy.meshgrid(desiredX,desiredY)
703
704    if(verbose):
705        print 'Making interpolation functions...'
706    swwXY=scipy.array([swwX[:],swwY[:]]).transpose()
707    # Get index of nearest point
708    index_qFun=scipy.interpolate.NearestNDInterpolator(swwXY,scipy.arange(len(swwX),dtype='int64').transpose())
709    gridXY_array=scipy.array([scipy.concatenate(gridX),scipy.concatenate(gridY)]).transpose()
710    gridqInd=index_qFun(gridXY_array)
711
712    if(bounding_polygon is not None):
713        # Find points to exclude (i.e. outside the bounding polygon)
714        cut_points=(nxutils.points_inside_poly(gridXY_array, bounding_polygon)==False).nonzero()[0]
715       
716    #import pdb
717    #pdb.set_trace()
718
719    # Loop over all output quantities and produce the output
720    for myTS in myTimeStep:
721        if(verbose):
722            print myTS
723        for output_quantity in output_quantities:
724
725            if(myTS=='last'):
726                myTS=len(p.time)-1
727       
728
729            #if(myTS!='max'):
730            if(type(myTS)=='int'):
731                if(output_quantity=='stage'):
732                    gridq=p2.stage[myTS,:][gridqInd]
733                if(output_quantity=='depth'):
734                    gridq=p2.height[myTS,:][gridqInd]
735                if(output_quantity=='velocity'):
736                    gridq=p2.vel[myTS,:][gridqInd]
737                if(output_quantity=='depthIntegratedVelocity'):
738                    swwDIVel=(p2.xmom[myTS,:]**2+p2.ymom[myTS,:]**2)**0.5
739                    gridq=swwDIVel[gridqInd]
740                if(output_quantity=='elevation'):
741                    gridq=p2.elev[gridqInd]
742                timestepString=str(round(p2.time[myTS]))
743            elif (myTS=='max'):
744                if(output_quantity=='stage'):
745                    gridq=p2.stage.max(axis=0)[gridqInd]
746                if(output_quantity=='depth'):
747                    gridq=p2.height.max(axis=0)[gridqInd]
748                if(output_quantity=='velocity'):
749                    gridq=p2.vel.max(axis=0)[gridqInd]
750                if(output_quantity=='depthIntegratedVelocity'):
751                    swwDIVel=((p2.xmom**2+p2.ymom**2).max(axis=0))**0.5
752                    gridq=swwDIVel[gridqInd]
753                if(output_quantity=='elevation'):
754                    gridq=p2.elev[gridqInd]
755                timestepString='max'
756            elif(myTS=='pointData'):
757                gridq=xyzPoints[:,2][gridqInd]
758
759
760            if(bounding_polygon is not None):
761                # Cut the points outside the bounding polygon
762                gridq[cut_points]=scipy.nan
763
764            # Make name for output file
765            if(myTS!='pointData'):
766                output_name=output_dir+'/'+os.path.splitext(os.path.basename(swwFile))[0] + '_'+\
767                            output_quantity+'_'+timestepString+\
768                            '_'+str(myTS)+'.tif'
769            else:
770                output_name=output_dir+'/'+'PointData_'+output_quantity+'.tif'
771
772            if(verbose):
773                print 'Making raster ...'
774            gridq.shape=(len(desiredY),len(desiredX))
775            make_grid(scipy.flipud(gridq),desiredY,desiredX, output_name,EPSG_CODE=EPSG_CODE, proj4string=proj4string)
776
777    return
778
779def plot_triangles(p, adjustLowerLeft=False):
780    """ Add mesh triangles to a pyplot plot
781    """
782    from matplotlib import pyplot as pyplot
783    #
784    x0=p.xllcorner
785    x1=p.yllcorner
786    #
787    for i in range(len(p.vols)):
788        k1=p.vols[i][0]
789        k2=p.vols[i][1]
790        k3=p.vols[i][2]
791        if(!adjustLowerLeft):
792            pyplot.plot([p.x[k1], p.x[k2], p.x[k3], p.x[k1]], [p.y[k1], p.y[k2], p.y[k3], p.y[k1]],'-',color='black')
793        else:
794            pyplot.plot([p.x[k1]+x0, p.x[k2]+x0, p.x[k3]+x0, p.x[k1]+x0], [p.y[k1]+x1, p.y[k2]+x1, p.y[k3]+x1, p.y[k1]+x1],'-',color='black')
795        #pyplot.plot([p.x[k3], p.x[k2]], [p.y[k3], p.y[k2]],'-',color='black')
796        #pyplot.plot([p.x[k3], p.x[k1]], [p.y[k3], p.y[k1]],'-',color='black')
797
798def find_neighbours(p,ind):
799    """
800        Find the triangles neighbouring triangle 'ind'
801        p is an object from get_output containing mesh vertices
802    """
803    ind_nei=p.vols[ind]
804   
805    shared_nei0=p.vols[:,1]*0.0
806    shared_nei1=p.vols[:,1]*0.0
807    shared_nei2=p.vols[:,1]*0.0
808    # Compute indices that match one of the vertices of triangle ind
809    # Note: Each triangle can only match a vertex, at most, once
810    for i in range(3):
811        shared_nei0+=1*(p.x[p.vols[:,i]]==p.x[ind_nei[0]])*\
812            1*(p.y[p.vols[:,i]]==p.y[ind_nei[0]])
813       
814        shared_nei1+=1*(p.x[p.vols[:,i]]==p.x[ind_nei[1]])*\
815            1*(p.y[p.vols[:,i]]==p.y[ind_nei[1]])
816       
817        shared_nei2+=1*(p.x[p.vols[:,i]]==p.x[ind_nei[2]])*\
818            1*(p.y[p.vols[:,i]]==p.y[ind_nei[2]])
819   
820    out=(shared_nei2 + shared_nei1 + shared_nei0)
821    return((out==2).nonzero())
822
823def calc_edge_elevations(p):
824    """
825        Compute the triangle edge elevations on p
826        Return x,y,elev for edges
827    """
828    pe_x=p.x*0.
829    pe_y=p.y*0.
830    pe_el=p.elev*0.
831
832   
833    # Compute coordinates + elevations
834    pe_x[p.vols[:,0]] = 0.5*(p.x[p.vols[:,1]] + p.x[p.vols[:,2]])
835    pe_y[p.vols[:,0]] = 0.5*(p.y[p.vols[:,1]] + p.y[p.vols[:,2]])
836    pe_el[p.vols[:,0]] = 0.5*(p.elev[p.vols[:,1]] + p.elev[p.vols[:,2]])
837   
838    pe_x[p.vols[:,1]] = 0.5*(p.x[p.vols[:,0]] + p.x[p.vols[:,2]])
839    pe_y[p.vols[:,1]] = 0.5*(p.y[p.vols[:,0]] + p.y[p.vols[:,2]])
840    pe_el[p.vols[:,1]] = 0.5*(p.elev[p.vols[:,0]] + p.elev[p.vols[:,2]])
841
842    pe_x[p.vols[:,2]] = 0.5*(p.x[p.vols[:,0]] + p.x[p.vols[:,1]])
843    pe_y[p.vols[:,2]] = 0.5*(p.y[p.vols[:,0]] + p.y[p.vols[:,1]])
844    pe_el[p.vols[:,2]] = 0.5*(p.elev[p.vols[:,0]] + p.elev[p.vols[:,1]])
845
846    return [pe_x, pe_y, pe_el]
847
848
Note: See TracBrowser for help on using the repository browser.