source: anuga_core/source/anuga/fit_interpolate/fit.py @ 7693

Last change on this file since 7693 was 7317, checked in by rwilson, 15 years ago

Replaced 'print' statements with log.critical() calls.

File size: 25.7 KB
Line 
1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4   putting point data onto the mesh.
5
6   The penalty term (or smoothing term) is controlled by the smoothing
7   parameter alpha.
8   With a value of alpha=0, the fit function will attempt
9   to interpolate as closely as possible in the least-squares sense.
10   With values alpha > 0, a certain amount of smoothing will be applied.
11   A positive alpha is essential in cases where there are too few
12   data points.
13   A negative alpha is not allowed.
14   A typical value of alpha is 1.0e-6
15
16
17   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
18   Geoscience Australia, 2004.
19
20   TO DO
21   * test geo_ref, geo_spatial
22
23   IDEAS
24   * (DSG-) Change the interface of fit, so a domain object can
25      be passed in. (I don't know if this is feasible). If could
26      save time/memory.
27"""
28import types
29
30from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
36from anuga.utilities.polygon import inside_polygon, is_inside_polygon
37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38
39from anuga.utilities.cg_solve import conjugate_gradient
40from anuga.utilities.numerical_tools import ensure_numeric, gradient
41from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
42import anuga.utilities.log as log
43
44import exceptions
45class TooFewPointsError(exceptions.Exception): pass
46class VertsWithNoTrianglesError(exceptions.Exception): pass
47
48import numpy as num
49
50
51class Fit(FitInterpolate):
52   
53    def __init__(self,
54                 vertex_coordinates=None,
55                 triangles=None,
56                 mesh=None,
57                 mesh_origin=None,
58                 alpha = None,
59                 verbose=False,
60                 max_vertices_per_cell=None):
61
62
63        """
64        Fit data at points to the vertices of a mesh.
65
66        Inputs:
67
68          vertex_coordinates: List of coordinate pairs [xi, eta] of
69              points constituting a mesh (or an m x 2 numeric array or
70              a geospatial object)
71              Points may appear multiple times
72              (e.g. if vertices have discontinuities)
73
74          triangles: List of 3-tuples (or a numeric array) of
75              integers representing indices of all vertices in the mesh.
76
77          mesh_origin: A geo_reference object or 3-tuples consisting of
78              UTM zone, easting and northing.
79              If specified vertex coordinates are assumed to be
80              relative to their respective origins.
81
82          max_vertices_per_cell: Number of vertices in a quad tree cell
83          at which the cell is split into 4.
84
85          Note: Don't supply a vertex coords as a geospatial object and
86              a mesh origin, since geospatial has its own mesh origin.
87
88
89        Usage,
90        To use this in a blocking way, call  build_fit_subset, with z info,
91        and then fit, with no point coord, z info.
92       
93        """
94        # Initialise variabels
95        if alpha is None:
96            self.alpha = DEFAULT_ALPHA
97        else:   
98            self.alpha = alpha
99           
100        FitInterpolate.__init__(self,
101                 vertex_coordinates,
102                 triangles,
103                 mesh,
104                 mesh_origin,
105                 verbose,
106                 max_vertices_per_cell)
107       
108        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
109       
110        self.AtA = None
111        self.Atz = None
112
113        self.point_count = 0
114        if self.alpha <> 0:
115            if verbose: log.critical('Building smoothing matrix')
116            self._build_smoothing_matrix_D()
117           
118        bd_poly = self.mesh.get_boundary_polygon()   
119        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
120           
121    def _build_coefficient_matrix_B(self,
122                                  verbose = False):
123        """
124        Build final coefficient matrix
125
126        Precon
127        If alpha is not zero, matrix D has been built
128        Matrix Ata has been built
129        """
130
131        if self.alpha <> 0:
132            #if verbose: log.critical('Building smoothing matrix')
133            #self._build_smoothing_matrix_D()
134            self.B = self.AtA + self.alpha*self.D
135        else:
136            self.B = self.AtA
137
138        # Convert self.B matrix to CSR format for faster matrix vector
139        self.B = Sparse_CSR(self.B)
140
141    def _build_smoothing_matrix_D(self):
142        """Build m x m smoothing matrix, where
143        m is the number of basis functions phi_k (one per vertex)
144
145        The smoothing matrix is defined as
146
147        D = D1 + D2
148
149        where
150
151        [D1]_{k,l} = \int_\Omega
152           \frac{\partial \phi_k}{\partial x}
153           \frac{\partial \phi_l}{\partial x}\,
154           dx dy
155
156        [D2]_{k,l} = \int_\Omega
157           \frac{\partial \phi_k}{\partial y}
158           \frac{\partial \phi_l}{\partial y}\,
159           dx dy
160
161
162        The derivatives \frac{\partial \phi_k}{\partial x},
163        \frac{\partial \phi_k}{\partial x} for a particular triangle
164        are obtained by computing the gradient a_k, b_k for basis function k
165        """
166       
167        # FIXME: algorithm might be optimised by computing local 9x9
168        # "element stiffness matrices:
169
170        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
171
172        self.D = Sparse(m,m)
173
174        # For each triangle compute contributions to D = D1+D2
175        for i in range(len(self.mesh)):
176
177            # Get area
178            area = self.mesh.areas[i]
179
180            # Get global vertex indices
181            v0 = self.mesh.triangles[i,0]
182            v1 = self.mesh.triangles[i,1]
183            v2 = self.mesh.triangles[i,2]
184
185            # Get the three vertex_points
186            xi0 = self.mesh.get_vertex_coordinate(i, 0)
187            xi1 = self.mesh.get_vertex_coordinate(i, 1)
188            xi2 = self.mesh.get_vertex_coordinate(i, 2)
189
190            # Compute gradients for each vertex
191            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
192                              1, 0, 0)
193
194            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
195                              0, 1, 0)
196
197            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
198                              0, 0, 1)
199
200            # Compute diagonal contributions
201            self.D[v0,v0] += (a0*a0 + b0*b0)*area
202            self.D[v1,v1] += (a1*a1 + b1*b1)*area
203            self.D[v2,v2] += (a2*a2 + b2*b2)*area
204
205            # Compute contributions for basis functions sharing edges
206            e01 = (a0*a1 + b0*b1)*area
207            self.D[v0,v1] += e01
208            self.D[v1,v0] += e01
209
210            e12 = (a1*a2 + b1*b2)*area
211            self.D[v1,v2] += e12
212            self.D[v2,v1] += e12
213
214            e20 = (a2*a0 + b2*b0)*area
215            self.D[v2,v0] += e20
216            self.D[v0,v2] += e20
217
218    def get_D(self):
219        return self.D.todense()
220
221
222
223    def _build_matrix_AtA_Atz(self,
224                              point_coordinates,
225                              z,
226                              verbose = False):
227        """Build:
228        AtA  m x m  interpolation matrix, and,
229        Atz  m x a  interpolation matrix where,
230        m is the number of basis functions phi_k (one per vertex)
231        a is the number of data attributes
232
233        This algorithm uses a quad tree data structure for fast binning of
234        data points.
235
236        If Ata is None, the matrices AtA and Atz are created.
237
238        This function can be called again and again, with sub-sets of
239        the point coordinates.  Call fit to get the results.
240       
241        Preconditions
242        z and points are numeric
243        Point_coordindates and mesh vertices have the same origin.
244
245        The number of attributes of the data points does not change
246        """
247       
248        # Build n x m interpolation matrix
249        if self.AtA == None:
250            # AtA and Atz need to be initialised.
251            m = self.mesh.number_of_nodes
252            if len(z.shape) > 1:
253                att_num = z.shape[1]
254                self.Atz = num.zeros((m,att_num), num.float)
255            else:
256                att_num = 1
257                self.Atz = num.zeros((m,), num.float)
258            assert z.shape[0] == point_coordinates.shape[0] 
259
260            AtA = Sparse(m,m)
261            # The memory damage has been done by now.
262        else:
263             AtA = self.AtA # Did this for speed, did ~nothing
264        self.point_count += point_coordinates.shape[0]
265
266
267        inside_indices = inside_polygon(point_coordinates,
268                                        self.mesh_boundary_polygon,
269                                        closed=True,
270                                        verbose=False) # Suppress output
271       
272        n = len(inside_indices)
273
274        # Compute matrix elements for points inside the mesh
275        triangles = self.mesh.triangles # Shorthand
276        for d, i in enumerate(inside_indices):
277            # For each data_coordinate point
278            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
279                                                            # %(d, n))
280            x = point_coordinates[i]
281           
282            element_found, sigma0, sigma1, sigma2, k = \
283                           search_tree_of_vertices(self.root, x)
284           
285            if element_found is True:
286                j0 = triangles[k,0] # Global vertex id for sigma0
287                j1 = triangles[k,1] # Global vertex id for sigma1
288                j2 = triangles[k,2] # Global vertex id for sigma2
289
290                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
291                js     = [j0,j1,j2]
292
293                for j in js:
294                    self.Atz[j] +=  sigmas[j]*z[i]
295                   
296                    for k in js:
297                        AtA[j,k] += sigmas[j]*sigmas[k]
298            else:
299                flag = is_inside_polygon(x,
300                                         self.mesh_boundary_polygon,
301                                         closed=True,
302                                         verbose=False) # Suppress output
303                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
304                assert flag is True, msg               
305               
306                # FIXME(Ole): This is the message referred to in ticket:314
307                minx = min(self.mesh_boundary_polygon[:,0])
308                maxx = max(self.mesh_boundary_polygon[:,0])               
309                miny = min(self.mesh_boundary_polygon[:,1])
310                maxy = max(self.mesh_boundary_polygon[:,1])
311                msg = 'Could not find triangle for point %s. ' % str(x) 
312                msg += 'Mesh boundary extent is (%.f, %.f), (%.f, %.f)'\
313                    % (minx, maxx, miny, maxy)
314                raise RuntimeError, msg
315
316               
317        self.AtA = AtA
318
319       
320    def fit(self, point_coordinates_or_filename=None, z=None,
321            verbose=False,
322            point_origin=None,
323            attribute_name=None,
324            max_read_lines=500):
325        """Fit a smooth surface to given 1d array of data points z.
326
327        The smooth surface is computed at each vertex in the underlying
328        mesh using the formula given in the module doc string.
329
330        Inputs:
331        point_coordinates: The co-ordinates of the data points.
332              List of coordinate pairs [x, y] of
333              data points or an nx2 numeric array or a Geospatial_data object
334              or points file filename
335          z: Single 1d vector or array of data at the point_coordinates.
336         
337        """
338       
339        # Use blocking to load in the point info
340        if type(point_coordinates_or_filename) == types.StringType:
341            msg = "Don't set a point origin when reading from a file"
342            assert point_origin is None, msg
343            filename = point_coordinates_or_filename
344
345            G_data = Geospatial_data(filename,
346                                     max_read_lines=max_read_lines,
347                                     load_file_now=False,
348                                     verbose=verbose)
349
350            for i, geo_block in enumerate(G_data):
351                if verbose is True and 0 == i%200: 
352                    # The time this will take
353                    # is dependant on the # of Triangles
354                       
355                    log.critical('Processing Block %d' % i)
356                    # FIXME (Ole): It would be good to say how many blocks
357                    # there are here. But this is no longer necessary
358                    # for pts files as they are reported in geospatial_data
359                    # I suggest deleting this verbose output and make
360                    # Geospatial_data more informative for txt files.
361                    #
362                    # I still think so (12/12/7, Ole).
363           
364
365                   
366                # Build the array
367
368                points = geo_block.get_data_points(absolute=True)
369                z = geo_block.get_attributes(attribute_name=attribute_name)
370                self.build_fit_subset(points, z, verbose=verbose)
371
372                # FIXME(Ole): I thought this test would make sense here
373                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
374                # Committed 11 March 2009
375                msg = 'Matrix AtA was not built'
376                assert self.AtA is not None, msg
377               
378            point_coordinates = None
379        else:
380            point_coordinates =  point_coordinates_or_filename
381           
382        if point_coordinates is None:
383            if verbose: log.critical('Warning: no data points in fit')
384            msg = 'No interpolation matrix.'
385            assert self.AtA is not None, msg
386            assert self.Atz is not None
387           
388            # FIXME (DSG) - do  a message
389        else:
390            point_coordinates = ensure_absolute(point_coordinates,
391                                                geo_reference=point_origin)
392            # if isinstance(point_coordinates,Geospatial_data) and z is None:
393            # z will come from the geo-ref
394            self.build_fit_subset(point_coordinates, z, verbose)
395
396        # Check sanity
397        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
398        n = self.point_count
399        if n<m and self.alpha == 0.0:
400            msg = 'ERROR (least_squares): Too few data points\n'
401            msg += 'There are only %d data points and alpha == 0. ' %n
402            msg += 'Need at least %d\n' %m
403            msg += 'Alternatively, set smoothing parameter alpha to a small '
404            msg += 'positive value,\ne.g. 1.0e-3.'
405            raise TooFewPointsError(msg)
406
407        self._build_coefficient_matrix_B(verbose)
408        loners = self.mesh.get_lone_vertices()
409        # FIXME  - make this as error message.
410        # test with
411        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
412        if len(loners)>0:
413            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
414            msg += 'All vertices should be part of a triangle.\n'
415            msg += 'In the future this will be inforced.\n'
416            msg += 'The following vertices are not part of a triangle;\n'
417            msg += str(loners)
418            log.critical(msg)
419            #raise VertsWithNoTrianglesError(msg)
420       
421       
422        return conjugate_gradient(self.B, self.Atz, self.Atz,
423                                  imax=2*len(self.Atz) )
424
425       
426    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
427                              verbose=False):
428        """Fit a smooth surface to given 1d array of data points z.
429
430        The smooth surface is computed at each vertex in the underlying
431        mesh using the formula given in the module doc string.
432
433        Inputs:
434        point_coordinates: The co-ordinates of the data points.
435              List of coordinate pairs [x, y] of
436              data points or an nx2 numeric array or a Geospatial_data object
437        z: Single 1d vector or array of data at the point_coordinates.
438        attribute_name: Used to get the z values from the
439              geospatial object if no attribute_name is specified,
440              it's a bit of a lucky dip as to what attributes you get.
441              If there is only one attribute it will be that one.
442
443        """
444
445        # FIXME(DSG-DSG): Check that the vert and point coords
446        # have the same zone.
447        if isinstance(point_coordinates,Geospatial_data):
448            point_coordinates = point_coordinates.get_data_points( \
449                absolute = True)
450       
451        # Convert input to numeric arrays
452        if z is not None:
453            z = ensure_numeric(z, num.float)
454        else:
455            msg = 'z not specified'
456            assert isinstance(point_coordinates,Geospatial_data), msg
457            z = point_coordinates.get_attributes(attribute_name)
458
459        point_coordinates = ensure_numeric(point_coordinates, num.float)
460        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
461
462
463############################################################################
464
465def fit_to_mesh(point_coordinates, # this can also be a points file name
466                vertex_coordinates=None,
467                triangles=None,
468                mesh=None,
469                point_attributes=None,
470                alpha=DEFAULT_ALPHA,
471                verbose=False,
472                mesh_origin=None,
473                data_origin=None,
474                max_read_lines=None,
475                attribute_name=None,
476                use_cache=False):
477    """Wrapper around internal function _fit_to_mesh for use with caching.
478   
479    """
480   
481    args = (point_coordinates, )
482    kwargs = {'vertex_coordinates': vertex_coordinates,
483              'triangles': triangles,
484              'mesh': mesh,
485              'point_attributes': point_attributes,
486              'alpha': alpha,
487              'verbose': verbose,
488              'mesh_origin': mesh_origin,
489              'data_origin': data_origin,
490              'max_read_lines': max_read_lines,
491              'attribute_name': attribute_name
492              }
493
494    if use_cache is True:
495        if isinstance(point_coordinates, basestring):
496            # We assume that point_coordinates is the name of a .csv/.txt
497            # file which must be passed onto caching as a dependency
498            # (in case it has changed on disk)
499            dep = [point_coordinates]
500        else:
501            dep = None
502
503           
504        #from caching import myhash
505        #import copy
506        #print args
507        #print kwargs
508        #print 'hashing:'
509        #print 'args', myhash( (args, kwargs) )
510        #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
511       
512        #print 'mesh hash', myhash( kwargs['mesh'] )       
513       
514        #print '-------------------------'
515        #print 'vertices hash', myhash( kwargs['mesh'].nodes )
516        #print 'triangles hash', myhash( kwargs['mesh'].triangles )
517        #print '-------------------------'       
518       
519        #for key in mesh.__dict__:
520        #    print key, myhash(mesh.__dict__[key])
521       
522        #for key in mesh.quantities.keys():
523        #    print key, myhash(mesh.quantities[key])
524       
525        #import sys; sys.exit()
526           
527        return cache(_fit_to_mesh,
528                     args, kwargs,
529                     verbose=verbose,
530                     compression=False,
531                     dependencies=dep)
532    else:
533        return apply(_fit_to_mesh,
534                     args, kwargs)
535
536def _fit_to_mesh(point_coordinates, # this can also be a points file name
537                 vertex_coordinates=None,
538                 triangles=None,
539                 mesh=None,
540                 point_attributes=None,
541                 alpha=DEFAULT_ALPHA,
542                 verbose=False,
543                 mesh_origin=None,
544                 data_origin=None,
545                 max_read_lines=None,
546                 attribute_name=None):
547    """
548    Fit a smooth surface to a triangulation,
549    given data points with attributes.
550
551
552        Inputs:
553        vertex_coordinates: List of coordinate pairs [xi, eta] of
554              points constituting a mesh (or an m x 2 numeric array or
555              a geospatial object)
556              Points may appear multiple times
557              (e.g. if vertices have discontinuities)
558
559          triangles: List of 3-tuples (or a numeric array) of
560          integers representing indices of all vertices in the mesh.
561
562          point_coordinates: List of coordinate pairs [x, y] of data points
563          (or an nx2 numeric array). This can also be a .csv/.txt/.pts
564          file name.
565
566          alpha: Smoothing parameter.
567
568          mesh_origin: A geo_reference object or 3-tuples consisting of
569              UTM zone, easting and northing.
570              If specified vertex coordinates are assumed to be
571              relative to their respective origins.
572
573          point_attributes: Vector or array of data at the
574                            point_coordinates.
575
576    """
577
578    if mesh is None:
579        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
580        # are None
581           
582        #Convert input to numeric arrays
583        triangles = ensure_numeric(triangles, num.int)
584        vertex_coordinates = ensure_absolute(vertex_coordinates,
585                                             geo_reference = mesh_origin)
586
587        if verbose: log.critical('FitInterpolate: Building mesh')
588        mesh = Mesh(vertex_coordinates, triangles)
589        mesh.check_integrity()
590   
591   
592    interp = Fit(mesh=mesh,
593                 verbose=verbose,
594                 alpha=alpha)
595
596    vertex_attributes = interp.fit(point_coordinates,
597                                   point_attributes,
598                                   point_origin=data_origin,
599                                   max_read_lines=max_read_lines,
600                                   attribute_name=attribute_name,
601                                   verbose=verbose)
602
603       
604    # Add the value checking stuff that's in least squares.
605    # Maybe this stuff should get pushed down into Fit.
606    # at least be a method of Fit.
607    # Or intigrate it into the fit method, saving teh max and min's
608    # as att's.
609   
610    return vertex_attributes
611
612
613#def _fit(*args, **kwargs):
614#    """Private function for use with caching. Reason is that classes
615#    may change their byte code between runs which is annoying.
616#    """
617#   
618#    return Fit(*args, **kwargs)
619
620
621def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
622                     alpha=DEFAULT_ALPHA, verbose= False,
623                     expand_search = False,
624                     precrop = False,
625                     display_errors = True):
626    """
627    Given a mesh file (tsh) and a point attribute file, fit
628    point attributes to the mesh and write a mesh file with the
629    results.
630
631    Note: the points file needs titles.  If you want anuga to use the tsh file,
632    make sure the title is elevation.
633
634    NOTE: Throws IOErrors, for a variety of file problems.
635   
636    """
637
638    from load_mesh.loadASCII import import_mesh_file, \
639         export_mesh_file, concatinate_attributelist
640
641
642    try:
643        mesh_dict = import_mesh_file(mesh_file)
644    except IOError,e:
645        if display_errors:
646            log.critical("Could not load bad file: %s" % str(e))
647        raise IOError  #Could not load bad mesh file.
648   
649    vertex_coordinates = mesh_dict['vertices']
650    triangles = mesh_dict['triangles']
651    if isinstance(mesh_dict['vertex_attributes'], num.ndarray):
652        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
653    else:
654        old_point_attributes = mesh_dict['vertex_attributes']
655
656    if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray):
657        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
658    else:
659        old_title_list = mesh_dict['vertex_attribute_titles']
660
661    if verbose: log.critical('tsh file %s loaded' % mesh_file)
662
663    # load in the points file
664    try:
665        geo = Geospatial_data(point_file, verbose=verbose)
666    except IOError,e:
667        if display_errors:
668            log.critical("Could not load bad file: %s" % str(e))
669        raise IOError  #Re-raise exception 
670
671    point_coordinates = geo.get_data_points(absolute=True)
672    title_list,point_attributes = concatinate_attributelist( \
673        geo.get_all_attributes())
674
675    if mesh_dict.has_key('geo_reference') and \
676           not mesh_dict['geo_reference'] is None:
677        mesh_origin = mesh_dict['geo_reference'].get_origin()
678    else:
679        mesh_origin = None
680
681    if verbose: log.critical("points file loaded")
682    if verbose: log.critical("fitting to mesh")
683    f = fit_to_mesh(point_coordinates,
684                    vertex_coordinates,
685                    triangles,
686                    None,
687                    point_attributes,
688                    alpha = alpha,
689                    verbose = verbose,
690                    data_origin = None,
691                    mesh_origin = mesh_origin)
692    if verbose: log.critical("finished fitting to mesh")
693
694    # convert array to list of lists
695    new_point_attributes = f.tolist()
696    #FIXME have this overwrite attributes with the same title - DSG
697    #Put the newer attributes last
698    if old_title_list <> []:
699        old_title_list.extend(title_list)
700        #FIXME can this be done a faster way? - DSG
701        for i in range(len(old_point_attributes)):
702            old_point_attributes[i].extend(new_point_attributes[i])
703        mesh_dict['vertex_attributes'] = old_point_attributes
704        mesh_dict['vertex_attribute_titles'] = old_title_list
705    else:
706        mesh_dict['vertex_attributes'] = new_point_attributes
707        mesh_dict['vertex_attribute_titles'] = title_list
708
709    if verbose: log.critical("exporting to file %s" % mesh_output_file)
710
711    try:
712        export_mesh_file(mesh_output_file, mesh_dict)
713    except IOError,e:
714        if display_errors:
715            log.critical("Could not write file %s", str(e))
716        raise IOError
Note: See TracBrowser for help on using the repository browser.