source: anuga_core/source/anuga/fit_interpolate/fit.py @ 6191

Last change on this file since 6191 was 6191, checked in by ole, 15 years ago

Refactored Mesh-Domain inheritance pattern into a composition pattern, thereby
paving the way for reuse of Mesh instance in fitting as per ticket:242.
Had to disable test for isinstance(domain, Domain) in quantity.py as it
failed for unknown reasons. All other tests and validation suite passes.

File size: 25.6 KB
Line 
1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4   putting point data onto the mesh.
5
6   The penalty term (or smoothing term) is controlled by the smoothing
7   parameter alpha.
8   With a value of alpha=0, the fit function will attempt
9   to interpolate as closely as possible in the least-squares sense.
10   With values alpha > 0, a certain amount of smoothing will be applied.
11   A positive alpha is essential in cases where there are too few
12   data points.
13   A negative alpha is not allowed.
14   A typical value of alpha is 1.0e-6
15
16
17   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
18   Geoscience Australia, 2004.
19
20   TO DO
21   * test geo_ref, geo_spatial
22
23   IDEAS
24   * (DSG-) Change the interface of fit, so a domain object can
25      be passed in. (I don't know if this is feasible). If could
26      save time/memory.
27"""
28import types
29
30from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
36from anuga.utilities.polygon import in_and_outside_polygon
37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38
39from anuga.utilities.cg_solve import conjugate_gradient
40from anuga.utilities.numerical_tools import ensure_numeric, gradient
41from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
42
43import exceptions
44class TooFewPointsError(exceptions.Exception): pass
45class VertsWithNoTrianglesError(exceptions.Exception): pass
46
47import Numeric as num
48
49
50#DEFAULT_ALPHA = 0.001
51
52
53class Fit(FitInterpolate):
54   
55    def __init__(self,
56                 vertex_coordinates=None,
57                 triangles=None,
58                 mesh=None,
59                 mesh_origin=None,
60                 alpha = None,
61                 verbose=False,
62                 max_vertices_per_cell=None):
63
64
65        """
66        Fit data at points to the vertices of a mesh.
67
68        Inputs:
69
70          vertex_coordinates: List of coordinate pairs [xi, eta] of
71              points constituting a mesh (or an m x 2 Numeric array or
72              a geospatial object)
73              Points may appear multiple times
74              (e.g. if vertices have discontinuities)
75
76          triangles: List of 3-tuples (or a Numeric array) of
77              integers representing indices of all vertices in the mesh.
78
79          mesh_origin: A geo_reference object or 3-tuples consisting of
80              UTM zone, easting and northing.
81              If specified vertex coordinates are assumed to be
82              relative to their respective origins.
83
84          max_vertices_per_cell: Number of vertices in a quad tree cell
85          at which the cell is split into 4.
86
87          Note: Don't supply a vertex coords as a geospatial object and
88              a mesh origin, since geospatial has its own mesh origin.
89
90
91        Usage,
92        To use this in a blocking way, call  build_fit_subset, with z info,
93        and then fit, with no point coord, z info.
94       
95        """
96        # Initialise variabels
97        if alpha is None:
98            self.alpha = DEFAULT_ALPHA
99        else:   
100            self.alpha = alpha
101           
102        FitInterpolate.__init__(self,
103                 vertex_coordinates,
104                 triangles,
105                 mesh,
106                 mesh_origin,
107                 verbose,
108                 max_vertices_per_cell)
109       
110        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
111       
112        self.AtA = None
113        self.Atz = None
114
115        self.point_count = 0
116        if self.alpha <> 0:
117            if verbose: print 'Building smoothing matrix'
118            self._build_smoothing_matrix_D()
119           
120        self.mesh_boundary_polygon = self.mesh.get_boundary_polygon()   
121           
122    def _build_coefficient_matrix_B(self,
123                                  verbose = False):
124        """
125        Build final coefficient matrix
126
127        Precon
128        If alpha is not zero, matrix D has been built
129        Matrix Ata has been built
130        """
131
132        if self.alpha <> 0:
133            #if verbose: print 'Building smoothing matrix'
134            #self._build_smoothing_matrix_D()
135            self.B = self.AtA + self.alpha*self.D
136        else:
137            self.B = self.AtA
138
139        #Convert self.B matrix to CSR format for faster matrix vector
140        self.B = Sparse_CSR(self.B)
141
142    def _build_smoothing_matrix_D(self):
143        """Build m x m smoothing matrix, where
144        m is the number of basis functions phi_k (one per vertex)
145
146        The smoothing matrix is defined as
147
148        D = D1 + D2
149
150        where
151
152        [D1]_{k,l} = \int_\Omega
153           \frac{\partial \phi_k}{\partial x}
154           \frac{\partial \phi_l}{\partial x}\,
155           dx dy
156
157        [D2]_{k,l} = \int_\Omega
158           \frac{\partial \phi_k}{\partial y}
159           \frac{\partial \phi_l}{\partial y}\,
160           dx dy
161
162
163        The derivatives \frac{\partial \phi_k}{\partial x},
164        \frac{\partial \phi_k}{\partial x} for a particular triangle
165        are obtained by computing the gradient a_k, b_k for basis function k
166        """
167       
168        #FIXME: algorithm might be optimised by computing local 9x9
169        #"element stiffness matrices:
170
171        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
172
173        self.D = Sparse(m,m)
174
175        #For each triangle compute contributions to D = D1+D2
176        for i in range(len(self.mesh)):
177
178            #Get area
179            area = self.mesh.areas[i]
180
181            #Get global vertex indices
182            v0 = self.mesh.triangles[i,0]
183            v1 = self.mesh.triangles[i,1]
184            v2 = self.mesh.triangles[i,2]
185
186            #Get the three vertex_points
187            xi0 = self.mesh.get_vertex_coordinate(i, 0)
188            xi1 = self.mesh.get_vertex_coordinate(i, 1)
189            xi2 = self.mesh.get_vertex_coordinate(i, 2)
190
191            #Compute gradients for each vertex
192            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
193                              1, 0, 0)
194
195            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
196                              0, 1, 0)
197
198            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
199                              0, 0, 1)
200
201            #Compute diagonal contributions
202            self.D[v0,v0] += (a0*a0 + b0*b0)*area
203            self.D[v1,v1] += (a1*a1 + b1*b1)*area
204            self.D[v2,v2] += (a2*a2 + b2*b2)*area
205
206            #Compute contributions for basis functions sharing edges
207            e01 = (a0*a1 + b0*b1)*area
208            self.D[v0,v1] += e01
209            self.D[v1,v0] += e01
210
211            e12 = (a1*a2 + b1*b2)*area
212            self.D[v1,v2] += e12
213            self.D[v2,v1] += e12
214
215            e20 = (a2*a0 + b2*b0)*area
216            self.D[v2,v0] += e20
217            self.D[v0,v2] += e20
218
219    def get_D(self):
220        return self.D.todense()
221
222
223
224    def _build_matrix_AtA_Atz(self,
225                              point_coordinates,
226                              z,
227                              verbose = False):
228        """Build:
229        AtA  m x m  interpolation matrix, and,
230        Atz  m x a  interpolation matrix where,
231        m is the number of basis functions phi_k (one per vertex)
232        a is the number of data attributes
233
234        This algorithm uses a quad tree data structure for fast binning of
235        data points.
236
237        If Ata is None, the matrices AtA and Atz are created.
238
239        This function can be called again and again, with sub-sets of
240        the point coordinates.  Call fit to get the results.
241       
242        Preconditions
243        z and points are numeric
244        Point_coordindates and mesh vertices have the same origin.
245
246        The number of attributes of the data points does not change
247        """
248        #Build n x m interpolation matrix
249
250        if self.AtA == None:
251            # AtA and Atz need to be initialised.
252            m = self.mesh.number_of_nodes
253            if len(z.shape) > 1:
254                att_num = z.shape[1]
255                self.Atz = num.zeros((m,att_num), num.Float)
256            else:
257                att_num = 1
258                self.Atz = num.zeros((m,), num.Float)
259            assert z.shape[0] == point_coordinates.shape[0] 
260
261            AtA = Sparse(m,m)
262            # The memory damage has been done by now.
263        else:
264             AtA = self.AtA #Did this for speed, did ~nothing
265        self.point_count += point_coordinates.shape[0]
266
267        #if verbose: print 'Getting indices inside mesh boundary'
268
269        inside_poly_indices, outside_poly_indices  = \
270                     in_and_outside_polygon(point_coordinates,
271                                            self.mesh_boundary_polygon,
272                                            closed = True,
273                                            verbose = False) # There's too much output if True
274        #print "self.inside_poly_indices",self.inside_poly_indices
275        #print "self.outside_poly_indices",self.outside_poly_indices
276
277       
278        n = len(inside_poly_indices)
279        #if verbose: print 'Building fitting matrix from %d points' %n       
280
281        #Compute matrix elements for points inside the mesh
282        triangles = self.mesh.triangles #Did this for speed, did ~nothing
283        for d, i in enumerate(inside_poly_indices):
284            # For each data_coordinate point
285            # if verbose and d%((n+10)/10)==0: print 'Doing %d of %d' %(d, n)
286            x = point_coordinates[i]
287            element_found, sigma0, sigma1, sigma2, k = \
288                           search_tree_of_vertices(self.root, self.mesh, x)
289           
290            if element_found is True:
291                j0 = triangles[k,0] #Global vertex id for sigma0
292                j1 = triangles[k,1] #Global vertex id for sigma1
293                j2 = triangles[k,2] #Global vertex id for sigma2
294
295                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
296                js     = [j0,j1,j2]
297
298                for j in js:
299                    self.Atz[j] +=  sigmas[j]*z[i]
300                    #print "self.Atz building", self.Atz
301                    #print "self.Atz[j]", self.Atz[j]
302                    #print " sigmas[j]", sigmas[j]
303                    #print "z[i]",z[i]
304                    #print "result", sigmas[j]*z[i]
305                   
306                    for k in js:
307                        AtA[j,k] += sigmas[j]*sigmas[k]
308            else:
309                msg = 'Could not find triangle for point', x
310                raise Exception(msg)
311            self.AtA = AtA
312       
313    def fit(self, point_coordinates_or_filename=None, z=None,
314            verbose=False,
315            point_origin=None,
316            attribute_name=None,
317            max_read_lines=500):
318        """Fit a smooth surface to given 1d array of data points z.
319
320        The smooth surface is computed at each vertex in the underlying
321        mesh using the formula given in the module doc string.
322
323        Inputs:
324        point_coordinates: The co-ordinates of the data points.
325              List of coordinate pairs [x, y] of
326              data points or an nx2 Numeric array or a Geospatial_data object
327              or points file filename
328          z: Single 1d vector or array of data at the point_coordinates.
329         
330        """
331        # use blocking to load in the point info
332        if type(point_coordinates_or_filename) == types.StringType:
333            msg = "Don't set a point origin when reading from a file"
334            assert point_origin is None, msg
335            filename = point_coordinates_or_filename
336
337            G_data = Geospatial_data(filename,
338                                     max_read_lines=max_read_lines,
339                                     load_file_now=False,
340                                     verbose=verbose)
341
342            for i, geo_block in enumerate(G_data):
343                if verbose is True and 0 == i%200: 
344                    # The time this will take
345                    # is dependant on the # of Triangles
346                       
347                    print 'Processing Block %d' %i
348                    # FIXME (Ole): It would be good to say how many blocks
349                    # there are here. But this is no longer necessary
350                    # for pts files as they are reported in geospatial_data
351                    # I suggest deleting this verbose output and make
352                    # Geospatial_data more informative for txt files.
353                    #
354                    # I still think so (12/12/7, Ole).
355           
356
357                   
358                # Build the array
359
360                points = geo_block.get_data_points(absolute=True)
361                #print "fit points", points
362                z = geo_block.get_attributes(attribute_name=attribute_name)
363                self.build_fit_subset(points, z, verbose=verbose)
364
365               
366            point_coordinates = None
367        else:
368            point_coordinates =  point_coordinates_or_filename
369           
370        if point_coordinates is None:
371            if verbose: print 'Warning: no data points in fit'
372            assert self.AtA <> None, 'no interpolation matrix'
373            assert self.Atz <> None
374           
375            #FIXME (DSG) - do  a message
376        else:
377            point_coordinates = ensure_absolute(point_coordinates,
378                                                geo_reference=point_origin)
379            #if isinstance(point_coordinates,Geospatial_data) and z is None:
380            # z will come from the geo-ref
381            self.build_fit_subset(point_coordinates, z, verbose)
382
383        #Check sanity
384        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
385        n = self.point_count
386        if n<m and self.alpha == 0.0:
387            msg = 'ERROR (least_squares): Too few data points\n'
388            msg += 'There are only %d data points and alpha == 0. ' %n
389            msg += 'Need at least %d\n' %m
390            msg += 'Alternatively, set smoothing parameter alpha to a small '
391            msg += 'positive value,\ne.g. 1.0e-3.'
392            raise TooFewPointsError(msg)
393
394        self._build_coefficient_matrix_B(verbose)
395        loners = self.mesh.get_lone_vertices()
396        # FIXME  - make this as error message.
397        # test with
398        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
399        if len(loners)>0:
400            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
401            msg += 'All vertices should be part of a triangle.\n'
402            msg += 'In the future this will be inforced.\n'
403            msg += 'The following vertices are not part of a triangle;\n'
404            msg += str(loners)
405            print msg
406            #raise VertsWithNoTrianglesError(msg)
407       
408       
409        return conjugate_gradient(self.B, self.Atz, self.Atz,
410                                  imax=2*len(self.Atz) )
411
412       
413    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
414                              verbose=False):
415        """Fit a smooth surface to given 1d array of data points z.
416
417        The smooth surface is computed at each vertex in the underlying
418        mesh using the formula given in the module doc string.
419
420        Inputs:
421        point_coordinates: The co-ordinates of the data points.
422              List of coordinate pairs [x, y] of
423              data points or an nx2 Numeric array or a Geospatial_data object
424        z: Single 1d vector or array of data at the point_coordinates.
425        attribute_name: Used to get the z values from the
426              geospatial object if no attribute_name is specified,
427              it's a bit of a lucky dip as to what attributes you get.
428              If there is only one attribute it will be that one.
429
430        """
431
432        # FIXME(DSG-DSG): Check that the vert and point coords
433        # have the same zone.
434        if isinstance(point_coordinates,Geospatial_data):
435            point_coordinates = point_coordinates.get_data_points( \
436                absolute = True)
437       
438        # Convert input to Numeric arrays
439        if z is not None:
440            z = ensure_numeric(z, num.Float)
441        else:
442            msg = 'z not specified'
443            assert isinstance(point_coordinates,Geospatial_data), msg
444            z = point_coordinates.get_attributes(attribute_name)
445
446        point_coordinates = ensure_numeric(point_coordinates, num.Float)
447        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
448
449
450############################################################################
451
452def fit_to_mesh(point_coordinates, # this can also be a points file name
453                vertex_coordinates=None,
454                triangles=None,
455                mesh=None,
456                point_attributes=None,
457                alpha=DEFAULT_ALPHA,
458                verbose=False,
459                acceptable_overshoot=1.01,
460                mesh_origin=None,
461                data_origin=None,
462                max_read_lines=None,
463                attribute_name=None,
464                use_cache=False):
465    """Wrapper around internal function _fit_to_mesh for use with caching.
466   
467    """
468   
469    args = (point_coordinates, )
470    kwargs = {'vertex_coordinates': vertex_coordinates,
471              'triangles': triangles,
472              'mesh': mesh,
473              'point_attributes': point_attributes,
474              'alpha': alpha,
475              'verbose': verbose,
476              'acceptable_overshoot': acceptable_overshoot,
477              'mesh_origin': mesh_origin,
478              'data_origin': data_origin,
479              'max_read_lines': max_read_lines,
480              'attribute_name': attribute_name,
481              'use_cache': use_cache
482              }
483
484    if use_cache is True:
485        if isinstance(point_coordinates, basestring):
486            # We assume that point_coordinates is the name of a .csv/.txt
487            # file which must be passed onto caching as a dependency
488            # (in case it has changed on disk)
489            dep = [point_coordinates]
490        else:
491            dep = None
492
493           
494        #from caching import myhash
495        #import copy
496        #print kwargs
497        #print 'hashing:'
498        #print 'args', myhash( (args, kwargs) )
499        #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
500       
501        #print 'mesh hash', myhash( kwargs['mesh'] )       
502       
503        #print '-------------------------'
504        #print 'vertices hash', myhash( kwargs['mesh'].nodes )
505        #print 'triangles hash', myhash( kwargs['mesh'].triangles )
506        #print '-------------------------'       
507       
508        #for key in mesh.__dict__:
509        #    print key, myhash(mesh.__dict__[key])
510       
511        #for key in mesh.quantities.keys():
512        #    print key, myhash(mesh.quantities[key])
513       
514        #import sys; sys.exit()
515           
516        return cache(_fit_to_mesh,
517                     args, kwargs,
518                     verbose=verbose,
519                     compression=False,
520                     dependencies=dep)
521    else:
522        return apply(_fit_to_mesh,
523                     args, kwargs)
524
525def _fit_to_mesh(point_coordinates, # this can also be a points file name
526                 vertex_coordinates=None,
527                 triangles=None,
528                 mesh=None,
529                 point_attributes=None,
530                 alpha=DEFAULT_ALPHA,
531                 verbose=False,
532                 acceptable_overshoot=1.01,
533                 mesh_origin=None,
534                 data_origin=None,
535                 max_read_lines=None,
536                 attribute_name=None,
537                 use_cache = False):
538    """
539    Fit a smooth surface to a triangulation,
540    given data points with attributes.
541
542
543        Inputs:
544        vertex_coordinates: List of coordinate pairs [xi, eta] of
545              points constituting a mesh (or an m x 2 Numeric array or
546              a geospatial object)
547              Points may appear multiple times
548              (e.g. if vertices have discontinuities)
549
550          triangles: List of 3-tuples (or a Numeric array) of
551          integers representing indices of all vertices in the mesh.
552
553          point_coordinates: List of coordinate pairs [x, y] of data points
554          (or an nx2 Numeric array). This can also be a .csv/.txt/.pts
555          file name.
556
557          alpha: Smoothing parameter.
558
559          acceptable overshoot: NOT IMPLEMENTED
560          controls the allowed factor by which
561          fitted values
562          may exceed the value of input data. The lower limit is defined
563          as min(z) - acceptable_overshoot*delta z and upper limit
564          as max(z) + acceptable_overshoot*delta z
565         
566
567          mesh_origin: A geo_reference object or 3-tuples consisting of
568              UTM zone, easting and northing.
569              If specified vertex coordinates are assumed to be
570              relative to their respective origins.
571         
572
573          point_attributes: Vector or array of data at the
574                            point_coordinates.
575
576    """
577
578    # Duncan and Ole think that this isn't worth caching.
579    # Caching happens at the higher level anyway.
580   
581
582    if mesh is None:
583        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
584        # are None
585           
586        #Convert input to Numeric arrays
587        triangles = ensure_numeric(triangles, num.Int)
588        vertex_coordinates = ensure_absolute(vertex_coordinates,
589                                             geo_reference = mesh_origin)
590
591        if verbose: print 'FitInterpolate: Building mesh'       
592        mesh = Mesh(vertex_coordinates, triangles)
593        mesh.check_integrity()
594   
595    interp = Fit(mesh=mesh,
596                 verbose=verbose,
597                 alpha=alpha)
598
599    vertex_attributes = interp.fit(point_coordinates,
600                                   point_attributes,
601                                   point_origin=data_origin,
602                                   max_read_lines=max_read_lines,
603                                   attribute_name=attribute_name,
604                                   verbose=verbose)
605
606       
607    # Add the value checking stuff that's in least squares.
608    # Maybe this stuff should get pushed down into Fit.
609    # at least be a method of Fit.
610    # Or intigrate it into the fit method, saving teh max and min's
611    # as att's.
612   
613    return vertex_attributes
614
615
616#def _fit(*args, **kwargs):
617#    """Private function for use with caching. Reason is that classes
618#    may change their byte code between runs which is annoying.
619#    """
620#   
621#    return Fit(*args, **kwargs)
622
623
624def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
625                     alpha=DEFAULT_ALPHA, verbose= False,
626                     expand_search = False,
627                     precrop = False,
628                     display_errors = True):
629    """
630    Given a mesh file (tsh) and a point attribute file, fit
631    point attributes to the mesh and write a mesh file with the
632    results.
633
634    Note: the points file needs titles.  If you want anuga to use the tsh file,
635    make sure the title is elevation.
636
637    NOTE: Throws IOErrors, for a variety of file problems.
638   
639    """
640
641    from load_mesh.loadASCII import import_mesh_file, \
642         export_mesh_file, concatinate_attributelist
643
644
645    try:
646        mesh_dict = import_mesh_file(mesh_file)
647    except IOError,e:
648        if display_errors:
649            print "Could not load bad file. ", e
650        raise IOError  #Could not load bad mesh file.
651   
652    vertex_coordinates = mesh_dict['vertices']
653    triangles = mesh_dict['triangles']
654    if type(mesh_dict['vertex_attributes']) == num.ArrayType:
655        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
656    else:
657        old_point_attributes = mesh_dict['vertex_attributes']
658
659    if type(mesh_dict['vertex_attribute_titles']) == num.ArrayType:
660        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
661    else:
662        old_title_list = mesh_dict['vertex_attribute_titles']
663
664    if verbose: print 'tsh file %s loaded' %mesh_file
665
666    # load in the points file
667    try:
668        geo = Geospatial_data(point_file, verbose=verbose)
669    except IOError,e:
670        if display_errors:
671            print "Could not load bad file. ", e
672        raise IOError  #Re-raise exception 
673
674    point_coordinates = geo.get_data_points(absolute=True)
675    title_list,point_attributes = concatinate_attributelist( \
676        geo.get_all_attributes())
677
678    if mesh_dict.has_key('geo_reference') and \
679           not mesh_dict['geo_reference'] is None:
680        mesh_origin = mesh_dict['geo_reference'].get_origin()
681    else:
682        mesh_origin = None
683
684    if verbose: print "points file loaded"
685    if verbose: print "fitting to mesh"
686    f = fit_to_mesh(point_coordinates,
687                    vertex_coordinates,
688                    triangles,
689                    None,
690                    point_attributes,
691                    alpha = alpha,
692                    verbose = verbose,
693                    data_origin = None,
694                    mesh_origin = mesh_origin)
695    if verbose: print "finished fitting to mesh"
696
697    # convert array to list of lists
698    new_point_attributes = f.tolist()
699    #FIXME have this overwrite attributes with the same title - DSG
700    #Put the newer attributes last
701    if old_title_list <> []:
702        old_title_list.extend(title_list)
703        #FIXME can this be done a faster way? - DSG
704        for i in range(len(old_point_attributes)):
705            old_point_attributes[i].extend(new_point_attributes[i])
706        mesh_dict['vertex_attributes'] = old_point_attributes
707        mesh_dict['vertex_attribute_titles'] = old_title_list
708    else:
709        mesh_dict['vertex_attributes'] = new_point_attributes
710        mesh_dict['vertex_attribute_titles'] = title_list
711
712    if verbose: print "exporting to file ", mesh_output_file
713
714    try:
715        export_mesh_file(mesh_output_file, mesh_dict)
716    except IOError,e:
717        if display_errors:
718            print "Could not write file. ", e
719        raise IOError
Note: See TracBrowser for help on using the repository browser.