source: anuga_core/source/anuga/fit_interpolate/fit.py @ 6488

Last change on this file since 6488 was 6488, checked in by ole, 15 years ago

Added new test that revealed another fitting problem where
matrix AtA isn't being built. This test was derived from
the JJKelly example by Petar Milevsky.

The test has been disabled with a preciding X

See ticket:314 for more info

File size: 26.5 KB
Line 
1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4   putting point data onto the mesh.
5
6   The penalty term (or smoothing term) is controlled by the smoothing
7   parameter alpha.
8   With a value of alpha=0, the fit function will attempt
9   to interpolate as closely as possible in the least-squares sense.
10   With values alpha > 0, a certain amount of smoothing will be applied.
11   A positive alpha is essential in cases where there are too few
12   data points.
13   A negative alpha is not allowed.
14   A typical value of alpha is 1.0e-6
15
16
17   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
18   Geoscience Australia, 2004.
19
20   TO DO
21   * test geo_ref, geo_spatial
22
23   IDEAS
24   * (DSG-) Change the interface of fit, so a domain object can
25      be passed in. (I don't know if this is feasible). If could
26      save time/memory.
27"""
28import types
29
30from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
36from anuga.utilities.polygon import inside_polygon, is_inside_polygon
37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38
39from anuga.utilities.cg_solve import conjugate_gradient
40from anuga.utilities.numerical_tools import ensure_numeric, gradient
41from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
42
43import exceptions
44class TooFewPointsError(exceptions.Exception): pass
45class VertsWithNoTrianglesError(exceptions.Exception): pass
46
47import Numeric as num
48
49
50class Fit(FitInterpolate):
51   
52    def __init__(self,
53                 vertex_coordinates=None,
54                 triangles=None,
55                 mesh=None,
56                 mesh_origin=None,
57                 alpha = None,
58                 verbose=False,
59                 max_vertices_per_cell=None):
60
61
62        """
63        Fit data at points to the vertices of a mesh.
64
65        Inputs:
66
67          vertex_coordinates: List of coordinate pairs [xi, eta] of
68              points constituting a mesh (or an m x 2 Numeric array or
69              a geospatial object)
70              Points may appear multiple times
71              (e.g. if vertices have discontinuities)
72
73          triangles: List of 3-tuples (or a Numeric array) of
74              integers representing indices of all vertices in the mesh.
75
76          mesh_origin: A geo_reference object or 3-tuples consisting of
77              UTM zone, easting and northing.
78              If specified vertex coordinates are assumed to be
79              relative to their respective origins.
80
81          max_vertices_per_cell: Number of vertices in a quad tree cell
82          at which the cell is split into 4.
83
84          Note: Don't supply a vertex coords as a geospatial object and
85              a mesh origin, since geospatial has its own mesh origin.
86
87
88        Usage,
89        To use this in a blocking way, call  build_fit_subset, with z info,
90        and then fit, with no point coord, z info.
91       
92        """
93        # Initialise variabels
94        if alpha is None:
95            self.alpha = DEFAULT_ALPHA
96        else:   
97            self.alpha = alpha
98           
99        FitInterpolate.__init__(self,
100                 vertex_coordinates,
101                 triangles,
102                 mesh,
103                 mesh_origin,
104                 verbose,
105                 max_vertices_per_cell)
106       
107        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
108       
109        self.AtA = None
110        self.Atz = None
111
112        self.point_count = 0
113        if self.alpha <> 0:
114            if verbose: print 'Building smoothing matrix'
115            self._build_smoothing_matrix_D()
116           
117        bd_poly = self.mesh.get_boundary_polygon()   
118        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
119           
120    def _build_coefficient_matrix_B(self,
121                                  verbose = False):
122        """
123        Build final coefficient matrix
124
125        Precon
126        If alpha is not zero, matrix D has been built
127        Matrix Ata has been built
128        """
129
130        if self.alpha <> 0:
131            #if verbose: print 'Building smoothing matrix'
132            #self._build_smoothing_matrix_D()
133            self.B = self.AtA + self.alpha*self.D
134        else:
135            self.B = self.AtA
136
137        # Convert self.B matrix to CSR format for faster matrix vector
138        self.B = Sparse_CSR(self.B)
139
140    def _build_smoothing_matrix_D(self):
141        """Build m x m smoothing matrix, where
142        m is the number of basis functions phi_k (one per vertex)
143
144        The smoothing matrix is defined as
145
146        D = D1 + D2
147
148        where
149
150        [D1]_{k,l} = \int_\Omega
151           \frac{\partial \phi_k}{\partial x}
152           \frac{\partial \phi_l}{\partial x}\,
153           dx dy
154
155        [D2]_{k,l} = \int_\Omega
156           \frac{\partial \phi_k}{\partial y}
157           \frac{\partial \phi_l}{\partial y}\,
158           dx dy
159
160
161        The derivatives \frac{\partial \phi_k}{\partial x},
162        \frac{\partial \phi_k}{\partial x} for a particular triangle
163        are obtained by computing the gradient a_k, b_k for basis function k
164        """
165       
166        # FIXME: algorithm might be optimised by computing local 9x9
167        # "element stiffness matrices:
168
169        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
170
171        self.D = Sparse(m,m)
172
173        # For each triangle compute contributions to D = D1+D2
174        for i in range(len(self.mesh)):
175
176            # Get area
177            area = self.mesh.areas[i]
178
179            # Get global vertex indices
180            v0 = self.mesh.triangles[i,0]
181            v1 = self.mesh.triangles[i,1]
182            v2 = self.mesh.triangles[i,2]
183
184            # Get the three vertex_points
185            xi0 = self.mesh.get_vertex_coordinate(i, 0)
186            xi1 = self.mesh.get_vertex_coordinate(i, 1)
187            xi2 = self.mesh.get_vertex_coordinate(i, 2)
188
189            # Compute gradients for each vertex
190            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
191                              1, 0, 0)
192
193            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
194                              0, 1, 0)
195
196            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
197                              0, 0, 1)
198
199            # Compute diagonal contributions
200            self.D[v0,v0] += (a0*a0 + b0*b0)*area
201            self.D[v1,v1] += (a1*a1 + b1*b1)*area
202            self.D[v2,v2] += (a2*a2 + b2*b2)*area
203
204            # Compute contributions for basis functions sharing edges
205            e01 = (a0*a1 + b0*b1)*area
206            self.D[v0,v1] += e01
207            self.D[v1,v0] += e01
208
209            e12 = (a1*a2 + b1*b2)*area
210            self.D[v1,v2] += e12
211            self.D[v2,v1] += e12
212
213            e20 = (a2*a0 + b2*b0)*area
214            self.D[v2,v0] += e20
215            self.D[v0,v2] += e20
216
217    def get_D(self):
218        return self.D.todense()
219
220
221
222    def _build_matrix_AtA_Atz(self,
223                              point_coordinates,
224                              z,
225                              verbose = False):
226        """Build:
227        AtA  m x m  interpolation matrix, and,
228        Atz  m x a  interpolation matrix where,
229        m is the number of basis functions phi_k (one per vertex)
230        a is the number of data attributes
231
232        This algorithm uses a quad tree data structure for fast binning of
233        data points.
234
235        If Ata is None, the matrices AtA and Atz are created.
236
237        This function can be called again and again, with sub-sets of
238        the point coordinates.  Call fit to get the results.
239       
240        Preconditions
241        z and points are numeric
242        Point_coordindates and mesh vertices have the same origin.
243
244        The number of attributes of the data points does not change
245        """
246       
247        # Build n x m interpolation matrix
248        if self.AtA == None:
249            # AtA and Atz need to be initialised.
250            m = self.mesh.number_of_nodes
251            if len(z.shape) > 1:
252                att_num = z.shape[1]
253                self.Atz = num.zeros((m,att_num), num.Float)
254            else:
255                att_num = 1
256                self.Atz = num.zeros((m,), num.Float)
257            assert z.shape[0] == point_coordinates.shape[0] 
258
259            AtA = Sparse(m,m)
260            # The memory damage has been done by now.
261        else:
262             AtA = self.AtA #Did this for speed, did ~nothing
263        self.point_count += point_coordinates.shape[0]
264
265
266        inside_indices = inside_polygon(point_coordinates,
267                                        self.mesh_boundary_polygon,
268                                        closed=True,
269                                        verbose=False) # Suppress output
270       
271        n = len(inside_indices)
272
273        # Compute matrix elements for points inside the mesh
274        triangles = self.mesh.triangles # Shorthand
275        for d, i in enumerate(inside_indices):
276            # For each data_coordinate point
277            # if verbose and d%((n+10)/10)==0: print 'Doing %d of %d' %(d, n)
278            x = point_coordinates[i]
279           
280            element_found, sigma0, sigma1, sigma2, k = \
281                           search_tree_of_vertices(self.root, self.mesh, x)
282           
283            if element_found is True:
284                j0 = triangles[k,0] #Global vertex id for sigma0
285                j1 = triangles[k,1] #Global vertex id for sigma1
286                j2 = triangles[k,2] #Global vertex id for sigma2
287
288                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
289                js     = [j0,j1,j2]
290
291                for j in js:
292                    self.Atz[j] +=  sigmas[j]*z[i]
293                    #print "self.Atz building", self.Atz
294                    #print "self.Atz[j]", self.Atz[j]
295                    #print " sigmas[j]", sigmas[j]
296                    #print "z[i]",z[i]
297                    #print "result", sigmas[j]*z[i]
298                   
299                    for k in js:
300                        AtA[j,k] += sigmas[j]*sigmas[k]
301            else:
302                flag = is_inside_polygon(x,
303                                         self.mesh_boundary_polygon,
304                                         closed=True,
305                                         verbose=False) # Suppress output
306                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
307                assert flag is True, msg               
308               
309                # FIXME(Ole): This is the message referred to in ticket:314
310                minx = min(self.mesh_boundary_polygon[:,0])
311                maxx = max(self.mesh_boundary_polygon[:,0])               
312                miny = min(self.mesh_boundary_polygon[:,1])
313                maxy = max(self.mesh_boundary_polygon[:,1])
314                msg = 'Could not find triangle for point %s. ' % str(x) 
315                msg += 'Mesh boundary extent is (%.f, %.f), (%.f, %.f)'\
316                    % (minx, maxx, miny, maxy)
317                raise RuntimeError, msg
318               
319            self.AtA = AtA
320
321       
322    def fit(self, point_coordinates_or_filename=None, z=None,
323            verbose=False,
324            point_origin=None,
325            attribute_name=None,
326            max_read_lines=500):
327        """Fit a smooth surface to given 1d array of data points z.
328
329        The smooth surface is computed at each vertex in the underlying
330        mesh using the formula given in the module doc string.
331
332        Inputs:
333        point_coordinates: The co-ordinates of the data points.
334              List of coordinate pairs [x, y] of
335              data points or an nx2 Numeric array or a Geospatial_data object
336              or points file filename
337          z: Single 1d vector or array of data at the point_coordinates.
338         
339        """
340       
341        # Use blocking to load in the point info
342        if type(point_coordinates_or_filename) == types.StringType:
343            msg = "Don't set a point origin when reading from a file"
344            assert point_origin is None, msg
345            filename = point_coordinates_or_filename
346
347            G_data = Geospatial_data(filename,
348                                     max_read_lines=max_read_lines,
349                                     load_file_now=False,
350                                     verbose=verbose)
351
352            for i, geo_block in enumerate(G_data):
353                if verbose is True and 0 == i%200: 
354                    # The time this will take
355                    # is dependant on the # of Triangles
356                       
357                    print 'Processing Block %d' %i
358                    # FIXME (Ole): It would be good to say how many blocks
359                    # there are here. But this is no longer necessary
360                    # for pts files as they are reported in geospatial_data
361                    # I suggest deleting this verbose output and make
362                    # Geospatial_data more informative for txt files.
363                    #
364                    # I still think so (12/12/7, Ole).
365           
366
367                   
368                # Build the array
369
370                points = geo_block.get_data_points(absolute=True)
371                z = geo_block.get_attributes(attribute_name=attribute_name)
372                self.build_fit_subset(points, z, verbose=verbose)
373
374                # FIXME(Ole): I thought this test would make sense here
375                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
376                # Committed 11 March 2009
377                #msg = 'Matrix AtA was not built'
378                #assert self.AtA is not None, msg
379               
380                #print 'Matrix was built OK'
381
382               
383            point_coordinates = None
384        else:
385            point_coordinates =  point_coordinates_or_filename
386           
387        if point_coordinates is None:
388            if verbose: print 'Warning: no data points in fit'
389            msg = 'No interpolation matrix'
390            assert self.AtA is not None, msg
391            assert self.Atz is not None
392           
393            # FIXME (DSG) - do  a message
394        else:
395            point_coordinates = ensure_absolute(point_coordinates,
396                                                geo_reference=point_origin)
397            # if isinstance(point_coordinates,Geospatial_data) and z is None:
398            # z will come from the geo-ref
399            self.build_fit_subset(point_coordinates, z, verbose)
400
401        # Check sanity
402        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
403        n = self.point_count
404        if n<m and self.alpha == 0.0:
405            msg = 'ERROR (least_squares): Too few data points\n'
406            msg += 'There are only %d data points and alpha == 0. ' %n
407            msg += 'Need at least %d\n' %m
408            msg += 'Alternatively, set smoothing parameter alpha to a small '
409            msg += 'positive value,\ne.g. 1.0e-3.'
410            raise TooFewPointsError(msg)
411
412        self._build_coefficient_matrix_B(verbose)
413        loners = self.mesh.get_lone_vertices()
414        # FIXME  - make this as error message.
415        # test with
416        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
417        if len(loners)>0:
418            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
419            msg += 'All vertices should be part of a triangle.\n'
420            msg += 'In the future this will be inforced.\n'
421            msg += 'The following vertices are not part of a triangle;\n'
422            msg += str(loners)
423            print msg
424            #raise VertsWithNoTrianglesError(msg)
425       
426       
427        return conjugate_gradient(self.B, self.Atz, self.Atz,
428                                  imax=2*len(self.Atz) )
429
430       
431    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
432                              verbose=False):
433        """Fit a smooth surface to given 1d array of data points z.
434
435        The smooth surface is computed at each vertex in the underlying
436        mesh using the formula given in the module doc string.
437
438        Inputs:
439        point_coordinates: The co-ordinates of the data points.
440              List of coordinate pairs [x, y] of
441              data points or an nx2 Numeric array or a Geospatial_data object
442        z: Single 1d vector or array of data at the point_coordinates.
443        attribute_name: Used to get the z values from the
444              geospatial object if no attribute_name is specified,
445              it's a bit of a lucky dip as to what attributes you get.
446              If there is only one attribute it will be that one.
447
448        """
449
450        # FIXME(DSG-DSG): Check that the vert and point coords
451        # have the same zone.
452        if isinstance(point_coordinates,Geospatial_data):
453            point_coordinates = point_coordinates.get_data_points( \
454                absolute = True)
455       
456        # Convert input to Numeric arrays
457        if z is not None:
458            z = ensure_numeric(z, num.Float)
459        else:
460            msg = 'z not specified'
461            assert isinstance(point_coordinates,Geospatial_data), msg
462            z = point_coordinates.get_attributes(attribute_name)
463
464        point_coordinates = ensure_numeric(point_coordinates, num.Float)
465        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
466
467
468############################################################################
469
470def fit_to_mesh(point_coordinates, # this can also be a points file name
471                vertex_coordinates=None,
472                triangles=None,
473                mesh=None,
474                point_attributes=None,
475                alpha=DEFAULT_ALPHA,
476                verbose=False,
477                acceptable_overshoot=1.01, 
478                # FIXME: Move to config - this value is assumed in caching test
479                # FIXME(Ole): Just realised that this was never implemented (29 Jan 2009). I suggest removing it altogether.
480                mesh_origin=None,
481                data_origin=None,
482                max_read_lines=None,
483                attribute_name=None,
484                use_cache=False):
485    """Wrapper around internal function _fit_to_mesh for use with caching.
486   
487    """
488   
489    args = (point_coordinates, )
490    kwargs = {'vertex_coordinates': vertex_coordinates,
491              'triangles': triangles,
492              'mesh': mesh,
493              'point_attributes': point_attributes,
494              'alpha': alpha,
495              'verbose': verbose,
496              'acceptable_overshoot': acceptable_overshoot,
497              'mesh_origin': mesh_origin,
498              'data_origin': data_origin,
499              'max_read_lines': max_read_lines,
500              'attribute_name': attribute_name
501              }
502
503    if use_cache is True:
504        if isinstance(point_coordinates, basestring):
505            # We assume that point_coordinates is the name of a .csv/.txt
506            # file which must be passed onto caching as a dependency
507            # (in case it has changed on disk)
508            dep = [point_coordinates]
509        else:
510            dep = None
511
512           
513        #from caching import myhash
514        #import copy
515        #print args
516        #print kwargs
517        #print 'hashing:'
518        #print 'args', myhash( (args, kwargs) )
519        #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
520       
521        #print 'mesh hash', myhash( kwargs['mesh'] )       
522       
523        #print '-------------------------'
524        #print 'vertices hash', myhash( kwargs['mesh'].nodes )
525        #print 'triangles hash', myhash( kwargs['mesh'].triangles )
526        #print '-------------------------'       
527       
528        #for key in mesh.__dict__:
529        #    print key, myhash(mesh.__dict__[key])
530       
531        #for key in mesh.quantities.keys():
532        #    print key, myhash(mesh.quantities[key])
533       
534        #import sys; sys.exit()
535           
536        return cache(_fit_to_mesh,
537                     args, kwargs,
538                     verbose=verbose,
539                     compression=False,
540                     dependencies=dep)
541    else:
542        return apply(_fit_to_mesh,
543                     args, kwargs)
544
545def _fit_to_mesh(point_coordinates, # this can also be a points file name
546                 vertex_coordinates=None,
547                 triangles=None,
548                 mesh=None,
549                 point_attributes=None,
550                 alpha=DEFAULT_ALPHA,
551                 verbose=False,
552                 acceptable_overshoot=1.01,
553                 mesh_origin=None,
554                 data_origin=None,
555                 max_read_lines=None,
556                 attribute_name=None):
557    """
558    Fit a smooth surface to a triangulation,
559    given data points with attributes.
560
561
562        Inputs:
563        vertex_coordinates: List of coordinate pairs [xi, eta] of
564              points constituting a mesh (or an m x 2 Numeric array or
565              a geospatial object)
566              Points may appear multiple times
567              (e.g. if vertices have discontinuities)
568
569          triangles: List of 3-tuples (or a Numeric array) of
570          integers representing indices of all vertices in the mesh.
571
572          point_coordinates: List of coordinate pairs [x, y] of data points
573          (or an nx2 Numeric array). This can also be a .csv/.txt/.pts
574          file name.
575
576          alpha: Smoothing parameter.
577
578          acceptable overshoot: NOT IMPLEMENTED
579          controls the allowed factor by which
580          fitted values
581          may exceed the value of input data. The lower limit is defined
582          as min(z) - acceptable_overshoot*delta z and upper limit
583          as max(z) + acceptable_overshoot*delta z
584         
585
586          mesh_origin: A geo_reference object or 3-tuples consisting of
587              UTM zone, easting and northing.
588              If specified vertex coordinates are assumed to be
589              relative to their respective origins.
590         
591
592          point_attributes: Vector or array of data at the
593                            point_coordinates.
594
595    """
596
597    if mesh is None:
598        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
599        # are None
600           
601        #Convert input to Numeric arrays
602        triangles = ensure_numeric(triangles, num.Int)
603        vertex_coordinates = ensure_absolute(vertex_coordinates,
604                                             geo_reference = mesh_origin)
605
606        if verbose: print 'FitInterpolate: Building mesh'       
607        mesh = Mesh(vertex_coordinates, triangles)
608        mesh.check_integrity()
609   
610   
611    interp = Fit(mesh=mesh,
612                 verbose=verbose,
613                 alpha=alpha)
614
615    vertex_attributes = interp.fit(point_coordinates,
616                                   point_attributes,
617                                   point_origin=data_origin,
618                                   max_read_lines=max_read_lines,
619                                   attribute_name=attribute_name,
620                                   verbose=verbose)
621
622       
623    # Add the value checking stuff that's in least squares.
624    # Maybe this stuff should get pushed down into Fit.
625    # at least be a method of Fit.
626    # Or intigrate it into the fit method, saving teh max and min's
627    # as att's.
628   
629    return vertex_attributes
630
631
632#def _fit(*args, **kwargs):
633#    """Private function for use with caching. Reason is that classes
634#    may change their byte code between runs which is annoying.
635#    """
636#   
637#    return Fit(*args, **kwargs)
638
639
640def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
641                     alpha=DEFAULT_ALPHA, verbose= False,
642                     expand_search = False,
643                     precrop = False,
644                     display_errors = True):
645    """
646    Given a mesh file (tsh) and a point attribute file, fit
647    point attributes to the mesh and write a mesh file with the
648    results.
649
650    Note: the points file needs titles.  If you want anuga to use the tsh file,
651    make sure the title is elevation.
652
653    NOTE: Throws IOErrors, for a variety of file problems.
654   
655    """
656
657    from load_mesh.loadASCII import import_mesh_file, \
658         export_mesh_file, concatinate_attributelist
659
660
661    try:
662        mesh_dict = import_mesh_file(mesh_file)
663    except IOError,e:
664        if display_errors:
665            print "Could not load bad file. ", e
666        raise IOError  #Could not load bad mesh file.
667   
668    vertex_coordinates = mesh_dict['vertices']
669    triangles = mesh_dict['triangles']
670    if type(mesh_dict['vertex_attributes']) == num.ArrayType:
671        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
672    else:
673        old_point_attributes = mesh_dict['vertex_attributes']
674
675    if type(mesh_dict['vertex_attribute_titles']) == num.ArrayType:
676        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
677    else:
678        old_title_list = mesh_dict['vertex_attribute_titles']
679
680    if verbose: print 'tsh file %s loaded' %mesh_file
681
682    # load in the points file
683    try:
684        geo = Geospatial_data(point_file, verbose=verbose)
685    except IOError,e:
686        if display_errors:
687            print "Could not load bad file. ", e
688        raise IOError  #Re-raise exception 
689
690    point_coordinates = geo.get_data_points(absolute=True)
691    title_list,point_attributes = concatinate_attributelist( \
692        geo.get_all_attributes())
693
694    if mesh_dict.has_key('geo_reference') and \
695           not mesh_dict['geo_reference'] is None:
696        mesh_origin = mesh_dict['geo_reference'].get_origin()
697    else:
698        mesh_origin = None
699
700    if verbose: print "points file loaded"
701    if verbose: print "fitting to mesh"
702    f = fit_to_mesh(point_coordinates,
703                    vertex_coordinates,
704                    triangles,
705                    None,
706                    point_attributes,
707                    alpha = alpha,
708                    verbose = verbose,
709                    data_origin = None,
710                    mesh_origin = mesh_origin)
711    if verbose: print "finished fitting to mesh"
712
713    # convert array to list of lists
714    new_point_attributes = f.tolist()
715    #FIXME have this overwrite attributes with the same title - DSG
716    #Put the newer attributes last
717    if old_title_list <> []:
718        old_title_list.extend(title_list)
719        #FIXME can this be done a faster way? - DSG
720        for i in range(len(old_point_attributes)):
721            old_point_attributes[i].extend(new_point_attributes[i])
722        mesh_dict['vertex_attributes'] = old_point_attributes
723        mesh_dict['vertex_attribute_titles'] = old_title_list
724    else:
725        mesh_dict['vertex_attributes'] = new_point_attributes
726        mesh_dict['vertex_attribute_titles'] = title_list
727
728    if verbose: print "exporting to file ", mesh_output_file
729
730    try:
731        export_mesh_file(mesh_output_file, mesh_dict)
732    except IOError,e:
733        if display_errors:
734            print "Could not write file. ", e
735        raise IOError
Note: See TracBrowser for help on using the repository browser.