source: anuga_core/source/anuga/fit_interpolate/fit.py @ 7276

Last change on this file since 7276 was 7276, checked in by ole, 15 years ago

Merged numpy branch back into the trunk.

In ~/sandpit/anuga/anuga_core/source
svn merge -r 6246:HEAD ../../branches/numpy .

In ~/sandpit/anuga/anuga_validation
svn merge -r 6417:HEAD ../branches/numpy_anuga_validation .

In ~/sandpit/anuga/misc
svn merge -r 6809:HEAD ../branches/numpy_misc .

For all merges, I used numpy version where conflicts existed

The suites test_all.py (in source/anuga) and validate_all.py passed using Python2.5 with numpy on my Ubuntu Linux box.

File size: 25.8 KB
RevLine 
[6152]1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4   putting point data onto the mesh.
5
6   The penalty term (or smoothing term) is controlled by the smoothing
7   parameter alpha.
8   With a value of alpha=0, the fit function will attempt
9   to interpolate as closely as possible in the least-squares sense.
10   With values alpha > 0, a certain amount of smoothing will be applied.
11   A positive alpha is essential in cases where there are too few
12   data points.
13   A negative alpha is not allowed.
14   A typical value of alpha is 1.0e-6
15
16
17   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
18   Geoscience Australia, 2004.
19
20   TO DO
21   * test geo_ref, geo_spatial
22
23   IDEAS
24   * (DSG-) Change the interface of fit, so a domain object can
25      be passed in. (I don't know if this is feasible). If could
26      save time/memory.
27"""
28import types
29
30from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
[6244]36from anuga.utilities.polygon import inside_polygon, is_inside_polygon
[6152]37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38
39from anuga.utilities.cg_solve import conjugate_gradient
40from anuga.utilities.numerical_tools import ensure_numeric, gradient
41from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
42
43import exceptions
44class TooFewPointsError(exceptions.Exception): pass
45class VertsWithNoTrianglesError(exceptions.Exception): pass
46
[7276]47import numpy as num
[6152]48
49
50class Fit(FitInterpolate):
51   
52    def __init__(self,
53                 vertex_coordinates=None,
54                 triangles=None,
55                 mesh=None,
56                 mesh_origin=None,
57                 alpha = None,
58                 verbose=False,
59                 max_vertices_per_cell=None):
60
61
62        """
63        Fit data at points to the vertices of a mesh.
64
65        Inputs:
66
67          vertex_coordinates: List of coordinate pairs [xi, eta] of
[7276]68              points constituting a mesh (or an m x 2 numeric array or
[6152]69              a geospatial object)
70              Points may appear multiple times
71              (e.g. if vertices have discontinuities)
72
[7276]73          triangles: List of 3-tuples (or a numeric array) of
[6152]74              integers representing indices of all vertices in the mesh.
75
76          mesh_origin: A geo_reference object or 3-tuples consisting of
77              UTM zone, easting and northing.
78              If specified vertex coordinates are assumed to be
79              relative to their respective origins.
80
81          max_vertices_per_cell: Number of vertices in a quad tree cell
82          at which the cell is split into 4.
83
84          Note: Don't supply a vertex coords as a geospatial object and
85              a mesh origin, since geospatial has its own mesh origin.
86
87
88        Usage,
89        To use this in a blocking way, call  build_fit_subset, with z info,
90        and then fit, with no point coord, z info.
91       
92        """
93        # Initialise variabels
94        if alpha is None:
95            self.alpha = DEFAULT_ALPHA
96        else:   
97            self.alpha = alpha
98           
99        FitInterpolate.__init__(self,
100                 vertex_coordinates,
101                 triangles,
102                 mesh,
103                 mesh_origin,
104                 verbose,
105                 max_vertices_per_cell)
106       
107        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
108       
109        self.AtA = None
110        self.Atz = None
111
112        self.point_count = 0
113        if self.alpha <> 0:
114            if verbose: print 'Building smoothing matrix'
115            self._build_smoothing_matrix_D()
116           
[6244]117        bd_poly = self.mesh.get_boundary_polygon()   
118        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
[6152]119           
120    def _build_coefficient_matrix_B(self,
121                                  verbose = False):
122        """
123        Build final coefficient matrix
124
125        Precon
126        If alpha is not zero, matrix D has been built
127        Matrix Ata has been built
128        """
129
130        if self.alpha <> 0:
131            #if verbose: print 'Building smoothing matrix'
132            #self._build_smoothing_matrix_D()
133            self.B = self.AtA + self.alpha*self.D
134        else:
135            self.B = self.AtA
136
[6236]137        # Convert self.B matrix to CSR format for faster matrix vector
[6152]138        self.B = Sparse_CSR(self.B)
139
140    def _build_smoothing_matrix_D(self):
141        """Build m x m smoothing matrix, where
142        m is the number of basis functions phi_k (one per vertex)
143
144        The smoothing matrix is defined as
145
146        D = D1 + D2
147
148        where
149
150        [D1]_{k,l} = \int_\Omega
151           \frac{\partial \phi_k}{\partial x}
152           \frac{\partial \phi_l}{\partial x}\,
153           dx dy
154
155        [D2]_{k,l} = \int_\Omega
156           \frac{\partial \phi_k}{\partial y}
157           \frac{\partial \phi_l}{\partial y}\,
158           dx dy
159
160
161        The derivatives \frac{\partial \phi_k}{\partial x},
162        \frac{\partial \phi_k}{\partial x} for a particular triangle
163        are obtained by computing the gradient a_k, b_k for basis function k
164        """
165       
[6236]166        # FIXME: algorithm might be optimised by computing local 9x9
167        # "element stiffness matrices:
[6152]168
169        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
170
171        self.D = Sparse(m,m)
172
[6236]173        # For each triangle compute contributions to D = D1+D2
[6152]174        for i in range(len(self.mesh)):
175
[6236]176            # Get area
[6152]177            area = self.mesh.areas[i]
178
[6236]179            # Get global vertex indices
[6152]180            v0 = self.mesh.triangles[i,0]
181            v1 = self.mesh.triangles[i,1]
182            v2 = self.mesh.triangles[i,2]
183
[6236]184            # Get the three vertex_points
[6152]185            xi0 = self.mesh.get_vertex_coordinate(i, 0)
186            xi1 = self.mesh.get_vertex_coordinate(i, 1)
187            xi2 = self.mesh.get_vertex_coordinate(i, 2)
188
[6236]189            # Compute gradients for each vertex
[6152]190            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
191                              1, 0, 0)
192
193            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
194                              0, 1, 0)
195
196            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
197                              0, 0, 1)
198
[6236]199            # Compute diagonal contributions
[6152]200            self.D[v0,v0] += (a0*a0 + b0*b0)*area
201            self.D[v1,v1] += (a1*a1 + b1*b1)*area
202            self.D[v2,v2] += (a2*a2 + b2*b2)*area
203
[6236]204            # Compute contributions for basis functions sharing edges
[6152]205            e01 = (a0*a1 + b0*b1)*area
206            self.D[v0,v1] += e01
207            self.D[v1,v0] += e01
208
209            e12 = (a1*a2 + b1*b2)*area
210            self.D[v1,v2] += e12
211            self.D[v2,v1] += e12
212
213            e20 = (a2*a0 + b2*b0)*area
214            self.D[v2,v0] += e20
215            self.D[v0,v2] += e20
216
217    def get_D(self):
218        return self.D.todense()
219
220
221
222    def _build_matrix_AtA_Atz(self,
223                              point_coordinates,
224                              z,
225                              verbose = False):
226        """Build:
227        AtA  m x m  interpolation matrix, and,
228        Atz  m x a  interpolation matrix where,
229        m is the number of basis functions phi_k (one per vertex)
230        a is the number of data attributes
231
232        This algorithm uses a quad tree data structure for fast binning of
233        data points.
234
235        If Ata is None, the matrices AtA and Atz are created.
236
237        This function can be called again and again, with sub-sets of
238        the point coordinates.  Call fit to get the results.
239       
240        Preconditions
241        z and points are numeric
242        Point_coordindates and mesh vertices have the same origin.
243
244        The number of attributes of the data points does not change
245        """
[6236]246       
247        # Build n x m interpolation matrix
[6152]248        if self.AtA == None:
249            # AtA and Atz need to be initialised.
250            m = self.mesh.number_of_nodes
251            if len(z.shape) > 1:
252                att_num = z.shape[1]
[7276]253                self.Atz = num.zeros((m,att_num), num.float)
[6152]254            else:
255                att_num = 1
[7276]256                self.Atz = num.zeros((m,), num.float)
[6152]257            assert z.shape[0] == point_coordinates.shape[0] 
258
259            AtA = Sparse(m,m)
260            # The memory damage has been done by now.
261        else:
[6537]262             AtA = self.AtA # Did this for speed, did ~nothing
[6152]263        self.point_count += point_coordinates.shape[0]
264
265
[6236]266        inside_indices = inside_polygon(point_coordinates,
267                                        self.mesh_boundary_polygon,
268                                        closed=True,
[6252]269                                        verbose=False) # Suppress output
[6152]270       
[6236]271        n = len(inside_indices)
[6152]272
[6236]273        # Compute matrix elements for points inside the mesh
274        triangles = self.mesh.triangles # Shorthand
275        for d, i in enumerate(inside_indices):
[6152]276            # For each data_coordinate point
277            # if verbose and d%((n+10)/10)==0: print 'Doing %d of %d' %(d, n)
278            x = point_coordinates[i]
[6244]279           
[6152]280            element_found, sigma0, sigma1, sigma2, k = \
[6545]281                           search_tree_of_vertices(self.root, x)
[6152]282           
283            if element_found is True:
[6537]284                j0 = triangles[k,0] # Global vertex id for sigma0
285                j1 = triangles[k,1] # Global vertex id for sigma1
286                j2 = triangles[k,2] # Global vertex id for sigma2
[6152]287
288                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
289                js     = [j0,j1,j2]
290
291                for j in js:
292                    self.Atz[j] +=  sigmas[j]*z[i]
293                    #print "self.Atz building", self.Atz
294                    #print "self.Atz[j]", self.Atz[j]
295                    #print " sigmas[j]", sigmas[j]
296                    #print "z[i]",z[i]
297                    #print "result", sigmas[j]*z[i]
298                   
299                    for k in js:
300                        AtA[j,k] += sigmas[j]*sigmas[k]
301            else:
[6244]302                flag = is_inside_polygon(x,
303                                         self.mesh_boundary_polygon,
304                                         closed=True,
[6252]305                                         verbose=False) # Suppress output
[6244]306                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
307                assert flag is True, msg               
308               
[6252]309                # FIXME(Ole): This is the message referred to in ticket:314
[6244]310                minx = min(self.mesh_boundary_polygon[:,0])
311                maxx = max(self.mesh_boundary_polygon[:,0])               
312                miny = min(self.mesh_boundary_polygon[:,1])
313                maxy = max(self.mesh_boundary_polygon[:,1])
[6236]314                msg = 'Could not find triangle for point %s. ' % str(x) 
[6244]315                msg += 'Mesh boundary extent is (%.f, %.f), (%.f, %.f)'\
316                    % (minx, maxx, miny, maxy)
[6252]317                raise RuntimeError, msg
[7276]318
[6244]319               
[6493]320        self.AtA = AtA
[6236]321
[6152]322       
323    def fit(self, point_coordinates_or_filename=None, z=None,
324            verbose=False,
325            point_origin=None,
326            attribute_name=None,
327            max_read_lines=500):
328        """Fit a smooth surface to given 1d array of data points z.
329
330        The smooth surface is computed at each vertex in the underlying
331        mesh using the formula given in the module doc string.
332
333        Inputs:
334        point_coordinates: The co-ordinates of the data points.
335              List of coordinate pairs [x, y] of
[7276]336              data points or an nx2 numeric array or a Geospatial_data object
[6152]337              or points file filename
338          z: Single 1d vector or array of data at the point_coordinates.
339         
340        """
[7276]341       
[6236]342        # Use blocking to load in the point info
[6152]343        if type(point_coordinates_or_filename) == types.StringType:
344            msg = "Don't set a point origin when reading from a file"
345            assert point_origin is None, msg
346            filename = point_coordinates_or_filename
[7276]347
[6152]348            G_data = Geospatial_data(filename,
349                                     max_read_lines=max_read_lines,
350                                     load_file_now=False,
351                                     verbose=verbose)
352
353            for i, geo_block in enumerate(G_data):
354                if verbose is True and 0 == i%200: 
355                    # The time this will take
356                    # is dependant on the # of Triangles
357                       
358                    print 'Processing Block %d' %i
359                    # FIXME (Ole): It would be good to say how many blocks
360                    # there are here. But this is no longer necessary
361                    # for pts files as they are reported in geospatial_data
362                    # I suggest deleting this verbose output and make
363                    # Geospatial_data more informative for txt files.
364                    #
365                    # I still think so (12/12/7, Ole).
366           
367
368                   
369                # Build the array
370
371                points = geo_block.get_data_points(absolute=True)
372                z = geo_block.get_attributes(attribute_name=attribute_name)
373                self.build_fit_subset(points, z, verbose=verbose)
374
[6488]375                # FIXME(Ole): I thought this test would make sense here
376                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
377                # Committed 11 March 2009
[6495]378                msg = 'Matrix AtA was not built'
379                assert self.AtA is not None, msg
[6152]380               
[6405]381                #print 'Matrix was built OK'
382
383               
[6152]384            point_coordinates = None
385        else:
386            point_coordinates =  point_coordinates_or_filename
387           
388        if point_coordinates is None:
389            if verbose: print 'Warning: no data points in fit'
[6493]390            msg = 'No interpolation matrix.'
[6405]391            assert self.AtA is not None, msg
392            assert self.Atz is not None
[6152]393           
[6236]394            # FIXME (DSG) - do  a message
[6152]395        else:
396            point_coordinates = ensure_absolute(point_coordinates,
397                                                geo_reference=point_origin)
[6236]398            # if isinstance(point_coordinates,Geospatial_data) and z is None:
[6152]399            # z will come from the geo-ref
400            self.build_fit_subset(point_coordinates, z, verbose)
401
[6236]402        # Check sanity
[6152]403        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
404        n = self.point_count
405        if n<m and self.alpha == 0.0:
406            msg = 'ERROR (least_squares): Too few data points\n'
407            msg += 'There are only %d data points and alpha == 0. ' %n
408            msg += 'Need at least %d\n' %m
409            msg += 'Alternatively, set smoothing parameter alpha to a small '
410            msg += 'positive value,\ne.g. 1.0e-3.'
411            raise TooFewPointsError(msg)
412
413        self._build_coefficient_matrix_B(verbose)
414        loners = self.mesh.get_lone_vertices()
415        # FIXME  - make this as error message.
416        # test with
417        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
418        if len(loners)>0:
419            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
420            msg += 'All vertices should be part of a triangle.\n'
421            msg += 'In the future this will be inforced.\n'
422            msg += 'The following vertices are not part of a triangle;\n'
423            msg += str(loners)
424            print msg
425            #raise VertsWithNoTrianglesError(msg)
426       
427       
428        return conjugate_gradient(self.B, self.Atz, self.Atz,
429                                  imax=2*len(self.Atz) )
430
431       
432    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
433                              verbose=False):
434        """Fit a smooth surface to given 1d array of data points z.
435
436        The smooth surface is computed at each vertex in the underlying
437        mesh using the formula given in the module doc string.
438
439        Inputs:
440        point_coordinates: The co-ordinates of the data points.
441              List of coordinate pairs [x, y] of
[7276]442              data points or an nx2 numeric array or a Geospatial_data object
[6152]443        z: Single 1d vector or array of data at the point_coordinates.
444        attribute_name: Used to get the z values from the
445              geospatial object if no attribute_name is specified,
446              it's a bit of a lucky dip as to what attributes you get.
447              If there is only one attribute it will be that one.
448
449        """
450
451        # FIXME(DSG-DSG): Check that the vert and point coords
452        # have the same zone.
453        if isinstance(point_coordinates,Geospatial_data):
454            point_coordinates = point_coordinates.get_data_points( \
455                absolute = True)
456       
[7276]457        # Convert input to numeric arrays
[6152]458        if z is not None:
[7276]459            z = ensure_numeric(z, num.float)
[6152]460        else:
461            msg = 'z not specified'
462            assert isinstance(point_coordinates,Geospatial_data), msg
463            z = point_coordinates.get_attributes(attribute_name)
464
[7276]465        point_coordinates = ensure_numeric(point_coordinates, num.float)
[6152]466        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
467
[7276]468
[6152]469############################################################################
470
471def fit_to_mesh(point_coordinates, # this can also be a points file name
472                vertex_coordinates=None,
473                triangles=None,
474                mesh=None,
475                point_attributes=None,
476                alpha=DEFAULT_ALPHA,
477                verbose=False,
478                mesh_origin=None,
479                data_origin=None,
480                max_read_lines=None,
481                attribute_name=None,
[6191]482                use_cache=False):
[6152]483    """Wrapper around internal function _fit_to_mesh for use with caching.
484   
485    """
486   
487    args = (point_coordinates, )
488    kwargs = {'vertex_coordinates': vertex_coordinates,
489              'triangles': triangles,
490              'mesh': mesh,
491              'point_attributes': point_attributes,
492              'alpha': alpha,
493              'verbose': verbose,
494              'mesh_origin': mesh_origin,
495              'data_origin': data_origin,
496              'max_read_lines': max_read_lines,
[6232]497              'attribute_name': attribute_name
[6152]498              }
499
500    if use_cache is True:
501        if isinstance(point_coordinates, basestring):
502            # We assume that point_coordinates is the name of a .csv/.txt
503            # file which must be passed onto caching as a dependency
504            # (in case it has changed on disk)
505            dep = [point_coordinates]
506        else:
507            dep = None
508
509           
510        #from caching import myhash
511        #import copy
[6232]512        #print args
[6152]513        #print kwargs
514        #print 'hashing:'
515        #print 'args', myhash( (args, kwargs) )
516        #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
517       
518        #print 'mesh hash', myhash( kwargs['mesh'] )       
519       
520        #print '-------------------------'
521        #print 'vertices hash', myhash( kwargs['mesh'].nodes )
522        #print 'triangles hash', myhash( kwargs['mesh'].triangles )
523        #print '-------------------------'       
524       
525        #for key in mesh.__dict__:
526        #    print key, myhash(mesh.__dict__[key])
527       
528        #for key in mesh.quantities.keys():
529        #    print key, myhash(mesh.quantities[key])
530       
531        #import sys; sys.exit()
532           
533        return cache(_fit_to_mesh,
534                     args, kwargs,
535                     verbose=verbose,
536                     compression=False,
537                     dependencies=dep)
538    else:
539        return apply(_fit_to_mesh,
540                     args, kwargs)
541
542def _fit_to_mesh(point_coordinates, # this can also be a points file name
543                 vertex_coordinates=None,
544                 triangles=None,
545                 mesh=None,
546                 point_attributes=None,
547                 alpha=DEFAULT_ALPHA,
548                 verbose=False,
549                 mesh_origin=None,
550                 data_origin=None,
551                 max_read_lines=None,
[6232]552                 attribute_name=None):
[6152]553    """
554    Fit a smooth surface to a triangulation,
555    given data points with attributes.
556
557
558        Inputs:
559        vertex_coordinates: List of coordinate pairs [xi, eta] of
[7276]560              points constituting a mesh (or an m x 2 numeric array or
[6152]561              a geospatial object)
562              Points may appear multiple times
563              (e.g. if vertices have discontinuities)
564
[7276]565          triangles: List of 3-tuples (or a numeric array) of
[6152]566          integers representing indices of all vertices in the mesh.
567
568          point_coordinates: List of coordinate pairs [x, y] of data points
[7276]569          (or an nx2 numeric array). This can also be a .csv/.txt/.pts
[6152]570          file name.
571
572          alpha: Smoothing parameter.
573
574          mesh_origin: A geo_reference object or 3-tuples consisting of
575              UTM zone, easting and northing.
576              If specified vertex coordinates are assumed to be
577              relative to their respective origins.
578
579          point_attributes: Vector or array of data at the
580                            point_coordinates.
581
582    """
583
584    if mesh is None:
585        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
586        # are None
587           
[7276]588        #Convert input to numeric arrays
589        triangles = ensure_numeric(triangles, num.int)
[6152]590        vertex_coordinates = ensure_absolute(vertex_coordinates,
591                                             geo_reference = mesh_origin)
592
593        if verbose: print 'FitInterpolate: Building mesh'       
594        mesh = Mesh(vertex_coordinates, triangles)
595        mesh.check_integrity()
596   
[6244]597   
[6152]598    interp = Fit(mesh=mesh,
599                 verbose=verbose,
600                 alpha=alpha)
601
602    vertex_attributes = interp.fit(point_coordinates,
603                                   point_attributes,
604                                   point_origin=data_origin,
605                                   max_read_lines=max_read_lines,
606                                   attribute_name=attribute_name,
607                                   verbose=verbose)
608
609       
610    # Add the value checking stuff that's in least squares.
611    # Maybe this stuff should get pushed down into Fit.
612    # at least be a method of Fit.
613    # Or intigrate it into the fit method, saving teh max and min's
614    # as att's.
615   
616    return vertex_attributes
617
618
619#def _fit(*args, **kwargs):
620#    """Private function for use with caching. Reason is that classes
621#    may change their byte code between runs which is annoying.
622#    """
623#   
624#    return Fit(*args, **kwargs)
625
626
627def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
628                     alpha=DEFAULT_ALPHA, verbose= False,
629                     expand_search = False,
630                     precrop = False,
631                     display_errors = True):
632    """
633    Given a mesh file (tsh) and a point attribute file, fit
634    point attributes to the mesh and write a mesh file with the
635    results.
636
637    Note: the points file needs titles.  If you want anuga to use the tsh file,
638    make sure the title is elevation.
639
640    NOTE: Throws IOErrors, for a variety of file problems.
641   
642    """
643
644    from load_mesh.loadASCII import import_mesh_file, \
645         export_mesh_file, concatinate_attributelist
646
647
648    try:
649        mesh_dict = import_mesh_file(mesh_file)
650    except IOError,e:
651        if display_errors:
652            print "Could not load bad file. ", e
653        raise IOError  #Could not load bad mesh file.
654   
655    vertex_coordinates = mesh_dict['vertices']
656    triangles = mesh_dict['triangles']
[7276]657    if isinstance(mesh_dict['vertex_attributes'], num.ndarray):
[6152]658        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
659    else:
660        old_point_attributes = mesh_dict['vertex_attributes']
661
[7276]662    if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray):
[6152]663        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
664    else:
665        old_title_list = mesh_dict['vertex_attribute_titles']
666
667    if verbose: print 'tsh file %s loaded' %mesh_file
668
669    # load in the points file
670    try:
671        geo = Geospatial_data(point_file, verbose=verbose)
672    except IOError,e:
673        if display_errors:
674            print "Could not load bad file. ", e
675        raise IOError  #Re-raise exception 
676
677    point_coordinates = geo.get_data_points(absolute=True)
678    title_list,point_attributes = concatinate_attributelist( \
679        geo.get_all_attributes())
680
681    if mesh_dict.has_key('geo_reference') and \
682           not mesh_dict['geo_reference'] is None:
683        mesh_origin = mesh_dict['geo_reference'].get_origin()
684    else:
685        mesh_origin = None
686
687    if verbose: print "points file loaded"
688    if verbose: print "fitting to mesh"
689    f = fit_to_mesh(point_coordinates,
690                    vertex_coordinates,
691                    triangles,
692                    None,
693                    point_attributes,
694                    alpha = alpha,
695                    verbose = verbose,
696                    data_origin = None,
697                    mesh_origin = mesh_origin)
698    if verbose: print "finished fitting to mesh"
699
700    # convert array to list of lists
701    new_point_attributes = f.tolist()
702    #FIXME have this overwrite attributes with the same title - DSG
703    #Put the newer attributes last
704    if old_title_list <> []:
705        old_title_list.extend(title_list)
706        #FIXME can this be done a faster way? - DSG
707        for i in range(len(old_point_attributes)):
708            old_point_attributes[i].extend(new_point_attributes[i])
709        mesh_dict['vertex_attributes'] = old_point_attributes
710        mesh_dict['vertex_attribute_titles'] = old_title_list
711    else:
712        mesh_dict['vertex_attributes'] = new_point_attributes
713        mesh_dict['vertex_attribute_titles'] = title_list
714
715    if verbose: print "exporting to file ", mesh_output_file
716
717    try:
718        export_mesh_file(mesh_output_file, mesh_dict)
719    except IOError,e:
720        if display_errors:
721            print "Could not write file. ", e
722        raise IOError
Note: See TracBrowser for help on using the repository browser.