source: branches/numpy/anuga/fit_interpolate/fit.py @ 6517

Last change on this file since 6517 was 6517, checked in by rwilson, 15 years ago

Hand-merged recent changes in main trunk. Still work to be done in shallow_water.

File size: 26.7 KB
RevLine 
[6152]1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4   putting point data onto the mesh.
5
6   The penalty term (or smoothing term) is controlled by the smoothing
7   parameter alpha.
8   With a value of alpha=0, the fit function will attempt
9   to interpolate as closely as possible in the least-squares sense.
10   With values alpha > 0, a certain amount of smoothing will be applied.
11   A positive alpha is essential in cases where there are too few
12   data points.
13   A negative alpha is not allowed.
14   A typical value of alpha is 1.0e-6
15
16
17   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
18   Geoscience Australia, 2004.
19
20   TO DO
21   * test geo_ref, geo_spatial
22
23   IDEAS
24   * (DSG-) Change the interface of fit, so a domain object can
25      be passed in. (I don't know if this is feasible). If could
26      save time/memory.
27"""
28import types
29
30from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
[6244]36from anuga.utilities.polygon import inside_polygon, is_inside_polygon
[6152]37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38
39from anuga.utilities.cg_solve import conjugate_gradient
40from anuga.utilities.numerical_tools import ensure_numeric, gradient
41from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
42
43import exceptions
44class TooFewPointsError(exceptions.Exception): pass
45class VertsWithNoTrianglesError(exceptions.Exception): pass
46
[6304]47import numpy as num
[6152]48
49
50class Fit(FitInterpolate):
51   
52    def __init__(self,
53                 vertex_coordinates=None,
54                 triangles=None,
55                 mesh=None,
56                 mesh_origin=None,
57                 alpha = None,
58                 verbose=False,
59                 max_vertices_per_cell=None):
60
61
62        """
63        Fit data at points to the vertices of a mesh.
64
65        Inputs:
66
67          vertex_coordinates: List of coordinate pairs [xi, eta] of
[6304]68              points constituting a mesh (or an m x 2 numeric array or
[6152]69              a geospatial object)
70              Points may appear multiple times
71              (e.g. if vertices have discontinuities)
72
[6304]73          triangles: List of 3-tuples (or a numeric array) of
[6152]74              integers representing indices of all vertices in the mesh.
75
76          mesh_origin: A geo_reference object or 3-tuples consisting of
77              UTM zone, easting and northing.
78              If specified vertex coordinates are assumed to be
79              relative to their respective origins.
80
81          max_vertices_per_cell: Number of vertices in a quad tree cell
82          at which the cell is split into 4.
83
84          Note: Don't supply a vertex coords as a geospatial object and
85              a mesh origin, since geospatial has its own mesh origin.
86
87
88        Usage,
89        To use this in a blocking way, call  build_fit_subset, with z info,
90        and then fit, with no point coord, z info.
91       
92        """
93        # Initialise variabels
94        if alpha is None:
95            self.alpha = DEFAULT_ALPHA
96        else:   
97            self.alpha = alpha
98           
99        FitInterpolate.__init__(self,
100                 vertex_coordinates,
101                 triangles,
102                 mesh,
103                 mesh_origin,
104                 verbose,
105                 max_vertices_per_cell)
106       
107        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
108       
109        self.AtA = None
110        self.Atz = None
111
112        self.point_count = 0
113        if self.alpha <> 0:
114            if verbose: print 'Building smoothing matrix'
115            self._build_smoothing_matrix_D()
116           
[6244]117        bd_poly = self.mesh.get_boundary_polygon()   
118        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
[6152]119           
120    def _build_coefficient_matrix_B(self,
121                                  verbose = False):
122        """
123        Build final coefficient matrix
124
125        Precon
126        If alpha is not zero, matrix D has been built
127        Matrix Ata has been built
128        """
129
130        if self.alpha <> 0:
131            #if verbose: print 'Building smoothing matrix'
132            #self._build_smoothing_matrix_D()
133            self.B = self.AtA + self.alpha*self.D
134        else:
135            self.B = self.AtA
136
[6236]137        # Convert self.B matrix to CSR format for faster matrix vector
[6152]138        self.B = Sparse_CSR(self.B)
139
140    def _build_smoothing_matrix_D(self):
141        """Build m x m smoothing matrix, where
142        m is the number of basis functions phi_k (one per vertex)
143
144        The smoothing matrix is defined as
145
146        D = D1 + D2
147
148        where
149
150        [D1]_{k,l} = \int_\Omega
151           \frac{\partial \phi_k}{\partial x}
152           \frac{\partial \phi_l}{\partial x}\,
153           dx dy
154
155        [D2]_{k,l} = \int_\Omega
156           \frac{\partial \phi_k}{\partial y}
157           \frac{\partial \phi_l}{\partial y}\,
158           dx dy
159
160
161        The derivatives \frac{\partial \phi_k}{\partial x},
162        \frac{\partial \phi_k}{\partial x} for a particular triangle
163        are obtained by computing the gradient a_k, b_k for basis function k
164        """
165       
[6236]166        # FIXME: algorithm might be optimised by computing local 9x9
167        # "element stiffness matrices:
[6152]168
169        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
170
171        self.D = Sparse(m,m)
172
[6236]173        # For each triangle compute contributions to D = D1+D2
[6152]174        for i in range(len(self.mesh)):
175
[6236]176            # Get area
[6152]177            area = self.mesh.areas[i]
178
[6236]179            # Get global vertex indices
[6152]180            v0 = self.mesh.triangles[i,0]
181            v1 = self.mesh.triangles[i,1]
182            v2 = self.mesh.triangles[i,2]
183
[6236]184            # Get the three vertex_points
[6152]185            xi0 = self.mesh.get_vertex_coordinate(i, 0)
186            xi1 = self.mesh.get_vertex_coordinate(i, 1)
187            xi2 = self.mesh.get_vertex_coordinate(i, 2)
188
[6236]189            # Compute gradients for each vertex
[6152]190            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
191                              1, 0, 0)
192
193            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
194                              0, 1, 0)
195
196            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
197                              0, 0, 1)
198
[6236]199            # Compute diagonal contributions
[6152]200            self.D[v0,v0] += (a0*a0 + b0*b0)*area
201            self.D[v1,v1] += (a1*a1 + b1*b1)*area
202            self.D[v2,v2] += (a2*a2 + b2*b2)*area
203
[6236]204            # Compute contributions for basis functions sharing edges
[6152]205            e01 = (a0*a1 + b0*b1)*area
206            self.D[v0,v1] += e01
207            self.D[v1,v0] += e01
208
209            e12 = (a1*a2 + b1*b2)*area
210            self.D[v1,v2] += e12
211            self.D[v2,v1] += e12
212
213            e20 = (a2*a0 + b2*b0)*area
214            self.D[v2,v0] += e20
215            self.D[v0,v2] += e20
216
217    def get_D(self):
218        return self.D.todense()
219
220
221
222    def _build_matrix_AtA_Atz(self,
223                              point_coordinates,
224                              z,
225                              verbose = False):
226        """Build:
227        AtA  m x m  interpolation matrix, and,
228        Atz  m x a  interpolation matrix where,
229        m is the number of basis functions phi_k (one per vertex)
230        a is the number of data attributes
231
232        This algorithm uses a quad tree data structure for fast binning of
233        data points.
234
235        If Ata is None, the matrices AtA and Atz are created.
236
237        This function can be called again and again, with sub-sets of
238        the point coordinates.  Call fit to get the results.
239       
240        Preconditions
241        z and points are numeric
242        Point_coordindates and mesh vertices have the same origin.
243
244        The number of attributes of the data points does not change
245        """
[6236]246       
247        # Build n x m interpolation matrix
[6152]248        if self.AtA == None:
249            # AtA and Atz need to be initialised.
250            m = self.mesh.number_of_nodes
251            if len(z.shape) > 1:
252                att_num = z.shape[1]
[6304]253                self.Atz = num.zeros((m,att_num), num.float)
[6152]254            else:
255                att_num = 1
[6304]256                self.Atz = num.zeros((m,), num.float)
[6152]257            assert z.shape[0] == point_coordinates.shape[0] 
258
259            AtA = Sparse(m,m)
260            # The memory damage has been done by now.
261        else:
262             AtA = self.AtA #Did this for speed, did ~nothing
263        self.point_count += point_coordinates.shape[0]
264
265
[6236]266        inside_indices = inside_polygon(point_coordinates,
267                                        self.mesh_boundary_polygon,
268                                        closed=True,
269                                        verbose=False) # Too much output if True
[6152]270
271       
[6236]272        n = len(inside_indices)
[6152]273
[6236]274        # Compute matrix elements for points inside the mesh
275        triangles = self.mesh.triangles # Shorthand
276        for d, i in enumerate(inside_indices):
[6152]277            # For each data_coordinate point
278            # if verbose and d%((n+10)/10)==0: print 'Doing %d of %d' %(d, n)
279            x = point_coordinates[i]
[6244]280           
[6152]281            element_found, sigma0, sigma1, sigma2, k = \
282                           search_tree_of_vertices(self.root, self.mesh, x)
283           
284            if element_found is True:
285                j0 = triangles[k,0] #Global vertex id for sigma0
286                j1 = triangles[k,1] #Global vertex id for sigma1
287                j2 = triangles[k,2] #Global vertex id for sigma2
288
289                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
290                js     = [j0,j1,j2]
291
292                for j in js:
293                    self.Atz[j] +=  sigmas[j]*z[i]
294                    #print "self.Atz building", self.Atz
295                    #print "self.Atz[j]", self.Atz[j]
296                    #print " sigmas[j]", sigmas[j]
297                    #print "z[i]",z[i]
298                    #print "result", sigmas[j]*z[i]
299                   
300                    for k in js:
301                        AtA[j,k] += sigmas[j]*sigmas[k]
302            else:
[6237]303                # FIXME(Ole): This is the message referred to in ticket:314
[6244]304               
305                flag = is_inside_polygon(x,
306                                         self.mesh_boundary_polygon,
307                                         closed=True,
308                                         verbose=False) # Too much output if True               
309                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
310                assert flag is True, msg               
311               
[6517]312                # FIXME(Ole): This is the message referred to in ticket:314
[6244]313                minx = min(self.mesh_boundary_polygon[:,0])
314                maxx = max(self.mesh_boundary_polygon[:,0])               
315                miny = min(self.mesh_boundary_polygon[:,1])
316                maxy = max(self.mesh_boundary_polygon[:,1])
[6236]317                msg = 'Could not find triangle for point %s. ' % str(x) 
[6244]318                msg += 'Mesh boundary extent is (%.f, %.f), (%.f, %.f)'\
319                    % (minx, maxx, miny, maxy)
[6517]320                raise RuntimeError, msg
[6244]321               
322
[6152]323                raise Exception(msg)
324            self.AtA = AtA
[6236]325
[6152]326       
327    def fit(self, point_coordinates_or_filename=None, z=None,
328            verbose=False,
329            point_origin=None,
330            attribute_name=None,
331            max_read_lines=500):
332        """Fit a smooth surface to given 1d array of data points z.
333
334        The smooth surface is computed at each vertex in the underlying
335        mesh using the formula given in the module doc string.
336
337        Inputs:
338        point_coordinates: The co-ordinates of the data points.
339              List of coordinate pairs [x, y] of
[6304]340              data points or an nx2 numeric array or a Geospatial_data object
[6152]341              or points file filename
342          z: Single 1d vector or array of data at the point_coordinates.
343         
344        """
[6236]345       
346        # Use blocking to load in the point info
[6152]347        if type(point_coordinates_or_filename) == types.StringType:
348            msg = "Don't set a point origin when reading from a file"
349            assert point_origin is None, msg
350            filename = point_coordinates_or_filename
351
352            G_data = Geospatial_data(filename,
353                                     max_read_lines=max_read_lines,
354                                     load_file_now=False,
355                                     verbose=verbose)
356
357            for i, geo_block in enumerate(G_data):
358                if verbose is True and 0 == i%200: 
359                    # The time this will take
360                    # is dependant on the # of Triangles
361                       
362                    print 'Processing Block %d' %i
363                    # FIXME (Ole): It would be good to say how many blocks
364                    # there are here. But this is no longer necessary
365                    # for pts files as they are reported in geospatial_data
366                    # I suggest deleting this verbose output and make
367                    # Geospatial_data more informative for txt files.
368                    #
369                    # I still think so (12/12/7, Ole).
370           
371
372                   
373                # Build the array
374
375                points = geo_block.get_data_points(absolute=True)
376                z = geo_block.get_attributes(attribute_name=attribute_name)
377                self.build_fit_subset(points, z, verbose=verbose)
378
[6517]379                # FIXME(Ole): I thought this test would make sense here
380                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
381                # Committed 11 March 2009
382                msg = 'Matrix AtA was not built'
383                assert self.AtA is not None, msg
[6152]384               
[6517]385                #print 'Matrix was built OK'
386
387               
[6152]388            point_coordinates = None
389        else:
390            point_coordinates =  point_coordinates_or_filename
391           
392        if point_coordinates is None:
393            if verbose: print 'Warning: no data points in fit'
[6517]394            msg = 'No interpolation matrix.'
395            assert self.AtA is not None, msg
396            assert self.Atz is not None
[6152]397           
[6236]398            # FIXME (DSG) - do  a message
[6152]399        else:
400            point_coordinates = ensure_absolute(point_coordinates,
401                                                geo_reference=point_origin)
[6236]402            # if isinstance(point_coordinates,Geospatial_data) and z is None:
[6152]403            # z will come from the geo-ref
404            self.build_fit_subset(point_coordinates, z, verbose)
405
[6236]406        # Check sanity
[6152]407        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
408        n = self.point_count
409        if n<m and self.alpha == 0.0:
410            msg = 'ERROR (least_squares): Too few data points\n'
411            msg += 'There are only %d data points and alpha == 0. ' %n
412            msg += 'Need at least %d\n' %m
413            msg += 'Alternatively, set smoothing parameter alpha to a small '
414            msg += 'positive value,\ne.g. 1.0e-3.'
415            raise TooFewPointsError(msg)
416
417        self._build_coefficient_matrix_B(verbose)
418        loners = self.mesh.get_lone_vertices()
419        # FIXME  - make this as error message.
420        # test with
421        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
422        if len(loners)>0:
423            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
424            msg += 'All vertices should be part of a triangle.\n'
425            msg += 'In the future this will be inforced.\n'
426            msg += 'The following vertices are not part of a triangle;\n'
427            msg += str(loners)
428            print msg
429            #raise VertsWithNoTrianglesError(msg)
430       
431       
432        return conjugate_gradient(self.B, self.Atz, self.Atz,
433                                  imax=2*len(self.Atz) )
434
435       
436    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
437                              verbose=False):
438        """Fit a smooth surface to given 1d array of data points z.
439
440        The smooth surface is computed at each vertex in the underlying
441        mesh using the formula given in the module doc string.
442
443        Inputs:
444        point_coordinates: The co-ordinates of the data points.
445              List of coordinate pairs [x, y] of
[6304]446              data points or an nx2 numeric array or a Geospatial_data object
[6152]447        z: Single 1d vector or array of data at the point_coordinates.
448        attribute_name: Used to get the z values from the
449              geospatial object if no attribute_name is specified,
450              it's a bit of a lucky dip as to what attributes you get.
451              If there is only one attribute it will be that one.
452
453        """
454
455        # FIXME(DSG-DSG): Check that the vert and point coords
456        # have the same zone.
457        if isinstance(point_coordinates,Geospatial_data):
458            point_coordinates = point_coordinates.get_data_points( \
459                absolute = True)
460       
[6304]461        # Convert input to numeric arrays
[6152]462        if z is not None:
[6304]463            z = ensure_numeric(z, num.float)
[6152]464        else:
465            msg = 'z not specified'
466            assert isinstance(point_coordinates,Geospatial_data), msg
467            z = point_coordinates.get_attributes(attribute_name)
468
[6304]469        point_coordinates = ensure_numeric(point_coordinates, num.float)
[6152]470        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
471
472
473############################################################################
474
475def fit_to_mesh(point_coordinates, # this can also be a points file name
476                vertex_coordinates=None,
477                triangles=None,
478                mesh=None,
479                point_attributes=None,
480                alpha=DEFAULT_ALPHA,
481                verbose=False,
[6236]482                acceptable_overshoot=1.01, 
483                # FIXME: Move to config - this value is assumed in caching test
484                # FIXME(Ole): Just realised that this was never implemented (29 Jan 2009). I suggest removing it altogether.
[6152]485                mesh_origin=None,
486                data_origin=None,
487                max_read_lines=None,
488                attribute_name=None,
[6191]489                use_cache=False):
[6152]490    """Wrapper around internal function _fit_to_mesh for use with caching.
491   
492    """
493   
494    args = (point_coordinates, )
495    kwargs = {'vertex_coordinates': vertex_coordinates,
496              'triangles': triangles,
497              'mesh': mesh,
498              'point_attributes': point_attributes,
499              'alpha': alpha,
500              'verbose': verbose,
501              'acceptable_overshoot': acceptable_overshoot,
502              'mesh_origin': mesh_origin,
503              'data_origin': data_origin,
504              'max_read_lines': max_read_lines,
[6232]505              'attribute_name': attribute_name
[6152]506              }
507
508    if use_cache is True:
509        if isinstance(point_coordinates, basestring):
510            # We assume that point_coordinates is the name of a .csv/.txt
511            # file which must be passed onto caching as a dependency
512            # (in case it has changed on disk)
513            dep = [point_coordinates]
514        else:
515            dep = None
516
517           
518        #from caching import myhash
519        #import copy
[6232]520        #print args
[6152]521        #print kwargs
522        #print 'hashing:'
523        #print 'args', myhash( (args, kwargs) )
524        #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
525       
526        #print 'mesh hash', myhash( kwargs['mesh'] )       
527       
528        #print '-------------------------'
529        #print 'vertices hash', myhash( kwargs['mesh'].nodes )
530        #print 'triangles hash', myhash( kwargs['mesh'].triangles )
531        #print '-------------------------'       
532       
533        #for key in mesh.__dict__:
534        #    print key, myhash(mesh.__dict__[key])
535       
536        #for key in mesh.quantities.keys():
537        #    print key, myhash(mesh.quantities[key])
538       
539        #import sys; sys.exit()
540           
541        return cache(_fit_to_mesh,
542                     args, kwargs,
543                     verbose=verbose,
544                     compression=False,
545                     dependencies=dep)
546    else:
547        return apply(_fit_to_mesh,
548                     args, kwargs)
549
550def _fit_to_mesh(point_coordinates, # this can also be a points file name
551                 vertex_coordinates=None,
552                 triangles=None,
553                 mesh=None,
554                 point_attributes=None,
555                 alpha=DEFAULT_ALPHA,
556                 verbose=False,
557                 acceptable_overshoot=1.01,
558                 mesh_origin=None,
559                 data_origin=None,
560                 max_read_lines=None,
[6232]561                 attribute_name=None):
[6152]562    """
563    Fit a smooth surface to a triangulation,
564    given data points with attributes.
565
566
567        Inputs:
568        vertex_coordinates: List of coordinate pairs [xi, eta] of
[6304]569              points constituting a mesh (or an m x 2 numeric array or
[6152]570              a geospatial object)
571              Points may appear multiple times
572              (e.g. if vertices have discontinuities)
573
[6304]574          triangles: List of 3-tuples (or a numeric array) of
[6152]575          integers representing indices of all vertices in the mesh.
576
577          point_coordinates: List of coordinate pairs [x, y] of data points
[6304]578          (or an nx2 numeric array). This can also be a .csv/.txt/.pts
[6152]579          file name.
580
581          alpha: Smoothing parameter.
582
583          acceptable overshoot: NOT IMPLEMENTED
584          controls the allowed factor by which
585          fitted values
586          may exceed the value of input data. The lower limit is defined
587          as min(z) - acceptable_overshoot*delta z and upper limit
588          as max(z) + acceptable_overshoot*delta z
589         
590
591          mesh_origin: A geo_reference object or 3-tuples consisting of
592              UTM zone, easting and northing.
593              If specified vertex coordinates are assumed to be
594              relative to their respective origins.
595         
596
597          point_attributes: Vector or array of data at the
598                            point_coordinates.
599
600    """
601
602    if mesh is None:
603        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
604        # are None
605           
[6304]606        #Convert input to numeric arrays
607        triangles = ensure_numeric(triangles, num.int)
[6152]608        vertex_coordinates = ensure_absolute(vertex_coordinates,
609                                             geo_reference = mesh_origin)
610
611        if verbose: print 'FitInterpolate: Building mesh'       
612        mesh = Mesh(vertex_coordinates, triangles)
613        mesh.check_integrity()
614   
[6244]615   
[6152]616    interp = Fit(mesh=mesh,
617                 verbose=verbose,
618                 alpha=alpha)
619
620    vertex_attributes = interp.fit(point_coordinates,
621                                   point_attributes,
622                                   point_origin=data_origin,
623                                   max_read_lines=max_read_lines,
624                                   attribute_name=attribute_name,
625                                   verbose=verbose)
626
627       
628    # Add the value checking stuff that's in least squares.
629    # Maybe this stuff should get pushed down into Fit.
630    # at least be a method of Fit.
631    # Or intigrate it into the fit method, saving teh max and min's
632    # as att's.
633   
634    return vertex_attributes
635
636
637#def _fit(*args, **kwargs):
638#    """Private function for use with caching. Reason is that classes
639#    may change their byte code between runs which is annoying.
640#    """
641#   
642#    return Fit(*args, **kwargs)
643
644
645def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
646                     alpha=DEFAULT_ALPHA, verbose= False,
647                     expand_search = False,
648                     precrop = False,
649                     display_errors = True):
650    """
651    Given a mesh file (tsh) and a point attribute file, fit
652    point attributes to the mesh and write a mesh file with the
653    results.
654
655    Note: the points file needs titles.  If you want anuga to use the tsh file,
656    make sure the title is elevation.
657
658    NOTE: Throws IOErrors, for a variety of file problems.
659   
660    """
661
662    from load_mesh.loadASCII import import_mesh_file, \
663         export_mesh_file, concatinate_attributelist
664
665
666    try:
667        mesh_dict = import_mesh_file(mesh_file)
668    except IOError,e:
669        if display_errors:
670            print "Could not load bad file. ", e
671        raise IOError  #Could not load bad mesh file.
672   
673    vertex_coordinates = mesh_dict['vertices']
674    triangles = mesh_dict['triangles']
[6304]675    if isinstance(mesh_dict['vertex_attributes'], num.ndarray):
[6152]676        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
677    else:
678        old_point_attributes = mesh_dict['vertex_attributes']
679
[6304]680    if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray):
[6152]681        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
682    else:
683        old_title_list = mesh_dict['vertex_attribute_titles']
684
685    if verbose: print 'tsh file %s loaded' %mesh_file
686
687    # load in the points file
688    try:
689        geo = Geospatial_data(point_file, verbose=verbose)
690    except IOError,e:
691        if display_errors:
692            print "Could not load bad file. ", e
693        raise IOError  #Re-raise exception 
694
695    point_coordinates = geo.get_data_points(absolute=True)
696    title_list,point_attributes = concatinate_attributelist( \
697        geo.get_all_attributes())
698
699    if mesh_dict.has_key('geo_reference') and \
700           not mesh_dict['geo_reference'] is None:
701        mesh_origin = mesh_dict['geo_reference'].get_origin()
702    else:
703        mesh_origin = None
704
705    if verbose: print "points file loaded"
706    if verbose: print "fitting to mesh"
707    f = fit_to_mesh(point_coordinates,
708                    vertex_coordinates,
709                    triangles,
710                    None,
711                    point_attributes,
712                    alpha = alpha,
713                    verbose = verbose,
714                    data_origin = None,
715                    mesh_origin = mesh_origin)
716    if verbose: print "finished fitting to mesh"
717
718    # convert array to list of lists
719    new_point_attributes = f.tolist()
720    #FIXME have this overwrite attributes with the same title - DSG
721    #Put the newer attributes last
722    if old_title_list <> []:
723        old_title_list.extend(title_list)
724        #FIXME can this be done a faster way? - DSG
725        for i in range(len(old_point_attributes)):
726            old_point_attributes[i].extend(new_point_attributes[i])
727        mesh_dict['vertex_attributes'] = old_point_attributes
728        mesh_dict['vertex_attribute_titles'] = old_title_list
729    else:
730        mesh_dict['vertex_attributes'] = new_point_attributes
731        mesh_dict['vertex_attribute_titles'] = title_list
732
733    if verbose: print "exporting to file ", mesh_output_file
734
735    try:
736        export_mesh_file(mesh_output_file, mesh_dict)
737    except IOError,e:
738        if display_errors:
739            print "Could not write file. ", e
740        raise IOError
Note: See TracBrowser for help on using the repository browser.