source: anuga_core/source/anuga/fit_interpolate/fit.py @ 4572

Last change on this file since 4572 was 4572, checked in by duncan, 17 years ago

small fixes

File size: 24.3 KB
Line 
1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4
5   The penalty term (or smoothing term) is controlled by the smoothing
6   parameter alpha.
7   With a value of alpha=0, the fit function will attempt
8   to interpolate as closely as possible in the least-squares sense.
9   With values alpha > 0, a certain amount of smoothing will be applied.
10   A positive alpha is essential in cases where there are too few
11   data points.
12   A negative alpha is not allowed.
13   A typical value of alpha is 1.0e-6
14
15
16   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
17   Geoscience Australia, 2004.
18
19   TO DO
20   * test geo_ref, geo_spatial
21
22   IDEAS
23   * (DSG-) Change the interface of fit, so a domain object can
24      be passed in. (I don't know if this is feasible). If could
25      save time/memory.
26"""
27import types
28
29from Numeric import zeros, Float, ArrayType,take
30
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
36from anuga.utilities.polygon import in_and_outside_polygon
37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38from anuga.utilities.cg_solve import conjugate_gradient
39from anuga.utilities.numerical_tools import ensure_numeric, gradient
40
41import exceptions
42class ToFewPointsError(exceptions.Exception): pass
43class VertsWithNoTrianglesError(exceptions.Exception): pass
44
45DEFAULT_ALPHA = 0.001
46
47
48class Fit(FitInterpolate):
49   
50    def __init__(self,
51                 vertex_coordinates,
52                 triangles,
53                 mesh_origin=None,
54                 alpha = None,
55                 verbose=False,
56                 max_vertices_per_cell=30):
57
58
59        """
60        Fit data at points to the vertices of a mesh.
61
62        Inputs:
63
64          vertex_coordinates: List of coordinate pairs [xi, eta] of
65              points constituting a mesh (or an m x 2 Numeric array or
66              a geospatial object)
67              Points may appear multiple times
68              (e.g. if vertices have discontinuities)
69
70          triangles: List of 3-tuples (or a Numeric array) of
71              integers representing indices of all vertices in the mesh.
72
73          mesh_origin: A geo_reference object or 3-tuples consisting of
74              UTM zone, easting and northing.
75              If specified vertex coordinates are assumed to be
76              relative to their respective origins.
77
78          max_vertices_per_cell: Number of vertices in a quad tree cell
79          at which the cell is split into 4.
80
81          Note: Don't supply a vertex coords as a geospatial object and
82              a mesh origin, since geospatial has its own mesh origin.
83
84
85        Usage,
86        To use this in a blocking way, call  build_fit_subset, with z info,
87        and then fit, with no point coord, z info.
88       
89        """
90        # Initialise variabels
91
92        if alpha is None:
93
94            self.alpha = DEFAULT_ALPHA
95        else:   
96            self.alpha = alpha
97        FitInterpolate.__init__(self,
98                 vertex_coordinates,
99                 triangles,
100                 mesh_origin,
101                 verbose,
102                 max_vertices_per_cell)
103       
104        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
105       
106        self.AtA = None
107        self.Atz = None
108
109        self.point_count = 0
110        if self.alpha <> 0:
111            if verbose: print 'Building smoothing matrix'
112            self._build_smoothing_matrix_D()
113           
114    def _build_coefficient_matrix_B(self,
115                                  verbose = False):
116        """
117        Build final coefficient matrix
118
119        Precon
120        If alpha is not zero, matrix D has been built
121        Matrix Ata has been built
122        """
123
124        if self.alpha <> 0:
125            #if verbose: print 'Building smoothing matrix'
126            #self._build_smoothing_matrix_D()
127            self.B = self.AtA + self.alpha*self.D
128        else:
129            self.B = self.AtA
130
131        #Convert self.B matrix to CSR format for faster matrix vector
132        self.B = Sparse_CSR(self.B)
133
134    def _build_smoothing_matrix_D(self):
135        """Build m x m smoothing matrix, where
136        m is the number of basis functions phi_k (one per vertex)
137
138        The smoothing matrix is defined as
139
140        D = D1 + D2
141
142        where
143
144        [D1]_{k,l} = \int_\Omega
145           \frac{\partial \phi_k}{\partial x}
146           \frac{\partial \phi_l}{\partial x}\,
147           dx dy
148
149        [D2]_{k,l} = \int_\Omega
150           \frac{\partial \phi_k}{\partial y}
151           \frac{\partial \phi_l}{\partial y}\,
152           dx dy
153
154
155        The derivatives \frac{\partial \phi_k}{\partial x},
156        \frac{\partial \phi_k}{\partial x} for a particular triangle
157        are obtained by computing the gradient a_k, b_k for basis function k
158        """
159       
160        #FIXME: algorithm might be optimised by computing local 9x9
161        #"element stiffness matrices:
162
163        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
164
165        self.D = Sparse(m,m)
166
167        #For each triangle compute contributions to D = D1+D2
168        for i in range(len(self.mesh)):
169
170            #Get area
171            area = self.mesh.areas[i]
172
173            #Get global vertex indices
174            v0 = self.mesh.triangles[i,0]
175            v1 = self.mesh.triangles[i,1]
176            v2 = self.mesh.triangles[i,2]
177
178            #Get the three vertex_points
179            xi0 = self.mesh.get_vertex_coordinate(i, 0)
180            xi1 = self.mesh.get_vertex_coordinate(i, 1)
181            xi2 = self.mesh.get_vertex_coordinate(i, 2)
182
183            #Compute gradients for each vertex
184            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
185                              1, 0, 0)
186
187            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
188                              0, 1, 0)
189
190            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
191                              0, 0, 1)
192
193            #Compute diagonal contributions
194            self.D[v0,v0] += (a0*a0 + b0*b0)*area
195            self.D[v1,v1] += (a1*a1 + b1*b1)*area
196            self.D[v2,v2] += (a2*a2 + b2*b2)*area
197
198            #Compute contributions for basis functions sharing edges
199            e01 = (a0*a1 + b0*b1)*area
200            self.D[v0,v1] += e01
201            self.D[v1,v0] += e01
202
203            e12 = (a1*a2 + b1*b2)*area
204            self.D[v1,v2] += e12
205            self.D[v2,v1] += e12
206
207            e20 = (a2*a0 + b2*b0)*area
208            self.D[v2,v0] += e20
209            self.D[v0,v2] += e20
210
211
212    def get_D(self):
213        return self.D.todense()
214
215
216    def _build_matrix_AtA_Atz(self,
217                              point_coordinates,
218                              z,
219                              verbose = False):
220        """Build:
221        AtA  m x m  interpolation matrix, and,
222        Atz  m x a  interpolation matrix where,
223        m is the number of basis functions phi_k (one per vertex)
224        a is the number of data attributes
225
226        This algorithm uses a quad tree data structure for fast binning of
227        data points.
228
229        If Ata is None, the matrices AtA and Atz are created.
230
231        This function can be called again and again, with sub-sets of
232        the point coordinates.  Call fit to get the results.
233       
234        Preconditions
235        z and points are numeric
236        Point_coordindates and mesh vertices have the same origin.
237
238        The number of attributes of the data points does not change
239        """
240        #Build n x m interpolation matrix
241
242        if self.AtA == None:
243            # AtA and Atz need to be initialised.
244            m = self.mesh.number_of_nodes
245            if len(z.shape) > 1:
246                att_num = z.shape[1]
247                self.Atz = zeros((m,att_num), Float)
248            else:
249                att_num = 1
250                self.Atz = zeros((m,), Float)
251            assert z.shape[0] == point_coordinates.shape[0] 
252
253            self.AtA = Sparse(m,m)
254            # The memory damage has been done by now.
255           
256        self.point_count += point_coordinates.shape[0]
257        #print "_build_matrix_AtA_Atz - self.point_count", self.point_count
258        if verbose: print 'Getting indices inside mesh boundary'
259        #print 'point_coordinates.shape', point_coordinates.shape         
260        #print 'self.mesh.get_boundary_polygon()',\
261        #      self.mesh.get_boundary_polygon()
262
263        inside_poly_indices, outside_poly_indices  = \
264                     in_and_outside_polygon(point_coordinates,
265                                            self.mesh.get_boundary_polygon(),
266                                            closed = True, verbose = verbose)
267        #print "self.inside_poly_indices",self.inside_poly_indices
268        #print "self.outside_poly_indices",self.outside_poly_indices
269
270       
271        n = len(inside_poly_indices)
272        if verbose: print 'Building fitting matrix from %d points' %n       
273        #Compute matrix elements for points inside the mesh
274        for k, i in enumerate(inside_poly_indices):
275            #For each data_coordinate point
276            if verbose and k%((n+10)/10)==0: print 'Doing %d of %d' %(k, n)
277            x = point_coordinates[i]
278            element_found, sigma0, sigma1, sigma2, k = \
279                           search_tree_of_vertices(self.root, self.mesh, x)
280           
281            if element_found is True:
282                j0 = self.mesh.triangles[k,0] #Global vertex id for sigma0
283                j1 = self.mesh.triangles[k,1] #Global vertex id for sigma1
284                j2 = self.mesh.triangles[k,2] #Global vertex id for sigma2
285
286                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
287                js     = [j0,j1,j2]
288
289                for j in js:
290                    self.Atz[j] +=  sigmas[j]*z[i]
291                    #print "self.Atz building", self.Atz
292                    #print "self.Atz[j]", self.Atz[j]
293                    #print " sigmas[j]", sigmas[j]
294                    #print "z[i]",z[i]
295                    #print "result", sigmas[j]*z[i]
296                   
297                    for k in js:
298                        self.AtA[j,k] += sigmas[j]*sigmas[k]
299            else:
300                msg = 'Could not find triangle for point', x
301                raise Exception(msg)
302   
303       
304    def fit(self, point_coordinates_or_filename=None, z=None,
305            verbose=False,
306            point_origin=None,
307            attribute_name=None,
308            max_read_lines=500):
309        """Fit a smooth surface to given 1d array of data points z.
310
311        The smooth surface is computed at each vertex in the underlying
312        mesh using the formula given in the module doc string.
313
314        Inputs:
315        point_coordinates: The co-ordinates of the data points.
316              List of coordinate pairs [x, y] of
317              data points or an nx2 Numeric array or a Geospatial_data object
318              or filename (txt, csv, pts?)
319          z: Single 1d vector or array of data at the point_coordinates.
320         
321        """
322        # use blocking to load in the point info
323        if type(point_coordinates_or_filename) == types.StringType:
324            msg = "Don't set a point origin when reading from a file"
325            assert point_origin is None, msg
326            filename = point_coordinates_or_filename
327           
328            for i, geo_block in enumerate(Geospatial_data(filename,
329                                              max_read_lines=max_read_lines,
330                                              load_file_now=False,
331                                              verbose=verbose)):
332                if verbose is True and 0 == i%200: # round every 5 minutes
333                    # But this is dependant on the # of Triangles, so it
334                    #isn't every 5 minutes.
335                    print 'Block %i' %i
336                # build the array
337                points = geo_block.get_data_points(absolute=True)
338                z = geo_block.get_attributes(attribute_name=attribute_name)
339                self.build_fit_subset(points, z)
340            point_coordinates = None
341        else:
342            point_coordinates =  point_coordinates_or_filename
343           
344        if point_coordinates is None:
345            assert self.AtA <> None
346            assert self.Atz <> None
347            #FIXME (DSG) - do  a message
348        else:
349            point_coordinates = ensure_absolute(point_coordinates,
350                                                geo_reference=point_origin)
351            #if isinstance(point_coordinates,Geospatial_data) and z is None:
352            # z will come from the geo-ref
353            self.build_fit_subset(point_coordinates, z, verbose)
354
355        #Check sanity
356        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
357        n = self.point_count
358        if n<m and self.alpha == 0.0:
359            msg = 'ERROR (least_squares): Too few data points\n'
360            msg += 'There are only %d data points and alpha == 0. ' %n
361            msg += 'Need at least %d\n' %m
362            msg += 'Alternatively, set smoothing parameter alpha to a small '
363            msg += 'positive value,\ne.g. 1.0e-3.'
364            raise ToFewPointsError(msg)
365
366        self._build_coefficient_matrix_B(verbose)
367        loners = self.mesh.get_lone_vertices()
368        # FIXME  - make this as error message.
369        # test with
370        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
371        if len(loners)>0:
372            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
373            msg += 'All vertices should be part of a triangle.\n'
374            msg += 'In the future this will be inforced.\n'
375            msg += 'The following vertices are not part of a triangle;\n'
376            msg += str(loners)
377            print msg
378            #raise VertsWithNoTrianglesError(msg)
379       
380       
381        return conjugate_gradient(self.B, self.Atz, self.Atz,
382                                  imax=2*len(self.Atz) )
383
384       
385    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
386                              verbose=False):
387        """Fit a smooth surface to given 1d array of data points z.
388
389        The smooth surface is computed at each vertex in the underlying
390        mesh using the formula given in the module doc string.
391
392        Inputs:
393        point_coordinates: The co-ordinates of the data points.
394              List of coordinate pairs [x, y] of
395              data points or an nx2 Numeric array or a Geospatial_data object
396        z: Single 1d vector or array of data at the point_coordinates.
397        attribute_name: Used to get the z values from the
398              geospatial object if no attribute_name is specified,
399              it's a bit of a lucky dip as to what attributes you get.
400              If there is only one attribute it will be that one.
401
402        """
403
404        #FIXME(DSG-DSG): Check that the vert and point coords
405        #have the same zone.
406        if isinstance(point_coordinates,Geospatial_data):
407            point_coordinates = point_coordinates.get_data_points( \
408                absolute = True)
409       
410        #Convert input to Numeric arrays
411        if z is not None:
412            z = ensure_numeric(z, Float)
413        else:
414            msg = 'z not specified'
415            assert isinstance(point_coordinates,Geospatial_data), msg
416            z = point_coordinates.get_attributes(attribute_name)
417           
418        point_coordinates = ensure_numeric(point_coordinates, Float)
419
420        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
421
422
423############################################################################
424
425def fit_to_mesh(vertex_coordinates,
426                triangles,
427                point_coordinates, # this can also be a .csv/.txt file name
428                point_attributes=None,
429                alpha=DEFAULT_ALPHA,
430                verbose=False,
431                acceptable_overshoot=1.01,
432                mesh_origin=None,
433                data_origin=None,
434                max_read_lines=None,
435                attribute_name=None,
436                use_cache = False):
437    """Wrapper around internal function _fit_to_mesh for use with caching.
438   
439    """
440   
441    args = (vertex_coordinates, triangles, point_coordinates, )
442    kwargs = {'point_attributes': point_attributes,
443              'alpha': alpha,
444              'verbose': verbose,
445              'acceptable_overshoot': acceptable_overshoot,
446              'mesh_origin': mesh_origin,
447              'data_origin': data_origin,
448              'max_read_lines': max_read_lines,
449              'attribute_name': attribute_name,
450              'use_cache':use_cache
451              }
452
453    if use_cache is True:
454        if isinstance(point_coordinates, basestring):
455            # We assume that point_coordinates is the name of a .csv/.txt
456            # file which must be passed onto caching as a dependency (in case it
457            # has changed on disk)
458            dep = [point_coordinates]
459        else:
460            dep = None
461
462        return cache(_fit_to_mesh,
463                     args, kwargs,
464                     verbose=verbose,
465                     compression=False,
466                     dependencies=dep)
467    else:
468        return apply(_fit_to_mesh,
469                     args, kwargs)
470
471def _fit_to_mesh(vertex_coordinates,
472                 triangles,
473                 point_coordinates, # this can also be a points file name
474                 point_attributes=None,
475                 alpha=DEFAULT_ALPHA,
476                 verbose=False,
477                 acceptable_overshoot=1.01,
478                 mesh_origin=None,
479                 data_origin=None,
480                 max_read_lines=None,
481                 attribute_name=None,
482                 use_cache = False):
483    """
484    Fit a smooth surface to a triangulation,
485    given data points with attributes.
486
487
488        Inputs:
489        vertex_coordinates: List of coordinate pairs [xi, eta] of
490              points constituting a mesh (or an m x 2 Numeric array or
491              a geospatial object)
492              Points may appear multiple times
493              (e.g. if vertices have discontinuities)
494
495          triangles: List of 3-tuples (or a Numeric array) of
496          integers representing indices of all vertices in the mesh.
497
498          point_coordinates: List of coordinate pairs [x, y] of data points
499          (or an nx2 Numeric array). This can also be a .csv/.txt/.pts
500          file name.
501
502          alpha: Smoothing parameter.
503
504          acceptable overshoot: controls the allowed factor by which fitted values
505          may exceed the value of input data. The lower limit is defined
506          as min(z) - acceptable_overshoot*delta z and upper limit
507          as max(z) + acceptable_overshoot*delta z
508
509          mesh_origin: A geo_reference object or 3-tuples consisting of
510              UTM zone, easting and northing.
511              If specified vertex coordinates are assumed to be
512              relative to their respective origins.
513         
514
515          point_attributes: Vector or array of data at the
516                            point_coordinates.
517
518    """
519    #Since this is a wrapper for fit, lets handle the geo_spatial att's
520    if use_cache is True:
521        interp = cache(_fit,
522                       (vertex_coordinates,
523                        triangles),
524                       {'verbose': verbose,
525                        'mesh_origin': mesh_origin,
526                        'alpha':alpha},
527                       compression = False,
528                       verbose = verbose)       
529       
530    else:
531        interp = Fit(vertex_coordinates,
532                     triangles,
533                     verbose=verbose,
534                     mesh_origin=mesh_origin,
535                     alpha=alpha)
536       
537    vertex_attributes = interp.fit(point_coordinates,
538                                   point_attributes,
539                                   point_origin=data_origin,
540                                   max_read_lines=max_read_lines,
541                                   attribute_name=attribute_name,
542                                   verbose=verbose)
543
544       
545    # Add the value checking stuff that's in least squares.
546    # Maybe this stuff should get pushed down into Fit.
547    # at least be a method of Fit.
548    # Or intigrate it into the fit method, saving teh max and min's
549    # as att's.
550   
551    return vertex_attributes
552
553def _fit(*args, **kwargs):
554    """Private function for use with caching. Reason is that classes
555    may change their byte code between runs which is annoying.
556    """
557   
558    return Fit(*args, **kwargs)
559
560
561def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
562                     alpha=DEFAULT_ALPHA, verbose= False,
563                     expand_search = False,
564                     precrop = False,
565                     display_errors = True):
566    """
567    Given a mesh file (tsh) and a point attribute file (xya), fit
568    point attributes to the mesh and write a mesh file with the
569    results.
570
571    Note: the .xya files need titles.  If you want anuga to use the tsh file,
572    make sure the title is elevation.
573
574    NOTE: Throws IOErrors, for a variety of file problems.
575   
576    """
577
578    from load_mesh.loadASCII import import_mesh_file, \
579         export_mesh_file, concatinate_attributelist
580
581
582    try:
583        mesh_dict = import_mesh_file(mesh_file)
584    except IOError,e:
585        if display_errors:
586            print "Could not load bad file. ", e
587        raise IOError  #Could not load bad mesh file.
588   
589    vertex_coordinates = mesh_dict['vertices']
590    triangles = mesh_dict['triangles']
591    if type(mesh_dict['vertex_attributes']) == ArrayType:
592        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
593    else:
594        old_point_attributes = mesh_dict['vertex_attributes']
595
596    if type(mesh_dict['vertex_attribute_titles']) == ArrayType:
597        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
598    else:
599        old_title_list = mesh_dict['vertex_attribute_titles']
600
601    if verbose: print 'tsh file %s loaded' %mesh_file
602
603    # load in the points file
604    try:
605        geo = Geospatial_data(point_file, verbose=verbose)
606    except IOError,e:
607        if display_errors:
608            print "Could not load bad file. ", e
609        raise IOError  #Re-raise exception 
610
611    point_coordinates = geo.get_data_points(absolute=True)
612    title_list,point_attributes = concatinate_attributelist( \
613        geo.get_all_attributes())
614
615    if mesh_dict.has_key('geo_reference') and \
616           not mesh_dict['geo_reference'] is None:
617        mesh_origin = mesh_dict['geo_reference'].get_origin()
618    else:
619        mesh_origin = None
620
621    if verbose: print "points file loaded"
622    if verbose: print "fitting to mesh"
623    f = fit_to_mesh(vertex_coordinates,
624                    triangles,
625                    point_coordinates,
626                    point_attributes,
627                    alpha = alpha,
628                    verbose = verbose,
629                    data_origin = None,
630                    mesh_origin = mesh_origin)
631    if verbose: print "finished fitting to mesh"
632
633    # convert array to list of lists
634    new_point_attributes = f.tolist()
635    #FIXME have this overwrite attributes with the same title - DSG
636    #Put the newer attributes last
637    if old_title_list <> []:
638        old_title_list.extend(title_list)
639        #FIXME can this be done a faster way? - DSG
640        for i in range(len(old_point_attributes)):
641            old_point_attributes[i].extend(new_point_attributes[i])
642        mesh_dict['vertex_attributes'] = old_point_attributes
643        mesh_dict['vertex_attribute_titles'] = old_title_list
644    else:
645        mesh_dict['vertex_attributes'] = new_point_attributes
646        mesh_dict['vertex_attribute_titles'] = title_list
647
648    if verbose: print "exporting to file ", mesh_output_file
649
650    try:
651        export_mesh_file(mesh_output_file, mesh_dict)
652    except IOError,e:
653        if display_errors:
654            print "Could not write file. ", e
655        raise IOError
Note: See TracBrowser for help on using the repository browser.