source: anuga_core/source/anuga/fit_interpolate/fit.py @ 4633

Last change on this file since 4633 was 4633, checked in by ole, 17 years ago

Added comments about blocking and deprecated xya format.
Also a small bug fix in sww2timeseries.

File size: 24.3 KB
Line 
1"""Least squares fitting.
2
3   Implements a penalised least-squares fit.
4
5   The penalty term (or smoothing term) is controlled by the smoothing
6   parameter alpha.
7   With a value of alpha=0, the fit function will attempt
8   to interpolate as closely as possible in the least-squares sense.
9   With values alpha > 0, a certain amount of smoothing will be applied.
10   A positive alpha is essential in cases where there are too few
11   data points.
12   A negative alpha is not allowed.
13   A typical value of alpha is 1.0e-6
14
15
16   Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
17   Geoscience Australia, 2004.
18
19   TO DO
20   * test geo_ref, geo_spatial
21
22   IDEAS
23   * (DSG-) Change the interface of fit, so a domain object can
24      be passed in. (I don't know if this is feasible). If could
25      save time/memory.
26"""
27import types
28
29from Numeric import zeros, Float, ArrayType,take
30
31from anuga.caching import cache           
32from anuga.geospatial_data.geospatial_data import Geospatial_data, \
33     ensure_absolute
34from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
35from anuga.utilities.sparse import Sparse, Sparse_CSR
36from anuga.utilities.polygon import in_and_outside_polygon
37from anuga.fit_interpolate.search_functions import search_tree_of_vertices
38from anuga.utilities.cg_solve import conjugate_gradient
39from anuga.utilities.numerical_tools import ensure_numeric, gradient
40
41import exceptions
42class ToFewPointsError(exceptions.Exception): pass
43class VertsWithNoTrianglesError(exceptions.Exception): pass
44
45DEFAULT_ALPHA = 0.001
46
47
48class Fit(FitInterpolate):
49   
50    def __init__(self,
51                 vertex_coordinates,
52                 triangles,
53                 mesh_origin=None,
54                 alpha = None,
55                 verbose=False,
56                 max_vertices_per_cell=None):
57
58
59        """
60        Fit data at points to the vertices of a mesh.
61
62        Inputs:
63
64          vertex_coordinates: List of coordinate pairs [xi, eta] of
65              points constituting a mesh (or an m x 2 Numeric array or
66              a geospatial object)
67              Points may appear multiple times
68              (e.g. if vertices have discontinuities)
69
70          triangles: List of 3-tuples (or a Numeric array) of
71              integers representing indices of all vertices in the mesh.
72
73          mesh_origin: A geo_reference object or 3-tuples consisting of
74              UTM zone, easting and northing.
75              If specified vertex coordinates are assumed to be
76              relative to their respective origins.
77
78          max_vertices_per_cell: Number of vertices in a quad tree cell
79          at which the cell is split into 4.
80
81          Note: Don't supply a vertex coords as a geospatial object and
82              a mesh origin, since geospatial has its own mesh origin.
83
84
85        Usage,
86        To use this in a blocking way, call  build_fit_subset, with z info,
87        and then fit, with no point coord, z info.
88       
89        """
90        # Initialise variabels
91
92        if alpha is None:
93
94            self.alpha = DEFAULT_ALPHA
95        else:   
96            self.alpha = alpha
97        FitInterpolate.__init__(self,
98                 vertex_coordinates,
99                 triangles,
100                 mesh_origin,
101                 verbose,
102                 max_vertices_per_cell)
103       
104        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
105       
106        self.AtA = None
107        self.Atz = None
108
109        self.point_count = 0
110        if self.alpha <> 0:
111            if verbose: print 'Building smoothing matrix'
112            self._build_smoothing_matrix_D()
113           
114    def _build_coefficient_matrix_B(self,
115                                  verbose = False):
116        """
117        Build final coefficient matrix
118
119        Precon
120        If alpha is not zero, matrix D has been built
121        Matrix Ata has been built
122        """
123
124        if self.alpha <> 0:
125            #if verbose: print 'Building smoothing matrix'
126            #self._build_smoothing_matrix_D()
127            self.B = self.AtA + self.alpha*self.D
128        else:
129            self.B = self.AtA
130
131        #Convert self.B matrix to CSR format for faster matrix vector
132        self.B = Sparse_CSR(self.B)
133
134    def _build_smoothing_matrix_D(self):
135        """Build m x m smoothing matrix, where
136        m is the number of basis functions phi_k (one per vertex)
137
138        The smoothing matrix is defined as
139
140        D = D1 + D2
141
142        where
143
144        [D1]_{k,l} = \int_\Omega
145           \frac{\partial \phi_k}{\partial x}
146           \frac{\partial \phi_l}{\partial x}\,
147           dx dy
148
149        [D2]_{k,l} = \int_\Omega
150           \frac{\partial \phi_k}{\partial y}
151           \frac{\partial \phi_l}{\partial y}\,
152           dx dy
153
154
155        The derivatives \frac{\partial \phi_k}{\partial x},
156        \frac{\partial \phi_k}{\partial x} for a particular triangle
157        are obtained by computing the gradient a_k, b_k for basis function k
158        """
159       
160        #FIXME: algorithm might be optimised by computing local 9x9
161        #"element stiffness matrices:
162
163        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
164
165        self.D = Sparse(m,m)
166
167        #For each triangle compute contributions to D = D1+D2
168        for i in range(len(self.mesh)):
169
170            #Get area
171            area = self.mesh.areas[i]
172
173            #Get global vertex indices
174            v0 = self.mesh.triangles[i,0]
175            v1 = self.mesh.triangles[i,1]
176            v2 = self.mesh.triangles[i,2]
177
178            #Get the three vertex_points
179            xi0 = self.mesh.get_vertex_coordinate(i, 0)
180            xi1 = self.mesh.get_vertex_coordinate(i, 1)
181            xi2 = self.mesh.get_vertex_coordinate(i, 2)
182
183            #Compute gradients for each vertex
184            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
185                              1, 0, 0)
186
187            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
188                              0, 1, 0)
189
190            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
191                              0, 0, 1)
192
193            #Compute diagonal contributions
194            self.D[v0,v0] += (a0*a0 + b0*b0)*area
195            self.D[v1,v1] += (a1*a1 + b1*b1)*area
196            self.D[v2,v2] += (a2*a2 + b2*b2)*area
197
198            #Compute contributions for basis functions sharing edges
199            e01 = (a0*a1 + b0*b1)*area
200            self.D[v0,v1] += e01
201            self.D[v1,v0] += e01
202
203            e12 = (a1*a2 + b1*b2)*area
204            self.D[v1,v2] += e12
205            self.D[v2,v1] += e12
206
207            e20 = (a2*a0 + b2*b0)*area
208            self.D[v2,v0] += e20
209            self.D[v0,v2] += e20
210
211
212    def get_D(self):
213        return self.D.todense()
214
215
216    def _build_matrix_AtA_Atz(self,
217                              point_coordinates,
218                              z,
219                              verbose = False):
220        """Build:
221        AtA  m x m  interpolation matrix, and,
222        Atz  m x a  interpolation matrix where,
223        m is the number of basis functions phi_k (one per vertex)
224        a is the number of data attributes
225
226        This algorithm uses a quad tree data structure for fast binning of
227        data points.
228
229        If Ata is None, the matrices AtA and Atz are created.
230
231        This function can be called again and again, with sub-sets of
232        the point coordinates.  Call fit to get the results.
233       
234        Preconditions
235        z and points are numeric
236        Point_coordindates and mesh vertices have the same origin.
237
238        The number of attributes of the data points does not change
239        """
240        #Build n x m interpolation matrix
241
242        if self.AtA == None:
243            # AtA and Atz need to be initialised.
244            m = self.mesh.number_of_nodes
245            if len(z.shape) > 1:
246                att_num = z.shape[1]
247                self.Atz = zeros((m,att_num), Float)
248            else:
249                att_num = 1
250                self.Atz = zeros((m,), Float)
251            assert z.shape[0] == point_coordinates.shape[0] 
252
253            self.AtA = Sparse(m,m)
254            # The memory damage has been done by now.
255           
256        self.point_count += point_coordinates.shape[0]
257        #print "_build_matrix_AtA_Atz - self.point_count", self.point_count
258        if verbose: print 'Getting indices inside mesh boundary'
259        #print 'point_coordinates.shape', point_coordinates.shape         
260        #print 'self.mesh.get_boundary_polygon()',\
261        #      self.mesh.get_boundary_polygon()
262
263        inside_poly_indices, outside_poly_indices  = \
264                     in_and_outside_polygon(point_coordinates,
265                                            self.mesh.get_boundary_polygon(),
266                                            closed = True, verbose = verbose)
267        #print "self.inside_poly_indices",self.inside_poly_indices
268        #print "self.outside_poly_indices",self.outside_poly_indices
269
270       
271        n = len(inside_poly_indices)
272        if verbose: print 'Building fitting matrix from %d points' %n       
273        #Compute matrix elements for points inside the mesh
274        for d, i in enumerate(inside_poly_indices):
275            #For each data_coordinate point
276            if verbose and d%((n+10)/10)==0: print 'Doing %d of %d' %(d, n)
277            x = point_coordinates[i]
278            element_found, sigma0, sigma1, sigma2, k = \
279                           search_tree_of_vertices(self.root, self.mesh, x)
280           
281            if element_found is True:
282                j0 = self.mesh.triangles[k,0] #Global vertex id for sigma0
283                j1 = self.mesh.triangles[k,1] #Global vertex id for sigma1
284                j2 = self.mesh.triangles[k,2] #Global vertex id for sigma2
285
286                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
287                js     = [j0,j1,j2]
288
289                for j in js:
290                    self.Atz[j] +=  sigmas[j]*z[i]
291                    #print "self.Atz building", self.Atz
292                    #print "self.Atz[j]", self.Atz[j]
293                    #print " sigmas[j]", sigmas[j]
294                    #print "z[i]",z[i]
295                    #print "result", sigmas[j]*z[i]
296                   
297                    for k in js:
298                        self.AtA[j,k] += sigmas[j]*sigmas[k]
299            else:
300                msg = 'Could not find triangle for point', x
301                raise Exception(msg)
302   
303       
304    def fit(self, point_coordinates_or_filename=None, z=None,
305            verbose=False,
306            point_origin=None,
307            attribute_name=None,
308            max_read_lines=500):
309        """Fit a smooth surface to given 1d array of data points z.
310
311        The smooth surface is computed at each vertex in the underlying
312        mesh using the formula given in the module doc string.
313
314        Inputs:
315        point_coordinates: The co-ordinates of the data points.
316              List of coordinate pairs [x, y] of
317              data points or an nx2 Numeric array or a Geospatial_data object
318              or filename (txt, csv, pts?)
319          z: Single 1d vector or array of data at the point_coordinates.
320         
321        """
322        # use blocking to load in the point info
323        if type(point_coordinates_or_filename) == types.StringType:
324            msg = "Don't set a point origin when reading from a file"
325            assert point_origin is None, msg
326            filename = point_coordinates_or_filename
327
328            G_data = Geospatial_data(filename,
329                                     max_read_lines=max_read_lines,
330                                     load_file_now=False,
331                                     verbose=verbose)
332
333            for i, geo_block in enumerate(G_data):
334                if verbose is True and 0 == i%200: # round every 5 minutes
335                    # But this is dependant on the # of Triangles, so it
336                    #isn't every 5 minutes.
337                       
338                    print 'Processing Block %d' %i
339                    # FIXME (Ole): It would be good to say how many blocks
340                    # there are here. But this is no longer necessary
341                    # for pts files as they are reported in geospatial_data
342                    # I suggest deleting this verbose output and make
343                    # Geospatial_data more informative for txt files.
344           
345
346                   
347                # build the array
348                points = geo_block.get_data_points(absolute=True)
349                z = geo_block.get_attributes(attribute_name=attribute_name)
350                self.build_fit_subset(points, z)
351            point_coordinates = None
352        else:
353            point_coordinates =  point_coordinates_or_filename
354           
355        if point_coordinates is None:
356            assert self.AtA <> None
357            assert self.Atz <> None
358            #FIXME (DSG) - do  a message
359        else:
360            point_coordinates = ensure_absolute(point_coordinates,
361                                                geo_reference=point_origin)
362            #if isinstance(point_coordinates,Geospatial_data) and z is None:
363            # z will come from the geo-ref
364            self.build_fit_subset(point_coordinates, z, verbose)
365
366        #Check sanity
367        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
368        n = self.point_count
369        if n<m and self.alpha == 0.0:
370            msg = 'ERROR (least_squares): Too few data points\n'
371            msg += 'There are only %d data points and alpha == 0. ' %n
372            msg += 'Need at least %d\n' %m
373            msg += 'Alternatively, set smoothing parameter alpha to a small '
374            msg += 'positive value,\ne.g. 1.0e-3.'
375            raise ToFewPointsError(msg)
376
377        self._build_coefficient_matrix_B(verbose)
378        loners = self.mesh.get_lone_vertices()
379        # FIXME  - make this as error message.
380        # test with
381        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
382        if len(loners)>0:
383            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
384            msg += 'All vertices should be part of a triangle.\n'
385            msg += 'In the future this will be inforced.\n'
386            msg += 'The following vertices are not part of a triangle;\n'
387            msg += str(loners)
388            print msg
389            #raise VertsWithNoTrianglesError(msg)
390       
391       
392        return conjugate_gradient(self.B, self.Atz, self.Atz,
393                                  imax=2*len(self.Atz) )
394
395       
396    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
397                              verbose=False):
398        """Fit a smooth surface to given 1d array of data points z.
399
400        The smooth surface is computed at each vertex in the underlying
401        mesh using the formula given in the module doc string.
402
403        Inputs:
404        point_coordinates: The co-ordinates of the data points.
405              List of coordinate pairs [x, y] of
406              data points or an nx2 Numeric array or a Geospatial_data object
407        z: Single 1d vector or array of data at the point_coordinates.
408        attribute_name: Used to get the z values from the
409              geospatial object if no attribute_name is specified,
410              it's a bit of a lucky dip as to what attributes you get.
411              If there is only one attribute it will be that one.
412
413        """
414
415        #FIXME(DSG-DSG): Check that the vert and point coords
416        #have the same zone.
417        if isinstance(point_coordinates,Geospatial_data):
418            point_coordinates = point_coordinates.get_data_points( \
419                absolute = True)
420       
421        #Convert input to Numeric arrays
422        if z is not None:
423            z = ensure_numeric(z, Float)
424        else:
425            msg = 'z not specified'
426            assert isinstance(point_coordinates,Geospatial_data), msg
427            z = point_coordinates.get_attributes(attribute_name)
428           
429        point_coordinates = ensure_numeric(point_coordinates, Float)
430
431        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
432
433
434############################################################################
435
436def fit_to_mesh(vertex_coordinates,
437                triangles,
438                point_coordinates, # this can also be a .csv/.txt file name
439                point_attributes=None,
440                alpha=DEFAULT_ALPHA,
441                verbose=False,
442                acceptable_overshoot=1.01,
443                mesh_origin=None,
444                data_origin=None,
445                max_read_lines=None,
446                attribute_name=None,
447                use_cache = False):
448    """Wrapper around internal function _fit_to_mesh for use with caching.
449   
450    """
451   
452    args = (vertex_coordinates, triangles, point_coordinates, )
453    kwargs = {'point_attributes': point_attributes,
454              'alpha': alpha,
455              'verbose': verbose,
456              'acceptable_overshoot': acceptable_overshoot,
457              'mesh_origin': mesh_origin,
458              'data_origin': data_origin,
459              'max_read_lines': max_read_lines,
460              'attribute_name': attribute_name,
461              'use_cache':use_cache
462              }
463
464    if use_cache is True:
465        if isinstance(point_coordinates, basestring):
466            # We assume that point_coordinates is the name of a .csv/.txt
467            # file which must be passed onto caching as a dependency (in case it
468            # has changed on disk)
469            dep = [point_coordinates]
470        else:
471            dep = None
472
473        return cache(_fit_to_mesh,
474                     args, kwargs,
475                     verbose=verbose,
476                     compression=False,
477                     dependencies=dep)
478    else:
479        return apply(_fit_to_mesh,
480                     args, kwargs)
481
482def _fit_to_mesh(vertex_coordinates,
483                 triangles,
484                 point_coordinates, # this can also be a points file name
485                 point_attributes=None,
486                 alpha=DEFAULT_ALPHA,
487                 verbose=False,
488                 acceptable_overshoot=1.01,
489                 mesh_origin=None,
490                 data_origin=None,
491                 max_read_lines=None,
492                 attribute_name=None,
493                 use_cache = False):
494    """
495    Fit a smooth surface to a triangulation,
496    given data points with attributes.
497
498
499        Inputs:
500        vertex_coordinates: List of coordinate pairs [xi, eta] of
501              points constituting a mesh (or an m x 2 Numeric array or
502              a geospatial object)
503              Points may appear multiple times
504              (e.g. if vertices have discontinuities)
505
506          triangles: List of 3-tuples (or a Numeric array) of
507          integers representing indices of all vertices in the mesh.
508
509          point_coordinates: List of coordinate pairs [x, y] of data points
510          (or an nx2 Numeric array). This can also be a .csv/.txt/.pts
511          file name.
512
513          alpha: Smoothing parameter.
514
515          acceptable overshoot: controls the allowed factor by which fitted values
516          may exceed the value of input data. The lower limit is defined
517          as min(z) - acceptable_overshoot*delta z and upper limit
518          as max(z) + acceptable_overshoot*delta z
519
520          mesh_origin: A geo_reference object or 3-tuples consisting of
521              UTM zone, easting and northing.
522              If specified vertex coordinates are assumed to be
523              relative to their respective origins.
524         
525
526          point_attributes: Vector or array of data at the
527                            point_coordinates.
528
529    """
530
531    # Duncan and Ole think that this isn't worth caching.
532    # Caching happens at the higher level anyway.
533    interp = Fit(vertex_coordinates,
534                 triangles,
535                 verbose=verbose,
536                 mesh_origin=mesh_origin,
537                 alpha=alpha)
538
539    vertex_attributes = interp.fit(point_coordinates,
540                                   point_attributes,
541                                   point_origin=data_origin,
542                                   max_read_lines=max_read_lines,
543                                   attribute_name=attribute_name,
544                                   verbose=verbose)
545
546       
547    # Add the value checking stuff that's in least squares.
548    # Maybe this stuff should get pushed down into Fit.
549    # at least be a method of Fit.
550    # Or intigrate it into the fit method, saving teh max and min's
551    # as att's.
552   
553    return vertex_attributes
554
555
556#def _fit(*args, **kwargs):
557#    """Private function for use with caching. Reason is that classes
558#    may change their byte code between runs which is annoying.
559#    """
560#   
561#    return Fit(*args, **kwargs)
562
563
564def fit_to_mesh_file(mesh_file, point_file, mesh_output_file,
565                     alpha=DEFAULT_ALPHA, verbose= False,
566                     expand_search = False,
567                     precrop = False,
568                     display_errors = True):
569    """
570    Given a mesh file (tsh) and a point attribute file (xya), fit
571    point attributes to the mesh and write a mesh file with the
572    results.
573
574    Note: the .xya files need titles.  If you want anuga to use the tsh file,
575    make sure the title is elevation.
576
577    NOTE: Throws IOErrors, for a variety of file problems.
578   
579    """
580
581    from load_mesh.loadASCII import import_mesh_file, \
582         export_mesh_file, concatinate_attributelist
583
584
585    try:
586        mesh_dict = import_mesh_file(mesh_file)
587    except IOError,e:
588        if display_errors:
589            print "Could not load bad file. ", e
590        raise IOError  #Could not load bad mesh file.
591   
592    vertex_coordinates = mesh_dict['vertices']
593    triangles = mesh_dict['triangles']
594    if type(mesh_dict['vertex_attributes']) == ArrayType:
595        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
596    else:
597        old_point_attributes = mesh_dict['vertex_attributes']
598
599    if type(mesh_dict['vertex_attribute_titles']) == ArrayType:
600        old_title_list = mesh_dict['vertex_attribute_titles'].tolist()
601    else:
602        old_title_list = mesh_dict['vertex_attribute_titles']
603
604    if verbose: print 'tsh file %s loaded' %mesh_file
605
606    # load in the points file
607    try:
608        geo = Geospatial_data(point_file, verbose=verbose)
609    except IOError,e:
610        if display_errors:
611            print "Could not load bad file. ", e
612        raise IOError  #Re-raise exception 
613
614    point_coordinates = geo.get_data_points(absolute=True)
615    title_list,point_attributes = concatinate_attributelist( \
616        geo.get_all_attributes())
617
618    if mesh_dict.has_key('geo_reference') and \
619           not mesh_dict['geo_reference'] is None:
620        mesh_origin = mesh_dict['geo_reference'].get_origin()
621    else:
622        mesh_origin = None
623
624    if verbose: print "points file loaded"
625    if verbose: print "fitting to mesh"
626    f = fit_to_mesh(vertex_coordinates,
627                    triangles,
628                    point_coordinates,
629                    point_attributes,
630                    alpha = alpha,
631                    verbose = verbose,
632                    data_origin = None,
633                    mesh_origin = mesh_origin)
634    if verbose: print "finished fitting to mesh"
635
636    # convert array to list of lists
637    new_point_attributes = f.tolist()
638    #FIXME have this overwrite attributes with the same title - DSG
639    #Put the newer attributes last
640    if old_title_list <> []:
641        old_title_list.extend(title_list)
642        #FIXME can this be done a faster way? - DSG
643        for i in range(len(old_point_attributes)):
644            old_point_attributes[i].extend(new_point_attributes[i])
645        mesh_dict['vertex_attributes'] = old_point_attributes
646        mesh_dict['vertex_attribute_titles'] = old_title_list
647    else:
648        mesh_dict['vertex_attributes'] = new_point_attributes
649        mesh_dict['vertex_attribute_titles'] = title_list
650
651    if verbose: print "exporting to file ", mesh_output_file
652
653    try:
654        export_mesh_file(mesh_output_file, mesh_dict)
655    except IOError,e:
656        if display_errors:
657            print "Could not write file. ", e
658        raise IOError
Note: See TracBrowser for help on using the repository browser.