Ignore:
Timestamp:
Feb 13, 2013, 3:26:15 PM (12 years ago)
Author:
steve
Message:

Rolling in Padarn's update to fit_interpolation

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/anuga_core/source/anuga/fit_interpolate/fit.py

    r8624 r8690  
    2828
    2929from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
    30 from anuga.caching import cache           
     30from anuga.caching import cache
    3131from anuga.geospatial_data.geospatial_data import Geospatial_data, \
    3232     ensure_absolute
    3333from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
    34 from anuga.utilities.sparse import Sparse, Sparse_CSR
    35 from anuga.geometry.polygon import inside_polygon, is_inside_polygon
    36 from anuga.pmesh.mesh_quadtree import MeshQuadtree
    37 
     34
     35from anuga.utilities.sparse import Sparse_CSR
     36from anuga.utilities.numerical_tools import ensure_numeric
    3837from anuga.utilities.cg_solve import conjugate_gradient
    39 from anuga.utilities.numerical_tools import ensure_numeric, gradient
    4038from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
    4139import anuga.utilities.log as log
     
    4846import sys
    4947
     48# Setup for c fitting routines
     49from anuga.utilities import compile
     50if compile.can_use_C_extension('fitsmooth.c'):
     51    import fitsmooth
     52else:
     53    msg = "C implementation of fitting algorithms (fitsmooth.c) not avalaible"
     54    raise Exception(msg)
     55
     56# Setup for quad_tree extension
     57from anuga.utilities import compile
     58if compile.can_use_C_extension('quad_tree_ext.c'):
     59    import quad_tree_ext
     60else:
     61    msg = "C implementation of quad tree extension not avaliable"
     62    raise Exception(msg)
     63
     64# Setup for sparse_matrix extension
     65from anuga.utilities import compile
     66if compile.can_use_C_extension('sparse_matrix_ext.c'):
     67    import sparse_matrix_ext
     68else:
     69    msg = "C implementation of sparse_matrix extension not avaliable"
     70    raise Exception(msg)
     71
     72
    5073
    5174class Fit(FitInterpolate):
    52    
     75
    5376    def __init__(self,
    5477                 vertex_coordinates=None,
     
    5679                 mesh=None,
    5780                 mesh_origin=None,
    58                  alpha = None,
    59                  verbose=False):
    60 
     81                 alpha=None,
     82                 verbose=False,
     83                 cg_precon='None',
     84                 use_c_cg=True):
    6185
    6286        """
     87        Padarn Note 05/12/12: This documentation should probably
     88        be updated to account for the fact that the fitting is now
     89        done in c. I wasn't sure what details were neccesary though.
     90
    6391        Fit data at points to the vertices of a mesh.
    6492
     
    6694
    6795          vertex_coordinates: List of coordinate pairs [xi, eta] of
    68               points constituting a mesh (or an m x 2 numeric array or
     96          points constituting a mesh (or an m x 2 numeric array or
    6997              a geospatial object)
    7098              Points may appear multiple times
     
    73101          triangles: List of 3-tuples (or a numeric array) of
    74102              integers representing indices of all vertices in the mesh.
    75 
    76           mesh: Object containing vertex_coordinates and triangles. Either
    77               mesh = None or both vertex_coordinates and triangles = None
    78103
    79104          mesh_origin: A geo_reference object or 3-tuples consisting of
     
    89114        To use this in a blocking way, call  build_fit_subset, with z info,
    90115        and then fit, with no point coord, z info.
    91        
    92116        """
    93117        # Initialise variabels
    94118        if alpha is None:
    95119            self.alpha = DEFAULT_ALPHA
    96         else:   
     120        else:
    97121            self.alpha = alpha
    98            
     122
    99123        FitInterpolate.__init__(self,
    100124                 vertex_coordinates,
     
    103127                 mesh_origin=mesh_origin,
    104128                 verbose=verbose)
    105        
    106         m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
    107        
     129
    108130        self.AtA = None
    109131        self.Atz = None
    110 
     132        self.D = None
    111133        self.point_count = 0
    112         if self.alpha <> 0:
    113             if verbose: log.critical('Fit: Building smoothing matrix')
    114             self._build_smoothing_matrix_D(verbose=verbose)
    115 
    116         if verbose: log.critical('Fit: Get Boundary Polygon')
    117         bd_poly = self.mesh.get_boundary_polygon()   
     134
     135        # NOTE PADARN: NEEDS FIXING - currently need smoothing matrix
     136        # even if alpha is zero, due to c function expecting it. This
     137        # could and should be removed.
     138        if True:
     139            if verbose:
     140                log.critical('Building smoothing matrix')
     141            self.D = self._build_smoothing_matrix_D()
     142
     143        bd_poly = self.mesh.get_boundary_polygon()
    118144        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
    119145
    120         if verbose: log.critical('Fit: Done')
    121 
    122            
     146        self.cg_precon=cg_precon
     147        self.use_c_cg=use_c_cg
     148
    123149    def _build_coefficient_matrix_B(self,
    124                                   verbose = False):
     150                                  verbose=False):
    125151        """
    126         Build final coefficient matrix
    127 
    128         Precon
    129         If alpha is not zero, matrix D has been built
    130         Matrix Ata has been built
     152        Build final coefficient matrix from AtA and D
    131153        """
    132154
    133         if verbose:
    134             import time
    135             t0 = time.time()
    136             print 'Fit: Build Coefficient Matrix B ',
    137 
    138 
    139         if self.alpha <> 0:
    140             #if verbose: log.critical('Building smoothing matrix')
    141             #self._build_smoothing_matrix_D()
    142             # FIXME SR: This is quite time consuming.
    143             # As AtA and D have same structure it should be possible
    144             # to speed this up.
    145             self.B = self.AtA + self.alpha*self.D
    146         else:
    147             self.B = self.AtA
    148 
    149 
    150         if verbose:
    151             import time
    152             dt = time.time()-t0
    153             print '%g secs' % dt
    154             print 'Fit: Convert Coefficient Matrix B to Sparse_CSR'
    155            
    156         # Convert self.B matrix to CSR format for faster matrix vector
    157         self.B = Sparse_CSR(self.B)
    158 
    159     def _build_smoothing_matrix_D(self, verbose=False):
     155        msize = self.mesh.number_of_nodes
     156
     157        self.B = fitsmooth.build_matrix_B(self.D, \
     158                                          self.AtA, self.alpha)
     159
     160        # Convert self.B matrix to CSR format
     161        self.B = Sparse_CSR(data=num.array(self.B[0]),\
     162                                  Colind=num.array(self.B[1]),\
     163                                  rowptr=num.array(self.B[2]), \
     164                                  m=msize, n=msize)
     165        # NOTE PADARN: The above step could potentially be removed
     166        # and the sparse matrix worked with directly in C. Not sure
     167        # if this would be worthwhile.
     168
     169    def _build_smoothing_matrix_D(self):
    160170        """Build m x m smoothing matrix, where
    161171        m is the number of basis functions phi_k (one per vertex)
     
    181191        \frac{\partial \phi_k}{\partial x} for a particular triangle
    182192        are obtained by computing the gradient a_k, b_k for basis function k
     193
     194        NOTE PADARN: All of this is now done in an external C function, and the
     195        result is stored in a Capsule object, meaning the entries cannot be directly
     196        accessed.
    183197        """
    184        
    185         # FIXME: algorithm might be optimised by computing local 9x9
    186         # "element stiffness matrices:
    187 
    188         m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    189 
    190         self.D = Sparse(m,m)
    191 
    192         import time
    193         t0 = time.time()
    194 
    195         if verbose :
    196             print '['+60*' '+']',
    197             sys.stdout.flush()
    198 
    199         N = len(self.mesh)
    200         M = N/60
    201 
    202         # For each triangle compute contributions to D = D1+D2
    203         for i in xrange(N):
    204 
    205             if verbose and i%M == 0 :
    206                 #restart_line()
    207                 print '\r['+(i/M)*'.'+(60-(i/M))*' ' +']',
    208                 sys.stdout.flush()
    209 
    210             # Get area
    211             area = self.mesh.areas[i]
    212 
    213             # Get global vertex indices
    214             v0 = self.mesh.triangles[i,0]
    215             v1 = self.mesh.triangles[i,1]
    216             v2 = self.mesh.triangles[i,2]
    217 
    218             # Get the three vertex_points
    219             xi0 = self.mesh.get_vertex_coordinate(i, 0)
    220             xi1 = self.mesh.get_vertex_coordinate(i, 1)
    221             xi2 = self.mesh.get_vertex_coordinate(i, 2)
    222 
    223             # Compute gradients for each vertex
    224             a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    225                               1, 0, 0)
    226 
    227             a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    228                               0, 1, 0)
    229 
    230             a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    231                               0, 0, 1)
    232 
    233             # Compute diagonal contributions
    234             self.D[v0,v0] += (a0*a0 + b0*b0)*area
    235             self.D[v1,v1] += (a1*a1 + b1*b1)*area
    236             self.D[v2,v2] += (a2*a2 + b2*b2)*area
    237 
    238             # Compute contributions for basis functions sharing edges
    239             e01 = (a0*a1 + b0*b1)*area
    240             self.D[v0,v1] += e01
    241             self.D[v1,v0] += e01
    242 
    243             e12 = (a1*a2 + b1*b2)*area
    244             self.D[v1,v2] += e12
    245             self.D[v2,v1] += e12
    246 
    247             e20 = (a2*a0 + b2*b0)*area
    248             self.D[v2,v0] += e20
    249             self.D[v0,v2] += e20
    250 
    251         if verbose:
    252             print ' %f secs' % (time.time()-t0)
    253 
     198
     199        # NOTE PADARN: Should the input arguments here be checked - making
     200        # sure that they are floats? Not sure if this is done elsewhere.
     201        # NOTE PADARN: Should global coordinates be used for the smoothing
     202        # matrix, or is thids not important?
     203        return fitsmooth.build_smoothing_matrix(self.mesh.triangles, \
     204          self.mesh.areas, self.mesh.vertex_coordinates)
     205
     206
     207    # NOTE PADARN: This function was added to emulate behavior of the original
     208    # class not using external c functions. This method is dangerous as D could
     209    # be very large - it was added as it is used in a unit test.
    254210    def get_D(self):
    255         return self.D.todense()
    256 
    257 
    258 
    259     def _build_matrix_AtA_Atz(self,
    260                               point_coordinates,
    261                               z,
    262                               verbose = False,
    263                               output=None):
    264 
    265 
     211        return fitsmooth.return_full_D(self.D, self.mesh.number_of_nodes)
     212
     213    # NOTE PADARN: This function was added to emulate behavior of the original
     214    # class so as to pass a unit test. It is completely uneeded.
     215    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
     216                              verbose=False, output='dot'):
     217        self._build_matrix_AtA_Atz(point_coordinates, z, attribute_name, verbose, output)
     218
     219    def _build_matrix_AtA_Atz(self, point_coordinates, z=None, attribute_name=None,
     220                              verbose=False, output='dot'):
    266221        """Build:
    267222        AtA  m x m  interpolation matrix, and,
     
    277232        This function can be called again and again, with sub-sets of
    278233        the point coordinates.  Call fit to get the results.
    279        
     234
    280235        Preconditions
    281236        z and points are numeric
     
    285240        """
    286241
    287 
    288         if verbose and output == 'counter':
    289             print 'Fit: Build Matrix AtA Atz'
    290 
    291         import time
    292         t0 = time.time()
    293 
    294         # Build n x m interpolation matrix
    295         if self.AtA == None:
    296             # AtA and Atz need to be initialised.
    297             m = self.mesh.number_of_nodes
    298             if len(z.shape) > 1:
    299                 att_num = z.shape[1]
    300                 self.Atz = num.zeros((m,att_num), num.float)
     242        if isinstance(point_coordinates, Geospatial_data):
     243            point_coordinates = point_coordinates.get_data_points( \
     244                absolute=True)
     245
     246        # Convert input to numeric arrays
     247        if z is not None:
     248            z = ensure_numeric(z, num.float)
     249        else:
     250            msg = 'z not specified'
     251            assert isinstance(point_coordinates, Geospatial_data), msg
     252            z = point_coordinates.get_attributes(attribute_name)
     253
     254        point_coordinates = ensure_numeric(point_coordinates, num.float)
     255
     256        npts = len(z)
     257        z = num.array(z)
     258        # NOTE PADARN : This copy might be needed to
     259        # make sure memory is contig - would be better to read in C..
     260        z = z.copy()
     261
     262        self.point_count += z.shape[0]
     263
     264        zdim = 1
     265        if len(z.shape) != 1:
     266            zdim = z.shape[1]
     267
     268        [AtA, Atz] = fitsmooth.build_matrix_AtA_Atz_points(self.root.root, \
     269               self.mesh.number_of_nodes, \
     270               self.mesh.triangles, \
     271               num.array(point_coordinates), z, zdim, npts)
     272
     273        if verbose and output == 'dot':
     274            print '\b.',
     275            sys.stdout.flush()
     276        if zdim == 1:
     277            Atz = num.array(Atz[0])
     278        else:
     279            Atz = num.array(Atz).transpose()
     280
     281        if self.AtA is None and self.Atz is None:
     282            self.AtA = AtA
     283            self.Atz = Atz
     284        else:
     285            fitsmooth.combine_partial_AtA_Atz(self.AtA, AtA,\
     286                    self.Atz, Atz, zdim, self.mesh.number_of_nodes)
     287
     288    def _build_matrix_AtA_Atz_file(self, filename, attribute_name=None, max_read_lines=500,\
     289                                   verbose=False):
     290        """Build:
     291        AtA  m x m  interpolation matrix, and,
     292        Atz  m x a  interpolation matrix where,
     293        m is the number of basis functions phi_k (one per vertex)
     294        a is the number of data attributes
     295
     296        This algorithm uses a quad tree data structure for fast binning of
     297        data points.
     298
     299        If Ata is None, the matrices AtA and Atz are created.
     300
     301        This function can be called again and again, with sub-sets of
     302        the point coordinates.  Call fit to get the results.
     303
     304        Preconditions
     305        z and points are numeric
     306        Point_coordindates and mesh vertices have the same origin.
     307
     308        The number of attributes of the data points does not change
     309        """
     310        # PADARN NOTE: THe following block of code is translated from
     311        # things that were being done in the Geospatial_data object
     312        # which is no longer required if data from file in c.
     313        #---------------------------------------------------------
     314        # Default attribute name here seems to only have possibility
     315        # of being 'None'.
     316        G_data = Geospatial_data(filename,
     317                                         max_read_lines=1,
     318                                         load_file_now=True,
     319                                         verbose=verbose)
     320
     321
     322        if attribute_name is None:
     323            if G_data.default_attribute_name is not None:
     324                attribute_name = G_data.default_attribute_name
    301325            else:
    302                 att_num = 1
    303                 self.Atz = num.zeros((m,), num.float)
    304             assert z.shape[0] == point_coordinates.shape[0]
    305 
    306             AtA = Sparse(m,m)
    307             # The memory damage has been done by now.
    308         else:
    309              AtA = self.AtA # Did this for speed, did ~nothing
    310         self.point_count += point_coordinates.shape[0]
    311 
    312 
    313         inside_indices = inside_polygon(point_coordinates,
    314                                         self.mesh_boundary_polygon,
    315                                         closed=True,
    316                                         verbose=False) # Suppress output
    317        
    318         n = len(inside_indices)
    319 
    320         # Compute matrix elements for points inside the mesh
    321         triangles = self.mesh.triangles # Shorthand
    322 
    323 
    324         if verbose and output == 'counter' :
    325             print '['+60*' '+']',
    326             #print '\b.',
    327             sys.stdout.flush()
    328 
    329         m = max(n/60,1)
    330 
    331 
    332         for d in xrange(n):
    333             i = inside_indices[d]
    334 
    335 #        for d, i in enumerate(inside_indices):
    336 #            # For each data_coordinate point
    337 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    338 #                                                            # %(d, n))
    339 #            x = point_coordinates[i]
    340 
    341             # For each data_coordinate point
    342             # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    343                                                             # %(d, n))
    344 
    345 
    346             if verbose and output == 'counter' and i%m == 0 :
    347                 print '\r['+(i/m)*'.'+(60-(i/m))*' '+']',
    348                 sys.stdout.flush()
    349 
    350 
    351             x = point_coordinates[i]
    352            
    353             element_found, sigma0, sigma1, sigma2, k = \
    354                            self.root.search_fast(x)
    355            
    356             if element_found is True:
    357                 j0 = triangles[k,0] # Global vertex id for sigma0
    358                 j1 = triangles[k,1] # Global vertex id for sigma1
    359                 j2 = triangles[k,2] # Global vertex id for sigma2
    360 
    361                 sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    362                 js     = [j0,j1,j2]
    363 
    364                 for j in js:
    365                     self.Atz[j] +=  sigmas[j]*z[i]
    366                    
    367                     for k in js:
    368                         AtA[j,k] += sigmas[j]*sigmas[k]
    369             else:
    370                 flag = is_inside_polygon(x,
    371                                          self.mesh_boundary_polygon,
    372                                          closed=True,
    373                                          verbose=False) # Suppress output
    374                 msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    375                 assert flag is True, msg               
    376                
    377                 # data point has fallen within a hole - so ignore it.
    378 
    379 
    380         if verbose and output == 'counter':
    381             print ' %f secs' % (time.time()-t0)
    382            
     326                attribute_name = G_data.attributes.keys()[0]
     327                # above line takes the first one from keys
     328
     329        if verbose is True:
     330            log.critical('Using attribute %s' % attribute_name)
     331            log.critical('Available attributes: %s' % (G_data.attributes.keys()))
     332
     333        msg = 'Attribute name %s does not exist in data set' % attribute_name
     334        assert G_data.attributes.has_key(attribute_name), msg
     335        #---------------------------------------------------------
     336
     337        # MAKE SURE READING ABSOLUTE POINT COORDINATES
     338        [AtA, Atz, npts] = fitsmooth.build_matrix_AtA_Atz_fileread(self.root.root, \
     339               self.mesh.number_of_nodes, \
     340               self.mesh.triangles, \
     341               filename, \
     342               attribute_name, \
     343               max_read_lines)
     344        self.point_count = npts
    383345        self.AtA = AtA
    384 
    385        
     346        self.Atz = Atz
     347
    386348    def fit(self, point_coordinates_or_filename=None, z=None,
    387349            verbose=False,
    388350            point_origin=None,
    389351            attribute_name=None,
    390             max_read_lines=None):
     352            max_read_lines=500,
     353            use_blocking_option2=True):
    391354        """Fit a smooth surface to given 1d array of data points z.
    392355
     
    397360        point_coordinates: The co-ordinates of the data points.
    398361              List of coordinate pairs [x, y] of
    399               data points or an nx2 numeric array or a Geospatial_data object
    400               or points file filename 
     362        data points or an nx2 numeric array or a Geospatial_data object
     363              or points file filename
    401364          z: Single 1d vector or array of data at the point_coordinates.
    402          
     365
    403366        """
    404 
    405 
    406         if verbose:
    407             print 'Fit.fit: Initializing'
    408 
    409         # Use blocking to load in the point info
    410367        if isinstance(point_coordinates_or_filename, basestring):
    411             msg = "Don't set a point origin when reading from a file"
    412             assert point_origin is None, msg
    413             filename = point_coordinates_or_filename
    414 
    415             G_data = Geospatial_data(filename,
    416                                      max_read_lines=max_read_lines,
    417                                      load_file_now=False,
    418                                      verbose=verbose)
    419 
    420 
    421             for i, geo_block in enumerate(G_data):
    422 
    423                 if verbose is True and 0 == i%200:
    424                     pass
    425                     # The time this will take
    426                     # is dependant on the # of Triangles
    427                        
    428                     #log.critical('Processing Block %d' % i)
    429                     # FIXME (Ole): It would be good to say how many blocks
    430                     # there are here. But this is no longer necessary
    431                     # for pts files as they are reported in geospatial_data
    432                     # I suggest deleting this verbose output and make
    433                     # Geospatial_data more informative for txt files.
    434                     #
    435                     # I still think so (12/12/7, Ole).
    436            
    437 
    438                    
    439                 # Build the array
    440 
    441                 points = geo_block.get_data_points(absolute=True)
    442                 z = geo_block.get_attributes(attribute_name=attribute_name)
    443                 self.build_fit_subset(points, z, attribute_name, verbose)
    444 
    445 
    446                 # FIXME(Ole): I thought this test would make sense here
    447                 # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    448                 # Committed 11 March 2009
    449                 msg = 'Matrix AtA was not built'
    450                 assert self.AtA is not None, msg
    451 
    452             point_coordinates = None
    453 
    454             if verbose: print ''
    455            
     368            if point_coordinates_or_filename[-4:] != ".pts":
     369                use_blocking_option2 = False
     370        # NOTE PADARN 12/12/12: Currently trying to get all blocking to be done
     371        # in c, but blocking option 1, which does everything using the python
     372        # interface can be used in the meantime (much slower).
     373        #-----------------------BLOCKING OPTION 1----------------------------
     374        if use_blocking_option2 is False:
     375            if verbose:
     376                print 'Fit.fit: Initializing'
     377
     378            # Use blocking to load in the point info
     379            if isinstance(point_coordinates_or_filename, basestring):
     380                msg = "Don't set a point origin when reading from a file"
     381                assert point_origin is None, msg
     382                filename = point_coordinates_or_filename
     383
     384                G_data = Geospatial_data(filename,
     385                                         max_read_lines=max_read_lines,
     386                                         load_file_now=False,
     387                                         verbose=verbose)
     388
     389                for i, geo_block in enumerate(G_data):
     390
     391                   # Build the array
     392                    points = geo_block.get_data_points(absolute=True)
     393                    z = geo_block.get_attributes(attribute_name=attribute_name)
     394
     395                    self._build_matrix_AtA_Atz(points, z, attribute_name, verbose)
     396
     397                point_coordinates = None
     398
     399                if verbose:
     400                    print ''
     401            else:
     402                point_coordinates = point_coordinates_or_filename
     403        #-----------------------BLOCKING OPTION 2----------------------------
    456404        else:
    457             point_coordinates =  point_coordinates_or_filename
    458 
    459 
    460 
     405            if verbose:
     406                print 'Fit.fit: Initializing'
     407
     408            if isinstance(point_coordinates_or_filename, basestring):
     409                msg = "Don't set a point origin when reading from a file"
     410                assert point_origin is None, msg
     411                filename = point_coordinates_or_filename
     412
     413                self._build_matrix_AtA_Atz_file(filename, attribute_name=attribute_name,\
     414                                                verbose=verbose)
     415
     416                point_coordinates = None
     417            else:
     418                point_coordinates = point_coordinates_or_filename
     419        #--------------------------------------------------------------------
    461420
    462421        if point_coordinates is None:
    463             if verbose: log.critical('Fit.fit: Warning: no data points in fit')
     422            if verbose:
     423                log.critical('Fit.fit: Warning: no data points in fit')
    464424            msg = 'No interpolation matrix.'
    465425            assert self.AtA is not None, msg
    466426            assert self.Atz is not None
    467            
     427
    468428            # FIXME (DSG) - do  a message
    469429        else:
     
    472432            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    473433            # z will come from the geo-ref
    474             self.build_fit_subset(point_coordinates, z, verbose=verbose, output='counter')
    475 
    476 
    477 
    478 
     434
     435            self._build_matrix_AtA_Atz(point_coordinates, z, verbose=verbose, output='counter')
    479436
    480437        # Check sanity
    481         m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
     438        m = self.mesh.number_of_nodes  # Nbr of basis functions (1/vertex)
    482439        n = self.point_count
    483         if n<m and self.alpha == 0.0:
     440        if n < m and self.alpha == 0.0:
    484441            msg = 'ERROR (least_squares): Too few data points\n'
    485             msg += 'There are only %d data points and alpha == 0. ' %n
    486             msg += 'Need at least %d\n' %m
     442            msg += 'There are only %d data points and alpha == 0. ' % n
     443            msg += 'Need at least %d\n' % m
    487444            msg += 'Alternatively, set smoothing parameter alpha to a small '
    488             msg += 'positive value,\ne.g. 1.0e-3.'
     445            msg += 'positive value,\ne.g. 1.0e-3.'
    489446            raise TooFewPointsError(msg)
    490 
    491447
    492448        self._build_coefficient_matrix_B(verbose)
     
    495451        # test with
    496452        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    497         if len(loners)>0:
     453        if len(loners) > 0:
    498454            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    499455            msg += 'All vertices should be part of a triangle.\n'
    500456            msg += 'In the future this will be inforced.\n'
    501             msg += 'The following vertices are not part of a triangle;\n'
     457            msg += 'The following vertices are not part of a triangle;\n'
    502458            msg += str(loners)
    503459            log.critical(msg)
     460
    504461            #raise VertsWithNoTrianglesError(msg)
    505        
    506         if verbose:
    507             print 'Fit.fit: Solve Fitting Equation'
    508 
    509         #x0 = num.zeros_like(self.Atz)
    510462        return conjugate_gradient(self.B, self.Atz, self.Atz,
    511                                   imax=2*len(self.Atz), iprint=1)
    512 
    513        
    514     def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
    515                               verbose=False, output='dot'):
    516         """Fit a smooth surface to given 1d array of data points z.
    517 
    518         The smooth surface is computed at each vertex in the underlying
    519         mesh using the formula given in the module doc string.
    520 
    521         Inputs:
    522         point_coordinates: The co-ordinates of the data points.
    523               List of coordinate pairs [x, y] of
    524               data points or an nx2 numeric array or a Geospatial_data object
    525         z: Single 1d vector or array of data at the point_coordinates.
    526         attribute_name: Used to get the z values from the
    527               geospatial object if no attribute_name is specified,
    528               it's a bit of a lucky dip as to what attributes you get.
    529               If there is only one attribute it will be that one.
    530 
    531         """
    532 
    533         # FIXME(DSG-DSG): Check that the vert and point coords
    534         # have the same zone.
    535         if isinstance(point_coordinates,Geospatial_data):
    536             point_coordinates = point_coordinates.get_data_points( \
    537                 absolute = True)
    538 
    539        
    540         # Convert input to numeric arrays
    541         if z is not None:
    542             z = ensure_numeric(z, num.float)
    543         else:
    544             msg = 'z not specified'
    545             assert isinstance(point_coordinates,Geospatial_data), msg
    546             z = point_coordinates.get_attributes(attribute_name)
    547 
    548         point_coordinates = ensure_numeric(point_coordinates, num.float)
    549         self._build_matrix_AtA_Atz(point_coordinates, z, verbose, output)
    550        
    551         if verbose and output == 'dot':
    552             print '\b.',
    553             sys.stdout.flush()
    554 
    555 
    556 
    557 
    558 ############################################################################
    559 
    560 #class Fit_old(FitInterpolate):
    561 #
    562 #    def __init__(self,
    563 #                 vertex_coordinates=None,
    564 #                 triangles=None,
    565 #                 mesh=None,
    566 #                 mesh_origin=None,
    567 #                 alpha = None,
    568 #                 verbose=False):
    569 #
    570 #
    571 #        """
    572 #        Fit data at points to the vertices of a mesh.
    573 #
    574 #        Inputs:
    575 #
    576 #          vertex_coordinates: List of coordinate pairs [xi, eta] of
    577 #             points constituting a mesh (or an m x 2 numeric array or
    578 #              a geospatial object)
    579 #              Points may appear multiple times
    580 #              (e.g. if vertices have discontinuities)
    581 #
    582 #          triangles: List of 3-tuples (or a numeric array) of
    583 #              integers representing indices of all vertices in the mesh.
    584 #
    585 #          mesh_origin: A geo_reference object or 3-tuples consisting of
    586 #              UTM zone, easting and northing.
    587 #              If specified vertex coordinates are assumed to be
    588 #              relative to their respective origins.
    589 #
    590 #          Note: Don't supply a vertex coords as a geospatial object and
    591 #              a mesh origin, since geospatial has its own mesh origin.
    592 #
    593 #
    594 #        Usage,
    595 #        To use this in a blocking way, call  build_fit_subset, with z info,
    596 #        and then fit, with no point coord, z info.
    597 #
    598 #        """
    599 #        # Initialise variabels
    600 #        if alpha is None:
    601 #            self.alpha = DEFAULT_ALPHA
    602 #        else:
    603 #            self.alpha = alpha
    604 #
    605 #        FitInterpolate.__init__(self,
    606 #                 vertex_coordinates,
    607 #                 triangles,
    608 #                 mesh,
    609 #                 mesh_origin,
    610 #                 verbose)
    611 #
    612 #        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
    613 #
    614 #        self.AtA = None
    615 #        self.Atz = None
    616 #
    617 #        self.point_count = 0
    618 #        if self.alpha <> 0:
    619 #            if verbose: log.critical('Fit: Building smoothing matrix')
    620 #            self._build_smoothing_matrix_D(verbose=verbose)
    621 #
    622 #        bd_poly = self.mesh.get_boundary_polygon()
    623 #        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
    624 #
    625 #    def _build_coefficient_matrix_B(self,
    626 #                                  verbose = False):
    627 #        """
    628 #        Build final coefficient matrix
    629 #
    630 #        Precon
    631 #        If alpha is not zero, matrix D has been built
    632 #        Matrix Ata has been built
    633 #        """
    634 #
    635 #        if self.alpha <> 0:
    636 #            #if verbose: log.critical('Building smoothing matrix')
    637 #            #self._build_smoothing_matrix_D()
    638 #            self.B = self.AtA + self.alpha*self.D
    639 #        else:
    640 #            self.B = self.AtA
    641 #
    642 #        # Convert self.B matrix to CSR format for faster matrix vector
    643 #        self.B = Sparse_CSR(self.B)
    644 #
    645 #    def _build_smoothing_matrix_D(self):
    646 #        """Build m x m smoothing matrix, where
    647 #        m is the number of basis functions phi_k (one per vertex)
    648 #
    649 #        The smoothing matrix is defined as
    650 #
    651 #        D = D1 + D2
    652 #
    653 #        where
    654 #
    655 #        [D1]_{k,l} = \int_\Omega
    656 #           \frac{\partial \phi_k}{\partial x}
    657 #           \frac{\partial \phi_l}{\partial x}\,
    658 #           dx dy
    659 #
    660 #        [D2]_{k,l} = \int_\Omega
    661 #           \frac{\partial \phi_k}{\partial y}
    662 #           \frac{\partial \phi_l}{\partial y}\,
    663 #           dx dy
    664 #
    665 #
    666 #        The derivatives \frac{\partial \phi_k}{\partial x},
    667 #        \frac{\partial \phi_k}{\partial x} for a particular triangle
    668 #        are obtained by computing the gradient a_k, b_k for basis function k
    669 #        """
    670 #
    671 #        # FIXME: algorithm might be optimised by computing local 9x9
    672 #        # "element stiffness matrices:
    673 #
    674 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    675 #
    676 #        self.D = Sparse(m,m)
    677 #
    678 #        # For each triangle compute contributions to D = D1+D2
    679 #        for i in range(len(self.mesh)):
    680 #
    681 #            # Get area
    682 #            area = self.mesh.areas[i]
    683 #
    684 #            # Get global vertex indices
    685 #            v0 = self.mesh.triangles[i,0]
    686 #            v1 = self.mesh.triangles[i,1]
    687 #            v2 = self.mesh.triangles[i,2]
    688 #
    689 #            # Get the three vertex_points
    690 #            xi0 = self.mesh.get_vertex_coordinate(i, 0)
    691 #            xi1 = self.mesh.get_vertex_coordinate(i, 1)
    692 #            xi2 = self.mesh.get_vertex_coordinate(i, 2)
    693 #
    694 #            # Compute gradients for each vertex
    695 #            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    696 #                              1, 0, 0)
    697 #
    698 #            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    699 #                              0, 1, 0)
    700 #
    701 #            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    702 #                              0, 0, 1)
    703 #
    704 #            # Compute diagonal contributions
    705 #            self.D[v0,v0] += (a0*a0 + b0*b0)*area
    706 #            self.D[v1,v1] += (a1*a1 + b1*b1)*area
    707 #            self.D[v2,v2] += (a2*a2 + b2*b2)*area
    708 #
    709 #            # Compute contributions for basis functions sharing edges
    710 #            e01 = (a0*a1 + b0*b1)*area
    711 #            self.D[v0,v1] += e01
    712 #            self.D[v1,v0] += e01
    713 #
    714 #            e12 = (a1*a2 + b1*b2)*area
    715 #            self.D[v1,v2] += e12
    716 #            self.D[v2,v1] += e12
    717 #
    718 #            e20 = (a2*a0 + b2*b0)*area
    719 #            self.D[v2,v0] += e20
    720 #            self.D[v0,v2] += e20
    721 #
    722 #    def get_D(self):
    723 #        return self.D.todense()
    724 #
    725 #
    726 #
    727 #    def _build_matrix_AtA_Atz(self,
    728 #                              point_coordinates,
    729 #                              z,
    730 #                              verbose = False):
    731 #        """Build:
    732 #        AtA  m x m  interpolation matrix, and,
    733 #        Atz  m x a  interpolation matrix where,
    734 #        m is the number of basis functions phi_k (one per vertex)
    735 #        a is the number of data attributes
    736 #
    737 #        This algorithm uses a quad tree data structure for fast binning of
    738 #        data points.
    739 #
    740 #        If Ata is None, the matrices AtA and Atz are created.
    741 #
    742 #        This function can be called again and again, with sub-sets of
    743 #        the point coordinates.  Call fit to get the results.
    744 #
    745 #        Preconditions
    746 #        z and points are numeric
    747 #        Point_coordindates and mesh vertices have the same origin.
    748 #
    749 #        The number of attributes of the data points does not change
    750 #        """
    751 #
    752 #        # Build n x m interpolation matrix
    753 #        if self.AtA == None:
    754 #            # AtA and Atz need to be initialised.
    755 #            m = self.mesh.number_of_nodes
    756 #            if len(z.shape) > 1:
    757 #                att_num = z.shape[1]
    758 #                self.Atz = num.zeros((m,att_num), num.float)
    759 #            else:
    760 #                att_num = 1
    761 #                self.Atz = num.zeros((m,), num.float)
    762 #            assert z.shape[0] == point_coordinates.shape[0]
    763 #
    764 #            AtA = Sparse(m,m)
    765 #            # The memory damage has been done by now.
    766 #        else:
    767 #             AtA = self.AtA # Did this for speed, did ~nothing
    768 #        self.point_count += point_coordinates.shape[0]
    769 #
    770 #
    771 #        inside_indices = inside_polygon(point_coordinates,
    772 #                                        self.mesh_boundary_polygon,
    773 #                                        closed=True,
    774 #                                        verbose=False) # Suppress output
    775 #
    776 #        n = len(inside_indices)
    777 #
    778 #        # Compute matrix elements for points inside the mesh
    779 #        triangles = self.mesh.triangles # Shorthand
    780 #        for d, i in enumerate(inside_indices):
    781 #            # For each data_coordinate point
    782 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    783 #                                                            # %(d, n))
    784 #            x = point_coordinates[i]
    785 #
    786 #            element_found, sigma0, sigma1, sigma2, k = \
    787 #                           self.root.search_fast(x)
    788 #
    789 #            if element_found is True:
    790 #                j0 = triangles[k,0] # Global vertex id for sigma0
    791 #                j1 = triangles[k,1] # Global vertex id for sigma1
    792 #                j2 = triangles[k,2] # Global vertex id for sigma2
    793 #
    794 #                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    795 #                js     = [j0,j1,j2]
    796 #
    797 #                for j in js:
    798 #                    self.Atz[j] +=  sigmas[j]*z[i]
    799 #
    800 #                    for k in js:
    801 #                        AtA[j,k] += sigmas[j]*sigmas[k]
    802 #            else:
    803 #                flag = is_inside_polygon(x,
    804 #                                         self.mesh_boundary_polygon,
    805 #                                         closed=True,
    806 #                                         verbose=False) # Suppress output
    807 #                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    808 #                assert flag is True, msg
    809 #
    810 #                # data point has fallen within a hole - so ignore it.
    811 #
    812 #        self.AtA = AtA
    813 #
    814 #
    815 #    def fit(self, point_coordinates_or_filename=None, z=None,
    816 #            verbose=False,
    817 #            point_origin=None,
    818 #            attribute_name=None,
    819 #            max_read_lines=500):
    820 #        """Fit a smooth surface to given 1d array of data points z.
    821 #
    822 #        The smooth surface is computed at each vertex in the underlying
    823 #        mesh using the formula given in the module doc string.
    824 #
    825 #        Inputs:
    826 #        point_coordinates: The co-ordinates of the data points.
    827 #              List of coordinate pairs [x, y] of
    828 #             data points or an nx2 numeric array or a Geospatial_data object
    829 #              or points file filename
    830 #          z: Single 1d vector or array of data at the point_coordinates.
    831 #
    832 #        """
    833 #
    834 #        # Use blocking to load in the point info
    835 #        if isinstance(point_coordinates_or_filename, basestring):
    836 #            msg = "Don't set a point origin when reading from a file"
    837 #            assert point_origin is None, msg
    838 #            filename = point_coordinates_or_filename
    839 #
    840 #            G_data = Geospatial_data(filename,
    841 #                                     max_read_lines=max_read_lines,
    842 #                                     load_file_now=False,
    843 #                                     verbose=verbose)
    844 #
    845 #            for i, geo_block in enumerate(G_data):
    846 #                if verbose is True and 0 == i%200:
    847 #                    # The time this will take
    848 #                    # is dependant on the # of Triangles
    849 #
    850 #                    log.critical('Processing Block %d' % i)
    851 #                    # FIXME (Ole): It would be good to say how many blocks
    852 #                    # there are here. But this is no longer necessary
    853 #                    # for pts files as they are reported in geospatial_data
    854 #                    # I suggest deleting this verbose output and make
    855 #                    # Geospatial_data more informative for txt files.
    856 #                    #
    857 #                    # I still think so (12/12/7, Ole).
    858 #
    859 #
    860 #
    861 #                # Build the array
    862 #
    863 #                points = geo_block.get_data_points(absolute=True)
    864 #                z = geo_block.get_attributes(attribute_name=attribute_name)
    865 #                self.build_fit_subset(points, z, verbose=verbose)
    866 #
    867 #                # FIXME(Ole): I thought this test would make sense here
    868 #                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    869 #                # Committed 11 March 2009
    870 #                msg = 'Matrix AtA was not built'
    871 #                assert self.AtA is not None, msg
    872 #
    873 #            point_coordinates = None
    874 #        else:
    875 #            point_coordinates =  point_coordinates_or_filename
    876 #
    877 #        if point_coordinates is None:
    878 #            if verbose: log.critical('Warning: no data points in fit')
    879 #            msg = 'No interpolation matrix.'
    880 #            assert self.AtA is not None, msg
    881 #            assert self.Atz is not None
    882 #
    883 #            # FIXME (DSG) - do  a message
    884 #        else:
    885 #            point_coordinates = ensure_absolute(point_coordinates,
    886 #                                                geo_reference=point_origin)
    887 #            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    888 #            # z will come from the geo-ref
    889 #            self.build_fit_subset(point_coordinates, z, verbose)
    890 #
    891 #        # Check sanity
    892 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    893 #        n = self.point_count
    894 #        if n<m and self.alpha == 0.0:
    895 #            msg = 'ERROR (least_squares): Too few data points\n'
    896 #            msg += 'There are only %d data points and alpha == 0. ' %n
    897 #           msg += 'Need at least %d\n' %m
    898 #            msg += 'Alternatively, set smoothing parameter alpha to a small '
    899 #           msg += 'positive value,\ne.g. 1.0e-3.'
    900 #            raise TooFewPointsError(msg)
    901 #
    902 #        self._build_coefficient_matrix_B(verbose)
    903 #        loners = self.mesh.get_lone_vertices()
    904 #        # FIXME  - make this as error message.
    905 #        # test with
    906 #        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    907 #        if len(loners)>0:
    908 #            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    909 #            msg += 'All vertices should be part of a triangle.\n'
    910 #            msg += 'In the future this will be inforced.\n'
    911 #           msg += 'The following vertices are not part of a triangle;\n'
    912 #            msg += str(loners)
    913 #            log.critical(msg)
    914 #            #raise VertsWithNoTrianglesError(msg)
    915 #
    916 #
    917 #        return conjugate_gradient(self.B, self.Atz, self.Atz,
    918 #                                  imax=2*len(self.Atz) )
    919 #
    920 #
    921 #    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
    922 #                              verbose=False):
    923 #        """Fit a smooth surface to given 1d array of data points z.
    924 #
    925 #        The smooth surface is computed at each vertex in the underlying
    926 #        mesh using the formula given in the module doc string.
    927 #
    928 #        Inputs:
    929 #        point_coordinates: The co-ordinates of the data points.
    930 #              List of coordinate pairs [x, y] of
    931 #             data points or an nx2 numeric array or a Geospatial_data object
    932 #        z: Single 1d vector or array of data at the point_coordinates.
    933 #        attribute_name: Used to get the z values from the
    934 #              geospatial object if no attribute_name is specified,
    935 #              it's a bit of a lucky dip as to what attributes you get.
    936 #              If there is only one attribute it will be that one.
    937 #
    938 #        """
    939 #
    940 #        # FIXME(DSG-DSG): Check that the vert and point coords
    941 #        # have the same zone.
    942 #        if isinstance(point_coordinates,Geospatial_data):
    943 #            point_coordinates = point_coordinates.get_data_points( \
    944 #                absolute = True)
    945 #
    946 #        # Convert input to numeric arrays
    947 #        if z is not None:
    948 #            z = ensure_numeric(z, num.float)
    949 #        else:
    950 #            msg = 'z not specified'
    951 #            assert isinstance(point_coordinates,Geospatial_data), msg
    952 #            z = point_coordinates.get_attributes(attribute_name)
    953 #
    954 #        point_coordinates = ensure_numeric(point_coordinates, num.float)
    955 #        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
    956 #
    957 #
    958 
    959 
    960 #===========================================================================
    961 
    962 #class Fit_test(FitInterpolate):
    963 #
    964 #    def __init__(self,
    965 #                 vertex_coordinates=None,
    966 #                 triangles=None,
    967 #                 mesh=None,
    968 #                 mesh_origin=None,
    969 #                 alpha = None,
    970 #                 verbose=False):
    971 #
    972 #
    973 #        """
    974 #        Fit data at points to the vertices of a mesh.
    975 #
    976 #        Inputs:
    977 #
    978 #          vertex_coordinates: List of coordinate pairs [xi, eta] of
    979 #             points constituting a mesh (or an m x 2 numeric array or
    980 #              a geospatial object)
    981 #              Points may appear multiple times
    982 #              (e.g. if vertices have discontinuities)
    983 #
    984 #          triangles: List of 3-tuples (or a numeric array) of
    985 #              integers representing indices of all vertices in the mesh.
    986 #
    987 #          mesh_origin: A geo_reference object or 3-tuples consisting of
    988 #              UTM zone, easting and northing.
    989 #              If specified vertex coordinates are assumed to be
    990 #              relative to their respective origins.
    991 #
    992 #          Note: Don't supply a vertex coords as a geospatial object and
    993 #              a mesh origin, since geospatial has its own mesh origin.
    994 #
    995 #
    996 #        Usage,
    997 #        To use this in a blocking way, call  build_fit_subset, with z info,
    998 #        and then fit, with no point coord, z info.
    999 #
    1000 #        """
    1001 #        # Initialise variabels
    1002 #        if alpha is None:
    1003 #            self.alpha = DEFAULT_ALPHA
    1004 #        else:
    1005 #            self.alpha = alpha
    1006 #
    1007 #        FitInterpolate.__init__(self,
    1008 #                 vertex_coordinates,
    1009 #                 triangles,
    1010 #                 mesh,
    1011 #                 mesh_origin,
    1012 #                 verbose)
    1013 #
    1014 #        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
    1015 #
    1016 #        self.AtA = None
    1017 #        self.Atz = None
    1018 #
    1019 #        self.point_count = 0
    1020 #        if self.alpha <> 0:
    1021 #            if verbose: log.critical('Fit: Building smoothing matrix')
    1022 #            self._build_smoothing_matrix_D(verbose=verbose)
    1023 #
    1024 #        if verbose: log.critical('Fit: Get Boundary Polygon')
    1025 #        bd_poly = self.mesh.get_boundary_polygon()
    1026 #        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
    1027 #
    1028 #    def _build_coefficient_matrix_B(self,
    1029 #                                  verbose = False):
    1030 #        """
    1031 #        Build final coefficient matrix
    1032 #
    1033 #        Precon
    1034 #        If alpha is not zero, matrix D has been built
    1035 #        Matrix Ata has been built
    1036 #        """
    1037 #
    1038 #        if verbose:
    1039 #            print 'Fit: Build Coefficient Matrix B'
    1040 #
    1041 #
    1042 #        if self.alpha <> 0:
    1043 #            #if verbose: log.critical('Building smoothing matrix')
    1044 #            #self._build_smoothing_matrix_D()
    1045 #            # FIXME SR: This is quite time consuming.
    1046 #            # As AtA and D have same structure it should be possible
    1047 #            # to speed this up.
    1048 #            self.B = self.AtA + self.alpha*self.D
    1049 #        else:
    1050 #            self.B = self.AtA
    1051 #
    1052 #
    1053 #        if verbose:
    1054 #            print 'Fit: Convert Coefficient Matrix B to Sparse_CSR'
    1055 #
    1056 #        # Convert self.B matrix to CSR format for faster matrix vector
    1057 #        self.B = Sparse_CSR(self.B)
    1058 #
    1059 #    def _build_coefficient_matrix_B_old(self,
    1060 #                                  verbose = False):
    1061 #        """
    1062 #        Build final coefficient matrix
    1063 #
    1064 #        Precon
    1065 #        If alpha is not zero, matrix D has been built
    1066 #        Matrix Ata has been built
    1067 #        """
    1068 #
    1069 #        if self.alpha <> 0:
    1070 #            #if verbose: log.critical('Building smoothing matrix')
    1071 #            #self._build_smoothing_matrix_D()
    1072 #            self.B = self.AtA + self.alpha*self.D
    1073 #        else:
    1074 #            self.B = self.AtA
    1075 #
    1076 #        # Convert self.B matrix to CSR format for faster matrix vector
    1077 #        self.B = Sparse_CSR(self.B)
    1078 #
    1079 #    def _build_smoothing_matrix_D(self, verbose=False):
    1080 #        """Build m x m smoothing matrix, where
    1081 #        m is the number of basis functions phi_k (one per vertex)
    1082 #
    1083 #        The smoothing matrix is defined as
    1084 #
    1085 #        D = D1 + D2
    1086 #
    1087 #        where
    1088 #
    1089 #        [D1]_{k,l} = \int_\Omega
    1090 #           \frac{\partial \phi_k}{\partial x}
    1091 #           \frac{\partial \phi_l}{\partial x}\,
    1092 #           dx dy
    1093 #
    1094 #        [D2]_{k,l} = \int_\Omega
    1095 #           \frac{\partial \phi_k}{\partial y}
    1096 #           \frac{\partial \phi_l}{\partial y}\,
    1097 #           dx dy
    1098 #
    1099 #
    1100 #        The derivatives \frac{\partial \phi_k}{\partial x},
    1101 #        \frac{\partial \phi_k}{\partial x} for a particular triangle
    1102 #        are obtained by computing the gradient a_k, b_k for basis function k
    1103 #        """
    1104 #
    1105 #        # FIXME: algorithm might be optimised by computing local 9x9
    1106 #        # "element stiffness matrices:
    1107 #
    1108 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1109 #
    1110 #        self.D = Sparse(m,m)
    1111 #
    1112 #        if verbose :
    1113 #            print '['+60*' '+']',
    1114 #            sys.stdout.flush()
    1115 #
    1116 #        N = len(self.mesh)
    1117 #        M = N/60
    1118 #
    1119 #        # For each triangle compute contributions to D = D1+D2
    1120 #        for i in xrange(N):
    1121 #
    1122 #            if verbose and i%M == 0 :
    1123 #                #restart_line()
    1124 #                print '\r['+(i/M)*'.'+(60-(i/M))*' ' +']',
    1125 #                sys.stdout.flush()
    1126 #
    1127 #            # Get area
    1128 #            area = self.mesh.areas[i]
    1129 #
    1130 #            # Get global vertex indices
    1131 #            v0 = self.mesh.triangles[i,0]
    1132 #            v1 = self.mesh.triangles[i,1]
    1133 #            v2 = self.mesh.triangles[i,2]
    1134 #
    1135 #            # Get the three vertex_points
    1136 #            xi0 = self.mesh.get_vertex_coordinate(i, 0)
    1137 #            xi1 = self.mesh.get_vertex_coordinate(i, 1)
    1138 #            xi2 = self.mesh.get_vertex_coordinate(i, 2)
    1139 #
    1140 #            # Compute gradients for each vertex
    1141 #            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1142 #                              1, 0, 0)
    1143 #
    1144 #            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1145 #                              0, 1, 0)
    1146 #
    1147 #            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1148 #                              0, 0, 1)
    1149 #
    1150 #            # Compute diagonal contributions
    1151 #            self.D[v0,v0] += (a0*a0 + b0*b0)*area
    1152 #            self.D[v1,v1] += (a1*a1 + b1*b1)*area
    1153 #            self.D[v2,v2] += (a2*a2 + b2*b2)*area
    1154 #
    1155 #            # Compute contributions for basis functions sharing edges
    1156 #            e01 = (a0*a1 + b0*b1)*area
    1157 #            self.D[v0,v1] += e01
    1158 #            self.D[v1,v0] += e01
    1159 #
    1160 #            e12 = (a1*a2 + b1*b2)*area
    1161 #            self.D[v1,v2] += e12
    1162 #            self.D[v2,v1] += e12
    1163 #
    1164 #            e20 = (a2*a0 + b2*b0)*area
    1165 #            self.D[v2,v0] += e20
    1166 #            self.D[v0,v2] += e20
    1167 #
    1168 #        if verbose:
    1169 #            print ''
    1170 #
    1171 #
    1172 #    def _build_smoothing_matrix_D_old(self):
    1173 #        """Build m x m smoothing matrix, where
    1174 #        m is the number of basis functions phi_k (one per vertex)
    1175 #
    1176 #        The smoothing matrix is defined as
    1177 #
    1178 #        D = D1 + D2
    1179 #
    1180 #        where
    1181 #
    1182 #        [D1]_{k,l} = \int_\Omega
    1183 #           \frac{\partial \phi_k}{\partial x}
    1184 #           \frac{\partial \phi_l}{\partial x}\,
    1185 #           dx dy
    1186 #
    1187 #        [D2]_{k,l} = \int_\Omega
    1188 #           \frac{\partial \phi_k}{\partial y}
    1189 #           \frac{\partial \phi_l}{\partial y}\,
    1190 #           dx dy
    1191 #
    1192 #
    1193 #        The derivatives \frac{\partial \phi_k}{\partial x},
    1194 #        \frac{\partial \phi_k}{\partial x} for a particular triangle
    1195 #        are obtained by computing the gradient a_k, b_k for basis function k
    1196 #        """
    1197 #
    1198 #        # FIXME: algorithm might be optimised by computing local 9x9
    1199 #        # "element stiffness matrices:
    1200 #
    1201 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1202 #
    1203 #        self.D = Sparse(m,m)
    1204 #
    1205 #        # For each triangle compute contributions to D = D1+D2
    1206 #        for i in range(len(self.mesh)):
    1207 #
    1208 #            # Get area
    1209 #            area = self.mesh.areas[i]
    1210 #
    1211 #            # Get global vertex indices
    1212 #            v0 = self.mesh.triangles[i,0]
    1213 #            v1 = self.mesh.triangles[i,1]
    1214 #            v2 = self.mesh.triangles[i,2]
    1215 #
    1216 #            # Get the three vertex_points
    1217 #            xi0 = self.mesh.get_vertex_coordinate(i, 0)
    1218 #            xi1 = self.mesh.get_vertex_coordinate(i, 1)
    1219 #            xi2 = self.mesh.get_vertex_coordinate(i, 2)
    1220 #
    1221 #            # Compute gradients for each vertex
    1222 #            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1223 #                              1, 0, 0)
    1224 #
    1225 #            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1226 #                              0, 1, 0)
    1227 #
    1228 #            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1229 #                              0, 0, 1)
    1230 #
    1231 #            # Compute diagonal contributions
    1232 #            self.D[v0,v0] += (a0*a0 + b0*b0)*area
    1233 #            self.D[v1,v1] += (a1*a1 + b1*b1)*area
    1234 #            self.D[v2,v2] += (a2*a2 + b2*b2)*area
    1235 #
    1236 #            # Compute contributions for basis functions sharing edges
    1237 #            e01 = (a0*a1 + b0*b1)*area
    1238 #            self.D[v0,v1] += e01
    1239 #            self.D[v1,v0] += e01
    1240 #
    1241 #            e12 = (a1*a2 + b1*b2)*area
    1242 #            self.D[v1,v2] += e12
    1243 #            self.D[v2,v1] += e12
    1244 #
    1245 #            e20 = (a2*a0 + b2*b0)*area
    1246 #            self.D[v2,v0] += e20
    1247 #            self.D[v0,v2] += e20
    1248 #
    1249 #    def get_D(self):
    1250 #        return self.D.todense()
    1251 #
    1252 #
    1253 #    def _build_matrix_AtA_Atz(self,
    1254 #                              point_coordinates,
    1255 #                              z,
    1256 #                              verbose = False):
    1257 #        """Build:
    1258 #        AtA  m x m  interpolation matrix, and,
    1259 #        Atz  m x a  interpolation matrix where,
    1260 #        m is the number of basis functions phi_k (one per vertex)
    1261 #        a is the number of data attributes
    1262 #
    1263 #        This algorithm uses a quad tree data structure for fast binning of
    1264 #        data points.
    1265 #
    1266 #        If Ata is None, the matrices AtA and Atz are created.
    1267 #
    1268 #        This function can be called again and again, with sub-sets of
    1269 #        the point coordinates.  Call fit to get the results.
    1270 #
    1271 #        Preconditions
    1272 #        z and points are numeric
    1273 #        Point_coordindates and mesh vertices have the same origin.
    1274 #
    1275 #        The number of attributes of the data points does not change
    1276 #        """
    1277 #
    1278 #
    1279 #        #if verbose:
    1280 #        #    print 'Fit: Build Matrix AtA Atz'
    1281 #
    1282 #        # Build n x m interpolation matrix
    1283 #        if self.AtA == None:
    1284 #            # AtA and Atz need to be initialised.
    1285 #            m = self.mesh.number_of_nodes
    1286 #            if len(z.shape) > 1:
    1287 #                att_num = z.shape[1]
    1288 #                self.Atz = num.zeros((m,att_num), num.float)
    1289 #            else:
    1290 #                att_num = 1
    1291 #                self.Atz = num.zeros((m,), num.float)
    1292 #            assert z.shape[0] == point_coordinates.shape[0]
    1293 #
    1294 #            AtA = Sparse(m,m)
    1295 #            # The memory damage has been done by now.
    1296 #        else:
    1297 #             AtA = self.AtA # Did this for speed, did ~nothing
    1298 #        self.point_count += point_coordinates.shape[0]
    1299 #
    1300 #
    1301 #        inside_indices = inside_polygon(point_coordinates,
    1302 #                                        self.mesh_boundary_polygon,
    1303 #                                        closed=True,
    1304 #                                        verbose=False) # Suppress output
    1305 #
    1306 #        n = len(inside_indices)
    1307 #
    1308 #        # Compute matrix elements for points inside the mesh
    1309 #        triangles = self.mesh.triangles # Shorthand
    1310 #
    1311 #
    1312 #        #if verbose :
    1313 #        #    print '['+60*' '+']',
    1314 #        #    sys.stdout.flush()
    1315 #
    1316 #        m = max(n/60,1)
    1317 #
    1318 #
    1319 #        for d in xrange(n):
    1320 #            i = inside_indices[d]
    1321 #
    1322 ##        for d, i in enumerate(inside_indices):
    1323 ##            # For each data_coordinate point
    1324 ##            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    1325 ##                                                            # %(d, n))
    1326 ##            x = point_coordinates[i]
    1327 #
    1328 #            # For each data_coordinate point
    1329 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    1330 #                                                            # %(d, n))
    1331 #
    1332 #
    1333 #            #if verbose and i%m == 0 :
    1334 #            #    print '\r['+(i/m)*'.'+(60-(i/m))*' '+']',
    1335 #            #    sys.stdout.flush()
    1336 #
    1337 #
    1338 #            x = point_coordinates[i]
    1339 #
    1340 #            element_found, sigma0, sigma1, sigma2, k = \
    1341 #                           self.root.search_fast(x)
    1342 #
    1343 #            if element_found is True:
    1344 #                j0 = triangles[k,0] # Global vertex id for sigma0
    1345 #                j1 = triangles[k,1] # Global vertex id for sigma1
    1346 #                j2 = triangles[k,2] # Global vertex id for sigma2
    1347 #
    1348 #                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    1349 #                js     = [j0,j1,j2]
    1350 #
    1351 #                for j in js:
    1352 #                    self.Atz[j] +=  sigmas[j]*z[i]
    1353 #
    1354 #                    for k in js:
    1355 #                        AtA[j,k] += sigmas[j]*sigmas[k]
    1356 #            else:
    1357 #                flag = is_inside_polygon(x,
    1358 #                                         self.mesh_boundary_polygon,
    1359 #                                         closed=True,
    1360 #                                         verbose=False) # Suppress output
    1361 #                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    1362 #                assert flag is True, msg
    1363 #
    1364 #                # data point has fallen within a hole - so ignore it.
    1365 #
    1366 #
    1367 #        #if verbose:
    1368 #        #    print ''
    1369 #
    1370 #        self.AtA = AtA
    1371 #
    1372 #
    1373 #
    1374 #    def _build_matrix_AtA_Atz_old(self,
    1375 #                              point_coordinates,
    1376 #                              z,
    1377 #                              verbose = False):
    1378 #        """Build:
    1379 #        AtA  m x m  interpolation matrix, and,
    1380 #        Atz  m x a  interpolation matrix where,
    1381 #        m is the number of basis functions phi_k (one per vertex)
    1382 #        a is the number of data attributes
    1383 #
    1384 #        This algorithm uses a quad tree data structure for fast binning of
    1385 #        data points.
    1386 #
    1387 #        If Ata is None, the matrices AtA and Atz are created.
    1388 #
    1389 #        This function can be called again and again, with sub-sets of
    1390 #        the point coordinates.  Call fit to get the results.
    1391 #
    1392 #        Preconditions
    1393 #        z and points are numeric
    1394 #        Point_coordindates and mesh vertices have the same origin.
    1395 #
    1396 #        The number of attributes of the data points does not change
    1397 #        """
    1398 #
    1399 #        # Build n x m interpolation matrix
    1400 #        if self.AtA == None:
    1401 #            # AtA and Atz need to be initialised.
    1402 #            m = self.mesh.number_of_nodes
    1403 #            if len(z.shape) > 1:
    1404 #                att_num = z.shape[1]
    1405 #                self.Atz = num.zeros((m,att_num), num.float)
    1406 #            else:
    1407 #                att_num = 1
    1408 #                self.Atz = num.zeros((m,), num.float)
    1409 #            assert z.shape[0] == point_coordinates.shape[0]
    1410 #
    1411 #            AtA = Sparse(m,m)
    1412 #            # The memory damage has been done by now.
    1413 #        else:
    1414 #             AtA = self.AtA # Did this for speed, did ~nothing
    1415 #        self.point_count += point_coordinates.shape[0]
    1416 #
    1417 #
    1418 #        inside_indices = inside_polygon(point_coordinates,
    1419 #                                        self.mesh_boundary_polygon,
    1420 #                                        closed=True,
    1421 #                                        verbose=False) # Suppress output
    1422 #
    1423 #        n = len(inside_indices)
    1424 #
    1425 #        # Compute matrix elements for points inside the mesh
    1426 #        triangles = self.mesh.triangles # Shorthand
    1427 #        for d, i in enumerate(inside_indices):
    1428 #            # For each data_coordinate point
    1429 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    1430 #                                                            # %(d, n))
    1431 #            x = point_coordinates[i]
    1432 #
    1433 #            element_found, sigma0, sigma1, sigma2, k = \
    1434 #                           self.root.search_fast(x)
    1435 #
    1436 #            if element_found is True:
    1437 #                j0 = triangles[k,0] # Global vertex id for sigma0
    1438 #                j1 = triangles[k,1] # Global vertex id for sigma1
    1439 #                j2 = triangles[k,2] # Global vertex id for sigma2
    1440 #
    1441 #                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    1442 #                js     = [j0,j1,j2]
    1443 #
    1444 #                for j in js:
    1445 #                    self.Atz[j] +=  sigmas[j]*z[i]
    1446 #
    1447 #                    for k in js:
    1448 #                        AtA[j,k] += sigmas[j]*sigmas[k]
    1449 #            else:
    1450 #                flag = is_inside_polygon(x,
    1451 #                                         self.mesh_boundary_polygon,
    1452 #                                         closed=True,
    1453 #                                         verbose=False) # Suppress output
    1454 #                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    1455 #                assert flag is True, msg
    1456 #
    1457 #                # data point has fallen within a hole - so ignore it.
    1458 #
    1459 #        self.AtA = AtA
    1460 #
    1461 #    def fit(self, point_coordinates_or_filename=None, z=None,
    1462 #            verbose=False,
    1463 #            point_origin=None,
    1464 #            attribute_name=None,
    1465 #            max_read_lines=None):
    1466 #        """Fit a smooth surface to given 1d array of data points z.
    1467 #
    1468 #        The smooth surface is computed at each vertex in the underlying
    1469 #        mesh using the formula given in the module doc string.
    1470 #
    1471 #        Inputs:
    1472 #        point_coordinates: The co-ordinates of the data points.
    1473 #              List of coordinate pairs [x, y] of
    1474 #             data points or an nx2 numeric array or a Geospatial_data object
    1475 #              or points file filename
    1476 #          z: Single 1d vector or array of data at the point_coordinates.
    1477 #
    1478 #        """
    1479 #
    1480 #
    1481 #        if verbose:
    1482 #            print 'Fit.fit: Initializing'
    1483 #
    1484 #        # Use blocking to load in the point info
    1485 #        if isinstance(point_coordinates_or_filename, basestring):
    1486 #            msg = "Don't set a point origin when reading from a file"
    1487 #            assert point_origin is None, msg
    1488 #            filename = point_coordinates_or_filename
    1489 #
    1490 #            G_data = Geospatial_data(filename,
    1491 #                                     max_read_lines=max_read_lines,
    1492 #                                     load_file_now=False,
    1493 #                                     verbose=verbose)
    1494 #
    1495 #
    1496 #            for i, geo_block in enumerate(G_data):
    1497 #                if verbose is True and 0 == i%200:
    1498 #                    # The time this will take
    1499 #                    # is dependant on the # of Triangles
    1500 #
    1501 #                    log.critical('Processing Block %d' % i)
    1502 #                    # FIXME (Ole): It would be good to say how many blocks
    1503 #                    # there are here. But this is no longer necessary
    1504 #                    # for pts files as they are reported in geospatial_data
    1505 #                    # I suggest deleting this verbose output and make
    1506 #                    # Geospatial_data more informative for txt files.
    1507 #                    #
    1508 #                    # I still think so (12/12/7, Ole).
    1509 #
    1510 #
    1511 #
    1512 #                # Build the array
    1513 #
    1514 #                points = geo_block.get_data_points(absolute=True)
    1515 #                z = geo_block.get_attributes(attribute_name=attribute_name)
    1516 #                self.build_fit_subset(points, z, attribute_name, verbose)
    1517 #
    1518 #                # FIXME(Ole): I thought this test would make sense here
    1519 #                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    1520 #                # Committed 11 March 2009
    1521 #                msg = 'Matrix AtA was not built'
    1522 #                assert self.AtA is not None, msg
    1523 #
    1524 #            point_coordinates = None
    1525 #        else:
    1526 #            point_coordinates =  point_coordinates_or_filename
    1527 #
    1528 #
    1529 #
    1530 #        if point_coordinates is None:
    1531 #            if verbose: log.critical('Fit.fit: Warning: no data points in fit')
    1532 #            msg = 'No interpolation matrix.'
    1533 #            assert self.AtA is not None, msg
    1534 #            assert self.Atz is not None
    1535 #
    1536 #            # FIXME (DSG) - do  a message
    1537 #        else:
    1538 #            point_coordinates = ensure_absolute(point_coordinates,
    1539 #                                                geo_reference=point_origin)
    1540 #            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    1541 #            # z will come from the geo-ref
    1542 #            self.build_fit_subset(point_coordinates, z, verbose=verbose)
    1543 #
    1544 #
    1545 #
    1546 #
    1547 #
    1548 #        # Check sanity
    1549 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1550 #        n = self.point_count
    1551 #        if n<m and self.alpha == 0.0:
    1552 #            msg = 'ERROR (least_squares): Too few data points\n'
    1553 #            msg += 'There are only %d data points and alpha == 0. ' %n
    1554 #           msg += 'Need at least %d\n' %m
    1555 #            msg += 'Alternatively, set smoothing parameter alpha to a small '
    1556 #           msg += 'positive value,\ne.g. 1.0e-3.'
    1557 #            raise TooFewPointsError(msg)
    1558 #
    1559 #
    1560 #        self._build_coefficient_matrix_B(verbose)
    1561 #        loners = self.mesh.get_lone_vertices()
    1562 #        # FIXME  - make this as error message.
    1563 #        # test with
    1564 #        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    1565 #        if len(loners)>0:
    1566 #            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    1567 #            msg += 'All vertices should be part of a triangle.\n'
    1568 #            msg += 'In the future this will be inforced.\n'
    1569 #           msg += 'The following vertices are not part of a triangle;\n'
    1570 #            msg += str(loners)
    1571 #            log.critical(msg)
    1572 #            #raise VertsWithNoTrianglesError(msg)
    1573 #
    1574 #        if verbose:
    1575 #            print 'Fit.fit: Solve Fitting Equation'
    1576 #
    1577 #        x0 = num.zeros_like(self.Atz)
    1578 #        return conjugate_gradient(self.B, self.Atz, x0,
    1579 #                                  imax=2*len(self.Atz), iprint=1 )
    1580 #
    1581 #
    1582 #    def fit_old(self, point_coordinates_or_filename=None, z=None,
    1583 #            verbose=False,
    1584 #            point_origin=None,
    1585 #            attribute_name=None,
    1586 #            max_read_lines=500):
    1587 #        """Fit a smooth surface to given 1d array of data points z.
    1588 #
    1589 #        The smooth surface is computed at each vertex in the underlying
    1590 #        mesh using the formula given in the module doc string.
    1591 #
    1592 #        Inputs:
    1593 #        point_coordinates: The co-ordinates of the data points.
    1594 #              List of coordinate pairs [x, y] of
    1595 #             data points or an nx2 numeric array or a Geospatial_data object
    1596 #              or points file filename
    1597 #          z: Single 1d vector or array of data at the point_coordinates.
    1598 #
    1599 #        """
    1600 #
    1601 #        if verbose: log.critical('Fit.fit: Start')
    1602 #
    1603 #        # Use blocking to load in the point info
    1604 #        if isinstance(point_coordinates_or_filename, basestring):
    1605 #            msg = "Don't set a point origin when reading from a file"
    1606 #            assert point_origin is None, msg
    1607 #            filename = point_coordinates_or_filename
    1608 #
    1609 #            G_data = Geospatial_data(filename,
    1610 #                                     max_read_lines=max_read_lines,
    1611 #                                     load_file_now=False,
    1612 #                                     verbose=verbose)
    1613 #
    1614 #            for i, geo_block in enumerate(G_data):
    1615 #                if verbose is True and 0 == i%200:
    1616 #                    # The time this will take
    1617 #                    # is dependant on the # of Triangles
    1618 #
    1619 #                    log.critical('Processing Block %d' % i)
    1620 #                    # FIXME (Ole): It would be good to say how many blocks
    1621 #                    # there are here. But this is no longer necessary
    1622 #                    # for pts files as they are reported in geospatial_data
    1623 #                    # I suggest deleting this verbose output and make
    1624 #                    # Geospatial_data more informative for txt files.
    1625 #                    #
    1626 #                    # I still think so (12/12/7, Ole).
    1627 #
    1628 #
    1629 #
    1630 #                # Build the array
    1631 #
    1632 #                points = geo_block.get_data_points(absolute=True)
    1633 #                z = geo_block.get_attributes(attribute_name=attribute_name)
    1634 #                self.build_fit_subset(points, z, verbose=verbose)
    1635 #
    1636 #                # FIXME(Ole): I thought this test would make sense here
    1637 #                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    1638 #                # Committed 11 March 2009
    1639 #                msg = 'Matrix AtA was not built'
    1640 #                assert self.AtA is not None, msg
    1641 #
    1642 #            point_coordinates = None
    1643 #        else:
    1644 #            point_coordinates =  point_coordinates_or_filename
    1645 #
    1646 #        if point_coordinates is None:
    1647 #            if verbose: log.critical('Warning: no data points in fit')
    1648 #            msg = 'No interpolation matrix.'
    1649 #            assert self.AtA is not None, msg
    1650 #            assert self.Atz is not None
    1651 #
    1652 #            # FIXME (DSG) - do  a message
    1653 #        else:
    1654 #            point_coordinates = ensure_absolute(point_coordinates,
    1655 #                                                geo_reference=point_origin)
    1656 #            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    1657 #            # z will come from the geo-ref
    1658 #            self.build_fit_subset(point_coordinates, z, verbose)
    1659 #
    1660 #        # Check sanity
    1661 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1662 #        n = self.point_count
    1663 #        if n<m and self.alpha == 0.0:
    1664 #            msg = 'ERROR (least_squares): Too few data points\n'
    1665 #            msg += 'There are only %d data points and alpha == 0. ' %n
    1666 #           msg += 'Need at least %d\n' %m
    1667 #            msg += 'Alternatively, set smoothing parameter alpha to a small '
    1668 #           msg += 'positive value,\ne.g. 1.0e-3.'
    1669 #            raise TooFewPointsError(msg)
    1670 #
    1671 #        self._build_coefficient_matrix_B(verbose)
    1672 #        loners = self.mesh.get_lone_vertices()
    1673 #        # FIXME  - make this as error message.
    1674 #        # test with
    1675 #        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    1676 #        if len(loners)>0:
    1677 #            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    1678 #            msg += 'All vertices should be part of a triangle.\n'
    1679 #            msg += 'In the future this will be inforced.\n'
    1680 #           msg += 'The following vertices are not part of a triangle;\n'
    1681 #            msg += str(loners)
    1682 #            log.critical(msg)
    1683 #            #raise VertsWithNoTrianglesError(msg)
    1684 #
    1685 #        if verbose: log.critical('Fit.fit: Conjugate Gradient')
    1686 #        return conjugate_gradient(self.B, self.Atz, self.Atz,
    1687 #                                  imax=2*len(self.Atz), iprint=1 )
    1688 #
    1689 #    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
    1690 #                              verbose=False):
    1691 #        """Fit a smooth surface to given 1d array of data points z.
    1692 #
    1693 #        The smooth surface is computed at each vertex in the underlying
    1694 #        mesh using the formula given in the module doc string.
    1695 #
    1696 #        Inputs:
    1697 #        point_coordinates: The co-ordinates of the data points.
    1698 #              List of coordinate pairs [x, y] of
    1699 #             data points or an nx2 numeric array or a Geospatial_data object
    1700 #        z: Single 1d vector or array of data at the point_coordinates.
    1701 #        attribute_name: Used to get the z values from the
    1702 #              geospatial object if no attribute_name is specified,
    1703 #              it's a bit of a lucky dip as to what attributes you get.
    1704 #              If there is only one attribute it will be that one.
    1705 #
    1706 #        """
    1707 #
    1708 #        # FIXME(DSG-DSG): Check that the vert and point coords
    1709 #        # have the same zone.
    1710 #        if isinstance(point_coordinates,Geospatial_data):
    1711 #            point_coordinates = point_coordinates.get_data_points( \
    1712 #                absolute = True)
    1713 #
    1714 #        # Convert input to numeric arrays
    1715 #        if z is not None:
    1716 #            z = ensure_numeric(z, num.float)
    1717 #        else:
    1718 #            msg = 'z not specified'
    1719 #            assert isinstance(point_coordinates,Geospatial_data), msg
    1720 #            z = point_coordinates.get_attributes(attribute_name)
    1721 #
    1722 #        point_coordinates = ensure_numeric(point_coordinates, num.float)
    1723 #        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
    1724 #
    1725 #
    1726 #
    1727 #    def build_fit_subset_old(self, point_coordinates, z=None, attribute_name=None,
    1728 #                              verbose=False):
    1729 #        """Fit a smooth surface to given 1d array of data points z.
    1730 #
    1731 #        The smooth surface is computed at each vertex in the underlying
    1732 #        mesh using the formula given in the module doc string.
    1733 #
    1734 #        Inputs:
    1735 #        point_coordinates: The co-ordinates of the data points.
    1736 #              List of coordinate pairs [x, y] of
    1737 #             data points or an nx2 numeric array or a Geospatial_data object
    1738 #        z: Single 1d vector or array of data at the point_coordinates.
    1739 #        attribute_name: Used to get the z values from the
    1740 #              geospatial object if no attribute_name is specified,
    1741 #              it's a bit of a lucky dip as to what attributes you get.
    1742 #              If there is only one attribute it will be that one.
    1743 #
    1744 #        """
    1745 #
    1746 #        # FIXME(DSG-DSG): Check that the vert and point coords
    1747 #        # have the same zone.
    1748 #        if isinstance(point_coordinates,Geospatial_data):
    1749 #            point_coordinates = point_coordinates.get_data_points( \
    1750 #                absolute = True)
    1751 #
    1752 #        # Convert input to numeric arrays
    1753 #        if z is not None:
    1754 #            z = ensure_numeric(z, num.float)
    1755 #        else:
    1756 #            msg = 'z not specified'
    1757 #            assert isinstance(point_coordinates,Geospatial_data), msg
    1758 #            z = point_coordinates.get_attributes(attribute_name)
    1759 #
    1760 #        point_coordinates = ensure_numeric(point_coordinates, num.float)
    1761 #        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
    1762 #
    1763 
    1764 #===============================================================================
    1765 
    1766 
    1767 def fit_to_mesh(point_coordinates, # this can also be a points file name
     463                                  imax=2 * len(self.Atz)+1000, use_c_cg=self.use_c_cg,
     464                                  precon=self.cg_precon)
     465
     466
     467#poin_coordiantes can also be a points file name
     468
     469def fit_to_mesh(point_coordinates,
    1768470                vertex_coordinates=None,
    1769471                triangles=None,
     
    1776478                max_read_lines=None,
    1777479                attribute_name=None,
    1778                 use_cache=False):
     480                use_cache=False,
     481                cg_precon='None',
     482                use_c_cg=False):
    1779483    """Wrapper around internal function _fit_to_mesh for use with caching.
    1780    
    1781484    """
    1782    
     485
    1783486    args = (point_coordinates, )
    1784487    kwargs = {'vertex_coordinates': vertex_coordinates,
     
    1791494              'data_origin': data_origin,
    1792495              'max_read_lines': max_read_lines,
    1793               'attribute_name': attribute_name
     496              'attribute_name': attribute_name,
     497              'cg_precon': cg_precon,
     498              'use_c_cg': use_c_cg
    1794499              }
    1795500
    1796501    if use_cache is True:
    1797502        if isinstance(point_coordinates, basestring):
    1798             # We assume that point_coordinates is the name of a .csv/.txt/.pts
    1799             # file which must be passed onto caching as a dependency 
     503            # We assume that point_coordinates is the name of a .csv/.txt
     504            # file which must be passed onto caching as a dependency
    1800505            # (in case it has changed on disk)
    1801506            dep = [point_coordinates]
     
    1803508            dep = None
    1804509
    1805            
    1806         #from caching import myhash
    1807         #import copy
    1808         #print args
    1809         #print kwargs
    1810         #print 'hashing:'
    1811         #print 'args', myhash( (args, kwargs) )
    1812         #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
    1813        
    1814         #print 'mesh hash', myhash( kwargs['mesh'] )       
    1815        
    1816         #print '-------------------------'
    1817         #print 'vertices hash', myhash( kwargs['mesh'].nodes )
    1818         #print 'triangles hash', myhash( kwargs['mesh'].triangles )
    1819         #print '-------------------------'       
    1820        
    1821         #for key in mesh.__dict__:
    1822         #    print key, myhash(mesh.__dict__[key])
    1823        
    1824         #for key in mesh.quantities.keys():
    1825         #    print key, myhash(mesh.quantities[key])
    1826        
    1827         #import sys; sys.exit()
    1828            
    1829510        return cache(_fit_to_mesh,
    1830511                     args, kwargs,
     
    1833514                     dependencies=dep)
    1834515    else:
    1835         return apply(_fit_to_mesh,
     516        res = apply(_fit_to_mesh,
    1836517                     args, kwargs)
    1837 
    1838 def _fit_to_mesh(point_coordinates, # this can also be a points file name
     518        "print intep should go out of range"
     519        return res
     520
     521
     522# point_coordinates can also be a points file name
     523
     524def _fit_to_mesh(point_coordinates,
    1839525                 vertex_coordinates=None,
    1840526                 triangles=None,
     
    1846532                 data_origin=None,
    1847533                 max_read_lines=None,
    1848                  attribute_name=None):
     534                 attribute_name=None,
     535                 cg_precon='None',
     536                 use_c_cg=False):
    1849537    """
    1850538    Fit a smooth surface to a triangulation,
     
    1854542        Inputs:
    1855543        vertex_coordinates: List of coordinate pairs [xi, eta] of
    1856               points constituting a mesh (or an m x 2 numeric array or
     544        points constituting a mesh (or an m x 2 numeric array or
    1857545              a geospatial object)
    1858546              Points may appear multiple times
     
    1881569        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
    1882570        # are None
    1883            
     571
    1884572        #Convert input to numeric arrays
    1885573        triangles = ensure_numeric(triangles, num.int)
     
    1887575                                             geo_reference = mesh_origin)
    1888576
    1889         if verbose: log.critical('FitInterpolate: Building mesh')
    1890         mesh = Mesh(vertex_coordinates, triangles, verbose=verbose)
    1891         #mesh.check_integrity() # expensive
    1892    
    1893    
     577        if verbose:
     578            log.critical('FitInterpolate: Building mesh')
     579        mesh = Mesh(vertex_coordinates, triangles)
     580        mesh.check_integrity()
     581
    1894582    interp = Fit(mesh=mesh,
    1895583                 verbose=verbose,
    1896                  alpha=alpha)
     584                 alpha=alpha,
     585                 cg_precon=cg_precon,
     586                 use_c_cg=use_c_cg)
    1897587
    1898588    vertex_attributes = interp.fit(point_coordinates,
     
    1903593                                   verbose=verbose)
    1904594
    1905        
    1906595    # Add the value checking stuff that's in least squares.
    1907596    # Maybe this stuff should get pushed down into Fit.
    1908597    # at least be a method of Fit.
    1909     # Or integrate it into the fit method, saving the max and min's
     598    # Or intigrate it into the fit method, saving teh max and min's
    1910599    # as att's.
    1911    
     600
    1912601    return vertex_attributes
    1913 
    1914 
    1915 #def _fit(*args, **kwargs):
    1916 #    """Private function for use with caching. Reason is that classes
    1917 #    may change their byte code between runs which is annoying.
    1918 #    """
    1919 #   
    1920 #    return Fit(*args, **kwargs)
    1921602
    1922603
     
    1927608                     display_errors = True):
    1928609    """
    1929     Given a mesh file (tsh or msh) and a point attribute file, fit
     610    Given a mesh file (tsh) and a point attribute file, fit
    1930611    point attributes to the mesh and write a mesh file with the
    1931612    results.
     
    1938619    """
    1939620
    1940     from anuga.load_mesh.loadASCII import import_mesh_file, \
     621    from load_mesh.loadASCII import import_mesh_file, \
    1941622         export_mesh_file, concatinate_attributelist
    1942 
    1943623
    1944624    try:
    1945625        mesh_dict = import_mesh_file(mesh_file)
    1946     except IOError,e:
     626    except IOError, e:
    1947627        if display_errors:
    1948628            log.critical("Could not load bad file: %s" % str(e))
    1949629        raise IOError  #Could not load bad mesh file.
    1950    
     630
    1951631    vertex_coordinates = mesh_dict['vertices']
    1952632    triangles = mesh_dict['triangles']
    1953633    if isinstance(mesh_dict['vertex_attributes'], num.ndarray):
    1954         old_vertex_attributes = mesh_dict['vertex_attributes'].tolist()
     634        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
    1955635    else:
    1956         old_vertex_attributes = mesh_dict['vertex_attributes']
     636        old_point_attributes = mesh_dict['vertex_attributes']
    1957637
    1958638    if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray):
     
    1961641        old_title_list = mesh_dict['vertex_attribute_titles']
    1962642
    1963     if verbose: log.critical('Fit_to_Mesh_File: tsh file %s loaded' % mesh_file)
     643    if verbose:
     644        log.critical('tsh file %s loaded' % mesh_file)
    1964645
    1965646    # load in the points file
    1966647    try:
    1967648        geo = Geospatial_data(point_file, verbose=verbose)
    1968     except IOError,e:
     649    except IOError, e:
    1969650        if display_errors:
    1970651            log.critical("Could not load bad file: %s" % str(e))
     
    1972653
    1973654    point_coordinates = geo.get_data_points(absolute=True)
    1974     title_list,point_attributes = concatinate_attributelist( \
     655    title_list, point_attributes = concatinate_attributelist( \
    1975656        geo.get_all_attributes())
    1976657
     
    1981662        mesh_origin = None
    1982663
    1983     if verbose: log.critical("Fit_to_Mesh_File: points file loaded")
    1984     if verbose: log.critical("Fit_to_Mesh_File: fitting to mesh")
    1985     new_vertex_attributes = fit_to_mesh(point_coordinates,
     664    if verbose:
     665        log.critical("points file loaded")
     666    if verbose:
     667        log.critical("fitting to mesh")
     668    f = fit_to_mesh(point_coordinates,
    1986669                    vertex_coordinates,
    1987670                    triangles,
     
    1992675                    data_origin = None,
    1993676                    mesh_origin = mesh_origin)
    1994     if verbose: log.critical("Fit_to_Mesh_File: finished fitting to mesh")
     677    if verbose:
     678        log.critical("finished fitting to mesh")
    1995679
    1996680    # convert array to list of lists
    1997     new_vertex_attributes = new_vertex_attributes.tolist()
     681    new_point_attributes = f.tolist()
    1998682    #FIXME have this overwrite attributes with the same title - DSG
    1999683    #Put the newer attributes last
    2000     if old_title_list <> []:
     684    if old_title_list != []:
    2001685        old_title_list.extend(title_list)
    2002686        #FIXME can this be done a faster way? - DSG
    2003         for i in xrange(len(old_vertex_attributes)):
    2004             old_vertex_attributes[i].extend(new_vertex_attributes[i])
    2005         mesh_dict['vertex_attributes'] = old_vertex_attributes
     687        for i in range(len(old_point_attributes)):
     688            old_point_attributes[i].extend(new_point_attributes[i])
     689        mesh_dict['vertex_attributes'] = old_point_attributes
    2006690        mesh_dict['vertex_attribute_titles'] = old_title_list
    2007691    else:
    2008         mesh_dict['vertex_attributes'] = new_vertex_attributes
     692        mesh_dict['vertex_attributes'] = new_point_attributes
    2009693        mesh_dict['vertex_attribute_titles'] = title_list
    2010694
    2011     if verbose: log.critical("Fit_to_Mesh_File: exporting to file %s" % mesh_output_file)
     695    if verbose:
     696        log.critical("exporting to file %s" % mesh_output_file)
    2012697
    2013698    try:
    2014699        export_mesh_file(mesh_output_file, mesh_dict)
    2015     except IOError,e:
     700    except IOError, e:
    2016701        if display_errors:
    2017702            log.critical("Could not write file %s", str(e))
Note: See TracChangeset for help on using the changeset viewer.