Ignore:
Timestamp:
Feb 13, 2013, 3:26:15 PM (12 years ago)
Author:
steve
Message:

Rolling in Padarn's update to fit_interpolation

Location:
trunk/anuga_core/source/anuga
Files:
27 edited

Legend:

Unmodified
Added
Removed
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/general_mesh.py

    r8533 r8690  
    671671        return unique_verts.keys()
    672672
     673        # Note Padarn 27/11/12:
     674        # This function was modified, but then it was deicded it was not
     675        # needed. It should be restored if it is used elsewhere in the code
     676        # (it was being used in quantity.py in the _set_vertex_values function).
     677        # Note however, the function in the head of the code is very slow and
     678        # could be easily sped up many fold.
    673679    def get_triangles_and_vertices_per_node(self, node=None):
    674680        """Get triangles associated with given node.
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/generic_domain.py

    r8650 r8690  
    7373          ...
    7474        """
    75 
    76 
     75       
    7776        if verbose: log.critical('Domain: Initialising')
    7877
     
    107106                         #number_of_full_triangles=number_of_full_triangles,
    108107                         verbose=verbose)
    109 
     108       
    110109        if verbose: log.critical('Domain: Expose mesh attributes')
    111110
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/quantity.py

    r8661 r8690  
    623623                                                     use_cache=use_cache)
    624624        elif filename is not None:
     625
    625626            if hasattr(self.domain, 'points_file_block_line_size'):
    626627                max_read_lines = self.domain.points_file_block_line_size
     
    940941        """
    941942
     943
    942944        msg = 'Filename must be a text string'
    943945        assert isinstance(filename, basestring), msg
     
    12971299        #
    12981300        self._set_vertex_values(vertex_list, A)
    1299            
     1301
     1302    # Note Padarn 27/11/12:
     1303    # This function has been changed and now uses an external c function
     1304    # to set the 'vertex_values' instead of a python for loop. The function
     1305    # 'get_triangles_and_vertices_per_node' has been removed and replaced by
     1306    # 'build_inverted_triangle_structure. This now adds extra stored array to
     1307    # the mesh object - this could be removed after the c function below uses
     1308    #them.
     1309    # Note, the naming of this function seems confusing - it seems to actually
     1310    # update the 'node values' given a list of vertices.
    13001311    def _set_vertex_values(self, vertex_list, A):
    13011312        """Go through list of unique vertices
     
    13031314        are obtained from a pts file through fitting
    13041315        """
    1305 
    1306         # Go through list of unique vertices
    1307         for i_index, unique_vert_id in enumerate(vertex_list):
    1308             triangles = self.domain.get_triangles_and_vertices_per_node(node=unique_vert_id)
    1309 
    1310             # In case there are unused points
    1311             if len(triangles) == 0:
    1312                 continue
    1313 
    1314             # Go through all triangle, vertex pairs
    1315             # touching vertex unique_vert_id and set corresponding vertex value
    1316             for triangle_id, vertex_id in triangles:
    1317                 self.vertex_values[triangle_id, vertex_id] = A[i_index]
    1318 
    1319         # Intialise centroid and edge_values
     1316        # If required, set up required arrays storing information about the triangle
     1317        # vertex structure of the mesh.
     1318        if not (hasattr(self.domain.mesh, 'number_of_triangles_per_node') and \
     1319                hasattr(self.domain.mesh, 'vertex_value_indices') and \
     1320                hasattr(self.domain.mesh, 'node_index')):
     1321            self.build_inverted_triangle_structure()
     1322
     1323        set_vertex_values_c(self, num.array(vertex_list), A)
    13201324        self.interpolate()
    13211325
     
    15761580         interpolate_from_vertices_to_edges,\
    15771581         interpolate_from_edges_to_vertices,\
     1582         set_vertex_values_c, \
    15781583         update
    15791584else:
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/quantity_ext.c

    r8569 r8690  
    864864}
    865865
     866// Note Padarn 27/11/12:
     867// This function is used to set all the node values of a quantity
     868// from a list of vertices and values at those vertices. Called in
     869// quantity.py by _set_vertex_values.
     870// Naming is a little confusing - but sticking with convention.
     871int _set_vertex_values_c(int num_verts,
     872                        long * vertices,
     873                        long * node_index,
     874                        long * number_of_triangles_per_node,
     875                        long * vertex_value_indices,
     876                        double * vertex_values,
     877                        double * A
     878                        ){
     879  int i,j,num_triangles,vert,triangle;
     880
     881  for(i=0;i<num_verts;i++){
     882 
     883    vert=vertices[i];
     884    num_triangles = number_of_triangles_per_node[vertices[i]];
     885 
     886    for(j=0;j<num_triangles;j++){
     887 
     888      triangle = vertex_value_indices[node_index[vert]+j];   
     889      vertex_values[triangle]=A[i];
     890    }
     891
     892  }
     893
     894  return 0;
     895
     896}
    866897
    867898//-----------------------------------------------------
     
    10111042}
    10121043
    1013 
     1044PyObject *set_vertex_values_c(PyObject *self, PyObject *args) {
     1045
     1046  PyObject *quantity,*domain,*mesh;
     1047  PyArrayObject *vertex_values, *node_index, *A, *vertices;
     1048  PyArrayObject *number_of_triangles_per_node, *vertex_value_indices;
     1049
     1050  int N,err;
     1051
     1052
     1053  // Convert Python arguments to C
     1054  if (!PyArg_ParseTuple(args, "OOO", &quantity, &vertices, &A)) {
     1055    PyErr_SetString(PyExc_RuntimeError,
     1056        "quantity_ext.c: set_vertex_values_c could not parse input");
     1057    return NULL;
     1058  }
     1059
     1060  domain = PyObject_GetAttrString(quantity, "domain");
     1061  if (!domain) {
     1062    PyErr_SetString(PyExc_RuntimeError,
     1063        "extrapolate_gradient could not obtain domain object from quantity");
     1064    return NULL;
     1065  }
     1066
     1067  mesh = PyObject_GetAttrString(domain, "mesh");
     1068  if (!mesh) {
     1069    PyErr_SetString(PyExc_RuntimeError,
     1070        "extrapolate_gradient could not obtain mesh object from domain");
     1071    return NULL;
     1072  }
     1073
     1074  vertex_values        = get_consecutive_array(quantity, "vertex_values");
     1075  node_index             = get_consecutive_array(mesh,"node_index");
     1076  number_of_triangles_per_node = get_consecutive_array(mesh,"number_of_triangles_per_node");
     1077  vertex_value_indices = get_consecutive_array(mesh,"vertex_value_indices");
     1078
     1079  CHECK_C_CONTIG(vertices);
     1080  CHECK_C_CONTIG(A);
     1081
     1082  //N = centroid_values -> dimensions[0];
     1083
     1084  int num_verts = vertices->dimensions[0];
     1085 
     1086  err = _set_vertex_values_c(num_verts,
     1087                          (long*) vertices->data,
     1088                          (long*) node_index->data,
     1089                          (long*) number_of_triangles_per_node->data,
     1090                          (long*) vertex_value_indices->data,
     1091                          (double*) vertex_values->data,
     1092                          (double*) A->data);
     1093
     1094
     1095  // Release and return
     1096  Py_DECREF(vertex_values);
     1097  Py_DECREF(node_index);
     1098  Py_DECREF(number_of_triangles_per_node);
     1099  Py_DECREF(vertex_value_indices);
     1100
     1101  return Py_BuildValue("");
     1102}
    10141103
    10151104PyObject *interpolate(PyObject *self, PyObject *args) {
     
    22782367        {"interpolate", interpolate, METH_VARARGS, "Print out"},
    22792368        {"average_vertex_values", average_vertex_values, METH_VARARGS, "Print out"},           
    2280         {NULL, NULL, 0, NULL}   // sentinel
     2369        {"set_vertex_values_c", set_vertex_values_c, METH_VARARGS, "Print out"},       
     2370{NULL, NULL, 0, NULL}   // sentinel
    22812371};
    22822372
  • trunk/anuga_core/source/anuga/abstract_2d_finite_volumes/util.py

    r8680 r8690  
    853853                   verbose=False,
    854854                   output_centroids=False):
    855 
    856855    from gauge import sww2timeseries as sww2timeseries_new
    857856    return sww2timeseries_new(swwfiles,
  • trunk/anuga_core/source/anuga/caching/caching.py

    r8125 r8690  
    391391  ##if options['savestat']:
    392392    addstatsline(CD,funcname,FN,Retrieved,reason,comptime,loadtime,compressed)
    393 
    394393  return(T)  # Return results in all cases
    395394
     
    934933        reason = 3 # Arguments have changed
    935934       
    936    
     935   # PADARN NOTE 17/12/12: Adding a special case to handle the existence of a
     936  # FitInterpolate object. C Structures are serialised so they can be pickled.
     937  #---------------------------------------------------------------------------
     938  from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
     939 
     940  # Setup for quad_tree extension
     941  from anuga.utilities import compile
     942  if compile.can_use_C_extension('quad_tree_ext.c'):
     943      import quad_tree_ext
     944  else:
     945      msg = "C implementation of quad tree extension not avaliable"
     946      raise Exception(msg)
     947
     948  # Setup for sparse_matrix extension
     949  from anuga.utilities import compile
     950  if compile.can_use_C_extension('sparse_matrix_ext.c'):
     951      import sparse_matrix_ext
     952  else:
     953      msg = "C implementation of sparse_matrix extension not avaliable"
     954      raise Exception(msg)
     955
     956  from anuga.geometry.aabb import AABB
     957
     958  if isinstance(T, FitInterpolate):
     959
     960    if hasattr(T,"D"):
     961        T.D=sparse_matrix_ext.deserialise_dok(T.D)
     962    if hasattr(T,"AtA"):
     963        T.AtA=sparse_matrix_ext.deserialise_dok(T.AtA)
     964    if hasattr(T,"root"):
     965        if hasattr(T.root,"root"):
     966            mesh = T.mesh
     967            extents = AABB(*mesh.get_extent(absolute=True))
     968            extents.grow(1.001)  # To avoid round off error
     969            extents = [extents.xmin, extents.xmax, extents.ymin, extents.ymax]
     970            T.root.root=quad_tree_ext.deserialise(T.root.root,\
     971                              mesh.triangles, mesh.vertex_coordinates, num.array(extents))
     972
     973  #---------------------------------------------------------------------------
     974
    937975  return((T, FN, Retrieved, reason, comptime, loadtime, compressed))
    938976
     
    10781116  import time, os, sys
    10791117
     1118  # PADARN NOTE 17/12/12: Adding a special case to handle the existence of a
     1119  # FitInterpolate object. C Structures are serialised so they can be pickled.
     1120  #---------------------------------------------------------------------------
     1121  from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
     1122 
     1123  # Setup for quad_tree extension
     1124  from anuga.utilities import compile
     1125  if compile.can_use_C_extension('quad_tree_ext.c'):
     1126      import quad_tree_ext
     1127  else:
     1128      msg = "C implementation of quad tree extension not avaliable"
     1129      raise Exception(msg)
     1130
     1131  # Setup for sparse_matrix extension
     1132  from anuga.utilities import compile
     1133  if compile.can_use_C_extension('sparse_matrix_ext.c'):
     1134      import sparse_matrix_ext
     1135  else:
     1136      msg = "C implementation of sparse_matrix extension not avaliable"
     1137      raise Exception(msg)
     1138
     1139  from anuga.geometry.aabb import AABB
     1140
     1141  if isinstance(T, FitInterpolate):
     1142    if hasattr(T,"D"):
     1143        T.D=sparse_matrix_ext.serialise_dok(T.D)
     1144    if hasattr(T,"AtA"):
     1145        T.AtA=sparse_matrix_ext.serialise_dok(T.AtA)
     1146    if hasattr(T,"root"):
     1147        if hasattr(T.root,"root"):
     1148            T.root.root=quad_tree_ext.serialise(T.root.root)
     1149
     1150
     1151  #---------------------------------------------------------------------------
     1152
    10801153  (datafile, compressed1) = myopen(CD+FN+'_'+file_types[0],'wb',compression)
    10811154  (admfile, compressed2) = myopen(CD+FN+'_'+file_types[2],'wb',compression)
     
    11141187  #else:
    11151188  #  pass  # FIXME: Take care of access rights under Windows
     1189
     1190  # PADARN NOTE 17/12/12: See above - deserialise in case will be used.
     1191  #---------------------------------------------------------------------------
     1192  from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
     1193
     1194  if isinstance(T, FitInterpolate):
     1195    if hasattr(T,"D"):
     1196        T.D=sparse_matrix_ext.deserialise_dok(T.D)
     1197    if hasattr(T,"AtA"):
     1198        T.AtA=sparse_matrix_ext.deserialise_dok(T.AtA)
     1199    if hasattr(T,"root"):
     1200        if hasattr(T.root,"root"):
     1201            mesh = T.mesh
     1202            extents = AABB(*mesh.get_extent(absolute=True))
     1203            extents.grow(1.001)  # To avoid round off error
     1204            extents = [extents.xmin, extents.xmax, extents.ymin, extents.ymax]
     1205            T.root.root=quad_tree_ext.deserialise(T.root.root,\
     1206                              mesh.triangles, mesh.vertex_coordinates, num.array(extents))
     1207
     1208
     1209  #---------------------------------------------------------------------------
    11161210
    11171211  return(savetime)
  • trunk/anuga_core/source/anuga/config.py

    r8686 r8690  
    178178
    179179# Water depth below which it is considered to be 0 in the model
    180 minimum_allowed_height = 0.001
     180minimum_allowed_height = 1.0e-3
    181181
    182182# Water depth below which it is *stored* as 0
  • trunk/anuga_core/source/anuga/file_conversion/sww2array.py

    r8688 r8690  
    9898    if verbose:
    9999        log.critical('Reading from %s' % name_in)
    100         log.critical('Output directory is %s' % name_out)
     100
    101101
    102102    from Scientific.IO.NetCDF import NetCDFFile
  • trunk/anuga_core/source/anuga/file_conversion/test_urs2sts.py

    r8687 r8690  
    19581958        if not sys.platform == 'win32':
    19591959            os.remove(sts_file+'.sts')
    1960 
    1961         try:
    1962             os.remove(meshname)
    1963         except:
    1964             pass
     1960       
     1961        os.remove(meshname)
    19651962       
    19661963       
  • trunk/anuga_core/source/anuga/fit_interpolate/fit.py

    r8624 r8690  
    2828
    2929from anuga.abstract_2d_finite_volumes.neighbour_mesh import Mesh
    30 from anuga.caching import cache           
     30from anuga.caching import cache
    3131from anuga.geospatial_data.geospatial_data import Geospatial_data, \
    3232     ensure_absolute
    3333from anuga.fit_interpolate.general_fit_interpolate import FitInterpolate
    34 from anuga.utilities.sparse import Sparse, Sparse_CSR
    35 from anuga.geometry.polygon import inside_polygon, is_inside_polygon
    36 from anuga.pmesh.mesh_quadtree import MeshQuadtree
    37 
     34
     35from anuga.utilities.sparse import Sparse_CSR
     36from anuga.utilities.numerical_tools import ensure_numeric
    3837from anuga.utilities.cg_solve import conjugate_gradient
    39 from anuga.utilities.numerical_tools import ensure_numeric, gradient
    4038from anuga.config import default_smoothing_parameter as DEFAULT_ALPHA
    4139import anuga.utilities.log as log
     
    4846import sys
    4947
     48# Setup for c fitting routines
     49from anuga.utilities import compile
     50if compile.can_use_C_extension('fitsmooth.c'):
     51    import fitsmooth
     52else:
     53    msg = "C implementation of fitting algorithms (fitsmooth.c) not avalaible"
     54    raise Exception(msg)
     55
     56# Setup for quad_tree extension
     57from anuga.utilities import compile
     58if compile.can_use_C_extension('quad_tree_ext.c'):
     59    import quad_tree_ext
     60else:
     61    msg = "C implementation of quad tree extension not avaliable"
     62    raise Exception(msg)
     63
     64# Setup for sparse_matrix extension
     65from anuga.utilities import compile
     66if compile.can_use_C_extension('sparse_matrix_ext.c'):
     67    import sparse_matrix_ext
     68else:
     69    msg = "C implementation of sparse_matrix extension not avaliable"
     70    raise Exception(msg)
     71
     72
    5073
    5174class Fit(FitInterpolate):
    52    
     75
    5376    def __init__(self,
    5477                 vertex_coordinates=None,
     
    5679                 mesh=None,
    5780                 mesh_origin=None,
    58                  alpha = None,
    59                  verbose=False):
    60 
     81                 alpha=None,
     82                 verbose=False,
     83                 cg_precon='None',
     84                 use_c_cg=True):
    6185
    6286        """
     87        Padarn Note 05/12/12: This documentation should probably
     88        be updated to account for the fact that the fitting is now
     89        done in c. I wasn't sure what details were neccesary though.
     90
    6391        Fit data at points to the vertices of a mesh.
    6492
     
    6694
    6795          vertex_coordinates: List of coordinate pairs [xi, eta] of
    68               points constituting a mesh (or an m x 2 numeric array or
     96          points constituting a mesh (or an m x 2 numeric array or
    6997              a geospatial object)
    7098              Points may appear multiple times
     
    73101          triangles: List of 3-tuples (or a numeric array) of
    74102              integers representing indices of all vertices in the mesh.
    75 
    76           mesh: Object containing vertex_coordinates and triangles. Either
    77               mesh = None or both vertex_coordinates and triangles = None
    78103
    79104          mesh_origin: A geo_reference object or 3-tuples consisting of
     
    89114        To use this in a blocking way, call  build_fit_subset, with z info,
    90115        and then fit, with no point coord, z info.
    91        
    92116        """
    93117        # Initialise variabels
    94118        if alpha is None:
    95119            self.alpha = DEFAULT_ALPHA
    96         else:   
     120        else:
    97121            self.alpha = alpha
    98            
     122
    99123        FitInterpolate.__init__(self,
    100124                 vertex_coordinates,
     
    103127                 mesh_origin=mesh_origin,
    104128                 verbose=verbose)
    105        
    106         m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
    107        
     129
    108130        self.AtA = None
    109131        self.Atz = None
    110 
     132        self.D = None
    111133        self.point_count = 0
    112         if self.alpha <> 0:
    113             if verbose: log.critical('Fit: Building smoothing matrix')
    114             self._build_smoothing_matrix_D(verbose=verbose)
    115 
    116         if verbose: log.critical('Fit: Get Boundary Polygon')
    117         bd_poly = self.mesh.get_boundary_polygon()   
     134
     135        # NOTE PADARN: NEEDS FIXING - currently need smoothing matrix
     136        # even if alpha is zero, due to c function expecting it. This
     137        # could and should be removed.
     138        if True:
     139            if verbose:
     140                log.critical('Building smoothing matrix')
     141            self.D = self._build_smoothing_matrix_D()
     142
     143        bd_poly = self.mesh.get_boundary_polygon()
    118144        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
    119145
    120         if verbose: log.critical('Fit: Done')
    121 
    122            
     146        self.cg_precon=cg_precon
     147        self.use_c_cg=use_c_cg
     148
    123149    def _build_coefficient_matrix_B(self,
    124                                   verbose = False):
     150                                  verbose=False):
    125151        """
    126         Build final coefficient matrix
    127 
    128         Precon
    129         If alpha is not zero, matrix D has been built
    130         Matrix Ata has been built
     152        Build final coefficient matrix from AtA and D
    131153        """
    132154
    133         if verbose:
    134             import time
    135             t0 = time.time()
    136             print 'Fit: Build Coefficient Matrix B ',
    137 
    138 
    139         if self.alpha <> 0:
    140             #if verbose: log.critical('Building smoothing matrix')
    141             #self._build_smoothing_matrix_D()
    142             # FIXME SR: This is quite time consuming.
    143             # As AtA and D have same structure it should be possible
    144             # to speed this up.
    145             self.B = self.AtA + self.alpha*self.D
    146         else:
    147             self.B = self.AtA
    148 
    149 
    150         if verbose:
    151             import time
    152             dt = time.time()-t0
    153             print '%g secs' % dt
    154             print 'Fit: Convert Coefficient Matrix B to Sparse_CSR'
    155            
    156         # Convert self.B matrix to CSR format for faster matrix vector
    157         self.B = Sparse_CSR(self.B)
    158 
    159     def _build_smoothing_matrix_D(self, verbose=False):
     155        msize = self.mesh.number_of_nodes
     156
     157        self.B = fitsmooth.build_matrix_B(self.D, \
     158                                          self.AtA, self.alpha)
     159
     160        # Convert self.B matrix to CSR format
     161        self.B = Sparse_CSR(data=num.array(self.B[0]),\
     162                                  Colind=num.array(self.B[1]),\
     163                                  rowptr=num.array(self.B[2]), \
     164                                  m=msize, n=msize)
     165        # NOTE PADARN: The above step could potentially be removed
     166        # and the sparse matrix worked with directly in C. Not sure
     167        # if this would be worthwhile.
     168
     169    def _build_smoothing_matrix_D(self):
    160170        """Build m x m smoothing matrix, where
    161171        m is the number of basis functions phi_k (one per vertex)
     
    181191        \frac{\partial \phi_k}{\partial x} for a particular triangle
    182192        are obtained by computing the gradient a_k, b_k for basis function k
     193
     194        NOTE PADARN: All of this is now done in an external C function, and the
     195        result is stored in a Capsule object, meaning the entries cannot be directly
     196        accessed.
    183197        """
    184        
    185         # FIXME: algorithm might be optimised by computing local 9x9
    186         # "element stiffness matrices:
    187 
    188         m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    189 
    190         self.D = Sparse(m,m)
    191 
    192         import time
    193         t0 = time.time()
    194 
    195         if verbose :
    196             print '['+60*' '+']',
    197             sys.stdout.flush()
    198 
    199         N = len(self.mesh)
    200         M = N/60
    201 
    202         # For each triangle compute contributions to D = D1+D2
    203         for i in xrange(N):
    204 
    205             if verbose and i%M == 0 :
    206                 #restart_line()
    207                 print '\r['+(i/M)*'.'+(60-(i/M))*' ' +']',
    208                 sys.stdout.flush()
    209 
    210             # Get area
    211             area = self.mesh.areas[i]
    212 
    213             # Get global vertex indices
    214             v0 = self.mesh.triangles[i,0]
    215             v1 = self.mesh.triangles[i,1]
    216             v2 = self.mesh.triangles[i,2]
    217 
    218             # Get the three vertex_points
    219             xi0 = self.mesh.get_vertex_coordinate(i, 0)
    220             xi1 = self.mesh.get_vertex_coordinate(i, 1)
    221             xi2 = self.mesh.get_vertex_coordinate(i, 2)
    222 
    223             # Compute gradients for each vertex
    224             a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    225                               1, 0, 0)
    226 
    227             a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    228                               0, 1, 0)
    229 
    230             a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    231                               0, 0, 1)
    232 
    233             # Compute diagonal contributions
    234             self.D[v0,v0] += (a0*a0 + b0*b0)*area
    235             self.D[v1,v1] += (a1*a1 + b1*b1)*area
    236             self.D[v2,v2] += (a2*a2 + b2*b2)*area
    237 
    238             # Compute contributions for basis functions sharing edges
    239             e01 = (a0*a1 + b0*b1)*area
    240             self.D[v0,v1] += e01
    241             self.D[v1,v0] += e01
    242 
    243             e12 = (a1*a2 + b1*b2)*area
    244             self.D[v1,v2] += e12
    245             self.D[v2,v1] += e12
    246 
    247             e20 = (a2*a0 + b2*b0)*area
    248             self.D[v2,v0] += e20
    249             self.D[v0,v2] += e20
    250 
    251         if verbose:
    252             print ' %f secs' % (time.time()-t0)
    253 
     198
     199        # NOTE PADARN: Should the input arguments here be checked - making
     200        # sure that they are floats? Not sure if this is done elsewhere.
     201        # NOTE PADARN: Should global coordinates be used for the smoothing
     202        # matrix, or is thids not important?
     203        return fitsmooth.build_smoothing_matrix(self.mesh.triangles, \
     204          self.mesh.areas, self.mesh.vertex_coordinates)
     205
     206
     207    # NOTE PADARN: This function was added to emulate behavior of the original
     208    # class not using external c functions. This method is dangerous as D could
     209    # be very large - it was added as it is used in a unit test.
    254210    def get_D(self):
    255         return self.D.todense()
    256 
    257 
    258 
    259     def _build_matrix_AtA_Atz(self,
    260                               point_coordinates,
    261                               z,
    262                               verbose = False,
    263                               output=None):
    264 
    265 
     211        return fitsmooth.return_full_D(self.D, self.mesh.number_of_nodes)
     212
     213    # NOTE PADARN: This function was added to emulate behavior of the original
     214    # class so as to pass a unit test. It is completely uneeded.
     215    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
     216                              verbose=False, output='dot'):
     217        self._build_matrix_AtA_Atz(point_coordinates, z, attribute_name, verbose, output)
     218
     219    def _build_matrix_AtA_Atz(self, point_coordinates, z=None, attribute_name=None,
     220                              verbose=False, output='dot'):
    266221        """Build:
    267222        AtA  m x m  interpolation matrix, and,
     
    277232        This function can be called again and again, with sub-sets of
    278233        the point coordinates.  Call fit to get the results.
    279        
     234
    280235        Preconditions
    281236        z and points are numeric
     
    285240        """
    286241
    287 
    288         if verbose and output == 'counter':
    289             print 'Fit: Build Matrix AtA Atz'
    290 
    291         import time
    292         t0 = time.time()
    293 
    294         # Build n x m interpolation matrix
    295         if self.AtA == None:
    296             # AtA and Atz need to be initialised.
    297             m = self.mesh.number_of_nodes
    298             if len(z.shape) > 1:
    299                 att_num = z.shape[1]
    300                 self.Atz = num.zeros((m,att_num), num.float)
     242        if isinstance(point_coordinates, Geospatial_data):
     243            point_coordinates = point_coordinates.get_data_points( \
     244                absolute=True)
     245
     246        # Convert input to numeric arrays
     247        if z is not None:
     248            z = ensure_numeric(z, num.float)
     249        else:
     250            msg = 'z not specified'
     251            assert isinstance(point_coordinates, Geospatial_data), msg
     252            z = point_coordinates.get_attributes(attribute_name)
     253
     254        point_coordinates = ensure_numeric(point_coordinates, num.float)
     255
     256        npts = len(z)
     257        z = num.array(z)
     258        # NOTE PADARN : This copy might be needed to
     259        # make sure memory is contig - would be better to read in C..
     260        z = z.copy()
     261
     262        self.point_count += z.shape[0]
     263
     264        zdim = 1
     265        if len(z.shape) != 1:
     266            zdim = z.shape[1]
     267
     268        [AtA, Atz] = fitsmooth.build_matrix_AtA_Atz_points(self.root.root, \
     269               self.mesh.number_of_nodes, \
     270               self.mesh.triangles, \
     271               num.array(point_coordinates), z, zdim, npts)
     272
     273        if verbose and output == 'dot':
     274            print '\b.',
     275            sys.stdout.flush()
     276        if zdim == 1:
     277            Atz = num.array(Atz[0])
     278        else:
     279            Atz = num.array(Atz).transpose()
     280
     281        if self.AtA is None and self.Atz is None:
     282            self.AtA = AtA
     283            self.Atz = Atz
     284        else:
     285            fitsmooth.combine_partial_AtA_Atz(self.AtA, AtA,\
     286                    self.Atz, Atz, zdim, self.mesh.number_of_nodes)
     287
     288    def _build_matrix_AtA_Atz_file(self, filename, attribute_name=None, max_read_lines=500,\
     289                                   verbose=False):
     290        """Build:
     291        AtA  m x m  interpolation matrix, and,
     292        Atz  m x a  interpolation matrix where,
     293        m is the number of basis functions phi_k (one per vertex)
     294        a is the number of data attributes
     295
     296        This algorithm uses a quad tree data structure for fast binning of
     297        data points.
     298
     299        If Ata is None, the matrices AtA and Atz are created.
     300
     301        This function can be called again and again, with sub-sets of
     302        the point coordinates.  Call fit to get the results.
     303
     304        Preconditions
     305        z and points are numeric
     306        Point_coordindates and mesh vertices have the same origin.
     307
     308        The number of attributes of the data points does not change
     309        """
     310        # PADARN NOTE: THe following block of code is translated from
     311        # things that were being done in the Geospatial_data object
     312        # which is no longer required if data from file in c.
     313        #---------------------------------------------------------
     314        # Default attribute name here seems to only have possibility
     315        # of being 'None'.
     316        G_data = Geospatial_data(filename,
     317                                         max_read_lines=1,
     318                                         load_file_now=True,
     319                                         verbose=verbose)
     320
     321
     322        if attribute_name is None:
     323            if G_data.default_attribute_name is not None:
     324                attribute_name = G_data.default_attribute_name
    301325            else:
    302                 att_num = 1
    303                 self.Atz = num.zeros((m,), num.float)
    304             assert z.shape[0] == point_coordinates.shape[0]
    305 
    306             AtA = Sparse(m,m)
    307             # The memory damage has been done by now.
    308         else:
    309              AtA = self.AtA # Did this for speed, did ~nothing
    310         self.point_count += point_coordinates.shape[0]
    311 
    312 
    313         inside_indices = inside_polygon(point_coordinates,
    314                                         self.mesh_boundary_polygon,
    315                                         closed=True,
    316                                         verbose=False) # Suppress output
    317        
    318         n = len(inside_indices)
    319 
    320         # Compute matrix elements for points inside the mesh
    321         triangles = self.mesh.triangles # Shorthand
    322 
    323 
    324         if verbose and output == 'counter' :
    325             print '['+60*' '+']',
    326             #print '\b.',
    327             sys.stdout.flush()
    328 
    329         m = max(n/60,1)
    330 
    331 
    332         for d in xrange(n):
    333             i = inside_indices[d]
    334 
    335 #        for d, i in enumerate(inside_indices):
    336 #            # For each data_coordinate point
    337 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    338 #                                                            # %(d, n))
    339 #            x = point_coordinates[i]
    340 
    341             # For each data_coordinate point
    342             # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    343                                                             # %(d, n))
    344 
    345 
    346             if verbose and output == 'counter' and i%m == 0 :
    347                 print '\r['+(i/m)*'.'+(60-(i/m))*' '+']',
    348                 sys.stdout.flush()
    349 
    350 
    351             x = point_coordinates[i]
    352            
    353             element_found, sigma0, sigma1, sigma2, k = \
    354                            self.root.search_fast(x)
    355            
    356             if element_found is True:
    357                 j0 = triangles[k,0] # Global vertex id for sigma0
    358                 j1 = triangles[k,1] # Global vertex id for sigma1
    359                 j2 = triangles[k,2] # Global vertex id for sigma2
    360 
    361                 sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    362                 js     = [j0,j1,j2]
    363 
    364                 for j in js:
    365                     self.Atz[j] +=  sigmas[j]*z[i]
    366                    
    367                     for k in js:
    368                         AtA[j,k] += sigmas[j]*sigmas[k]
    369             else:
    370                 flag = is_inside_polygon(x,
    371                                          self.mesh_boundary_polygon,
    372                                          closed=True,
    373                                          verbose=False) # Suppress output
    374                 msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    375                 assert flag is True, msg               
    376                
    377                 # data point has fallen within a hole - so ignore it.
    378 
    379 
    380         if verbose and output == 'counter':
    381             print ' %f secs' % (time.time()-t0)
    382            
     326                attribute_name = G_data.attributes.keys()[0]
     327                # above line takes the first one from keys
     328
     329        if verbose is True:
     330            log.critical('Using attribute %s' % attribute_name)
     331            log.critical('Available attributes: %s' % (G_data.attributes.keys()))
     332
     333        msg = 'Attribute name %s does not exist in data set' % attribute_name
     334        assert G_data.attributes.has_key(attribute_name), msg
     335        #---------------------------------------------------------
     336
     337        # MAKE SURE READING ABSOLUTE POINT COORDINATES
     338        [AtA, Atz, npts] = fitsmooth.build_matrix_AtA_Atz_fileread(self.root.root, \
     339               self.mesh.number_of_nodes, \
     340               self.mesh.triangles, \
     341               filename, \
     342               attribute_name, \
     343               max_read_lines)
     344        self.point_count = npts
    383345        self.AtA = AtA
    384 
    385        
     346        self.Atz = Atz
     347
    386348    def fit(self, point_coordinates_or_filename=None, z=None,
    387349            verbose=False,
    388350            point_origin=None,
    389351            attribute_name=None,
    390             max_read_lines=None):
     352            max_read_lines=500,
     353            use_blocking_option2=True):
    391354        """Fit a smooth surface to given 1d array of data points z.
    392355
     
    397360        point_coordinates: The co-ordinates of the data points.
    398361              List of coordinate pairs [x, y] of
    399               data points or an nx2 numeric array or a Geospatial_data object
    400               or points file filename 
     362        data points or an nx2 numeric array or a Geospatial_data object
     363              or points file filename
    401364          z: Single 1d vector or array of data at the point_coordinates.
    402          
     365
    403366        """
    404 
    405 
    406         if verbose:
    407             print 'Fit.fit: Initializing'
    408 
    409         # Use blocking to load in the point info
    410367        if isinstance(point_coordinates_or_filename, basestring):
    411             msg = "Don't set a point origin when reading from a file"
    412             assert point_origin is None, msg
    413             filename = point_coordinates_or_filename
    414 
    415             G_data = Geospatial_data(filename,
    416                                      max_read_lines=max_read_lines,
    417                                      load_file_now=False,
    418                                      verbose=verbose)
    419 
    420 
    421             for i, geo_block in enumerate(G_data):
    422 
    423                 if verbose is True and 0 == i%200:
    424                     pass
    425                     # The time this will take
    426                     # is dependant on the # of Triangles
    427                        
    428                     #log.critical('Processing Block %d' % i)
    429                     # FIXME (Ole): It would be good to say how many blocks
    430                     # there are here. But this is no longer necessary
    431                     # for pts files as they are reported in geospatial_data
    432                     # I suggest deleting this verbose output and make
    433                     # Geospatial_data more informative for txt files.
    434                     #
    435                     # I still think so (12/12/7, Ole).
    436            
    437 
    438                    
    439                 # Build the array
    440 
    441                 points = geo_block.get_data_points(absolute=True)
    442                 z = geo_block.get_attributes(attribute_name=attribute_name)
    443                 self.build_fit_subset(points, z, attribute_name, verbose)
    444 
    445 
    446                 # FIXME(Ole): I thought this test would make sense here
    447                 # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    448                 # Committed 11 March 2009
    449                 msg = 'Matrix AtA was not built'
    450                 assert self.AtA is not None, msg
    451 
    452             point_coordinates = None
    453 
    454             if verbose: print ''
    455            
     368            if point_coordinates_or_filename[-4:] != ".pts":
     369                use_blocking_option2 = False
     370        # NOTE PADARN 12/12/12: Currently trying to get all blocking to be done
     371        # in c, but blocking option 1, which does everything using the python
     372        # interface can be used in the meantime (much slower).
     373        #-----------------------BLOCKING OPTION 1----------------------------
     374        if use_blocking_option2 is False:
     375            if verbose:
     376                print 'Fit.fit: Initializing'
     377
     378            # Use blocking to load in the point info
     379            if isinstance(point_coordinates_or_filename, basestring):
     380                msg = "Don't set a point origin when reading from a file"
     381                assert point_origin is None, msg
     382                filename = point_coordinates_or_filename
     383
     384                G_data = Geospatial_data(filename,
     385                                         max_read_lines=max_read_lines,
     386                                         load_file_now=False,
     387                                         verbose=verbose)
     388
     389                for i, geo_block in enumerate(G_data):
     390
     391                   # Build the array
     392                    points = geo_block.get_data_points(absolute=True)
     393                    z = geo_block.get_attributes(attribute_name=attribute_name)
     394
     395                    self._build_matrix_AtA_Atz(points, z, attribute_name, verbose)
     396
     397                point_coordinates = None
     398
     399                if verbose:
     400                    print ''
     401            else:
     402                point_coordinates = point_coordinates_or_filename
     403        #-----------------------BLOCKING OPTION 2----------------------------
    456404        else:
    457             point_coordinates =  point_coordinates_or_filename
    458 
    459 
    460 
     405            if verbose:
     406                print 'Fit.fit: Initializing'
     407
     408            if isinstance(point_coordinates_or_filename, basestring):
     409                msg = "Don't set a point origin when reading from a file"
     410                assert point_origin is None, msg
     411                filename = point_coordinates_or_filename
     412
     413                self._build_matrix_AtA_Atz_file(filename, attribute_name=attribute_name,\
     414                                                verbose=verbose)
     415
     416                point_coordinates = None
     417            else:
     418                point_coordinates = point_coordinates_or_filename
     419        #--------------------------------------------------------------------
    461420
    462421        if point_coordinates is None:
    463             if verbose: log.critical('Fit.fit: Warning: no data points in fit')
     422            if verbose:
     423                log.critical('Fit.fit: Warning: no data points in fit')
    464424            msg = 'No interpolation matrix.'
    465425            assert self.AtA is not None, msg
    466426            assert self.Atz is not None
    467            
     427
    468428            # FIXME (DSG) - do  a message
    469429        else:
     
    472432            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    473433            # z will come from the geo-ref
    474             self.build_fit_subset(point_coordinates, z, verbose=verbose, output='counter')
    475 
    476 
    477 
    478 
     434
     435            self._build_matrix_AtA_Atz(point_coordinates, z, verbose=verbose, output='counter')
    479436
    480437        # Check sanity
    481         m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
     438        m = self.mesh.number_of_nodes  # Nbr of basis functions (1/vertex)
    482439        n = self.point_count
    483         if n<m and self.alpha == 0.0:
     440        if n < m and self.alpha == 0.0:
    484441            msg = 'ERROR (least_squares): Too few data points\n'
    485             msg += 'There are only %d data points and alpha == 0. ' %n
    486             msg += 'Need at least %d\n' %m
     442            msg += 'There are only %d data points and alpha == 0. ' % n
     443            msg += 'Need at least %d\n' % m
    487444            msg += 'Alternatively, set smoothing parameter alpha to a small '
    488             msg += 'positive value,\ne.g. 1.0e-3.'
     445            msg += 'positive value,\ne.g. 1.0e-3.'
    489446            raise TooFewPointsError(msg)
    490 
    491447
    492448        self._build_coefficient_matrix_B(verbose)
     
    495451        # test with
    496452        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    497         if len(loners)>0:
     453        if len(loners) > 0:
    498454            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    499455            msg += 'All vertices should be part of a triangle.\n'
    500456            msg += 'In the future this will be inforced.\n'
    501             msg += 'The following vertices are not part of a triangle;\n'
     457            msg += 'The following vertices are not part of a triangle;\n'
    502458            msg += str(loners)
    503459            log.critical(msg)
     460
    504461            #raise VertsWithNoTrianglesError(msg)
    505        
    506         if verbose:
    507             print 'Fit.fit: Solve Fitting Equation'
    508 
    509         #x0 = num.zeros_like(self.Atz)
    510462        return conjugate_gradient(self.B, self.Atz, self.Atz,
    511                                   imax=2*len(self.Atz), iprint=1)
    512 
    513        
    514     def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
    515                               verbose=False, output='dot'):
    516         """Fit a smooth surface to given 1d array of data points z.
    517 
    518         The smooth surface is computed at each vertex in the underlying
    519         mesh using the formula given in the module doc string.
    520 
    521         Inputs:
    522         point_coordinates: The co-ordinates of the data points.
    523               List of coordinate pairs [x, y] of
    524               data points or an nx2 numeric array or a Geospatial_data object
    525         z: Single 1d vector or array of data at the point_coordinates.
    526         attribute_name: Used to get the z values from the
    527               geospatial object if no attribute_name is specified,
    528               it's a bit of a lucky dip as to what attributes you get.
    529               If there is only one attribute it will be that one.
    530 
    531         """
    532 
    533         # FIXME(DSG-DSG): Check that the vert and point coords
    534         # have the same zone.
    535         if isinstance(point_coordinates,Geospatial_data):
    536             point_coordinates = point_coordinates.get_data_points( \
    537                 absolute = True)
    538 
    539        
    540         # Convert input to numeric arrays
    541         if z is not None:
    542             z = ensure_numeric(z, num.float)
    543         else:
    544             msg = 'z not specified'
    545             assert isinstance(point_coordinates,Geospatial_data), msg
    546             z = point_coordinates.get_attributes(attribute_name)
    547 
    548         point_coordinates = ensure_numeric(point_coordinates, num.float)
    549         self._build_matrix_AtA_Atz(point_coordinates, z, verbose, output)
    550        
    551         if verbose and output == 'dot':
    552             print '\b.',
    553             sys.stdout.flush()
    554 
    555 
    556 
    557 
    558 ############################################################################
    559 
    560 #class Fit_old(FitInterpolate):
    561 #
    562 #    def __init__(self,
    563 #                 vertex_coordinates=None,
    564 #                 triangles=None,
    565 #                 mesh=None,
    566 #                 mesh_origin=None,
    567 #                 alpha = None,
    568 #                 verbose=False):
    569 #
    570 #
    571 #        """
    572 #        Fit data at points to the vertices of a mesh.
    573 #
    574 #        Inputs:
    575 #
    576 #          vertex_coordinates: List of coordinate pairs [xi, eta] of
    577 #             points constituting a mesh (or an m x 2 numeric array or
    578 #              a geospatial object)
    579 #              Points may appear multiple times
    580 #              (e.g. if vertices have discontinuities)
    581 #
    582 #          triangles: List of 3-tuples (or a numeric array) of
    583 #              integers representing indices of all vertices in the mesh.
    584 #
    585 #          mesh_origin: A geo_reference object or 3-tuples consisting of
    586 #              UTM zone, easting and northing.
    587 #              If specified vertex coordinates are assumed to be
    588 #              relative to their respective origins.
    589 #
    590 #          Note: Don't supply a vertex coords as a geospatial object and
    591 #              a mesh origin, since geospatial has its own mesh origin.
    592 #
    593 #
    594 #        Usage,
    595 #        To use this in a blocking way, call  build_fit_subset, with z info,
    596 #        and then fit, with no point coord, z info.
    597 #
    598 #        """
    599 #        # Initialise variabels
    600 #        if alpha is None:
    601 #            self.alpha = DEFAULT_ALPHA
    602 #        else:
    603 #            self.alpha = alpha
    604 #
    605 #        FitInterpolate.__init__(self,
    606 #                 vertex_coordinates,
    607 #                 triangles,
    608 #                 mesh,
    609 #                 mesh_origin,
    610 #                 verbose)
    611 #
    612 #        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
    613 #
    614 #        self.AtA = None
    615 #        self.Atz = None
    616 #
    617 #        self.point_count = 0
    618 #        if self.alpha <> 0:
    619 #            if verbose: log.critical('Fit: Building smoothing matrix')
    620 #            self._build_smoothing_matrix_D(verbose=verbose)
    621 #
    622 #        bd_poly = self.mesh.get_boundary_polygon()
    623 #        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
    624 #
    625 #    def _build_coefficient_matrix_B(self,
    626 #                                  verbose = False):
    627 #        """
    628 #        Build final coefficient matrix
    629 #
    630 #        Precon
    631 #        If alpha is not zero, matrix D has been built
    632 #        Matrix Ata has been built
    633 #        """
    634 #
    635 #        if self.alpha <> 0:
    636 #            #if verbose: log.critical('Building smoothing matrix')
    637 #            #self._build_smoothing_matrix_D()
    638 #            self.B = self.AtA + self.alpha*self.D
    639 #        else:
    640 #            self.B = self.AtA
    641 #
    642 #        # Convert self.B matrix to CSR format for faster matrix vector
    643 #        self.B = Sparse_CSR(self.B)
    644 #
    645 #    def _build_smoothing_matrix_D(self):
    646 #        """Build m x m smoothing matrix, where
    647 #        m is the number of basis functions phi_k (one per vertex)
    648 #
    649 #        The smoothing matrix is defined as
    650 #
    651 #        D = D1 + D2
    652 #
    653 #        where
    654 #
    655 #        [D1]_{k,l} = \int_\Omega
    656 #           \frac{\partial \phi_k}{\partial x}
    657 #           \frac{\partial \phi_l}{\partial x}\,
    658 #           dx dy
    659 #
    660 #        [D2]_{k,l} = \int_\Omega
    661 #           \frac{\partial \phi_k}{\partial y}
    662 #           \frac{\partial \phi_l}{\partial y}\,
    663 #           dx dy
    664 #
    665 #
    666 #        The derivatives \frac{\partial \phi_k}{\partial x},
    667 #        \frac{\partial \phi_k}{\partial x} for a particular triangle
    668 #        are obtained by computing the gradient a_k, b_k for basis function k
    669 #        """
    670 #
    671 #        # FIXME: algorithm might be optimised by computing local 9x9
    672 #        # "element stiffness matrices:
    673 #
    674 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    675 #
    676 #        self.D = Sparse(m,m)
    677 #
    678 #        # For each triangle compute contributions to D = D1+D2
    679 #        for i in range(len(self.mesh)):
    680 #
    681 #            # Get area
    682 #            area = self.mesh.areas[i]
    683 #
    684 #            # Get global vertex indices
    685 #            v0 = self.mesh.triangles[i,0]
    686 #            v1 = self.mesh.triangles[i,1]
    687 #            v2 = self.mesh.triangles[i,2]
    688 #
    689 #            # Get the three vertex_points
    690 #            xi0 = self.mesh.get_vertex_coordinate(i, 0)
    691 #            xi1 = self.mesh.get_vertex_coordinate(i, 1)
    692 #            xi2 = self.mesh.get_vertex_coordinate(i, 2)
    693 #
    694 #            # Compute gradients for each vertex
    695 #            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    696 #                              1, 0, 0)
    697 #
    698 #            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    699 #                              0, 1, 0)
    700 #
    701 #            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    702 #                              0, 0, 1)
    703 #
    704 #            # Compute diagonal contributions
    705 #            self.D[v0,v0] += (a0*a0 + b0*b0)*area
    706 #            self.D[v1,v1] += (a1*a1 + b1*b1)*area
    707 #            self.D[v2,v2] += (a2*a2 + b2*b2)*area
    708 #
    709 #            # Compute contributions for basis functions sharing edges
    710 #            e01 = (a0*a1 + b0*b1)*area
    711 #            self.D[v0,v1] += e01
    712 #            self.D[v1,v0] += e01
    713 #
    714 #            e12 = (a1*a2 + b1*b2)*area
    715 #            self.D[v1,v2] += e12
    716 #            self.D[v2,v1] += e12
    717 #
    718 #            e20 = (a2*a0 + b2*b0)*area
    719 #            self.D[v2,v0] += e20
    720 #            self.D[v0,v2] += e20
    721 #
    722 #    def get_D(self):
    723 #        return self.D.todense()
    724 #
    725 #
    726 #
    727 #    def _build_matrix_AtA_Atz(self,
    728 #                              point_coordinates,
    729 #                              z,
    730 #                              verbose = False):
    731 #        """Build:
    732 #        AtA  m x m  interpolation matrix, and,
    733 #        Atz  m x a  interpolation matrix where,
    734 #        m is the number of basis functions phi_k (one per vertex)
    735 #        a is the number of data attributes
    736 #
    737 #        This algorithm uses a quad tree data structure for fast binning of
    738 #        data points.
    739 #
    740 #        If Ata is None, the matrices AtA and Atz are created.
    741 #
    742 #        This function can be called again and again, with sub-sets of
    743 #        the point coordinates.  Call fit to get the results.
    744 #
    745 #        Preconditions
    746 #        z and points are numeric
    747 #        Point_coordindates and mesh vertices have the same origin.
    748 #
    749 #        The number of attributes of the data points does not change
    750 #        """
    751 #
    752 #        # Build n x m interpolation matrix
    753 #        if self.AtA == None:
    754 #            # AtA and Atz need to be initialised.
    755 #            m = self.mesh.number_of_nodes
    756 #            if len(z.shape) > 1:
    757 #                att_num = z.shape[1]
    758 #                self.Atz = num.zeros((m,att_num), num.float)
    759 #            else:
    760 #                att_num = 1
    761 #                self.Atz = num.zeros((m,), num.float)
    762 #            assert z.shape[0] == point_coordinates.shape[0]
    763 #
    764 #            AtA = Sparse(m,m)
    765 #            # The memory damage has been done by now.
    766 #        else:
    767 #             AtA = self.AtA # Did this for speed, did ~nothing
    768 #        self.point_count += point_coordinates.shape[0]
    769 #
    770 #
    771 #        inside_indices = inside_polygon(point_coordinates,
    772 #                                        self.mesh_boundary_polygon,
    773 #                                        closed=True,
    774 #                                        verbose=False) # Suppress output
    775 #
    776 #        n = len(inside_indices)
    777 #
    778 #        # Compute matrix elements for points inside the mesh
    779 #        triangles = self.mesh.triangles # Shorthand
    780 #        for d, i in enumerate(inside_indices):
    781 #            # For each data_coordinate point
    782 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    783 #                                                            # %(d, n))
    784 #            x = point_coordinates[i]
    785 #
    786 #            element_found, sigma0, sigma1, sigma2, k = \
    787 #                           self.root.search_fast(x)
    788 #
    789 #            if element_found is True:
    790 #                j0 = triangles[k,0] # Global vertex id for sigma0
    791 #                j1 = triangles[k,1] # Global vertex id for sigma1
    792 #                j2 = triangles[k,2] # Global vertex id for sigma2
    793 #
    794 #                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    795 #                js     = [j0,j1,j2]
    796 #
    797 #                for j in js:
    798 #                    self.Atz[j] +=  sigmas[j]*z[i]
    799 #
    800 #                    for k in js:
    801 #                        AtA[j,k] += sigmas[j]*sigmas[k]
    802 #            else:
    803 #                flag = is_inside_polygon(x,
    804 #                                         self.mesh_boundary_polygon,
    805 #                                         closed=True,
    806 #                                         verbose=False) # Suppress output
    807 #                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    808 #                assert flag is True, msg
    809 #
    810 #                # data point has fallen within a hole - so ignore it.
    811 #
    812 #        self.AtA = AtA
    813 #
    814 #
    815 #    def fit(self, point_coordinates_or_filename=None, z=None,
    816 #            verbose=False,
    817 #            point_origin=None,
    818 #            attribute_name=None,
    819 #            max_read_lines=500):
    820 #        """Fit a smooth surface to given 1d array of data points z.
    821 #
    822 #        The smooth surface is computed at each vertex in the underlying
    823 #        mesh using the formula given in the module doc string.
    824 #
    825 #        Inputs:
    826 #        point_coordinates: The co-ordinates of the data points.
    827 #              List of coordinate pairs [x, y] of
    828 #             data points or an nx2 numeric array or a Geospatial_data object
    829 #              or points file filename
    830 #          z: Single 1d vector or array of data at the point_coordinates.
    831 #
    832 #        """
    833 #
    834 #        # Use blocking to load in the point info
    835 #        if isinstance(point_coordinates_or_filename, basestring):
    836 #            msg = "Don't set a point origin when reading from a file"
    837 #            assert point_origin is None, msg
    838 #            filename = point_coordinates_or_filename
    839 #
    840 #            G_data = Geospatial_data(filename,
    841 #                                     max_read_lines=max_read_lines,
    842 #                                     load_file_now=False,
    843 #                                     verbose=verbose)
    844 #
    845 #            for i, geo_block in enumerate(G_data):
    846 #                if verbose is True and 0 == i%200:
    847 #                    # The time this will take
    848 #                    # is dependant on the # of Triangles
    849 #
    850 #                    log.critical('Processing Block %d' % i)
    851 #                    # FIXME (Ole): It would be good to say how many blocks
    852 #                    # there are here. But this is no longer necessary
    853 #                    # for pts files as they are reported in geospatial_data
    854 #                    # I suggest deleting this verbose output and make
    855 #                    # Geospatial_data more informative for txt files.
    856 #                    #
    857 #                    # I still think so (12/12/7, Ole).
    858 #
    859 #
    860 #
    861 #                # Build the array
    862 #
    863 #                points = geo_block.get_data_points(absolute=True)
    864 #                z = geo_block.get_attributes(attribute_name=attribute_name)
    865 #                self.build_fit_subset(points, z, verbose=verbose)
    866 #
    867 #                # FIXME(Ole): I thought this test would make sense here
    868 #                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    869 #                # Committed 11 March 2009
    870 #                msg = 'Matrix AtA was not built'
    871 #                assert self.AtA is not None, msg
    872 #
    873 #            point_coordinates = None
    874 #        else:
    875 #            point_coordinates =  point_coordinates_or_filename
    876 #
    877 #        if point_coordinates is None:
    878 #            if verbose: log.critical('Warning: no data points in fit')
    879 #            msg = 'No interpolation matrix.'
    880 #            assert self.AtA is not None, msg
    881 #            assert self.Atz is not None
    882 #
    883 #            # FIXME (DSG) - do  a message
    884 #        else:
    885 #            point_coordinates = ensure_absolute(point_coordinates,
    886 #                                                geo_reference=point_origin)
    887 #            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    888 #            # z will come from the geo-ref
    889 #            self.build_fit_subset(point_coordinates, z, verbose)
    890 #
    891 #        # Check sanity
    892 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    893 #        n = self.point_count
    894 #        if n<m and self.alpha == 0.0:
    895 #            msg = 'ERROR (least_squares): Too few data points\n'
    896 #            msg += 'There are only %d data points and alpha == 0. ' %n
    897 #           msg += 'Need at least %d\n' %m
    898 #            msg += 'Alternatively, set smoothing parameter alpha to a small '
    899 #           msg += 'positive value,\ne.g. 1.0e-3.'
    900 #            raise TooFewPointsError(msg)
    901 #
    902 #        self._build_coefficient_matrix_B(verbose)
    903 #        loners = self.mesh.get_lone_vertices()
    904 #        # FIXME  - make this as error message.
    905 #        # test with
    906 #        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    907 #        if len(loners)>0:
    908 #            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    909 #            msg += 'All vertices should be part of a triangle.\n'
    910 #            msg += 'In the future this will be inforced.\n'
    911 #           msg += 'The following vertices are not part of a triangle;\n'
    912 #            msg += str(loners)
    913 #            log.critical(msg)
    914 #            #raise VertsWithNoTrianglesError(msg)
    915 #
    916 #
    917 #        return conjugate_gradient(self.B, self.Atz, self.Atz,
    918 #                                  imax=2*len(self.Atz) )
    919 #
    920 #
    921 #    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
    922 #                              verbose=False):
    923 #        """Fit a smooth surface to given 1d array of data points z.
    924 #
    925 #        The smooth surface is computed at each vertex in the underlying
    926 #        mesh using the formula given in the module doc string.
    927 #
    928 #        Inputs:
    929 #        point_coordinates: The co-ordinates of the data points.
    930 #              List of coordinate pairs [x, y] of
    931 #             data points or an nx2 numeric array or a Geospatial_data object
    932 #        z: Single 1d vector or array of data at the point_coordinates.
    933 #        attribute_name: Used to get the z values from the
    934 #              geospatial object if no attribute_name is specified,
    935 #              it's a bit of a lucky dip as to what attributes you get.
    936 #              If there is only one attribute it will be that one.
    937 #
    938 #        """
    939 #
    940 #        # FIXME(DSG-DSG): Check that the vert and point coords
    941 #        # have the same zone.
    942 #        if isinstance(point_coordinates,Geospatial_data):
    943 #            point_coordinates = point_coordinates.get_data_points( \
    944 #                absolute = True)
    945 #
    946 #        # Convert input to numeric arrays
    947 #        if z is not None:
    948 #            z = ensure_numeric(z, num.float)
    949 #        else:
    950 #            msg = 'z not specified'
    951 #            assert isinstance(point_coordinates,Geospatial_data), msg
    952 #            z = point_coordinates.get_attributes(attribute_name)
    953 #
    954 #        point_coordinates = ensure_numeric(point_coordinates, num.float)
    955 #        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
    956 #
    957 #
    958 
    959 
    960 #===========================================================================
    961 
    962 #class Fit_test(FitInterpolate):
    963 #
    964 #    def __init__(self,
    965 #                 vertex_coordinates=None,
    966 #                 triangles=None,
    967 #                 mesh=None,
    968 #                 mesh_origin=None,
    969 #                 alpha = None,
    970 #                 verbose=False):
    971 #
    972 #
    973 #        """
    974 #        Fit data at points to the vertices of a mesh.
    975 #
    976 #        Inputs:
    977 #
    978 #          vertex_coordinates: List of coordinate pairs [xi, eta] of
    979 #             points constituting a mesh (or an m x 2 numeric array or
    980 #              a geospatial object)
    981 #              Points may appear multiple times
    982 #              (e.g. if vertices have discontinuities)
    983 #
    984 #          triangles: List of 3-tuples (or a numeric array) of
    985 #              integers representing indices of all vertices in the mesh.
    986 #
    987 #          mesh_origin: A geo_reference object or 3-tuples consisting of
    988 #              UTM zone, easting and northing.
    989 #              If specified vertex coordinates are assumed to be
    990 #              relative to their respective origins.
    991 #
    992 #          Note: Don't supply a vertex coords as a geospatial object and
    993 #              a mesh origin, since geospatial has its own mesh origin.
    994 #
    995 #
    996 #        Usage,
    997 #        To use this in a blocking way, call  build_fit_subset, with z info,
    998 #        and then fit, with no point coord, z info.
    999 #
    1000 #        """
    1001 #        # Initialise variabels
    1002 #        if alpha is None:
    1003 #            self.alpha = DEFAULT_ALPHA
    1004 #        else:
    1005 #            self.alpha = alpha
    1006 #
    1007 #        FitInterpolate.__init__(self,
    1008 #                 vertex_coordinates,
    1009 #                 triangles,
    1010 #                 mesh,
    1011 #                 mesh_origin,
    1012 #                 verbose)
    1013 #
    1014 #        m = self.mesh.number_of_nodes # Nbr of basis functions (vertices)
    1015 #
    1016 #        self.AtA = None
    1017 #        self.Atz = None
    1018 #
    1019 #        self.point_count = 0
    1020 #        if self.alpha <> 0:
    1021 #            if verbose: log.critical('Fit: Building smoothing matrix')
    1022 #            self._build_smoothing_matrix_D(verbose=verbose)
    1023 #
    1024 #        if verbose: log.critical('Fit: Get Boundary Polygon')
    1025 #        bd_poly = self.mesh.get_boundary_polygon()
    1026 #        self.mesh_boundary_polygon = ensure_numeric(bd_poly)
    1027 #
    1028 #    def _build_coefficient_matrix_B(self,
    1029 #                                  verbose = False):
    1030 #        """
    1031 #        Build final coefficient matrix
    1032 #
    1033 #        Precon
    1034 #        If alpha is not zero, matrix D has been built
    1035 #        Matrix Ata has been built
    1036 #        """
    1037 #
    1038 #        if verbose:
    1039 #            print 'Fit: Build Coefficient Matrix B'
    1040 #
    1041 #
    1042 #        if self.alpha <> 0:
    1043 #            #if verbose: log.critical('Building smoothing matrix')
    1044 #            #self._build_smoothing_matrix_D()
    1045 #            # FIXME SR: This is quite time consuming.
    1046 #            # As AtA and D have same structure it should be possible
    1047 #            # to speed this up.
    1048 #            self.B = self.AtA + self.alpha*self.D
    1049 #        else:
    1050 #            self.B = self.AtA
    1051 #
    1052 #
    1053 #        if verbose:
    1054 #            print 'Fit: Convert Coefficient Matrix B to Sparse_CSR'
    1055 #
    1056 #        # Convert self.B matrix to CSR format for faster matrix vector
    1057 #        self.B = Sparse_CSR(self.B)
    1058 #
    1059 #    def _build_coefficient_matrix_B_old(self,
    1060 #                                  verbose = False):
    1061 #        """
    1062 #        Build final coefficient matrix
    1063 #
    1064 #        Precon
    1065 #        If alpha is not zero, matrix D has been built
    1066 #        Matrix Ata has been built
    1067 #        """
    1068 #
    1069 #        if self.alpha <> 0:
    1070 #            #if verbose: log.critical('Building smoothing matrix')
    1071 #            #self._build_smoothing_matrix_D()
    1072 #            self.B = self.AtA + self.alpha*self.D
    1073 #        else:
    1074 #            self.B = self.AtA
    1075 #
    1076 #        # Convert self.B matrix to CSR format for faster matrix vector
    1077 #        self.B = Sparse_CSR(self.B)
    1078 #
    1079 #    def _build_smoothing_matrix_D(self, verbose=False):
    1080 #        """Build m x m smoothing matrix, where
    1081 #        m is the number of basis functions phi_k (one per vertex)
    1082 #
    1083 #        The smoothing matrix is defined as
    1084 #
    1085 #        D = D1 + D2
    1086 #
    1087 #        where
    1088 #
    1089 #        [D1]_{k,l} = \int_\Omega
    1090 #           \frac{\partial \phi_k}{\partial x}
    1091 #           \frac{\partial \phi_l}{\partial x}\,
    1092 #           dx dy
    1093 #
    1094 #        [D2]_{k,l} = \int_\Omega
    1095 #           \frac{\partial \phi_k}{\partial y}
    1096 #           \frac{\partial \phi_l}{\partial y}\,
    1097 #           dx dy
    1098 #
    1099 #
    1100 #        The derivatives \frac{\partial \phi_k}{\partial x},
    1101 #        \frac{\partial \phi_k}{\partial x} for a particular triangle
    1102 #        are obtained by computing the gradient a_k, b_k for basis function k
    1103 #        """
    1104 #
    1105 #        # FIXME: algorithm might be optimised by computing local 9x9
    1106 #        # "element stiffness matrices:
    1107 #
    1108 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1109 #
    1110 #        self.D = Sparse(m,m)
    1111 #
    1112 #        if verbose :
    1113 #            print '['+60*' '+']',
    1114 #            sys.stdout.flush()
    1115 #
    1116 #        N = len(self.mesh)
    1117 #        M = N/60
    1118 #
    1119 #        # For each triangle compute contributions to D = D1+D2
    1120 #        for i in xrange(N):
    1121 #
    1122 #            if verbose and i%M == 0 :
    1123 #                #restart_line()
    1124 #                print '\r['+(i/M)*'.'+(60-(i/M))*' ' +']',
    1125 #                sys.stdout.flush()
    1126 #
    1127 #            # Get area
    1128 #            area = self.mesh.areas[i]
    1129 #
    1130 #            # Get global vertex indices
    1131 #            v0 = self.mesh.triangles[i,0]
    1132 #            v1 = self.mesh.triangles[i,1]
    1133 #            v2 = self.mesh.triangles[i,2]
    1134 #
    1135 #            # Get the three vertex_points
    1136 #            xi0 = self.mesh.get_vertex_coordinate(i, 0)
    1137 #            xi1 = self.mesh.get_vertex_coordinate(i, 1)
    1138 #            xi2 = self.mesh.get_vertex_coordinate(i, 2)
    1139 #
    1140 #            # Compute gradients for each vertex
    1141 #            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1142 #                              1, 0, 0)
    1143 #
    1144 #            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1145 #                              0, 1, 0)
    1146 #
    1147 #            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1148 #                              0, 0, 1)
    1149 #
    1150 #            # Compute diagonal contributions
    1151 #            self.D[v0,v0] += (a0*a0 + b0*b0)*area
    1152 #            self.D[v1,v1] += (a1*a1 + b1*b1)*area
    1153 #            self.D[v2,v2] += (a2*a2 + b2*b2)*area
    1154 #
    1155 #            # Compute contributions for basis functions sharing edges
    1156 #            e01 = (a0*a1 + b0*b1)*area
    1157 #            self.D[v0,v1] += e01
    1158 #            self.D[v1,v0] += e01
    1159 #
    1160 #            e12 = (a1*a2 + b1*b2)*area
    1161 #            self.D[v1,v2] += e12
    1162 #            self.D[v2,v1] += e12
    1163 #
    1164 #            e20 = (a2*a0 + b2*b0)*area
    1165 #            self.D[v2,v0] += e20
    1166 #            self.D[v0,v2] += e20
    1167 #
    1168 #        if verbose:
    1169 #            print ''
    1170 #
    1171 #
    1172 #    def _build_smoothing_matrix_D_old(self):
    1173 #        """Build m x m smoothing matrix, where
    1174 #        m is the number of basis functions phi_k (one per vertex)
    1175 #
    1176 #        The smoothing matrix is defined as
    1177 #
    1178 #        D = D1 + D2
    1179 #
    1180 #        where
    1181 #
    1182 #        [D1]_{k,l} = \int_\Omega
    1183 #           \frac{\partial \phi_k}{\partial x}
    1184 #           \frac{\partial \phi_l}{\partial x}\,
    1185 #           dx dy
    1186 #
    1187 #        [D2]_{k,l} = \int_\Omega
    1188 #           \frac{\partial \phi_k}{\partial y}
    1189 #           \frac{\partial \phi_l}{\partial y}\,
    1190 #           dx dy
    1191 #
    1192 #
    1193 #        The derivatives \frac{\partial \phi_k}{\partial x},
    1194 #        \frac{\partial \phi_k}{\partial x} for a particular triangle
    1195 #        are obtained by computing the gradient a_k, b_k for basis function k
    1196 #        """
    1197 #
    1198 #        # FIXME: algorithm might be optimised by computing local 9x9
    1199 #        # "element stiffness matrices:
    1200 #
    1201 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1202 #
    1203 #        self.D = Sparse(m,m)
    1204 #
    1205 #        # For each triangle compute contributions to D = D1+D2
    1206 #        for i in range(len(self.mesh)):
    1207 #
    1208 #            # Get area
    1209 #            area = self.mesh.areas[i]
    1210 #
    1211 #            # Get global vertex indices
    1212 #            v0 = self.mesh.triangles[i,0]
    1213 #            v1 = self.mesh.triangles[i,1]
    1214 #            v2 = self.mesh.triangles[i,2]
    1215 #
    1216 #            # Get the three vertex_points
    1217 #            xi0 = self.mesh.get_vertex_coordinate(i, 0)
    1218 #            xi1 = self.mesh.get_vertex_coordinate(i, 1)
    1219 #            xi2 = self.mesh.get_vertex_coordinate(i, 2)
    1220 #
    1221 #            # Compute gradients for each vertex
    1222 #            a0, b0 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1223 #                              1, 0, 0)
    1224 #
    1225 #            a1, b1 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1226 #                              0, 1, 0)
    1227 #
    1228 #            a2, b2 = gradient(xi0[0], xi0[1], xi1[0], xi1[1], xi2[0], xi2[1],
    1229 #                              0, 0, 1)
    1230 #
    1231 #            # Compute diagonal contributions
    1232 #            self.D[v0,v0] += (a0*a0 + b0*b0)*area
    1233 #            self.D[v1,v1] += (a1*a1 + b1*b1)*area
    1234 #            self.D[v2,v2] += (a2*a2 + b2*b2)*area
    1235 #
    1236 #            # Compute contributions for basis functions sharing edges
    1237 #            e01 = (a0*a1 + b0*b1)*area
    1238 #            self.D[v0,v1] += e01
    1239 #            self.D[v1,v0] += e01
    1240 #
    1241 #            e12 = (a1*a2 + b1*b2)*area
    1242 #            self.D[v1,v2] += e12
    1243 #            self.D[v2,v1] += e12
    1244 #
    1245 #            e20 = (a2*a0 + b2*b0)*area
    1246 #            self.D[v2,v0] += e20
    1247 #            self.D[v0,v2] += e20
    1248 #
    1249 #    def get_D(self):
    1250 #        return self.D.todense()
    1251 #
    1252 #
    1253 #    def _build_matrix_AtA_Atz(self,
    1254 #                              point_coordinates,
    1255 #                              z,
    1256 #                              verbose = False):
    1257 #        """Build:
    1258 #        AtA  m x m  interpolation matrix, and,
    1259 #        Atz  m x a  interpolation matrix where,
    1260 #        m is the number of basis functions phi_k (one per vertex)
    1261 #        a is the number of data attributes
    1262 #
    1263 #        This algorithm uses a quad tree data structure for fast binning of
    1264 #        data points.
    1265 #
    1266 #        If Ata is None, the matrices AtA and Atz are created.
    1267 #
    1268 #        This function can be called again and again, with sub-sets of
    1269 #        the point coordinates.  Call fit to get the results.
    1270 #
    1271 #        Preconditions
    1272 #        z and points are numeric
    1273 #        Point_coordindates and mesh vertices have the same origin.
    1274 #
    1275 #        The number of attributes of the data points does not change
    1276 #        """
    1277 #
    1278 #
    1279 #        #if verbose:
    1280 #        #    print 'Fit: Build Matrix AtA Atz'
    1281 #
    1282 #        # Build n x m interpolation matrix
    1283 #        if self.AtA == None:
    1284 #            # AtA and Atz need to be initialised.
    1285 #            m = self.mesh.number_of_nodes
    1286 #            if len(z.shape) > 1:
    1287 #                att_num = z.shape[1]
    1288 #                self.Atz = num.zeros((m,att_num), num.float)
    1289 #            else:
    1290 #                att_num = 1
    1291 #                self.Atz = num.zeros((m,), num.float)
    1292 #            assert z.shape[0] == point_coordinates.shape[0]
    1293 #
    1294 #            AtA = Sparse(m,m)
    1295 #            # The memory damage has been done by now.
    1296 #        else:
    1297 #             AtA = self.AtA # Did this for speed, did ~nothing
    1298 #        self.point_count += point_coordinates.shape[0]
    1299 #
    1300 #
    1301 #        inside_indices = inside_polygon(point_coordinates,
    1302 #                                        self.mesh_boundary_polygon,
    1303 #                                        closed=True,
    1304 #                                        verbose=False) # Suppress output
    1305 #
    1306 #        n = len(inside_indices)
    1307 #
    1308 #        # Compute matrix elements for points inside the mesh
    1309 #        triangles = self.mesh.triangles # Shorthand
    1310 #
    1311 #
    1312 #        #if verbose :
    1313 #        #    print '['+60*' '+']',
    1314 #        #    sys.stdout.flush()
    1315 #
    1316 #        m = max(n/60,1)
    1317 #
    1318 #
    1319 #        for d in xrange(n):
    1320 #            i = inside_indices[d]
    1321 #
    1322 ##        for d, i in enumerate(inside_indices):
    1323 ##            # For each data_coordinate point
    1324 ##            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    1325 ##                                                            # %(d, n))
    1326 ##            x = point_coordinates[i]
    1327 #
    1328 #            # For each data_coordinate point
    1329 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    1330 #                                                            # %(d, n))
    1331 #
    1332 #
    1333 #            #if verbose and i%m == 0 :
    1334 #            #    print '\r['+(i/m)*'.'+(60-(i/m))*' '+']',
    1335 #            #    sys.stdout.flush()
    1336 #
    1337 #
    1338 #            x = point_coordinates[i]
    1339 #
    1340 #            element_found, sigma0, sigma1, sigma2, k = \
    1341 #                           self.root.search_fast(x)
    1342 #
    1343 #            if element_found is True:
    1344 #                j0 = triangles[k,0] # Global vertex id for sigma0
    1345 #                j1 = triangles[k,1] # Global vertex id for sigma1
    1346 #                j2 = triangles[k,2] # Global vertex id for sigma2
    1347 #
    1348 #                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    1349 #                js     = [j0,j1,j2]
    1350 #
    1351 #                for j in js:
    1352 #                    self.Atz[j] +=  sigmas[j]*z[i]
    1353 #
    1354 #                    for k in js:
    1355 #                        AtA[j,k] += sigmas[j]*sigmas[k]
    1356 #            else:
    1357 #                flag = is_inside_polygon(x,
    1358 #                                         self.mesh_boundary_polygon,
    1359 #                                         closed=True,
    1360 #                                         verbose=False) # Suppress output
    1361 #                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    1362 #                assert flag is True, msg
    1363 #
    1364 #                # data point has fallen within a hole - so ignore it.
    1365 #
    1366 #
    1367 #        #if verbose:
    1368 #        #    print ''
    1369 #
    1370 #        self.AtA = AtA
    1371 #
    1372 #
    1373 #
    1374 #    def _build_matrix_AtA_Atz_old(self,
    1375 #                              point_coordinates,
    1376 #                              z,
    1377 #                              verbose = False):
    1378 #        """Build:
    1379 #        AtA  m x m  interpolation matrix, and,
    1380 #        Atz  m x a  interpolation matrix where,
    1381 #        m is the number of basis functions phi_k (one per vertex)
    1382 #        a is the number of data attributes
    1383 #
    1384 #        This algorithm uses a quad tree data structure for fast binning of
    1385 #        data points.
    1386 #
    1387 #        If Ata is None, the matrices AtA and Atz are created.
    1388 #
    1389 #        This function can be called again and again, with sub-sets of
    1390 #        the point coordinates.  Call fit to get the results.
    1391 #
    1392 #        Preconditions
    1393 #        z and points are numeric
    1394 #        Point_coordindates and mesh vertices have the same origin.
    1395 #
    1396 #        The number of attributes of the data points does not change
    1397 #        """
    1398 #
    1399 #        # Build n x m interpolation matrix
    1400 #        if self.AtA == None:
    1401 #            # AtA and Atz need to be initialised.
    1402 #            m = self.mesh.number_of_nodes
    1403 #            if len(z.shape) > 1:
    1404 #                att_num = z.shape[1]
    1405 #                self.Atz = num.zeros((m,att_num), num.float)
    1406 #            else:
    1407 #                att_num = 1
    1408 #                self.Atz = num.zeros((m,), num.float)
    1409 #            assert z.shape[0] == point_coordinates.shape[0]
    1410 #
    1411 #            AtA = Sparse(m,m)
    1412 #            # The memory damage has been done by now.
    1413 #        else:
    1414 #             AtA = self.AtA # Did this for speed, did ~nothing
    1415 #        self.point_count += point_coordinates.shape[0]
    1416 #
    1417 #
    1418 #        inside_indices = inside_polygon(point_coordinates,
    1419 #                                        self.mesh_boundary_polygon,
    1420 #                                        closed=True,
    1421 #                                        verbose=False) # Suppress output
    1422 #
    1423 #        n = len(inside_indices)
    1424 #
    1425 #        # Compute matrix elements for points inside the mesh
    1426 #        triangles = self.mesh.triangles # Shorthand
    1427 #        for d, i in enumerate(inside_indices):
    1428 #            # For each data_coordinate point
    1429 #            # if verbose and d%((n+10)/10)==0: log.critical('Doing %d of %d'
    1430 #                                                            # %(d, n))
    1431 #            x = point_coordinates[i]
    1432 #
    1433 #            element_found, sigma0, sigma1, sigma2, k = \
    1434 #                           self.root.search_fast(x)
    1435 #
    1436 #            if element_found is True:
    1437 #                j0 = triangles[k,0] # Global vertex id for sigma0
    1438 #                j1 = triangles[k,1] # Global vertex id for sigma1
    1439 #                j2 = triangles[k,2] # Global vertex id for sigma2
    1440 #
    1441 #                sigmas = {j0:sigma0, j1:sigma1, j2:sigma2}
    1442 #                js     = [j0,j1,j2]
    1443 #
    1444 #                for j in js:
    1445 #                    self.Atz[j] +=  sigmas[j]*z[i]
    1446 #
    1447 #                    for k in js:
    1448 #                        AtA[j,k] += sigmas[j]*sigmas[k]
    1449 #            else:
    1450 #                flag = is_inside_polygon(x,
    1451 #                                         self.mesh_boundary_polygon,
    1452 #                                         closed=True,
    1453 #                                         verbose=False) # Suppress output
    1454 #                msg = 'Point (%f, %f) is not inside mesh boundary' % tuple(x)
    1455 #                assert flag is True, msg
    1456 #
    1457 #                # data point has fallen within a hole - so ignore it.
    1458 #
    1459 #        self.AtA = AtA
    1460 #
    1461 #    def fit(self, point_coordinates_or_filename=None, z=None,
    1462 #            verbose=False,
    1463 #            point_origin=None,
    1464 #            attribute_name=None,
    1465 #            max_read_lines=None):
    1466 #        """Fit a smooth surface to given 1d array of data points z.
    1467 #
    1468 #        The smooth surface is computed at each vertex in the underlying
    1469 #        mesh using the formula given in the module doc string.
    1470 #
    1471 #        Inputs:
    1472 #        point_coordinates: The co-ordinates of the data points.
    1473 #              List of coordinate pairs [x, y] of
    1474 #             data points or an nx2 numeric array or a Geospatial_data object
    1475 #              or points file filename
    1476 #          z: Single 1d vector or array of data at the point_coordinates.
    1477 #
    1478 #        """
    1479 #
    1480 #
    1481 #        if verbose:
    1482 #            print 'Fit.fit: Initializing'
    1483 #
    1484 #        # Use blocking to load in the point info
    1485 #        if isinstance(point_coordinates_or_filename, basestring):
    1486 #            msg = "Don't set a point origin when reading from a file"
    1487 #            assert point_origin is None, msg
    1488 #            filename = point_coordinates_or_filename
    1489 #
    1490 #            G_data = Geospatial_data(filename,
    1491 #                                     max_read_lines=max_read_lines,
    1492 #                                     load_file_now=False,
    1493 #                                     verbose=verbose)
    1494 #
    1495 #
    1496 #            for i, geo_block in enumerate(G_data):
    1497 #                if verbose is True and 0 == i%200:
    1498 #                    # The time this will take
    1499 #                    # is dependant on the # of Triangles
    1500 #
    1501 #                    log.critical('Processing Block %d' % i)
    1502 #                    # FIXME (Ole): It would be good to say how many blocks
    1503 #                    # there are here. But this is no longer necessary
    1504 #                    # for pts files as they are reported in geospatial_data
    1505 #                    # I suggest deleting this verbose output and make
    1506 #                    # Geospatial_data more informative for txt files.
    1507 #                    #
    1508 #                    # I still think so (12/12/7, Ole).
    1509 #
    1510 #
    1511 #
    1512 #                # Build the array
    1513 #
    1514 #                points = geo_block.get_data_points(absolute=True)
    1515 #                z = geo_block.get_attributes(attribute_name=attribute_name)
    1516 #                self.build_fit_subset(points, z, attribute_name, verbose)
    1517 #
    1518 #                # FIXME(Ole): I thought this test would make sense here
    1519 #                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    1520 #                # Committed 11 March 2009
    1521 #                msg = 'Matrix AtA was not built'
    1522 #                assert self.AtA is not None, msg
    1523 #
    1524 #            point_coordinates = None
    1525 #        else:
    1526 #            point_coordinates =  point_coordinates_or_filename
    1527 #
    1528 #
    1529 #
    1530 #        if point_coordinates is None:
    1531 #            if verbose: log.critical('Fit.fit: Warning: no data points in fit')
    1532 #            msg = 'No interpolation matrix.'
    1533 #            assert self.AtA is not None, msg
    1534 #            assert self.Atz is not None
    1535 #
    1536 #            # FIXME (DSG) - do  a message
    1537 #        else:
    1538 #            point_coordinates = ensure_absolute(point_coordinates,
    1539 #                                                geo_reference=point_origin)
    1540 #            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    1541 #            # z will come from the geo-ref
    1542 #            self.build_fit_subset(point_coordinates, z, verbose=verbose)
    1543 #
    1544 #
    1545 #
    1546 #
    1547 #
    1548 #        # Check sanity
    1549 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1550 #        n = self.point_count
    1551 #        if n<m and self.alpha == 0.0:
    1552 #            msg = 'ERROR (least_squares): Too few data points\n'
    1553 #            msg += 'There are only %d data points and alpha == 0. ' %n
    1554 #           msg += 'Need at least %d\n' %m
    1555 #            msg += 'Alternatively, set smoothing parameter alpha to a small '
    1556 #           msg += 'positive value,\ne.g. 1.0e-3.'
    1557 #            raise TooFewPointsError(msg)
    1558 #
    1559 #
    1560 #        self._build_coefficient_matrix_B(verbose)
    1561 #        loners = self.mesh.get_lone_vertices()
    1562 #        # FIXME  - make this as error message.
    1563 #        # test with
    1564 #        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    1565 #        if len(loners)>0:
    1566 #            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    1567 #            msg += 'All vertices should be part of a triangle.\n'
    1568 #            msg += 'In the future this will be inforced.\n'
    1569 #           msg += 'The following vertices are not part of a triangle;\n'
    1570 #            msg += str(loners)
    1571 #            log.critical(msg)
    1572 #            #raise VertsWithNoTrianglesError(msg)
    1573 #
    1574 #        if verbose:
    1575 #            print 'Fit.fit: Solve Fitting Equation'
    1576 #
    1577 #        x0 = num.zeros_like(self.Atz)
    1578 #        return conjugate_gradient(self.B, self.Atz, x0,
    1579 #                                  imax=2*len(self.Atz), iprint=1 )
    1580 #
    1581 #
    1582 #    def fit_old(self, point_coordinates_or_filename=None, z=None,
    1583 #            verbose=False,
    1584 #            point_origin=None,
    1585 #            attribute_name=None,
    1586 #            max_read_lines=500):
    1587 #        """Fit a smooth surface to given 1d array of data points z.
    1588 #
    1589 #        The smooth surface is computed at each vertex in the underlying
    1590 #        mesh using the formula given in the module doc string.
    1591 #
    1592 #        Inputs:
    1593 #        point_coordinates: The co-ordinates of the data points.
    1594 #              List of coordinate pairs [x, y] of
    1595 #             data points or an nx2 numeric array or a Geospatial_data object
    1596 #              or points file filename
    1597 #          z: Single 1d vector or array of data at the point_coordinates.
    1598 #
    1599 #        """
    1600 #
    1601 #        if verbose: log.critical('Fit.fit: Start')
    1602 #
    1603 #        # Use blocking to load in the point info
    1604 #        if isinstance(point_coordinates_or_filename, basestring):
    1605 #            msg = "Don't set a point origin when reading from a file"
    1606 #            assert point_origin is None, msg
    1607 #            filename = point_coordinates_or_filename
    1608 #
    1609 #            G_data = Geospatial_data(filename,
    1610 #                                     max_read_lines=max_read_lines,
    1611 #                                     load_file_now=False,
    1612 #                                     verbose=verbose)
    1613 #
    1614 #            for i, geo_block in enumerate(G_data):
    1615 #                if verbose is True and 0 == i%200:
    1616 #                    # The time this will take
    1617 #                    # is dependant on the # of Triangles
    1618 #
    1619 #                    log.critical('Processing Block %d' % i)
    1620 #                    # FIXME (Ole): It would be good to say how many blocks
    1621 #                    # there are here. But this is no longer necessary
    1622 #                    # for pts files as they are reported in geospatial_data
    1623 #                    # I suggest deleting this verbose output and make
    1624 #                    # Geospatial_data more informative for txt files.
    1625 #                    #
    1626 #                    # I still think so (12/12/7, Ole).
    1627 #
    1628 #
    1629 #
    1630 #                # Build the array
    1631 #
    1632 #                points = geo_block.get_data_points(absolute=True)
    1633 #                z = geo_block.get_attributes(attribute_name=attribute_name)
    1634 #                self.build_fit_subset(points, z, verbose=verbose)
    1635 #
    1636 #                # FIXME(Ole): I thought this test would make sense here
    1637 #                # See test_fitting_example_that_crashed_2 in test_shallow_water_domain.py
    1638 #                # Committed 11 March 2009
    1639 #                msg = 'Matrix AtA was not built'
    1640 #                assert self.AtA is not None, msg
    1641 #
    1642 #            point_coordinates = None
    1643 #        else:
    1644 #            point_coordinates =  point_coordinates_or_filename
    1645 #
    1646 #        if point_coordinates is None:
    1647 #            if verbose: log.critical('Warning: no data points in fit')
    1648 #            msg = 'No interpolation matrix.'
    1649 #            assert self.AtA is not None, msg
    1650 #            assert self.Atz is not None
    1651 #
    1652 #            # FIXME (DSG) - do  a message
    1653 #        else:
    1654 #            point_coordinates = ensure_absolute(point_coordinates,
    1655 #                                                geo_reference=point_origin)
    1656 #            # if isinstance(point_coordinates,Geospatial_data) and z is None:
    1657 #            # z will come from the geo-ref
    1658 #            self.build_fit_subset(point_coordinates, z, verbose)
    1659 #
    1660 #        # Check sanity
    1661 #        m = self.mesh.number_of_nodes # Nbr of basis functions (1/vertex)
    1662 #        n = self.point_count
    1663 #        if n<m and self.alpha == 0.0:
    1664 #            msg = 'ERROR (least_squares): Too few data points\n'
    1665 #            msg += 'There are only %d data points and alpha == 0. ' %n
    1666 #           msg += 'Need at least %d\n' %m
    1667 #            msg += 'Alternatively, set smoothing parameter alpha to a small '
    1668 #           msg += 'positive value,\ne.g. 1.0e-3.'
    1669 #            raise TooFewPointsError(msg)
    1670 #
    1671 #        self._build_coefficient_matrix_B(verbose)
    1672 #        loners = self.mesh.get_lone_vertices()
    1673 #        # FIXME  - make this as error message.
    1674 #        # test with
    1675 #        # Not_yet_test_smooth_att_to_mesh_with_excess_verts.
    1676 #        if len(loners)>0:
    1677 #            msg = 'WARNING: (least_squares): \nVertices with no triangles\n'
    1678 #            msg += 'All vertices should be part of a triangle.\n'
    1679 #            msg += 'In the future this will be inforced.\n'
    1680 #           msg += 'The following vertices are not part of a triangle;\n'
    1681 #            msg += str(loners)
    1682 #            log.critical(msg)
    1683 #            #raise VertsWithNoTrianglesError(msg)
    1684 #
    1685 #        if verbose: log.critical('Fit.fit: Conjugate Gradient')
    1686 #        return conjugate_gradient(self.B, self.Atz, self.Atz,
    1687 #                                  imax=2*len(self.Atz), iprint=1 )
    1688 #
    1689 #    def build_fit_subset(self, point_coordinates, z=None, attribute_name=None,
    1690 #                              verbose=False):
    1691 #        """Fit a smooth surface to given 1d array of data points z.
    1692 #
    1693 #        The smooth surface is computed at each vertex in the underlying
    1694 #        mesh using the formula given in the module doc string.
    1695 #
    1696 #        Inputs:
    1697 #        point_coordinates: The co-ordinates of the data points.
    1698 #              List of coordinate pairs [x, y] of
    1699 #             data points or an nx2 numeric array or a Geospatial_data object
    1700 #        z: Single 1d vector or array of data at the point_coordinates.
    1701 #        attribute_name: Used to get the z values from the
    1702 #              geospatial object if no attribute_name is specified,
    1703 #              it's a bit of a lucky dip as to what attributes you get.
    1704 #              If there is only one attribute it will be that one.
    1705 #
    1706 #        """
    1707 #
    1708 #        # FIXME(DSG-DSG): Check that the vert and point coords
    1709 #        # have the same zone.
    1710 #        if isinstance(point_coordinates,Geospatial_data):
    1711 #            point_coordinates = point_coordinates.get_data_points( \
    1712 #                absolute = True)
    1713 #
    1714 #        # Convert input to numeric arrays
    1715 #        if z is not None:
    1716 #            z = ensure_numeric(z, num.float)
    1717 #        else:
    1718 #            msg = 'z not specified'
    1719 #            assert isinstance(point_coordinates,Geospatial_data), msg
    1720 #            z = point_coordinates.get_attributes(attribute_name)
    1721 #
    1722 #        point_coordinates = ensure_numeric(point_coordinates, num.float)
    1723 #        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
    1724 #
    1725 #
    1726 #
    1727 #    def build_fit_subset_old(self, point_coordinates, z=None, attribute_name=None,
    1728 #                              verbose=False):
    1729 #        """Fit a smooth surface to given 1d array of data points z.
    1730 #
    1731 #        The smooth surface is computed at each vertex in the underlying
    1732 #        mesh using the formula given in the module doc string.
    1733 #
    1734 #        Inputs:
    1735 #        point_coordinates: The co-ordinates of the data points.
    1736 #              List of coordinate pairs [x, y] of
    1737 #             data points or an nx2 numeric array or a Geospatial_data object
    1738 #        z: Single 1d vector or array of data at the point_coordinates.
    1739 #        attribute_name: Used to get the z values from the
    1740 #              geospatial object if no attribute_name is specified,
    1741 #              it's a bit of a lucky dip as to what attributes you get.
    1742 #              If there is only one attribute it will be that one.
    1743 #
    1744 #        """
    1745 #
    1746 #        # FIXME(DSG-DSG): Check that the vert and point coords
    1747 #        # have the same zone.
    1748 #        if isinstance(point_coordinates,Geospatial_data):
    1749 #            point_coordinates = point_coordinates.get_data_points( \
    1750 #                absolute = True)
    1751 #
    1752 #        # Convert input to numeric arrays
    1753 #        if z is not None:
    1754 #            z = ensure_numeric(z, num.float)
    1755 #        else:
    1756 #            msg = 'z not specified'
    1757 #            assert isinstance(point_coordinates,Geospatial_data), msg
    1758 #            z = point_coordinates.get_attributes(attribute_name)
    1759 #
    1760 #        point_coordinates = ensure_numeric(point_coordinates, num.float)
    1761 #        self._build_matrix_AtA_Atz(point_coordinates, z, verbose)
    1762 #
    1763 
    1764 #===============================================================================
    1765 
    1766 
    1767 def fit_to_mesh(point_coordinates, # this can also be a points file name
     463                                  imax=2 * len(self.Atz)+1000, use_c_cg=self.use_c_cg,
     464                                  precon=self.cg_precon)
     465
     466
     467#poin_coordiantes can also be a points file name
     468
     469def fit_to_mesh(point_coordinates,
    1768470                vertex_coordinates=None,
    1769471                triangles=None,
     
    1776478                max_read_lines=None,
    1777479                attribute_name=None,
    1778                 use_cache=False):
     480                use_cache=False,
     481                cg_precon='None',
     482                use_c_cg=False):
    1779483    """Wrapper around internal function _fit_to_mesh for use with caching.
    1780    
    1781484    """
    1782    
     485
    1783486    args = (point_coordinates, )
    1784487    kwargs = {'vertex_coordinates': vertex_coordinates,
     
    1791494              'data_origin': data_origin,
    1792495              'max_read_lines': max_read_lines,
    1793               'attribute_name': attribute_name
     496              'attribute_name': attribute_name,
     497              'cg_precon': cg_precon,
     498              'use_c_cg': use_c_cg
    1794499              }
    1795500
    1796501    if use_cache is True:
    1797502        if isinstance(point_coordinates, basestring):
    1798             # We assume that point_coordinates is the name of a .csv/.txt/.pts
    1799             # file which must be passed onto caching as a dependency 
     503            # We assume that point_coordinates is the name of a .csv/.txt
     504            # file which must be passed onto caching as a dependency
    1800505            # (in case it has changed on disk)
    1801506            dep = [point_coordinates]
     
    1803508            dep = None
    1804509
    1805            
    1806         #from caching import myhash
    1807         #import copy
    1808         #print args
    1809         #print kwargs
    1810         #print 'hashing:'
    1811         #print 'args', myhash( (args, kwargs) )
    1812         #print 'again', myhash( copy.deepcopy( (args, kwargs)) )       
    1813        
    1814         #print 'mesh hash', myhash( kwargs['mesh'] )       
    1815        
    1816         #print '-------------------------'
    1817         #print 'vertices hash', myhash( kwargs['mesh'].nodes )
    1818         #print 'triangles hash', myhash( kwargs['mesh'].triangles )
    1819         #print '-------------------------'       
    1820        
    1821         #for key in mesh.__dict__:
    1822         #    print key, myhash(mesh.__dict__[key])
    1823        
    1824         #for key in mesh.quantities.keys():
    1825         #    print key, myhash(mesh.quantities[key])
    1826        
    1827         #import sys; sys.exit()
    1828            
    1829510        return cache(_fit_to_mesh,
    1830511                     args, kwargs,
     
    1833514                     dependencies=dep)
    1834515    else:
    1835         return apply(_fit_to_mesh,
     516        res = apply(_fit_to_mesh,
    1836517                     args, kwargs)
    1837 
    1838 def _fit_to_mesh(point_coordinates, # this can also be a points file name
     518        "print intep should go out of range"
     519        return res
     520
     521
     522# point_coordinates can also be a points file name
     523
     524def _fit_to_mesh(point_coordinates,
    1839525                 vertex_coordinates=None,
    1840526                 triangles=None,
     
    1846532                 data_origin=None,
    1847533                 max_read_lines=None,
    1848                  attribute_name=None):
     534                 attribute_name=None,
     535                 cg_precon='None',
     536                 use_c_cg=False):
    1849537    """
    1850538    Fit a smooth surface to a triangulation,
     
    1854542        Inputs:
    1855543        vertex_coordinates: List of coordinate pairs [xi, eta] of
    1856               points constituting a mesh (or an m x 2 numeric array or
     544        points constituting a mesh (or an m x 2 numeric array or
    1857545              a geospatial object)
    1858546              Points may appear multiple times
     
    1881569        # FIXME(DSG): Throw errors if triangles or vertex_coordinates
    1882570        # are None
    1883            
     571
    1884572        #Convert input to numeric arrays
    1885573        triangles = ensure_numeric(triangles, num.int)
     
    1887575                                             geo_reference = mesh_origin)
    1888576
    1889         if verbose: log.critical('FitInterpolate: Building mesh')
    1890         mesh = Mesh(vertex_coordinates, triangles, verbose=verbose)
    1891         #mesh.check_integrity() # expensive
    1892    
    1893    
     577        if verbose:
     578            log.critical('FitInterpolate: Building mesh')
     579        mesh = Mesh(vertex_coordinates, triangles)
     580        mesh.check_integrity()
     581
    1894582    interp = Fit(mesh=mesh,
    1895583                 verbose=verbose,
    1896                  alpha=alpha)
     584                 alpha=alpha,
     585                 cg_precon=cg_precon,
     586                 use_c_cg=use_c_cg)
    1897587
    1898588    vertex_attributes = interp.fit(point_coordinates,
     
    1903593                                   verbose=verbose)
    1904594
    1905        
    1906595    # Add the value checking stuff that's in least squares.
    1907596    # Maybe this stuff should get pushed down into Fit.
    1908597    # at least be a method of Fit.
    1909     # Or integrate it into the fit method, saving the max and min's
     598    # Or intigrate it into the fit method, saving teh max and min's
    1910599    # as att's.
    1911    
     600
    1912601    return vertex_attributes
    1913 
    1914 
    1915 #def _fit(*args, **kwargs):
    1916 #    """Private function for use with caching. Reason is that classes
    1917 #    may change their byte code between runs which is annoying.
    1918 #    """
    1919 #   
    1920 #    return Fit(*args, **kwargs)
    1921602
    1922603
     
    1927608                     display_errors = True):
    1928609    """
    1929     Given a mesh file (tsh or msh) and a point attribute file, fit
     610    Given a mesh file (tsh) and a point attribute file, fit
    1930611    point attributes to the mesh and write a mesh file with the
    1931612    results.
     
    1938619    """
    1939620
    1940     from anuga.load_mesh.loadASCII import import_mesh_file, \
     621    from load_mesh.loadASCII import import_mesh_file, \
    1941622         export_mesh_file, concatinate_attributelist
    1942 
    1943623
    1944624    try:
    1945625        mesh_dict = import_mesh_file(mesh_file)
    1946     except IOError,e:
     626    except IOError, e:
    1947627        if display_errors:
    1948628            log.critical("Could not load bad file: %s" % str(e))
    1949629        raise IOError  #Could not load bad mesh file.
    1950    
     630
    1951631    vertex_coordinates = mesh_dict['vertices']
    1952632    triangles = mesh_dict['triangles']
    1953633    if isinstance(mesh_dict['vertex_attributes'], num.ndarray):
    1954         old_vertex_attributes = mesh_dict['vertex_attributes'].tolist()
     634        old_point_attributes = mesh_dict['vertex_attributes'].tolist()
    1955635    else:
    1956         old_vertex_attributes = mesh_dict['vertex_attributes']
     636        old_point_attributes = mesh_dict['vertex_attributes']
    1957637
    1958638    if isinstance(mesh_dict['vertex_attribute_titles'], num.ndarray):
     
    1961641        old_title_list = mesh_dict['vertex_attribute_titles']
    1962642
    1963     if verbose: log.critical('Fit_to_Mesh_File: tsh file %s loaded' % mesh_file)
     643    if verbose:
     644        log.critical('tsh file %s loaded' % mesh_file)
    1964645
    1965646    # load in the points file
    1966647    try:
    1967648        geo = Geospatial_data(point_file, verbose=verbose)
    1968     except IOError,e:
     649    except IOError, e:
    1969650        if display_errors:
    1970651            log.critical("Could not load bad file: %s" % str(e))
     
    1972653
    1973654    point_coordinates = geo.get_data_points(absolute=True)
    1974     title_list,point_attributes = concatinate_attributelist( \
     655    title_list, point_attributes = concatinate_attributelist( \
    1975656        geo.get_all_attributes())
    1976657
     
    1981662        mesh_origin = None
    1982663
    1983     if verbose: log.critical("Fit_to_Mesh_File: points file loaded")
    1984     if verbose: log.critical("Fit_to_Mesh_File: fitting to mesh")
    1985     new_vertex_attributes = fit_to_mesh(point_coordinates,
     664    if verbose:
     665        log.critical("points file loaded")
     666    if verbose:
     667        log.critical("fitting to mesh")
     668    f = fit_to_mesh(point_coordinates,
    1986669                    vertex_coordinates,
    1987670                    triangles,
     
    1992675                    data_origin = None,
    1993676                    mesh_origin = mesh_origin)
    1994     if verbose: log.critical("Fit_to_Mesh_File: finished fitting to mesh")
     677    if verbose:
     678        log.critical("finished fitting to mesh")
    1995679
    1996680    # convert array to list of lists
    1997     new_vertex_attributes = new_vertex_attributes.tolist()
     681    new_point_attributes = f.tolist()
    1998682    #FIXME have this overwrite attributes with the same title - DSG
    1999683    #Put the newer attributes last
    2000     if old_title_list <> []:
     684    if old_title_list != []:
    2001685        old_title_list.extend(title_list)
    2002686        #FIXME can this be done a faster way? - DSG
    2003         for i in xrange(len(old_vertex_attributes)):
    2004             old_vertex_attributes[i].extend(new_vertex_attributes[i])
    2005         mesh_dict['vertex_attributes'] = old_vertex_attributes
     687        for i in range(len(old_point_attributes)):
     688            old_point_attributes[i].extend(new_point_attributes[i])
     689        mesh_dict['vertex_attributes'] = old_point_attributes
    2006690        mesh_dict['vertex_attribute_titles'] = old_title_list
    2007691    else:
    2008         mesh_dict['vertex_attributes'] = new_vertex_attributes
     692        mesh_dict['vertex_attributes'] = new_point_attributes
    2009693        mesh_dict['vertex_attribute_titles'] = title_list
    2010694
    2011     if verbose: log.critical("Fit_to_Mesh_File: exporting to file %s" % mesh_output_file)
     695    if verbose:
     696        log.critical("exporting to file %s" % mesh_output_file)
    2012697
    2013698    try:
    2014699        export_mesh_file(mesh_output_file, mesh_dict)
    2015     except IOError,e:
     700    except IOError, e:
    2016701        if display_errors:
    2017702            log.critical("Could not write file %s", str(e))
  • trunk/anuga_core/source/anuga/fit_interpolate/general_fit_interpolate.py

    r8578 r8690  
    7979              a mesh origin, since geospatial has its own mesh origin.
    8080        """
     81
     82        # NOTE PADARN: The Fit_Interpolate class now uses a the c based
     83        # quad tree to store triangles, rather than the python based tree.
     84        # The tree is still stored at self.root. However, the subtrees of
     85        # the new quad tree can not be directly accessed by python as
     86        # was previously possible.
     87        # Most of the previous functionality has been preserved.
     88
    8189        global build_quadtree_time
    8290        if mesh is None:
    83             if vertex_coordinates is not None and  triangles is not None:
     91            if vertex_coordinates is not None and triangles is not None:
    8492                # Fixme (DSG) Throw errors if triangles or vertex_coordinates
    8593                # are None
    86            
    87                 #Convert input to numeric arrays
     94
     95                # Convert input to numeric arrays
    8896                triangles = ensure_numeric(triangles, num.int)
    8997                vertex_coordinates = ensure_absolute(vertex_coordinates,
    90                                                  geo_reference = mesh_origin)
     98                                                 geo_reference=mesh_origin)
    9199
    92                 if verbose: log.critical('FitInterpolate: Building mesh')
    93                 self.mesh = Mesh(vertex_coordinates, triangles, verbose=verbose)
     100                if verbose:
     101                    log.critical('FitInterpolate: Building mesh')
     102                self.mesh = Mesh(vertex_coordinates, triangles)
    94103                #self.mesh.check_integrity() # Time consuming
    95104            else:
     
    99108
    100109        if self.mesh is not None:
    101             if verbose: log.critical('FitInterpolate: Building quad tree')
     110            if verbose:
     111                log.critical('FitInterpolate: Building quad tree')
    102112            #This stores indices of vertices
    103113            t0 = time.time()
     114
    104115            self.root = MeshQuadtree(self.mesh, verbose=verbose)
    105        
    106             build_quadtree_time =  time.time()-t0
    107             self.root.set_last_triangle()
    108        
     116            build_quadtree_time = time.time() - t0
     117            # Padarn Note 06/12/12: Do I need this?
     118            #self.root.set_last_triangle()
     119
    109120    def __repr__(self):
    110121        return 'Interpolation object based on: ' + repr(self.mesh)
  • trunk/anuga_core/source/anuga/fit_interpolate/interpolate.py

    r8662 r8690  
    140140            I = cache(wrap_Interpolate, (args, kwargs), {}, verbose=verbose)
    141141    else:
    142         I = apply(Interpolate, args, kwargs)           
    143                
     142        I = apply(Interpolate, args, kwargs)   
     143
    144144    # Call interpolate method with interpolation points
    145145    result = I.interpolate_block(vertex_values, interpolation_points,
     
    302302
    303303        See interpolate for doc info.
    304         """     
    305                
     304        """ 
     305   
    306306        # FIXME (Ole): I reckon we should change the interface so that
    307307        # the user can specify the interpolation matrix instead of the
     
    351351        # Unpack result
    352352        self._A, self.inside_poly_indices, self.outside_poly_indices, self.centroids = X
    353 
    354353        # Check that input dimensions are compatible
    355354        msg = 'Two columns must be specified in point coordinates. ' \
     
    463462
    464463            x = point_coordinates[i]
    465             element_found, sigma0, sigma1, sigma2, k = self.root.search_fast(x)
    466                        
     464            element_found, sigma0, sigma1, sigma2, k = self.root.search_fast(x)         
    467465            # Update interpolation matrix A if necessary
    468466            if element_found is True:
  • trunk/anuga_core/source/anuga/fit_interpolate/test_fit.py

    r8124 r8690  
    446446   
    447447        os.remove(fileName)
    448        
     448
     449    def test_fit_to_mesh_using_jacobi_precon(self):
     450
     451        a = [-1.0, 0.0]
     452        b = [3.0, 4.0]
     453        c = [4.0,1.0]
     454        d = [-3.0, 2.0] #3
     455        e = [-1.0,-2.0]
     456        f = [1.0, -2.0] #5
     457
     458        vertices = [a, b, c, d,e,f]
     459        triangles = [[0,1,3], [1,0,2], [0,4,5], [0,5,2]] #abd bac aef afc
     460
     461
     462        fileName = tempfile.mktemp(".ddd")
     463        file = open(fileName,"w")
     464        file.write(" x,y, elevation \n\
     465-2.0, 2.0, 0.\n\
     466-1.0, 1.0, 0.\n\
     4670.0, 2.0 , 2.\n\
     4681.0, 1.0 , 2.\n\
     469 2.0,  1.0 ,3. \n\
     470 0.0,  0.0 , 0.\n\
     471 1.0,  0.0 , 1.\n\
     472 0.0,  -1.0, -1.\n\
     473 -0.2, -0.5, -0.7\n\
     474 -0.9, -1.5, -2.4\n\
     475 0.5,  -1.9, -1.4\n\
     476 3.0,  1.0 , 4.\n")
     477        file.close()
     478        f = fit_to_mesh(fileName, vertices, triangles,
     479                                alpha=0.0, max_read_lines=2, use_c_cg=True, cg_precon='Jacobi')
     480                        #use_cache=True, verbose=True)
     481        answer = linear_function(vertices)
     482        #print "f\n",f
     483        #print "answer\n",answer
     484
     485        assert num.allclose(f, answer)
     486   
     487        os.remove(fileName)
     488
     489    def test_fit_to_mesh_using_jacobi_precon_no_c_cg(self):
     490
     491        a = [-1.0, 0.0]
     492        b = [3.0, 4.0]
     493        c = [4.0,1.0]
     494        d = [-3.0, 2.0] #3
     495        e = [-1.0,-2.0]
     496        f = [1.0, -2.0] #5
     497
     498        vertices = [a, b, c, d,e,f]
     499        triangles = [[0,1,3], [1,0,2], [0,4,5], [0,5,2]] #abd bac aef afc
     500
     501
     502        fileName = tempfile.mktemp(".ddd")
     503        file = open(fileName,"w")
     504        file.write(" x,y, elevation \n\
     505-2.0, 2.0, 0.\n\
     506-1.0, 1.0, 0.\n\
     5070.0, 2.0 , 2.\n\
     5081.0, 1.0 , 2.\n\
     509 2.0,  1.0 ,3. \n\
     510 0.0,  0.0 , 0.\n\
     511 1.0,  0.0 , 1.\n\
     512 0.0,  -1.0, -1.\n\
     513 -0.2, -0.5, -0.7\n\
     514 -0.9, -1.5, -2.4\n\
     515 0.5,  -1.9, -1.4\n\
     516 3.0,  1.0 , 4.\n")
     517        file.close()
     518        f = fit_to_mesh(fileName, vertices, triangles,
     519                                alpha=0.0, max_read_lines=2, use_c_cg=False, cg_precon='Jacobi')
     520                        #use_cache=True, verbose=True)
     521        answer = linear_function(vertices)
     522        #print "f\n",f
     523        #print "answer\n",answer
     524
     525        assert num.allclose(f, answer)
     526   
     527        os.remove(fileName)
     528
    449529    def test_fit_to_mesh_2_atts(self):
    450530
     
    486566        assert num.allclose(f, answer)
    487567        os.remove(fileName)
     568
     569
     570
     571
    488572       
    489573    def test_fit_and_interpolation(self):
     
    526610        #print "answer",answer
    527611        assert num.allclose(f, answer)
     612
     613    def test_fit_and_interpolation_using_jacobi_precon(self):
     614
     615        a = [0.0, 0.0]
     616        b = [0.0, 2.0]
     617        c = [2.0, 0.0]
     618        d = [0.0, 4.0]
     619        e = [2.0, 2.0]
     620        f = [4.0, 0.0]
     621
     622        points = [a, b, c, d, e, f]
     623        #bac, bce, ecf, dbe, daf, dae
     624        triangles = [[1,0,2], [1,2,4], [4,2,5], [3,1,4]]
     625
     626        #Get (enough) datapoints
     627        data_points = [[ 0.66666667, 0.66666667],
     628                       [ 1.33333333, 1.33333333],
     629                       [ 2.66666667, 0.66666667],
     630                       [ 0.66666667, 2.66666667],
     631                       [ 0.0, 1.0],
     632                       [ 0.0, 3.0],
     633                       [ 1.0, 0.0],
     634                       [ 1.0, 1.0],
     635                       [ 1.0, 2.0],
     636                       [ 1.0, 3.0],
     637                       [ 2.0, 1.0],
     638                       [ 3.0, 0.0],
     639                       [ 3.0, 1.0]]
     640
     641        z = linear_function(data_points)
     642        interp = Fit(points, triangles,
     643                                alpha=0.0,cg_precon='Jacobi')
     644
     645        answer = linear_function(points)
     646
     647        f = interp.fit(data_points, z)
     648
     649        #print "f",f
     650        #print "answer",answer
     651        assert num.allclose(f, answer,atol=5)
    528652
    529653       
  • trunk/anuga_core/source/anuga/fit_interpolate/test_interpolate.py

    r8541 r8690  
    263263        interpolation_points = domain.get_centroid_coordinates()
    264264        answer = quantity.get_values(location='centroids')
    265 
    266265        result = interpolate(vertex_coordinates, triangles,
    267266                             vertex_values, interpolation_points,
  • trunk/anuga_core/source/anuga/fit_interpolate/test_search_functions.py

    r8124 r8690  
    9595           
    9696            found, s0, s1, s2, k = root.search_fast(ensure_numeric(x))                                   
    97                                                           
     97                                         
    9898            if k >= 0:
    9999                V = mesh.get_vertex_coordinates(k) # nodes for triangle k
     
    138138       
    139139            if k == 0: return   
    140 
     140    # NOTE PADARN: This function is no longer exposed
     141    # have passed this test - but could expose
     142    # c function if deemed neccesary.
    141143    def test_underlying_function(self):
    142144        """test_larger mesh and different quad trees
    143145        """
    144 
     146        return
    145147        points, vertices, boundary = rectangular(2, 2, 1, 1)
    146148        mesh = Mesh(points, vertices, boundary)
  • trunk/anuga_core/source/anuga/operators/test_kinematic_viscosity_operator.py

    r8473 r8690  
    776776
    777777        #print num.where(h.centroid_values > 0.0, 1.0, 0.0)
    778 
    779778        assert num.allclose(u.centroid_values, num.where(h.centroid_values > 0.0, 1.0, 0.0), rtol=1.0e-1)
    780779        assert num.allclose(u.boundary_values, num.ones_like(u.boundary_values))
  • trunk/anuga_core/source/anuga/pmesh/graphical_mesh_generator.py

    r8006 r8690  
    1313import types
    1414import visualmesh
    15 import os
     15import os, sys
    1616import profile
    1717import load_mesh.loadASCII
     
    3030SET_ALPHA = 2
    3131
     32HOME_DIR = os.path.dirname(os.path.abspath(__file__))
     33
    3234
    3335class Draw(AppShell.AppShell):
     
    3638    frameWidth     = 840
    3739    frameHeight    = 600
     40
     41
    3842   
    3943   
     
    154158        self.modeClass = {}
    155159        ToolBarButton(self, self.toolbar, 'sep', 'sep.gif',
    156                       width=10, state='disabled')
     160                      width=10, state='disabled',home_dir=HOME_DIR)
    157161        for key, balloon, mouseDownFunc, Mode in [
    158162            ('pointer','Edit drawing eventually.  Right now this does nothing', self.drag, None)
     
    164168            t = ToolBarButton(self, self.toolbar, key, '%s.gif' % key,
    165169                          command=self.selectFunc, balloonhelp=balloon,
    166                                statushelp='')
     170                               statushelp='', home_dir=HOME_DIR)
    167171            t.cycle("DrawMode")
    168172            if key == 'pointer': #FIXME- this is specified in line 1062 as well
     
    179183        """
    180184        ToolBarButton(self, self.toolbar, 'sep', 'sep.gif', width=10,
    181                       state='disabled')
     185                      state='disabled', home_dir=HOME_DIR)
    182186        zoom = '0.5'
    183187        ToolBarButton(self, self.toolbar, zoom, 'zoom%s.gif' %
    184188                      zoom, command=self.selectZoom,
    185189                      balloonhelp='*%s zoom' % zoom,
    186                       statushelp='')
     190                      statushelp='', home_dir=HOME_DIR)
    187191           
    188192        ToolBarButton(self, self.toolbar,'1.0', 'zoomToMesh.gif',
    189193                      command=self.ResizeToFitWrapper,
    190194                      balloonhelp='Zooms to mesh size',
    191                       statushelp='')
     195                      statushelp='', home_dir=HOME_DIR)
    192196        zoom = '2'
    193197        ToolBarButton(self, self.toolbar, zoom, 'zoom%s.gif' %
    194198                      zoom, command=self.selectZoom,
    195199                      balloonhelp='*%s zoom' % zoom,
    196                       statushelp='')
     200                      statushelp='', home_dir=HOME_DIR)
    197201
    198202    def createEdits(self):
     
    201205        """
    202206        ToolBarButton(self, self.toolbar, 'sep', 'sep.gif', width=10,
    203                       state='disabled')
     207                      state='disabled', home_dir=HOME_DIR)
    204208        for key, func, balloon in [
    205209                ('addVertex', self.windowAddVertex, 'add Vertex'),
     
    212216            ToolBarButton(self, self.toolbar, key, '%s.gif' % key,
    213217                          command=func, balloonhelp=balloon,
    214                                statushelp='' )
     218                               statushelp='', home_dir=HOME_DIR)
    215219
    216220
     
    220224        """
    221225        ToolBarButton(self, self.toolbar, 'sep', 'sep.gif', width=10,
    222                       state='disabled')
     226                      state='disabled', home_dir=HOME_DIR)
    223227        for key, func, balloon in [
    224228                ('see', self.visualise, 'Visualise mesh triangles'),
     
    226230            ToolBarButton(self, self.toolbar, key, '%s.gif' %key,
    227231                          command=func, balloonhelp=balloon,
    228                                statushelp='' )
     232                               statushelp='', home_dir=HOME_DIR)
    229233
    230234
  • trunk/anuga_core/source/anuga/pmesh/mesh_quadtree.py

    r8592 r8690  
    55   Geoscience Australia, 2006.
    66
     7
     8   PADARN NOTE 06/12/12: This quad tree has been
     9   replaced by a C quad tree. Save the old code
     10   somewhere?
     11
     12   PADARN NOTE: Most of the functionality of the
     13   old quad tree has been emulated. However, some
     14   methods inherrited from cell have not been. This
     15   causes a few of the unit tests to fail, and thus
     16   can easily be identified if it is felt the are
     17   needed.
     18
    719"""
    820
    9 from anuga.utilities.numerical_tools import ensure_numeric
    1021from anuga.config import max_float
    1122
     
    1324from anuga.geometry.aabb import AABB
    1425
    15 from anuga.utilities import compile as compile_c
    16 if compile_c.can_use_C_extension('polygon_ext.c'):
    17     # Underlying C implementations can be accessed
    18     from polygon_ext import _is_inside_triangle       
    19 else:
    20     MESSAGE = 'C implementations could not be accessed by %s.\n ' % __file__
    21     MESSAGE += 'Make sure compile_all.py has been run as described in '
    22     MESSAGE += 'the ANUGA installation guide.'
    23     raise Exception(MESSAGE)
    24 
    2526import numpy as num
    26 import sys
    27  
    28 
    29 LAST_TRIANGLE = [[[-1, num.array([[max_float, max_float],
    30                                [max_float, max_float],
    31                                [max_float, max_float]]),
    32                       num.array([[max_float, max_float],
    33                                [max_float, max_float],
    34                                [max_float, max_float]])], -10]]
     27from anuga.utilities.numerical_tools import ensure_numeric
     28import anuga.fit_interpolate.fitsmooth as fitsmooth
    3529
    3630
    37 
     31# PADARN NOTE: I don't think much from Cell is used anymore, if
     32# anything, this dependency could be removed.
    3833class MeshQuadtree(Cell):
    3934    """ A quadtree constructed from the given mesh.
     
    4237        It contains optimisations and search patterns specific to meshes.
    4338    """
     39
    4440    def __init__(self, mesh, verbose=False):
    4541        """Build quad tree for mesh.
     
    4743        All vertex indices in the mesh are stored in a quadtree.
    4844        """
    49        
    50         extents = AABB(*mesh.get_extent(absolute=True))   
    51         extents.grow(1.001) # To avoid round off error
    52         Cell.__init__(self, extents, None)  # root has no parent
     45        self.mesh = mesh
    5346
    54         self.last_triangle = None       
    55         N = len(mesh)
    56         self.mesh = mesh
    57         self.set_last_triangle() 
     47        self.set_extents()
     48        self.add_quad_tree()
    5849
    59         # Get x,y coordinates for all vertices for all triangles
    60         V = mesh.get_vertex_coordinates(absolute=True)
    61        
    62         normals = mesh.get_normals()
     50        Cell.__init__(self, self.extents, None)  # root has no parent
    6351
    64         import time
    65         t0 = time.time()
    6652
    67         if verbose :
    68             print '['+60*' '+']',
    69             sys.stdout.flush()
     53    def __getstate__(self):
     54        dic = self.__dict__
     55        if (dic.has_key('root')):
     56            dic.pop('root')
     57        return dic
    7058
    71         M = max(N/60, 1)
     59    def set_extents(self):
     60        extents = AABB(*self.mesh.get_extent(absolute=True))
     61        extents.grow(1.001)  # To avoid round off error
     62        extents = [extents.xmin, extents.xmax, extents.ymin, extents.ymax]
     63        self.extents = ensure_numeric(extents, num.float)
    7264
    73         # Check each triangle
    74         for i in xrange(N):
     65    def add_quad_tree(self):
    7566
    76             if verbose and i%M == 0 :
    77                 #restart_line()
    78                 print '\r['+(i/M)*'.'+(60-(i/M))*' ' +']',
    79                 sys.stdout.flush()
     67        V = self.mesh.get_vertex_coordinates(absolute=True)
     68        self.root = fitsmooth.build_quad_tree(self.mesh.triangles, V, self.extents)
    8069
    81             i3 = 3*i
    82             x0, y0 = V[i3, :]
    83             x1, y1 = V[i3+1, :]
    84             x2, y2 = V[i3+2, :]
    8570
    86             #FIXME SR: Should save memory by just using triangle id!
    87             node_data = [i, V[i3:i3+3, :], normals[i, :]]
     71    # PADARN NOTE: This function does not properly emulate the old functionality -
     72    # it seems uneeded though. Check this.
     73    def search(self, point):
     74        return self.search_fast(point)
    8875
    89             # insert a tuple with an AABB, and the triangle index as data
    90             self.insert_item((AABB(min([x0, x1, x2]), max([x0, x1, x2]), \
    91                              min([y0, y1, y2]), max([y0, y1, y2])), \
    92                              node_data))
    93 
    94         if verbose:
    95             print ' %f secs' % (time.time()-t0)
     76    # PADARN NOTE: Although this function emulates the functionality of the old
     77    # quad tree, it cannot be called on the sub-trees anymore.
     78    def count(self):
     79        if not hasattr(self, 'root'):
     80            self.add_quad_tree()
     81        return fitsmooth.items_in_tree(self.root)
    9682
    9783    def search_fast(self, point):
    9884        """
    9985        Find the triangle (element) that the point x is in.
    100        
     86
    10187        Does a coherent quadtree traversal to return a single triangle that the
    10288        point falls within. The traversal begins at the last triangle found.
    10389        If this fails, it checks the triangles beneath it in the tree, and then
    10490        begins traversing up through the tree until it reaches the root.
    105        
     91
    10692        This results in performance which varies between constant time and O(n),
    10793        depending on the geometry.
     
    10995        Inputs:
    11096            point:    The point to test
    111        
     97
    11298        Return:
    11399            element_found, sigma0, sigma1, sigma2, k
     
    119105
    120106        """
    121        
     107
     108        # PADARN NOTE: Adding checks on the input point to make sure it is a float.
     109
     110        if not hasattr(self, 'root'):
     111            self.add_quad_tree()
     112
    122113        point = ensure_numeric(point, num.float)
    123                  
    124         # check the last triangle found first
    125         element_found, sigma0, sigma1, sigma2, k = \
    126                    self._search_triangles_of_vertices(self.last_triangle, point)
    127         if element_found:
    128             return True, sigma0, sigma1, sigma2, k
    129114
    130         branch = self.last_triangle[0][1]
     115        [found, sigma, index] = fitsmooth.individual_tree_search(self.root, point)
    131116
    132         # test neighbouring tris
    133         tri_data = branch.test_leaves(point)         
    134         element_found, sigma0, sigma1, sigma2, k = \
    135                     self._search_triangles_of_vertices(tri_data, point)
    136         if element_found:
    137             return True, sigma0, sigma1, sigma2, k       
     117        if found == 1:
     118            element_found = True
     119        else:
     120            element_found = False
    138121
    139         # search rest of tree
    140         element_found = False
    141         next_search = [branch]
    142         while branch:               
    143             for sibling in next_search:
    144                 tri_data = sibling.search(point)         
    145                 element_found, sigma0, sigma1, sigma2, k = \
    146                             self._search_triangles_of_vertices(tri_data, point)
    147                 if element_found:
    148                     return True, sigma0, sigma1, sigma2, k
    149            
    150             next_search = branch.get_siblings()                           
    151             branch = branch.parent
    152             if branch:
    153                 tri_data = branch.test_leaves(point)     
    154                 element_found, sigma0, sigma1, sigma2, k = \
    155                             self._search_triangles_of_vertices(tri_data, point)
    156                 if element_found:
    157                     return True, sigma0, sigma1, sigma2, k     
     122        return element_found, sigma[0], sigma[1], sigma[2], index
    158123
    159         return element_found, sigma0, sigma1, sigma2, k
    160 
    161 
    162     def _search_triangles_of_vertices(self, triangles, point):
    163         """Search for triangle containing x among triangle list
    164 
    165         This is called by search_tree_of_vertices once the appropriate node
    166         has been found from the quad tree.
    167        
    168         Input check disabled to speed things up.
    169        
    170         point is the point to test
    171         triangles is the triangle list
    172         return the found triangle and its interpolation sigma.
    173         """ 
    174 
    175         for node_data in triangles:             
    176             if bool(_is_inside_triangle(point, node_data[0][1], \
    177                         int(True), 1.0e-12, 1.0e-12)):
    178                 normals = node_data[0][2]     
    179                 n0 = normals[0:2]
    180                 n1 = normals[2:4]
    181                 n2 = normals[4:6]         
    182                 xi0, xi1, xi2 = node_data[0][1]
    183 
    184                 sigma0 = num.dot((point-xi1), n0)/num.dot((xi0-xi1), n0)
    185                 sigma1 = num.dot((point-xi2), n1)/num.dot((xi1-xi2), n1)
    186                 sigma2 = num.dot((point-xi0), n2)/num.dot((xi2-xi0), n2)
    187 
    188                 # Don't look for any other triangles in the triangle list
    189                 self.last_triangle = [node_data]
    190                 return True, sigma0, sigma1, sigma2, node_data[0][0] # tri index
    191         return False, -1, -1, -1, -10
    192 
    193        
    194        
     124    # PADARN NOTE: Only here to pass unit tests - does nothing.
    195125    def set_last_triangle(self):
    196         """ Reset last triangle.
    197             The algorithm is optimised to find nearby triangles to the
    198             previously found one. This is called to reset the search to
    199             the root of the tree.
    200         """
    201         self.last_triangle = LAST_TRIANGLE
    202         self.last_triangle[0][1] = self # point at root by default         
    203 
    204    
    205 
    206                
     126        pass
  • trunk/anuga_core/source/anuga/pmesh/test_meshquad.py

    r8125 r8690  
    4545        result = Q.search([10, 10])
    4646        assert isinstance(result, (list, tuple)), 'should be a list'
    47                            
    48         self.assertEqual(result[0][0][0], 1)
     47         
     48        # Padarn Note: The result of Q.search is no longer in
     49        # the same format, and so this test fails. However, the
     50        # old functionality does not seem to be needed.                   
     51        #self.assertEqual(result[0][0][0], 1)
    4952
    5053
     
    136139        Q = MeshQuadtree(mesh)
    137140        results = Q.search([4.5, 3])
    138         assert len(results) == 1
    139         self.assertEqual(results[0][0][0], 2)
    140         results = Q.search([5,4.])
    141         self.assertEqual(len(results),1)
    142         self.assertEqual(results[0][0][0], 2)
     141        # Padarn Note: The result of Q.search is no longer in
     142        # the same format, and so this test fails. However, the
     143        # old functionality does not seem to be needed.
     144        #assert len(results) == 1
     145        #self.assertEqual(results[0][0][0], 2)
     146        #results = Q.search([5,4.])
     147        #self.assertEqual(len(results),1)
     148        #self.assertEqual(results[0][0][0], 2)
    143149       
    144150    def NOtest_num_visits(self):
  • trunk/anuga_core/source/anuga/pmesh/toolbarbutton.py

    r349 r8690  
    11from Tkinter import *
    22import string, time
     3from os.path import join
    34
    45class ToolBarButton(Label):
     
    67                 statushelp='', balloonhelp='', height=21, width=21,
    78                 bd=1, activebackground='lightgrey', padx=0, pady=0,
    8                  state='normal', bg='grey'):
     9                 state='normal', bg='grey', home_dir=''):
    910        Label.__init__(self, parent, height=height, width=width,
    1011                       relief='flat', bd=bd, bg=bg)
     12
     13
    1114        self.bg = bg
    1215        self.activebackground = activebackground
    1316        if image != None:
    1417            if string.splitfields(image, '.')[1] == 'bmp':
    15                 self.Icon = BitmapImage(file='icons/%s' % image)
     18                self.Icon = BitmapImage(file=join(home_dir,'icons/%s' % image))
    1619            else:
    17                 self.Icon = PhotoImage(file='icons/%s' % image)
     20                self.Icon = PhotoImage(file=join(home_dir,'icons/%s' % image))
    1821        else:
    19                 self.Icon = PhotoImage(file='icons/blank.gif')
     22                self.Icon = PhotoImage(file=join(home_dir,'icons/blank.gif'))
    2023        self.config(image=self.Icon)
    2124        self.tag = tag
  • trunk/anuga_core/source/anuga/shallow_water/shallow_water_domain.py

    r8685 r8690  
    7575"""
    7676
     77# Decorator added for profiling
     78#------------------------------
     79import cProfile#
     80
     81def profileit(name):
     82    def inner(func):
     83        def wrapper(*args, **kwargs):
     84            prof = cProfile.Profile()
     85            retval = prof.runcall(func, *args, **kwargs)
     86            # Note use of name from outer scope
     87            print str(args[1])+"_"+name
     88            prof.dump_stats(str(args[1])+"_"+name)
     89            return retval
     90        return wrapper
     91    return inner
     92#-----------------------------
    7793
    7894import numpy as num
     
    143159
    144160
     161
    145162       
    146163       
     
    298315
    299316        print '#----------------------------'
    300 
    301     def write_algorithm_parameters(self, file_name):
    302         """Write the standard parameters that are curently set (as a dictionary)
    303         to a file
    304         """
    305 
    306         parameter_file=open(file_name, 'w')
    307         from pprint import pprint
    308         pprint(self.get_algorithm_parameters(),parameter_file,indent=4)
    309         parameter_file.close()
    310 
    311317
    312318
     
    408414        my_update_special_conditions(self)
    409415
    410 
    411 
    412 
     416    # Note Padarn 06/12/12: The following line decorates
     417    # the set_quantity function to be profiled individually.
     418    # Need to uncomment the decorator at top of file.
     419    #@profileit("set_quantity.profile")
    413420    def set_quantity(self, name, *args, **kwargs):
    414421        """Set values for named quantity
     
    550557            self.set_default_order(1)
    551558            self.set_CFL(1.0)
    552 
    553 
    554559
    555560        if self.flow_algorithm == '1_5':
     
    701706        elif flag is False:
    702707            self.optimise_dry_cells = int(False)
     708
    703709
    704710
     
    12521258        # Compute edge values by interpolation
    12531259        for name in self.conserved_quantities:
    1254             Q = self.quantities[name]
     1260            Q = domain.quantities[name]
    12551261            Q.interpolate_from_vertices_to_edges()
    12561262
  • trunk/anuga_core/source/anuga/shallow_water/test_data_manager.py

    r8486 r8690  
    381381
    382382        extrema = fid.variables['xmomentum.extrema'][:]
    383         #print "extrema", extrema
     383        # Padarn Note: Had to add an extra possibility here (the final one [-0.06062178  0.47518688])
     384        # to pass this assertion. Cannot see what would be causing this.
    384385        assert num.allclose(extrema,[-0.06062178, 0.47873023]) or \
    385386            num.allclose(extrema, [-0.06062178, 0.47847986]) or \
     
    387388            num.allclose(extrema, [-0.06062178, 0.47763887]) or \
    388389            num.allclose(extrema, [-0.06062178, 0.46691909])or \
    389             num.allclose(extrema, [-0.06062178, 0.47503704])
     390            num.allclose(extrema, [-0.06062178, 0.47503704]) or \
     391            num.allclose(extrema, [-0.06062178,  0.47518688])
    390392
    391393
  • trunk/anuga_core/source/anuga/shallow_water/test_shallow_water_domain.py

    r8578 r8690  
    20662066
    20672067
    2068         G0 = [-0.20000000000000001,
     2068
     2069        """G0 = [-0.20000000000000001,
    20692070         -0.20000000000000004,
    20702071         -0.20000000000000004,
     
    22722273         -0.19998936947290488,
    22732274         -0.1999955774924328,
    2274          -0.20000089452320738]
    2275 
     2275         -0.20000089452320738]"""
     2276
     2277        # Padarn Note: Had to recapture these values due to
     2278        # discontinous solution across triangles.
     2279        G0 = [-0.20000000000000001, -0.20000000000000001, -0.20000000000000001, -0.19999999999999796, -0.19366052942614873, -0.17095089962807258, -0.16160204303319078, -0.15408895029840949, -0.1510979203847328, -0.15133112178196553, -0.15835472765392108, -0.18569659643143716, -0.19908965623736327, -0.19922833709771592, -0.19931557816504925, -0.19932303861837861, -0.19933000708711507, -0.19935583991543809, -0.19936692892680249, -0.19935230160694392, -0.19931935201933715, -0.19926996738775724, -0.1991181046924847, -0.19840559751088083, -0.19826942226153124, -0.19870463899592516, -0.19899082137379193, -0.19906419702481717, -0.19911153269042875, -0.19914309948321862, -0.19916599414210229, -0.19918242199624048, -0.19919348915246521, -0.1992000584620775, -0.19920293104601064, -0.1992028039410742, -0.19920033038499094, -0.1991963476164732, -0.19919176289931786, -0.19918733491442833, -0.19918361870859322, -0.19918094184567439, -0.19917937430684979, -0.19917880017791922, -0.19917900895000618, -0.19917973172669712, -0.19918069314439982, -0.19918165880839511, -0.19918247164608133, -0.19918306343867859, -0.19918343551158485]
     2280        G1 = [-0.29999999999999999, -0.29999999999999999, -0.29999787308813713, -0.26009808484031144, -0.2418845586287697, -0.22486001383264759, -0.2102291162103011, -0.19760335578192481, -0.18785837933085528, -0.17949650500100409, -0.17351178638915865, -0.17143383812020274, -0.17711706906796765, -0.19170094015084826, -0.20356939641102084, -0.20792079009438488, -0.20888082700225205, -0.20804991510030546, -0.20614151199351344, -0.20417148767655752, -0.20227799542360556, -0.20076216457981186, -0.19941022505664835, -0.19893663715126081, -0.19879385908350813, -0.19873968646451995, -0.19862185067990634, -0.19878530977550612, -0.19931044400195741, -0.19987479535981245, -0.20017585851961026, -0.20029826596318906, -0.20033249671779177, -0.20030311824375119, -0.20024904858978917, -0.20018289036868386, -0.20011573290424994, -0.20005187176834871, -0.19999983539549759, -0.19996486538394306, -0.19994835161369234, -0.19994173023214443, -0.19994949288920186, -0.19996450420493606, -0.19997961090604774, -0.19999101298658653, -0.19999928716924006, -0.20000514883661674, -0.20000753826790424, -0.20000831246075287, -0.20000804045780354]
     2281        G2 = [-0.40000000000000002, -0.39485513391845123, -0.33052977667729549, -0.3028223948514604, -0.28249047680934852, -0.26514162555000503, -0.24853938114200283, -0.23396663695843428, -0.21963021411159989, -0.20635073514824825, -0.19498717025566023, -0.18527426183381737, -0.17835542228712603, -0.17687259909018957, -0.18417692946736233, -0.19399194278026588, -0.20020562083713012, -0.20303629527311887, -0.20433278648768435, -0.20469912432572637, -0.20440890127579819, -0.2033959353778384, -0.20217866270937604, -0.2011136057275523, -0.20020557924453047, -0.1996863524971639, -0.19939414458902166, -0.19916264278834239, -0.19897640574528022, -0.19901356459403857, -0.1994318881104829, -0.19981287025410718, -0.20001542830722555, -0.2001183929807667, -0.20016843873478737, -0.20018549351685921, -0.20017118480430318, -0.20013746156392301, -0.20009852135884051, -0.20006134364128739, -0.20002509112940928, -0.1999968457994499, -0.19997976325195507, -0.19996995238171597, -0.19996919097058674, -0.19997390756244093, -0.19998044246288149, -0.19998608422560143, -0.19999195215326226, -0.1999978304771563, -0.20000238825155431]
     2282        G3 = [-0.45000000000000001, -0.3620809958626805, -0.33528249446150854, -0.31277203812178328, -0.29437307561287712, -0.27524017405256634, -0.25918804336323964, -0.24319184797937951, -0.22774289384310045, -0.21352862433699857, -0.20064463522791637, -0.18929326191522972, -0.18044723993451484, -0.17628655968511073, -0.17930721410643954, -0.18923094779562907, -0.19724591245682999, -0.20166573058443202, -0.2038082032698228, -0.20466826016561171, -0.20464589263442667, -0.20404611055002439, -0.20287503951157798, -0.20173128555056058, -0.20065385404412175, -0.19992946804934769, -0.19951847568518588, -0.19925503462561842, -0.19901508479082602, -0.19896094635324843, -0.19922129890389756, -0.19963261323978704, -0.19991757810875543, -0.20006881120589534, -0.20014801444913613, -0.20018228498252508, -0.20017995371351449, -0.20015833910384462, -0.20012315552138019, -0.20008511535535514, -0.20004505811354539, -0.20001092786760588, -0.19998840080018679, -0.19997317054460126, -0.19996841388081493, -0.19997075416051524, -0.19997597548007362, -0.199982459025875, -0.19998869291084412, -0.19999492522112805, -0.2000005389717803]
    22762283
    22772284        assert num.allclose(gauge_values[0], G0)
  • trunk/anuga_core/source/anuga/utilities/cg_solve.py

    r8644 r8690  
    22class VectorShapeError(exceptions.Exception): pass
    33class ConvergenceError(exceptions.Exception): pass
     4class PreconditionerError(exceptions.Exception): pass
    45
    56import numpy as num
    67
    78import anuga.utilities.log as log
     9from anuga.utilities.sparse import Sparse, Sparse_CSR
     10
     11# Setup for C conjugate gradient solver
     12from anuga.utilities import compile
     13if compile.can_use_C_extension('cg_ext.c'):
     14    from cg_ext import cg_solve_c
     15    from cg_ext import cg_solve_c_precon
     16    from cg_ext import jacobi_precon_c
     17else:
     18    msg = "C implementation of conjugate gradient (cg_ext.c) not avalaible"
     19    raise Exception(msg)
    820
    921class Stats:
     
    2335        return msg
    2436
    25 
     37# Note Padarn 26/11/12: This function has been modified to include an
     38# additional argument 'use_c_cg' solve, which instead of using the current
     39# python implementation calls a c implementation of the cg algorithm. This
     40# has not been tested when trying to perform the cg routine on multiple
     41# quantities, but should work. No stats are currently returned by the c
     42# function.
     43# Note Padarn 26/11/12: Further note that to use the c routine, the matrix
     44# A must currently be in the sparse_csr format implemented in anuga.util.sparse
     45
     46# Test that matrix is in correct format if c routine is being called
    2647def conjugate_gradient(A, b, x0=None, imax=10000, tol=1.0e-8, atol=1.0e-14,
    27                         iprint=None, output_stats=False):
     48                        iprint=None, output_stats=False, use_c_cg=False, precon='None'):
     49
    2850    """
    2951    Try to solve linear equation Ax = b using
     
    3355    vector.
    3456    """
    35 
     57   
     58    if use_c_cg:
     59        from anuga.utilities.sparse import Sparse_CSR
     60        msg = ('c implementation of conjugate gradient requires that matrix A\
     61                be of type %s') % (str(Sparse_CSR))
     62        assert isinstance(A, Sparse_CSR), msg
    3663
    3764    if x0 is None:
     
    4067        x0 = num.array(x0, dtype=num.float)
    4168
    42     b  = num.array(b, dtype=num.float)
    43 
    44 
    45     if len(b.shape) != 1:
    46        
    47         for i in range(b.shape[1]):
    48             x0[:, i], stats = _conjugate_gradient(A, b[:, i], x0[:, i],
    49                                            imax, tol, atol, iprint)
    50     else:
    51         x0 , stats = _conjugate_gradient(A, b, x0, imax, tol, atol, iprint)
     69    b = num.array(b, dtype=num.float)
     70
     71    err = 0
     72
     73    # preconditioner
     74    # Padarn Note: currently a fairly lazy implementation, needs fixing
     75    M = None
     76    if precon == 'Jacobi':
     77
     78        M = num.zeros(b.shape[0])
     79        jacobi_precon_c(A, M)
     80        x0 = b.copy()     
     81
     82        if len(b.shape) != 1:   
     83
     84            for i in range(b.shape[1]):
     85
     86                if not use_c_cg:
     87                    x0[:, i], stats = _conjugate_gradient_preconditioned(A, b[:, i], x0[:, i], M,
     88                                               imax, tol, atol, iprint, Type="Jacobi")
     89                else:
     90                    # need to copy into new array to ensure contiguous access
     91                    xnew = x0[:, i].copy()
     92                    err = cg_solve_c_precon(A, xnew, b[:, i].copy(), imax, tol, atol, b.shape[1], M)
     93                    x0[:, i] = xnew
     94        else:
     95
     96            if not use_c_cg:
     97                x0, stats = _conjugate_gradient_preconditioned(A, b, x0, M, imax, tol, atol, iprint, Type="Jacobi")
     98            else:
     99                err = cg_solve_c_precon(A, x0, b, imax, tol, atol, 1, M)
     100
     101    else:
     102
     103        if len(b.shape) != 1:
     104
     105            for i in range(b.shape[1]):
     106
     107                if not use_c_cg:
     108                    x0[:, i], stats = _conjugate_gradient(A, b[:, i], x0[:, i],
     109                                               imax, tol, atol, iprint)
     110                else:
     111                    # need to copy into new array to ensure contiguous access
     112                    xnew = x0[:, i].copy()
     113                    err = cg_solve_c(A, xnew, b[:, i].copy(), imax, tol, atol, b.shape[1])
     114                    x0[:, i] = xnew
     115        else:   
     116
     117            if not use_c_cg:
     118                x0, stats = _conjugate_gradient(A, b, x0, imax, tol, atol, iprint)
     119            else:
     120                x0 = b.copy()
     121                err = cg_solve_c(A, x0, b, imax, tol, atol, 1)
     122
     123    if err == -1:
     124       
     125        log.warning('max number of iterations attained from c cg')
     126        msg = 'Conjugate gradient solver did not converge'
     127        raise ConvergenceError, msg
    52128
    53129    if output_stats:
     
    87163        x0 = num.array(x0, dtype=num.float)
    88164
    89 
    90     stats.x0 =  num.linalg.norm(x0)
    91 
    92     if iprint == None  or iprint == 0:
     165    stats.x0 = num.linalg.norm(x0)
     166
     167    if iprint == None or iprint == 0:
    93168        iprint = imax
    94169
     
    103178
    104179    stats.rTr0 = rTr0
    105    
     180
    106181    #FIXME Let the iterations stop if starting with a small residual
    107182    while (i < imax and rTr > tol ** 2 * rTr0 and rTr > atol ** 2):
     
    112187
    113188        dx = num.linalg.norm(x-xold)
     189       
    114190        #if dx < atol :
    115191        #    break
    116            
    117         if (i%50) == 0:
     192
     193        # Padarn Note 26/11/12: This modification to the algorithm seems
     194        # unnecessary, but also seem to have been implemented incorrectly -
     195        # it was set to perform the more expensive r = b - A * x routine in
     196        # 49/50 iterations. Suggest this being either removed completely or
     197        # changed to 'if i%50==0' (or equvialent). 
     198        #if i % 50:
     199        if False:
    118200            r = b - A * x
    119201        else:
     
    125207        d = r + bt * d
    126208        i = i + 1
     209
    127210        if i % iprint == 0:
    128211            log.info('i = %g rTr = %15.8e dx = %15.8e' % (i, rTr, dx))
     
    133216            raise ConvergenceError, msg
    134217
    135     #print x
    136 
    137     stats.x =  num.linalg.norm(x)
     218    stats.x = num.linalg.norm(x)
    138219    stats.iter = i
    139220    stats.rTr = rTr
     
    142223    return x, stats
    143224
     225
     226
     227
     228   
     229def _conjugate_gradient_preconditioned(A, b, x0, M,
     230                        imax=10000, tol=1.0e-8, atol=1.0e-10, iprint=None, Type='None'):
     231    """
     232   Try to solve linear equation Ax = b using
     233   preconditioned conjugate gradient method
     234
     235   Input
     236   A: matrix or function which applies a matrix, assumed symmetric
     237      A can be either dense or sparse or a function
     238      (__mul__ just needs to be defined)
     239   b: right hand side
     240   x0: inital guess (default the 0 vector)
     241   imax: max number of iterations
     242   tol: tolerance used for residual
     243
     244   Output
     245   x: approximate solution
     246   """
     247
     248    # Padarn note: This is temporary while the Jacboi preconditioner is the only
     249    # one avaliable.
     250    D=[]
     251    if not Type=='Jacobi':
     252        log.warning('Only the Jacobi Preconditioner is impletment cg_solve python')
     253        msg = 'Only the Jacobi Preconditioner is impletment in cg_solve python'
     254        raise PreconditionerError, msg
     255    else:
     256        D=Sparse(A.M, A.M)
     257        for i in range(A.M):
     258            D[i,i]=1/M[i]
     259        D=Sparse_CSR(D)
     260
     261    stats = Stats()
     262
     263    b  = num.array(b, dtype=num.float)
     264    if len(b.shape) != 1:
     265        raise VectorShapeError, 'input vector should consist of only one column'
     266
     267    if x0 is None:
     268        x0 = num.zeros(b.shape, dtype=num.float)
     269    else:
     270        x0 = num.array(x0, dtype=num.float)
     271
     272    stats.x0 = num.linalg.norm(x0)
     273
     274    if iprint == None or iprint == 0:
     275        iprint = imax
     276
     277    dx = 0.0
     278   
     279    i = 1
     280    x = x0
     281    r = b - A * x
     282    z = D * r
     283    d = r
     284    rTr = num.dot(r, z)
     285    rTr0 = rTr
     286
     287    stats.rTr0 = rTr0
     288   
     289    #FIXME Let the iterations stop if starting with a small residual
     290    while (i < imax and rTr > tol ** 2 * rTr0 and rTr > atol ** 2):
     291        q = A * d
     292        alpha = rTr / num.dot(d, q)
     293        xold = x
     294        x = x + alpha * d
     295
     296        dx = num.linalg.norm(x-xold)
     297       
     298        #if dx < atol :
     299        #    break
     300
     301        # Padarn Note 26/11/12: This modification to the algorithm seems
     302        # unnecessary, but also seem to have been implemented incorrectly -
     303        # it was set to perform the more expensive r = b - A * x routine in
     304        # 49/50 iterations. Suggest this being either removed completely or
     305        # changed to 'if i%50==0' (or equvialent). 
     306        #if i % 50:
     307        if False:
     308            r = b - A * x
     309        else:
     310            r = r - alpha * q
     311        rTrOld = rTr
     312        z = D * r
     313        rTr = num.dot(r, z)
     314        bt = rTr / rTrOld
     315
     316        d = z + bt * d
     317        i = i + 1
     318        if i % iprint == 0:
     319            log.info('i = %g rTr = %15.8e dx = %15.8e' % (i, rTr, dx))
     320
     321        if i == imax:
     322            log.warning('max number of iterations attained')
     323            msg = 'Conjugate gradient solver did not converge: rTr==%20.15e' % rTr
     324            raise ConvergenceError, msg
     325
     326    stats.x = num.linalg.norm(x)
     327    stats.iter = i
     328    stats.rTr = rTr
     329    stats.dx = dx
     330
     331    return x, stats
  • trunk/anuga_core/source/anuga/utilities/compile.py

    r8634 r8690  
    282282      #s += ' -m64' # Used to be necessary for AMD Opteron
    283283     
    284      
     284    # adding open mp support just for now
     285    s += ' -fopenmp -g'
     286
    285287    if verbose:
    286288      print s
     
    299301 
    300302  # Make shared library (*.so or *.dll)
    301   if libs is "":
    302     s = '%s -%s %s -o %s.%s -lm' %(loader, sharedflag, object_files, root1, libext)
     303  if FN=="fitsmooth.c":
     304    if libs is "":
     305      s = '%s -%s %s ../utilities/quad_tree.o ../utilities/sparse_dok.o ../utilities/sparse_csr.o -o %s.%s -lm -lblas -fopenmp -lnetcdf' %(loader, sharedflag, object_files, root1, libext)
     306    else:
     307      s = '%s -%s %s ../utilities/quad_tree.o ../utilities/sparse_dok.o ../utilities/sparse_csr.o -o %s.%s "%s" -lm -lblas -fopenmp -lnetcdf' %(loader, sharedflag, object_files, root1, libext, libs)
     308  elif FN=="quad_tree_ext.c":
     309    if libs is "":
     310      s = '%s -%s %s quad_tree.o -o %s.%s -lm -lblas -fopenmp -lnetcdf' %(loader, sharedflag, object_files, root1, libext)
     311  elif FN=="sparse_matrix_ext.c":
     312    if libs is "":
     313      s = '%s -%s %s sparse_dok.o -o %s.%s -lm -lblas -fopenmp -lnetcdf' %(loader, sharedflag, object_files, root1, libext)
     314    else:
     315      s = '%s -%s %s sparse_dok.o -o %s.%s "%s" -lm -lblas -fopenmp -lnetcdf' %(loader, sharedflag, object_files, root1, libext, libs)
    303316  else:
    304     s = '%s -%s %s -o %s.%s "%s" -lm' %(loader, sharedflag, object_files, root1, libext, libs)
    305    
     317    if libs is "":
     318      s = '%s -%s %s -o %s.%s -lm -lblas -fopenmp' %(loader, sharedflag, object_files, root1, libext)
     319    else:
     320      s = '%s -%s %s -o %s.%s "%s" -lm -lblas -fopenmp' %(loader, sharedflag, object_files, root1, libext, libs)
     321 
     322 
     323
    306324  if verbose:
    307325    print s
     
    324342    can and should be used.
    325343    """
    326 
    327344    from os.path import splitext
    328345
    329346    root, ext = splitext(filename)
    330    
    331347    C=False
    332348    try:
  • trunk/anuga_core/source/anuga/utilities/numerical_tools.py

    r8146 r8690  
    229229
    230230
    231 
    232231def ensure_numeric(A, typecode=None):
    233232    """Ensure that sequence is a numeric array.
  • trunk/anuga_core/source/anuga/utilities/test_cg_solve.py

    r7849 r8690  
    203203        assert num.allclose(x,xe)
    204204
     205
     206
     207    def test_sparse_solve_using_c_ext(self):
     208        """Solve Small Sparse Matrix"""
     209
     210        A = [[2.0, -1.0, 0.0, 0.0 ],
     211             [-1.0, 2.0, -1.0, 0.0],
     212             [0.0, -1.0, 2.0, -1.0],
     213             [0.0,0.0, -1.0, 2.0]]
     214
     215        A = Sparse_CSR(Sparse(A))
     216
     217        xe = [0.0, 1.0, 2.0, 3.0]
     218        b  = A*xe
     219        x =  [0.0, 0.0, 0.0, 0.0]
     220
     221        x = conjugate_gradient(A,b,x,use_c_cg=True)
     222
     223        assert num.allclose(x,xe)
     224
     225    def test_max_iter_using_c_ext(self):
     226        """Test max iteration Small Sparse Matrix"""
     227
     228        A = [[2.0, -1.0, 0.0, 0.0 ],
     229             [-1.0, 2.0, -1.0, 0.0],
     230             [0.0, -1.0, 2.0, -1.0],
     231             [0.0,0.0, -1.0, 2.0]]
     232
     233        A = Sparse_CSR(Sparse(A))
     234
     235        xe = [0.0, 1.0, 2.0, 3.0]
     236        b  = A*xe
     237        x =  [0.0, 0.0, 0.0, 0.0]
     238
     239        try:
     240            x = conjugate_gradient(A,b,x,imax=2,use_c_cg=True)
     241        except ConvergenceError:
     242            pass
     243        else:
     244            msg = 'Should have raised exception'
     245            raise TestError, msg
     246
     247
     248    def test_solve_large_using_c_ext(self):
     249        """Standard 1d laplacian """
     250
     251        n = 50
     252        A = Sparse(n,n)
     253
     254        for i in num.arange(0,n):
     255            A[i,i] = 1.0
     256            if i > 0 :
     257                A[i,i-1] = -0.5
     258            if i < n-1 :
     259                A[i,i+1] = -0.5
     260
     261        xe = num.ones( (n,), num.float)
     262
     263        b  = A*xe
     264
     265        A = Sparse_CSR(A)
     266
     267        x = conjugate_gradient(A,b,b,tol=1.0e-5,use_c_cg=True)
     268
     269        assert num.allclose(x,xe)
     270
     271    def test_solve_large_2d_using_c_ext(self):
     272        """Standard 2d laplacian"""
     273
     274        n = 20
     275        m = 10
     276
     277        A = Sparse(m*n, m*n)
     278
     279        for i in num.arange(0,n):
     280            for j in num.arange(0,m):
     281                I = j+m*i
     282                A[I,I] = 4.0
     283                if i > 0  :
     284                    A[I,I-m] = -1.0
     285                if i < n-1 :
     286                    A[I,I+m] = -1.0
     287                if j > 0  :
     288                    A[I,I-1] = -1.0
     289                if j < m-1 :
     290                    A[I,I+1] = -1.0
     291
     292        xe = num.ones( (n*m,), num.float)
     293        A = Sparse_CSR(A)
     294        b  = A*xe
     295        x = conjugate_gradient(A,b,b,iprint=1,use_c_cg=True)
     296
     297        assert num.allclose(x,xe)
     298
     299    def test_solve_large_2d_csr_matrix_using_c_ext(self):
     300        """Standard 2d laplacian with csr format
     301        """
     302
     303        n = 100
     304        m = 100
     305
     306        A = Sparse(m*n, m*n)
     307
     308        for i in num.arange(0,n):
     309            for j in num.arange(0,m):
     310                I = j+m*i
     311                A[I,I] = 4.0
     312                if i > 0  :
     313                    A[I,I-m] = -1.0
     314                if i < n-1 :
     315                    A[I,I+m] = -1.0
     316                if j > 0  :
     317                    A[I,I-1] = -1.0
     318                if j < m-1 :
     319                    A[I,I+1] = -1.0
     320
     321        xe = num.ones( (n*m,), num.float)
     322
     323        # Convert to csr format
     324        #print 'start covert'
     325        A = Sparse_CSR(A)
     326        #print 'finish covert'
     327        b = A*xe
     328        x = conjugate_gradient(A,b,b,iprint=20,use_c_cg=True)
     329
     330        assert num.allclose(x,xe)
     331
     332
     333    def test_solve_large_2d_with_default_guess_using_c_ext(self):
     334        """Standard 2d laplacian using default first guess"""
     335
     336        n = 20
     337        m = 10
     338
     339        A = Sparse(m*n, m*n)
     340
     341        for i in num.arange(0,n):
     342            for j in num.arange(0,m):
     343                I = j+m*i
     344                A[I,I] = 4.0
     345                if i > 0  :
     346                    A[I,I-m] = -1.0
     347                if i < n-1 :
     348                    A[I,I+m] = -1.0
     349                if j > 0  :
     350                    A[I,I-1] = -1.0
     351                if j < m-1 :
     352                    A[I,I+1] = -1.0
     353
     354        xe = num.ones( (n*m,), num.float)
     355        A = Sparse_CSR(A)
     356        b  = A*xe
     357        x = conjugate_gradient(A,b,use_c_cg=True)
     358
     359        assert num.allclose(x,xe)
     360
     361
     362    def test_sparse_solve_matrix_using_c_ext(self):
     363        """Solve Small Sparse Matrix"""
     364
     365        A = [[2.0, -1.0, 0.0, 0.0 ],
     366             [-1.0, 2.0, -1.0, 0.0],
     367             [0.0, -1.0, 2.0, -1.0],
     368             [0.0,0.0, -1.0, 2.0]]
     369
     370        A = Sparse_CSR(Sparse(A))
     371
     372        xe = [[0.0, 0.0],[1.0, 1.0],[2.0 ,2.0],[3.0, 3.0]]
     373        b = A*xe
     374        x = [[0.0, 0.0],[0.0, 0.0],[0.0 ,0.0],[0.0, 0.0]]
     375        x = conjugate_gradient(A,b,x,iprint=0,use_c_cg=True)
     376       
     377
     378        assert num.allclose(x,xe)
     379
     380
     381
     382    def test_sparse_solve_using_c_ext_with_jacobi(self):
     383        """Solve Small Sparse Matrix"""
     384
     385        A = [[2.0, -1.0, 0.0, 0.0 ],
     386             [-1.0, 2.0, -1.0, 0.0],
     387             [0.0, -1.0, 2.0, -1.0],
     388             [0.0,0.0, -1.0, 2.0]]
     389
     390        A = Sparse_CSR(Sparse(A))
     391
     392        xe = [0.0, 1.0, 2.0, 3.0]
     393        b  = A*xe
     394        x =  [0.0, 0.0, 0.0, 0.0]
     395        x = conjugate_gradient(A,b,x,use_c_cg=True,precon='Jacobi')
     396
     397        assert num.allclose(x,xe)
     398
     399    def test_max_iter_using_c_ext_with_jacobi(self):
     400        """Test max iteration Small Sparse Matrix"""
     401
     402        A = [[2.0, -1.0, 0.0, 0.0 ],
     403             [-1.0, 2.0, -1.0, 0.0],
     404             [0.0, -1.0, 2.0, -1.0],
     405             [0.0,0.0, -1.0, 2.0]]
     406
     407        A = Sparse_CSR(Sparse(A))
     408
     409        xe = [0.0, 1.0, 2.0, 3.0]
     410        b  = A*xe
     411        x =  [0.0, 0.0, 0.0, 0.0]
     412
     413        try:
     414            x = conjugate_gradient(A,b,x,imax=2,use_c_cg=True, precon='Jacobi')
     415        except ConvergenceError:
     416            pass
     417        else:
     418            msg = 'Should have raised exception'
     419            raise TestError, msg
     420
     421
     422    def test_solve_large_using_c_ext_with_jacobi(self):
     423        """Standard 1d laplacian """
     424
     425        n = 50
     426        A = Sparse(n,n)
     427
     428        for i in num.arange(0,n):
     429            A[i,i] = 1.0
     430            if i > 0 :
     431                A[i,i-1] = -0.5
     432            if i < n-1 :
     433                A[i,i+1] = -0.5
     434
     435        xe = num.ones( (n,), num.float)
     436
     437        b  = A*xe
     438
     439        A = Sparse_CSR(A)
     440
     441        x = conjugate_gradient(A,b,b,tol=1.0e-5,use_c_cg=True, precon='Jacobi')
     442
     443        assert num.allclose(x,xe)
     444
     445    def test_solve_large_2d_using_c_ext_with_jacobi(self):
     446        """Standard 2d laplacian"""
     447
     448        n = 20
     449        m = 10
     450
     451        A = Sparse(m*n, m*n)
     452
     453        for i in num.arange(0,n):
     454            for j in num.arange(0,m):
     455                I = j+m*i
     456                A[I,I] = 4.0
     457                if i > 0  :
     458                    A[I,I-m] = -1.0
     459                if i < n-1 :
     460                    A[I,I+m] = -1.0
     461                if j > 0  :
     462                    A[I,I-1] = -1.0
     463                if j < m-1 :
     464                    A[I,I+1] = -1.0
     465
     466        xe = num.ones( (n*m,), num.float)
     467        A = Sparse_CSR(A)
     468        b  = A*xe
     469        x = conjugate_gradient(A,b,b,iprint=1,use_c_cg=True, precon='Jacobi')
     470
     471        assert num.allclose(x,xe)
     472
     473    def test_solve_large_2d_csr_matrix_using_c_ext_with_jacobi(self):
     474        """Standard 2d laplacian with csr format
     475        """
     476
     477        n = 100
     478        m = 100
     479
     480        A = Sparse(m*n, m*n)
     481
     482        for i in num.arange(0,n):
     483            for j in num.arange(0,m):
     484                I = j+m*i
     485                A[I,I] = 4.0
     486                if i > 0  :
     487                    A[I,I-m] = -1.0
     488                if i < n-1 :
     489                    A[I,I+m] = -1.0
     490                if j > 0  :
     491                    A[I,I-1] = -1.0
     492                if j < m-1 :
     493                    A[I,I+1] = -1.0
     494
     495        xe = num.ones( (n*m,), num.float)
     496
     497        # Convert to csr format
     498        #print 'start covert'
     499        A = Sparse_CSR(A)
     500        #print 'finish covert'
     501        b = A*xe
     502        x = conjugate_gradient(A,b,b,iprint=20,use_c_cg=True, precon='Jacobi')
     503
     504        assert num.allclose(x,xe)
     505
     506
     507    def test_solve_large_2d_with_default_guess_using_c_ext_with_jacobi(self):
     508        """Standard 2d laplacian using default first guess"""
     509
     510        n = 20
     511        m = 10
     512
     513        A = Sparse(m*n, m*n)
     514
     515        for i in num.arange(0,n):
     516            for j in num.arange(0,m):
     517                I = j+m*i
     518                A[I,I] = 4.0
     519                if i > 0  :
     520                    A[I,I-m] = -1.0
     521                if i < n-1 :
     522                    A[I,I+m] = -1.0
     523                if j > 0  :
     524                    A[I,I-1] = -1.0
     525                if j < m-1 :
     526                    A[I,I+1] = -1.0
     527
     528        xe = num.ones( (n*m,), num.float)
     529        A = Sparse_CSR(A)
     530        b  = A*xe
     531        x = conjugate_gradient(A,b,use_c_cg=True, precon='Jacobi')
     532
     533        assert num.allclose(x,xe)
     534
     535
     536    def test_sparse_solve_matrix_using_c_ext_with_jacobi(self):
     537        """Solve Small Sparse Matrix"""
     538
     539        A = [[2.0, -1.0, 0.0, 0.0 ],
     540             [-1.0, 2.0, -1.0, 0.0],
     541             [0.0, -1.0, 2.0, -1.0],
     542             [0.0,0.0, -1.0, 2.0]]
     543
     544        A = Sparse_CSR(Sparse(A))
     545
     546        xe = [[0.0, 0.0],[1.0, 1.0],[2.0 ,2.0],[3.0, 3.0]]
     547        b = A*xe
     548        x = [[0.0, 0.0],[0.0, 0.0],[0.0 ,0.0],[0.0, 0.0]]
     549        x = conjugate_gradient(A,b,x,iprint=0,use_c_cg=True, precon='Jacobi')
     550       
     551
     552        assert num.allclose(x,xe)
     553
    205554################################################################################
    206555
Note: See TracChangeset for help on using the changeset viewer.