Changeset 484
- Timestamp:
- Nov 4, 2004, 2:13:02 PM (20 years ago)
- Location:
- inundation/ga/storm_surge/pyvolution
- Files:
-
- 2 added
- 2 deleted
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
inundation/ga/storm_surge/pyvolution/doit.py
r267 r484 8 8 9 9 10 os.system('python compile.py')10 #os.system('python compile.py') 11 11 os.system('python test_all.py') 12 12 os.system('python run_profile.py') -
inundation/ga/storm_surge/pyvolution/general_mesh.py
r482 r484 122 122 #Edgelengths 123 123 self.edgelengths[i, :] = [l0, l1, l2] 124 125 #Build vertex list 126 self.build_vertexlist() 124 127 125 128 def __len__(self): … … 197 200 M = 3*m #Total number of unique vertices 198 201 return reshape(array(range(M)).astype(Int), (m,3)) 202 203 204 def build_vertexlist(self): 205 """Build vertexlist index by vertex ids and for each entry (point id) 206 build a list of (triangles, vertex_id) pairs that use the point 207 as vertex. 208 209 Preconditions: 210 self.coordinates and self.triangles are defined 211 212 Postcondition: 213 self.vertexlist is built 214 """ 215 216 vertexlist = [None]*len(self.coordinates) 217 for i in range(self.number_of_elements): 218 219 a = self.triangles[i, 0] 220 b = self.triangles[i, 1] 221 c = self.triangles[i, 2] 222 223 #Register the vertices v as lists of 224 #(triangle_id, vertex_id) tuples associated with them 225 #This is used for smoothing 226 for vertex_id, v in enumerate([a,b,c]): 227 if vertexlist[v] is None: 228 vertexlist[v] = [] 229 230 vertexlist[v].append( (i, vertex_id) ) 231 232 self.vertexlist = vertexlist 233 234 -
inundation/ga/storm_surge/pyvolution/least_squares.py
r482 r484 19 19 20 20 21 #FIXME: Current implementation uses full matrices and a general solver.22 #Later on we may consider using sparse techniques23 24 21 import exceptions 25 22 class ShapeError(exceptions.Exception): pass … … 28 25 from Numeric import zeros, array, Float, Int, dot, transpose 29 26 from LinearAlgebra import solve_linear_equations 30 #from scipy import sparse31 27 from sparse import Sparse 32 28 from cg_solve import conjugate_gradient, VectorShapeError … … 200 196 self.B = self.AtA 201 197 198 202 199 203 200 def build_interpolation_matrix_A(self, point_coordinates): 204 201 """Build n x m interpolation matrix, where 205 202 n is the number of data points and 206 m is the number of basis functions phi_k (one per vertex)""" 203 m is the number of basis functions phi_k (one per vertex) 204 205 This algorithm uses a quad tree data structure for fast binning of data points 206 """ 207 208 from quad import build_quadtree 207 209 208 210 #Convert input to Numeric arrays … … 213 215 n = point_coordinates.shape[0] #Nbr of data points 214 216 215 #self.A = zeros((n,m), Float)216 217 self.A = Sparse(n,m) 217 218 self.AtA = Sparse(m,m) 219 220 #Build quad tree of vertices 221 #print 'Building quad tree' 222 root = build_quadtree(self.mesh) 223 #print 'Quad tree built' 224 #root.show() 218 225 219 226 #Compute matrix elements … … 221 228 #For each data_coordinate point 222 229 223 #print 'Doing %d/%d' %(i, n) 230 #print 'Doing %d of %d' %(i, n) 231 232 x = point_coordinates[i] 233 234 #Find vertices near x 235 candidate_vertices = root.search(x[0], x[1]) 236 237 #Find triangle containing x 238 element_found = False 239 for v in candidate_vertices: 240 for triangle_id, vertex_id in self.mesh.vertexlist[v]: 241 242 k = triangle_id 243 244 #Get the three vertex_points of candidate triangle 245 xi0 = self.mesh.get_vertex_coordinate(k, 0) 246 xi1 = self.mesh.get_vertex_coordinate(k, 1) 247 xi2 = self.mesh.get_vertex_coordinate(k, 2) 248 249 #Get the three normals 250 n0 = self.mesh.get_normal(k, 0) 251 n1 = self.mesh.get_normal(k, 1) 252 n2 = self.mesh.get_normal(k, 2) 253 254 #Compute interpolation 255 sigma2 = dot((x-xi0), n2)/dot((xi2-xi0), n2) 256 sigma0 = dot((x-xi1), n0)/dot((xi0-xi1), n0) 257 sigma1 = dot((x-xi2), n1)/dot((xi1-xi2), n1) 258 259 #FIXME: Maybe move out to test or something 260 epsilon = 1.0e-6 261 assert abs(sigma0 + sigma1 + sigma2 - 1.0) < epsilon 262 263 #Check that this triangle contains the data point 264 if sigma0 >= 0 and sigma1 >= 0 and sigma2 >= 0: 265 element_found = True 266 break 267 268 if element_found is True: 269 #Don't look for any other triangle 270 break 271 272 if element_found is True: 273 #Assign values to matrix A 274 275 j0 = self.mesh.triangles[k,0] #Global vertex id 276 #self.A[i, j0] = sigma0 277 278 j1 = self.mesh.triangles[k,1] #Global vertex id 279 #self.A[i, j1] = sigma1 280 281 j2 = self.mesh.triangles[k,2] #Global vertex id 282 #self.A[i, j2] = sigma2 283 284 sigmas = {j0:sigma0, j1:sigma1, j2:sigma2} 285 js = [j0,j1,j2] 286 287 for j in js: 288 self.A[i,j] = sigmas[j] 289 for k in js: 290 self.AtA[j,k] += sigmas[j]*sigmas[k] 291 else: 292 pass 293 #Ok if there is no triangle for datapoint (as in brute force version) 294 #raise 'Could not find triangle for point', x 295 296 297 298 def build_interpolation_matrix_A_brute(self, point_coordinates): 299 """Build n x m interpolation matrix, where 300 n is the number of data points and 301 m is the number of basis functions phi_k (one per vertex) 302 303 304 This is the brute force which is too slow for large problems, but good for testing 305 """ 306 307 308 309 #Convert input to Numeric arrays 310 point_coordinates = array(point_coordinates).astype(Float) 311 312 #Build n x m interpolation matrix 313 m = self.mesh.coordinates.shape[0] #Nbr of basis functions (1/vertex) 314 n = point_coordinates.shape[0] #Nbr of data points 315 316 self.A = Sparse(n,m) 317 self.AtA = Sparse(m,m) 318 319 #Compute matrix elements 320 for i in range(n): 321 #For each data_coordinate point 224 322 225 323 x = point_coordinates[i] … … 254 352 #Assign values to matrix A 255 353 256 257 354 j0 = self.mesh.triangles[k,0] #Global vertex id 258 355 #self.A[i, j0] = sigma0 … … 273 370 k = k+1 274 371 275 ## self.A = (self.A).tocsc()276 ## self.AtA = (self.AtA).tocsc()277 ## self.At = self.A.transp()278 279 372 280 373 -
inundation/ga/storm_surge/pyvolution/mesh.py
r482 r484 120 120 #Build neighbour structure 121 121 self.build_neighbour_structure() 122 123 #Build vertex list124 self.build_vertexlist()125 122 126 123 #Build surrogate neighbour structure … … 194 191 self.number_of_boundaries[i] -= 1 195 192 196 197 198 def build_vertexlist(self):199 """Build vertexlist index by vertex ids and for each entry (point id)200 build a list of (triangles, vertex_id) pairs that use the point201 as vertex.202 203 Preconditions:204 self.coordinates and self.triangles are defined205 206 Postcondition:207 self.vertexlist is built208 """209 210 vertexlist = [None]*len(self.coordinates)211 for i in range(self.number_of_elements):212 213 a = self.triangles[i, 0]214 b = self.triangles[i, 1]215 c = self.triangles[i, 2]216 217 #Register the vertices v as lists of218 #(triangle_id, vertex_id) tuples associated with them219 #This is used for smoothing220 for vertex_id, v in enumerate([a,b,c]):221 if vertexlist[v] is None:222 vertexlist[v] = []223 224 vertexlist[v].append( (i, vertex_id) )225 226 self.vertexlist = vertexlist227 228 193 229 194 def build_surrogate_neighbour_structure(self): -
inundation/ga/storm_surge/pyvolution/test_least_squares.py
r475 r484 647 647 fd.close() 648 648 649 mesh_output_file = "new_trian lge.tsh"649 mesh_output_file = "new_triangle.tsh" 650 650 fit_to_mesh_file(mesh_file, 651 651 point_file, … … 666 666 #clean up 667 667 os.remove(mesh_file) 668 os.remove(mesh_output_file) 668 669 os.remove(point_file) 669 670
Note: See TracChangeset
for help on using the changeset viewer.