1 | #!/usr/bin/env python |
---|
2 | # |
---|
3 | |
---|
4 | # This file was reverted from changeset:5484 to changeset:5470 on 10th July |
---|
5 | # by Ole. |
---|
6 | |
---|
7 | import unittest |
---|
8 | import copy |
---|
9 | import numpy as num |
---|
10 | |
---|
11 | import tempfile |
---|
12 | import os |
---|
13 | import shutil |
---|
14 | from struct import pack, unpack |
---|
15 | |
---|
16 | from Scientific.IO.NetCDF import NetCDFFile |
---|
17 | |
---|
18 | from anuga.shallow_water.data_manager import * |
---|
19 | from anuga.shallow_water.sww_file import SWW_file |
---|
20 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
21 | from anuga.coordinate_transforms.redfearn import degminsec2decimal_degrees |
---|
22 | from anuga.abstract_2d_finite_volumes.util import file_function |
---|
23 | from anuga.utilities.system_tools import get_pathname_from_package |
---|
24 | from anuga.utilities.file_utils import del_dir, load_csv_as_dict, \ |
---|
25 | load_csv_as_array |
---|
26 | from anuga.anuga_exceptions import ANUGAError |
---|
27 | from anuga.utilities.numerical_tools import ensure_numeric, mean |
---|
28 | from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a |
---|
29 | from anuga.config import netcdf_float, epsilon, g |
---|
30 | |
---|
31 | # import all the boundaries - some are generic, some are shallow water |
---|
32 | from boundaries import Reflective_boundary, \ |
---|
33 | Field_boundary, Transmissive_momentum_set_stage_boundary, \ |
---|
34 | Transmissive_stage_zero_momentum_boundary |
---|
35 | from anuga.abstract_2d_finite_volumes.generic_boundary_conditions\ |
---|
36 | import Transmissive_boundary, Dirichlet_boundary, \ |
---|
37 | Time_boundary, File_boundary, AWI_boundary |
---|
38 | |
---|
39 | # This is needed to run the tests of local functions |
---|
40 | import data_manager |
---|
41 | from anuga.coordinate_transforms.redfearn import redfearn |
---|
42 | from anuga.coordinate_transforms.geo_reference import Geo_reference, \ |
---|
43 | DEFAULT_ZONE |
---|
44 | from anuga.geospatial_data.geospatial_data import Geospatial_data |
---|
45 | |
---|
46 | |
---|
47 | class Test_Data_Manager(unittest.TestCase): |
---|
48 | # Class variable |
---|
49 | verbose = False |
---|
50 | |
---|
51 | def set_verbose(self): |
---|
52 | Test_Data_Manager.verbose = True |
---|
53 | |
---|
54 | def setUp(self): |
---|
55 | import time |
---|
56 | from mesh_factory import rectangular |
---|
57 | |
---|
58 | self.verbose = Test_Data_Manager.verbose |
---|
59 | # Create basic mesh |
---|
60 | points, vertices, boundary = rectangular(2, 2) |
---|
61 | |
---|
62 | # Create shallow water domain |
---|
63 | domain = Domain(points, vertices, boundary) |
---|
64 | domain.default_order = 2 |
---|
65 | |
---|
66 | # Set some field values |
---|
67 | domain.set_quantity('elevation', lambda x,y: -x) |
---|
68 | domain.set_quantity('friction', 0.03) |
---|
69 | |
---|
70 | |
---|
71 | ###################### |
---|
72 | # Boundary conditions |
---|
73 | B = Transmissive_boundary(domain) |
---|
74 | domain.set_boundary( {'left': B, 'right': B, 'top': B, 'bottom': B}) |
---|
75 | |
---|
76 | |
---|
77 | ###################### |
---|
78 | #Initial condition - with jumps |
---|
79 | bed = domain.quantities['elevation'].vertex_values |
---|
80 | stage = num.zeros(bed.shape, num.float) |
---|
81 | |
---|
82 | h = 0.3 |
---|
83 | for i in range(stage.shape[0]): |
---|
84 | if i % 2 == 0: |
---|
85 | stage[i,:] = bed[i,:] + h |
---|
86 | else: |
---|
87 | stage[i,:] = bed[i,:] |
---|
88 | |
---|
89 | domain.set_quantity('stage', stage) |
---|
90 | |
---|
91 | |
---|
92 | domain.distribute_to_vertices_and_edges() |
---|
93 | self.initial_stage = copy.copy(domain.quantities['stage'].vertex_values) |
---|
94 | |
---|
95 | |
---|
96 | self.domain = domain |
---|
97 | |
---|
98 | C = domain.get_vertex_coordinates() |
---|
99 | self.X = C[:,0:6:2].copy() |
---|
100 | self.Y = C[:,1:6:2].copy() |
---|
101 | |
---|
102 | self.F = bed |
---|
103 | |
---|
104 | #Write A testfile (not realistic. Values aren't realistic) |
---|
105 | self.test_MOST_file = 'most_small' |
---|
106 | |
---|
107 | longitudes = [150.66667, 150.83334, 151., 151.16667] |
---|
108 | latitudes = [-34.5, -34.33333, -34.16667, -34] |
---|
109 | |
---|
110 | long_name = 'LON' |
---|
111 | lat_name = 'LAT' |
---|
112 | |
---|
113 | nx = 4 |
---|
114 | ny = 4 |
---|
115 | six = 6 |
---|
116 | |
---|
117 | |
---|
118 | for ext in ['_ha.nc', '_ua.nc', '_va.nc', '_e.nc']: |
---|
119 | fid = NetCDFFile(self.test_MOST_file + ext, netcdf_mode_w) |
---|
120 | |
---|
121 | fid.createDimension(long_name,nx) |
---|
122 | fid.createVariable(long_name,netcdf_float,(long_name,)) |
---|
123 | fid.variables[long_name].point_spacing='uneven' |
---|
124 | fid.variables[long_name].units='degrees_east' |
---|
125 | fid.variables[long_name].assignValue(longitudes) |
---|
126 | |
---|
127 | fid.createDimension(lat_name,ny) |
---|
128 | fid.createVariable(lat_name,netcdf_float,(lat_name,)) |
---|
129 | fid.variables[lat_name].point_spacing='uneven' |
---|
130 | fid.variables[lat_name].units='degrees_north' |
---|
131 | fid.variables[lat_name].assignValue(latitudes) |
---|
132 | |
---|
133 | fid.createDimension('TIME',six) |
---|
134 | fid.createVariable('TIME',netcdf_float,('TIME',)) |
---|
135 | fid.variables['TIME'].point_spacing='uneven' |
---|
136 | fid.variables['TIME'].units='seconds' |
---|
137 | fid.variables['TIME'].assignValue([0.0, 0.1, 0.6, 1.1, 1.6, 2.1]) |
---|
138 | |
---|
139 | |
---|
140 | name = ext[1:3].upper() |
---|
141 | if name == 'E.': name = 'ELEVATION' |
---|
142 | fid.createVariable(name,netcdf_float,('TIME', lat_name, long_name)) |
---|
143 | fid.variables[name].units='CENTIMETERS' |
---|
144 | fid.variables[name].missing_value=-1.e+034 |
---|
145 | |
---|
146 | fid.variables[name].assignValue([[[0.3400644, 0, -46.63519, -6.50198], |
---|
147 | [-0.1214216, 0, 0, 0], |
---|
148 | [0, 0, 0, 0], |
---|
149 | [0, 0, 0, 0]], |
---|
150 | [[0.3400644, 2.291054e-005, -23.33335, -6.50198], |
---|
151 | [-0.1213987, 4.581959e-005, -1.594838e-007, 1.421085e-012], |
---|
152 | [2.291054e-005, 4.582107e-005, 4.581715e-005, 1.854517e-009], |
---|
153 | [0, 2.291054e-005, 2.291054e-005, 0]], |
---|
154 | [[0.3400644, 0.0001374632, -23.31503, -6.50198], |
---|
155 | [-0.1212842, 0.0002756907, 0.006325484, 1.380492e-006], |
---|
156 | [0.0001374632, 0.0002749264, 0.0002742863, 6.665601e-008], |
---|
157 | [0, 0.0001374632, 0.0001374632, 0]], |
---|
158 | [[0.3400644, 0.0002520159, -23.29672, -6.50198], |
---|
159 | [-0.1211696, 0.0005075303, 0.01264618, 6.208276e-006], |
---|
160 | [0.0002520159, 0.0005040318, 0.0005027961, 2.23865e-007], |
---|
161 | [0, 0.0002520159, 0.0002520159, 0]], |
---|
162 | [[0.3400644, 0.0003665686, -23.27842, -6.50198], |
---|
163 | [-0.1210551, 0.0007413362, 0.01896192, 1.447638e-005], |
---|
164 | [0.0003665686, 0.0007331371, 0.0007313463, 4.734126e-007], |
---|
165 | [0, 0.0003665686, 0.0003665686, 0]], |
---|
166 | [[0.3400644, 0.0004811212, -23.26012, -6.50198], |
---|
167 | [-0.1209405, 0.0009771062, 0.02527271, 2.617787e-005], |
---|
168 | [0.0004811212, 0.0009622425, 0.0009599366, 8.152277e-007], |
---|
169 | [0, 0.0004811212, 0.0004811212, 0]]]) |
---|
170 | |
---|
171 | |
---|
172 | fid.close() |
---|
173 | |
---|
174 | |
---|
175 | |
---|
176 | |
---|
177 | def tearDown(self): |
---|
178 | import os |
---|
179 | for ext in ['_ha.nc', '_ua.nc', '_va.nc', '_e.nc']: |
---|
180 | #print 'Trying to remove', self.test_MOST_file + ext |
---|
181 | os.remove(self.test_MOST_file + ext) |
---|
182 | |
---|
183 | def test_sww_constant(self): |
---|
184 | """Test that constant sww information can be written correctly |
---|
185 | (non smooth) |
---|
186 | """ |
---|
187 | self.domain.set_name('datatest' + str(id(self))) |
---|
188 | self.domain.format = 'sww' #Remove?? |
---|
189 | self.domain.smooth = False |
---|
190 | |
---|
191 | sww = SWW_file(self.domain) |
---|
192 | sww.store_connectivity() |
---|
193 | |
---|
194 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
195 | |
---|
196 | # Get the variables |
---|
197 | x = fid.variables['x'] |
---|
198 | y = fid.variables['y'] |
---|
199 | z = fid.variables['elevation'] |
---|
200 | V = fid.variables['volumes'] |
---|
201 | |
---|
202 | assert num.allclose (x[:], self.X.flatten()) |
---|
203 | assert num.allclose (y[:], self.Y.flatten()) |
---|
204 | assert num.allclose (z[:], self.F.flatten()) |
---|
205 | |
---|
206 | P = len(self.domain) |
---|
207 | for k in range(P): |
---|
208 | assert V[k, 0] == 3*k |
---|
209 | assert V[k, 1] == 3*k+1 |
---|
210 | assert V[k, 2] == 3*k+2 |
---|
211 | |
---|
212 | fid.close() |
---|
213 | os.remove(sww.filename) |
---|
214 | |
---|
215 | def test_sww_header(self): |
---|
216 | """Test that constant sww information can be written correctly |
---|
217 | (non smooth) |
---|
218 | """ |
---|
219 | self.domain.set_name('datatest' + str(id(self))) |
---|
220 | self.domain.format = 'sww' #Remove?? |
---|
221 | self.domain.smooth = False |
---|
222 | |
---|
223 | sww = SWW_file(self.domain) |
---|
224 | sww.store_connectivity() |
---|
225 | |
---|
226 | # Check contents |
---|
227 | # Get NetCDF |
---|
228 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
229 | |
---|
230 | # Get the variables |
---|
231 | sww_revision = fid.revision_number |
---|
232 | try: |
---|
233 | revision_number = get_revision_number() |
---|
234 | except: |
---|
235 | revision_number = None |
---|
236 | |
---|
237 | assert str(revision_number) == sww_revision |
---|
238 | fid.close() |
---|
239 | |
---|
240 | #print "sww.filename", sww.filename |
---|
241 | os.remove(sww.filename) |
---|
242 | |
---|
243 | def test_sww_range(self): |
---|
244 | """Test that constant sww information can be written correctly |
---|
245 | Use non-smooth to be able to compare to quantity values. |
---|
246 | """ |
---|
247 | |
---|
248 | self.domain.set_name('datatest' + str(id(self))) |
---|
249 | self.domain.format = 'sww' |
---|
250 | self.domain.set_store_vertices_uniquely(True) |
---|
251 | |
---|
252 | sww = SWW_file(self.domain) |
---|
253 | |
---|
254 | dqs = self.domain.get_quantity('stage') |
---|
255 | dqx = self.domain.get_quantity('xmomentum') |
---|
256 | dqy = self.domain.get_quantity('ymomentum') |
---|
257 | xmom_min = ymom_min = stage_min = sys.maxint |
---|
258 | xmom_max = ymom_max = stage_max = -stage_min |
---|
259 | for t in self.domain.evolve(yieldstep = 1, finaltime = 1): |
---|
260 | wmax = max(dqs.get_values().flatten()) |
---|
261 | if wmax > stage_max: stage_max = wmax |
---|
262 | wmin = min(dqs.get_values().flatten()) |
---|
263 | if wmin < stage_min: stage_min = wmin |
---|
264 | |
---|
265 | uhmax = max(dqx.get_values().flatten()) |
---|
266 | if uhmax > xmom_max: xmom_max = uhmax |
---|
267 | uhmin = min(dqx.get_values().flatten()) |
---|
268 | if uhmin < xmom_min: xmom_min = uhmin |
---|
269 | |
---|
270 | vhmax = max(dqy.get_values().flatten()) |
---|
271 | if vhmax > ymom_max: ymom_max = vhmax |
---|
272 | vhmin = min(dqy.get_values().flatten()) |
---|
273 | if vhmin < ymom_min: ymom_min = vhmin |
---|
274 | |
---|
275 | |
---|
276 | |
---|
277 | # Get NetCDF |
---|
278 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
279 | |
---|
280 | # Get the variables |
---|
281 | range = fid.variables['stage_range'][:] |
---|
282 | assert num.allclose(range,[stage_min, stage_max]) |
---|
283 | |
---|
284 | range = fid.variables['xmomentum_range'][:] |
---|
285 | #print range |
---|
286 | assert num.allclose(range, [xmom_min, xmom_max]) |
---|
287 | |
---|
288 | range = fid.variables['ymomentum_range'][:] |
---|
289 | #print range |
---|
290 | assert num.allclose(range, [ymom_min, ymom_max]) |
---|
291 | |
---|
292 | |
---|
293 | |
---|
294 | fid.close() |
---|
295 | os.remove(sww.filename) |
---|
296 | |
---|
297 | def test_sww_extrema(self): |
---|
298 | """Test that extrema of quantities can be retrieved at every vertex |
---|
299 | Extrema are updated at every *internal* timestep |
---|
300 | """ |
---|
301 | |
---|
302 | domain = self.domain |
---|
303 | |
---|
304 | domain.set_name('extrema_test' + str(id(self))) |
---|
305 | domain.format = 'sww' |
---|
306 | domain.smooth = True |
---|
307 | |
---|
308 | assert domain.quantities_to_be_monitored is None |
---|
309 | assert domain.monitor_polygon is None |
---|
310 | assert domain.monitor_time_interval is None |
---|
311 | |
---|
312 | domain.set_quantities_to_be_monitored(['xmomentum', |
---|
313 | 'ymomentum', |
---|
314 | 'stage-elevation']) |
---|
315 | |
---|
316 | assert domain.monitor_polygon is None |
---|
317 | assert domain.monitor_time_interval is None |
---|
318 | |
---|
319 | |
---|
320 | domain.set_quantities_to_be_monitored(['xmomentum', |
---|
321 | 'ymomentum', |
---|
322 | 'stage-elevation'], |
---|
323 | polygon=domain.get_boundary_polygon(), |
---|
324 | time_interval=[0,1]) |
---|
325 | |
---|
326 | |
---|
327 | assert len(domain.quantities_to_be_monitored) == 3 |
---|
328 | assert domain.quantities_to_be_monitored.has_key('stage-elevation') |
---|
329 | assert domain.quantities_to_be_monitored.has_key('xmomentum') |
---|
330 | assert domain.quantities_to_be_monitored.has_key('ymomentum') |
---|
331 | |
---|
332 | |
---|
333 | #domain.protect_against_isolated_degenerate_timesteps = True |
---|
334 | #domain.tight_slope_limiters = 1 |
---|
335 | domain.tight_slope_limiters = 0 # Backwards compatibility |
---|
336 | domain.use_centroid_velocities = 0 # Backwards compatibility (7/5/8) |
---|
337 | |
---|
338 | |
---|
339 | sww = SWW_file(domain) |
---|
340 | |
---|
341 | for t in domain.evolve(yieldstep = 1, finaltime = 1): |
---|
342 | pass |
---|
343 | #print domain.timestepping_statistics() |
---|
344 | domain.quantity_statistics(precision = '%.8f') # Silent |
---|
345 | |
---|
346 | |
---|
347 | # Get NetCDF |
---|
348 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
349 | |
---|
350 | # Get the variables |
---|
351 | extrema = fid.variables['stage-elevation.extrema'][:] |
---|
352 | assert num.allclose(extrema, [0.00, 0.30]) |
---|
353 | |
---|
354 | loc = fid.variables['stage-elevation.min_location'][:] |
---|
355 | assert num.allclose(loc, [0.16666667, 0.33333333]) |
---|
356 | |
---|
357 | loc = fid.variables['stage-elevation.max_location'][:] |
---|
358 | assert num.allclose(loc, [0.8333333, 0.16666667]) |
---|
359 | |
---|
360 | time = fid.variables['stage-elevation.max_time'][:] |
---|
361 | assert num.allclose(time, 0.0) |
---|
362 | |
---|
363 | extrema = fid.variables['xmomentum.extrema'][:] |
---|
364 | assert num.allclose(extrema,[-0.06062178, 0.47873023]) or \ |
---|
365 | num.allclose(extrema, [-0.06062178, 0.47847986]) or \ |
---|
366 | num.allclose(extrema, [-0.06062178, 0.47848481]) or \ |
---|
367 | num.allclose(extrema, [-0.06062178, 0.47763887]) # 18/09/09 |
---|
368 | |
---|
369 | extrema = fid.variables['ymomentum.extrema'][:] |
---|
370 | assert num.allclose(extrema,[0.00, 0.0625786]) or num.allclose(extrema,[0.00, 0.06062178]) |
---|
371 | |
---|
372 | time_interval = fid.variables['extrema.time_interval'][:] |
---|
373 | assert num.allclose(time_interval, [0,1]) |
---|
374 | |
---|
375 | polygon = fid.variables['extrema.polygon'][:] |
---|
376 | assert num.allclose(polygon, domain.get_boundary_polygon()) |
---|
377 | |
---|
378 | fid.close() |
---|
379 | #print "sww.filename", sww.filename |
---|
380 | os.remove(sww.filename) |
---|
381 | |
---|
382 | |
---|
383 | |
---|
384 | def test_sww_constant_smooth(self): |
---|
385 | """Test that constant sww information can be written correctly |
---|
386 | (non smooth) |
---|
387 | """ |
---|
388 | self.domain.set_name('datatest' + str(id(self))) |
---|
389 | self.domain.format = 'sww' |
---|
390 | self.domain.smooth = True |
---|
391 | |
---|
392 | sww = SWW_file(self.domain) |
---|
393 | sww.store_connectivity() |
---|
394 | |
---|
395 | # Check contents |
---|
396 | # Get NetCDF |
---|
397 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
398 | |
---|
399 | # Get the variables |
---|
400 | X = fid.variables['x'][:] |
---|
401 | Y = fid.variables['y'][:] |
---|
402 | Z = fid.variables['elevation'][:] |
---|
403 | V = fid.variables['volumes'] |
---|
404 | |
---|
405 | assert num.allclose([X[0], Y[0]], num.array([0.0, 0.0])) |
---|
406 | assert num.allclose([X[1], Y[1]], num.array([0.0, 0.5])) |
---|
407 | assert num.allclose([X[2], Y[2]], num.array([0.0, 1.0])) |
---|
408 | assert num.allclose([X[4], Y[4]], num.array([0.5, 0.5])) |
---|
409 | assert num.allclose([X[7], Y[7]], num.array([1.0, 0.5])) |
---|
410 | |
---|
411 | assert Z[4] == -0.5 |
---|
412 | |
---|
413 | assert V[2,0] == 4 |
---|
414 | assert V[2,1] == 5 |
---|
415 | assert V[2,2] == 1 |
---|
416 | assert V[4,0] == 6 |
---|
417 | assert V[4,1] == 7 |
---|
418 | assert V[4,2] == 3 |
---|
419 | |
---|
420 | fid.close() |
---|
421 | os.remove(sww.filename) |
---|
422 | |
---|
423 | |
---|
424 | def test_sww_variable(self): |
---|
425 | """Test that sww information can be written correctly |
---|
426 | """ |
---|
427 | self.domain.set_name('datatest' + str(id(self))) |
---|
428 | self.domain.format = 'sww' |
---|
429 | self.domain.smooth = True |
---|
430 | self.domain.reduction = mean |
---|
431 | |
---|
432 | sww = SWW_file(self.domain) |
---|
433 | sww.store_connectivity() |
---|
434 | sww.store_timestep() |
---|
435 | |
---|
436 | # Check contents |
---|
437 | # Get NetCDF |
---|
438 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
439 | |
---|
440 | |
---|
441 | # Get the variables |
---|
442 | time = fid.variables['time'] |
---|
443 | stage = fid.variables['stage'] |
---|
444 | |
---|
445 | Q = self.domain.quantities['stage'] |
---|
446 | Q0 = Q.vertex_values[:,0] |
---|
447 | Q1 = Q.vertex_values[:,1] |
---|
448 | Q2 = Q.vertex_values[:,2] |
---|
449 | |
---|
450 | A = stage[0,:] |
---|
451 | #print A[0], (Q2[0,0] + Q1[1,0])/2 |
---|
452 | assert num.allclose(A[0], (Q2[0] + Q1[1])/2) |
---|
453 | assert num.allclose(A[1], (Q0[1] + Q1[3] + Q2[2])/3) |
---|
454 | assert num.allclose(A[2], Q0[3]) |
---|
455 | assert num.allclose(A[3], (Q0[0] + Q1[5] + Q2[4])/3) |
---|
456 | |
---|
457 | #Center point |
---|
458 | assert num.allclose(A[4], (Q1[0] + Q2[1] + Q0[2] +\ |
---|
459 | Q0[5] + Q2[6] + Q1[7])/6) |
---|
460 | |
---|
461 | fid.close() |
---|
462 | os.remove(sww.filename) |
---|
463 | |
---|
464 | |
---|
465 | def test_sww_variable2(self): |
---|
466 | """Test that sww information can be written correctly |
---|
467 | multiple timesteps. Use average as reduction operator |
---|
468 | """ |
---|
469 | |
---|
470 | import time, os |
---|
471 | from Scientific.IO.NetCDF import NetCDFFile |
---|
472 | |
---|
473 | self.domain.set_name('datatest' + str(id(self))) |
---|
474 | self.domain.format = 'sww' |
---|
475 | self.domain.smooth = True |
---|
476 | |
---|
477 | self.domain.reduction = mean |
---|
478 | |
---|
479 | sww = SWW_file(self.domain) |
---|
480 | sww.store_connectivity() |
---|
481 | sww.store_timestep() |
---|
482 | #self.domain.tight_slope_limiters = 1 |
---|
483 | self.domain.evolve_to_end(finaltime = 0.01) |
---|
484 | sww.store_timestep() |
---|
485 | |
---|
486 | |
---|
487 | # Check contents |
---|
488 | # Get NetCDF |
---|
489 | fid = NetCDFFile(sww.filename, netcdf_mode_r) # Open existing file for append |
---|
490 | |
---|
491 | # Get the variables |
---|
492 | x = fid.variables['x'] |
---|
493 | y = fid.variables['y'] |
---|
494 | z = fid.variables['elevation'] |
---|
495 | time = fid.variables['time'] |
---|
496 | stage = fid.variables['stage'] |
---|
497 | |
---|
498 | #Check values |
---|
499 | Q = self.domain.quantities['stage'] |
---|
500 | Q0 = Q.vertex_values[:,0] |
---|
501 | Q1 = Q.vertex_values[:,1] |
---|
502 | Q2 = Q.vertex_values[:,2] |
---|
503 | |
---|
504 | A = stage[1,:] |
---|
505 | assert num.allclose(A[0], (Q2[0] + Q1[1])/2) |
---|
506 | assert num.allclose(A[1], (Q0[1] + Q1[3] + Q2[2])/3) |
---|
507 | assert num.allclose(A[2], Q0[3]) |
---|
508 | assert num.allclose(A[3], (Q0[0] + Q1[5] + Q2[4])/3) |
---|
509 | |
---|
510 | #Center point |
---|
511 | assert num.allclose(A[4], (Q1[0] + Q2[1] + Q0[2] +\ |
---|
512 | Q0[5] + Q2[6] + Q1[7])/6) |
---|
513 | |
---|
514 | |
---|
515 | fid.close() |
---|
516 | |
---|
517 | #Cleanup |
---|
518 | os.remove(sww.filename) |
---|
519 | |
---|
520 | def no_test_sww_variable3(self): |
---|
521 | """Test that sww information can be written correctly |
---|
522 | multiple timesteps using a different reduction operator (min) |
---|
523 | """ |
---|
524 | |
---|
525 | # Different reduction in sww files has been made obsolete. |
---|
526 | |
---|
527 | import time, os |
---|
528 | from Scientific.IO.NetCDF import NetCDFFile |
---|
529 | |
---|
530 | self.domain.set_name('datatest' + str(id(self))) |
---|
531 | self.domain.format = 'sww' |
---|
532 | self.domain.smooth = True |
---|
533 | self.domain.reduction = min |
---|
534 | |
---|
535 | sww = SWW_file(self.domain) |
---|
536 | sww.store_connectivity() |
---|
537 | sww.store_timestep() |
---|
538 | #self.domain.tight_slope_limiters = 1 |
---|
539 | self.domain.evolve_to_end(finaltime = 0.01) |
---|
540 | sww.store_timestep() |
---|
541 | |
---|
542 | |
---|
543 | #Check contents |
---|
544 | #Get NetCDF |
---|
545 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
546 | |
---|
547 | # Get the variables |
---|
548 | x = fid.variables['x'] |
---|
549 | y = fid.variables['y'] |
---|
550 | z = fid.variables['elevation'] |
---|
551 | time = fid.variables['time'] |
---|
552 | stage = fid.variables['stage'] |
---|
553 | |
---|
554 | #Check values |
---|
555 | Q = self.domain.quantities['stage'] |
---|
556 | Q0 = Q.vertex_values[:,0] |
---|
557 | Q1 = Q.vertex_values[:,1] |
---|
558 | Q2 = Q.vertex_values[:,2] |
---|
559 | |
---|
560 | A = stage[1,:] |
---|
561 | assert num.allclose(A[0], min(Q2[0], Q1[1])) |
---|
562 | assert num.allclose(A[1], min(Q0[1], Q1[3], Q2[2])) |
---|
563 | assert num.allclose(A[2], Q0[3]) |
---|
564 | assert num.allclose(A[3], min(Q0[0], Q1[5], Q2[4])) |
---|
565 | |
---|
566 | #Center point |
---|
567 | assert num.allclose(A[4], min(Q1[0], Q2[1], Q0[2], |
---|
568 | Q0[5], Q2[6], Q1[7])) |
---|
569 | |
---|
570 | |
---|
571 | fid.close() |
---|
572 | |
---|
573 | #Cleanup |
---|
574 | os.remove(sww.filename) |
---|
575 | |
---|
576 | |
---|
577 | def test_sync(self): |
---|
578 | """test_sync - Test info stored at each timestep is as expected (incl initial condition) |
---|
579 | """ |
---|
580 | |
---|
581 | import time, os, config |
---|
582 | from Scientific.IO.NetCDF import NetCDFFile |
---|
583 | |
---|
584 | self.domain.set_name('synctest') |
---|
585 | self.domain.format = 'sww' |
---|
586 | self.domain.smooth = False |
---|
587 | self.domain.store = True |
---|
588 | |
---|
589 | self.domain.tight_slope_limiters = True |
---|
590 | self.domain.use_centroid_velocities = True |
---|
591 | |
---|
592 | # In this case tight_slope_limiters as default |
---|
593 | # in conjunction with protection |
---|
594 | # against isolated degenerate timesteps works. |
---|
595 | #self.domain.tight_slope_limiters = 1 |
---|
596 | #self.domain.protect_against_isolated_degenerate_timesteps = True |
---|
597 | |
---|
598 | #print 'tight_sl', self.domain.tight_slope_limiters |
---|
599 | |
---|
600 | |
---|
601 | #Evolution |
---|
602 | for t in self.domain.evolve(yieldstep = 1.0, finaltime = 4.0): |
---|
603 | |
---|
604 | #########self.domain.write_time(track_speeds=True) |
---|
605 | stage = self.domain.quantities['stage'].vertex_values |
---|
606 | |
---|
607 | #Get NetCDF |
---|
608 | fid = NetCDFFile(self.domain.writer.filename, netcdf_mode_r) |
---|
609 | stage_file = fid.variables['stage'] |
---|
610 | |
---|
611 | if t == 0.0: |
---|
612 | assert num.allclose(stage, self.initial_stage) |
---|
613 | assert num.allclose(stage_file[:], stage.flatten()) |
---|
614 | else: |
---|
615 | assert not num.allclose(stage, self.initial_stage) |
---|
616 | assert not num.allclose(stage_file[:], stage.flatten()) |
---|
617 | |
---|
618 | fid.close() |
---|
619 | |
---|
620 | os.remove(self.domain.writer.filename) |
---|
621 | |
---|
622 | |
---|
623 | def test_sww_minimum_storable_height(self): |
---|
624 | """Test that sww information can be written correctly |
---|
625 | multiple timesteps using a different reduction operator (min) |
---|
626 | """ |
---|
627 | |
---|
628 | import time, os |
---|
629 | from Scientific.IO.NetCDF import NetCDFFile |
---|
630 | |
---|
631 | self.domain.set_name('datatest' + str(id(self))) |
---|
632 | self.domain.format = 'sww' |
---|
633 | self.domain.smooth = True |
---|
634 | self.domain.reduction = min |
---|
635 | self.domain.minimum_storable_height = 100 |
---|
636 | |
---|
637 | sww = SWW_file(self.domain) |
---|
638 | sww.store_connectivity() |
---|
639 | sww.store_timestep() |
---|
640 | |
---|
641 | #self.domain.tight_slope_limiters = 1 |
---|
642 | self.domain.evolve_to_end(finaltime = 0.01) |
---|
643 | sww.store_timestep() |
---|
644 | |
---|
645 | |
---|
646 | #Check contents |
---|
647 | #Get NetCDF |
---|
648 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
649 | |
---|
650 | |
---|
651 | # Get the variables |
---|
652 | x = fid.variables['x'] |
---|
653 | y = fid.variables['y'] |
---|
654 | z = fid.variables['elevation'] |
---|
655 | time = fid.variables['time'] |
---|
656 | stage = fid.variables['stage'] |
---|
657 | xmomentum = fid.variables['xmomentum'] |
---|
658 | ymomentum = fid.variables['ymomentum'] |
---|
659 | |
---|
660 | #Check values |
---|
661 | Q = self.domain.quantities['stage'] |
---|
662 | Q0 = Q.vertex_values[:,0] |
---|
663 | Q1 = Q.vertex_values[:,1] |
---|
664 | Q2 = Q.vertex_values[:,2] |
---|
665 | |
---|
666 | A = stage[1,:] |
---|
667 | assert num.allclose(stage[1,:], z[:]) |
---|
668 | |
---|
669 | |
---|
670 | assert num.allclose(xmomentum, 0.0) |
---|
671 | assert num.allclose(ymomentum, 0.0) |
---|
672 | |
---|
673 | fid.close() |
---|
674 | |
---|
675 | #Cleanup |
---|
676 | os.remove(sww.filename) |
---|
677 | |
---|
678 | |
---|
679 | def Not_a_test_sww_DSG(self): |
---|
680 | """Not a test, rather a look at the sww format |
---|
681 | """ |
---|
682 | |
---|
683 | import time, os |
---|
684 | from Scientific.IO.NetCDF import NetCDFFile |
---|
685 | |
---|
686 | self.domain.set_name('datatest' + str(id(self))) |
---|
687 | self.domain.format = 'sww' |
---|
688 | self.domain.smooth = True |
---|
689 | self.domain.reduction = mean |
---|
690 | |
---|
691 | sww = SWW_file(self.domain) |
---|
692 | sww.store_connectivity() |
---|
693 | sww.store_timestep() |
---|
694 | |
---|
695 | #Check contents |
---|
696 | #Get NetCDF |
---|
697 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
698 | |
---|
699 | # Get the variables |
---|
700 | x = fid.variables['x'] |
---|
701 | y = fid.variables['y'] |
---|
702 | z = fid.variables['elevation'] |
---|
703 | |
---|
704 | volumes = fid.variables['volumes'] |
---|
705 | time = fid.variables['time'] |
---|
706 | |
---|
707 | # 2D |
---|
708 | stage = fid.variables['stage'] |
---|
709 | |
---|
710 | X = x[:] |
---|
711 | Y = y[:] |
---|
712 | Z = z[:] |
---|
713 | V = volumes[:] |
---|
714 | T = time[:] |
---|
715 | S = stage[:,:] |
---|
716 | |
---|
717 | # print "****************************" |
---|
718 | # print "X ",X |
---|
719 | # print "****************************" |
---|
720 | # print "Y ",Y |
---|
721 | # print "****************************" |
---|
722 | # print "Z ",Z |
---|
723 | # print "****************************" |
---|
724 | # print "V ",V |
---|
725 | # print "****************************" |
---|
726 | # print "Time ",T |
---|
727 | # print "****************************" |
---|
728 | # print "Stage ",S |
---|
729 | # print "****************************" |
---|
730 | |
---|
731 | |
---|
732 | fid.close() |
---|
733 | |
---|
734 | #Cleanup |
---|
735 | os.remove(sww.filename) |
---|
736 | |
---|
737 | |
---|
738 | |
---|
739 | def test_export_grid(self): |
---|
740 | """ |
---|
741 | test_export_grid(self): |
---|
742 | Test that sww information can be converted correctly to asc/prj |
---|
743 | format readable by e.g. ArcView |
---|
744 | """ |
---|
745 | |
---|
746 | import time, os |
---|
747 | from Scientific.IO.NetCDF import NetCDFFile |
---|
748 | |
---|
749 | try: |
---|
750 | os.remove('teg*.sww') |
---|
751 | except: |
---|
752 | pass |
---|
753 | |
---|
754 | #Setup |
---|
755 | self.domain.set_name('teg') |
---|
756 | |
---|
757 | prjfile = self.domain.get_name() + '_elevation.prj' |
---|
758 | ascfile = self.domain.get_name() + '_elevation.asc' |
---|
759 | swwfile = self.domain.get_name() + '.sww' |
---|
760 | |
---|
761 | self.domain.set_datadir('.') |
---|
762 | self.domain.smooth = True |
---|
763 | self.domain.set_quantity('elevation', lambda x,y: -x-y) |
---|
764 | self.domain.set_quantity('stage', 1.0) |
---|
765 | |
---|
766 | self.domain.geo_reference = Geo_reference(56,308500,6189000) |
---|
767 | |
---|
768 | sww = SWW_file(self.domain) |
---|
769 | sww.store_connectivity() |
---|
770 | sww.store_timestep() |
---|
771 | self.domain.evolve_to_end(finaltime = 0.01) |
---|
772 | sww.store_timestep() |
---|
773 | |
---|
774 | cellsize = 0.25 |
---|
775 | #Check contents |
---|
776 | #Get NetCDF |
---|
777 | |
---|
778 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
779 | |
---|
780 | # Get the variables |
---|
781 | x = fid.variables['x'][:] |
---|
782 | y = fid.variables['y'][:] |
---|
783 | z = fid.variables['elevation'][:] |
---|
784 | time = fid.variables['time'][:] |
---|
785 | stage = fid.variables['stage'][:] |
---|
786 | |
---|
787 | fid.close() |
---|
788 | |
---|
789 | #Export to ascii/prj files |
---|
790 | export_grid(self.domain.get_name(), |
---|
791 | quantities = 'elevation', |
---|
792 | cellsize = cellsize, |
---|
793 | verbose = self.verbose, |
---|
794 | format = 'asc') |
---|
795 | |
---|
796 | #Check asc file |
---|
797 | ascid = open(ascfile) |
---|
798 | lines = ascid.readlines() |
---|
799 | ascid.close() |
---|
800 | |
---|
801 | L = lines[2].strip().split() |
---|
802 | assert L[0].strip().lower() == 'xllcorner' |
---|
803 | assert num.allclose(float(L[1].strip().lower()), 308500) |
---|
804 | |
---|
805 | L = lines[3].strip().split() |
---|
806 | assert L[0].strip().lower() == 'yllcorner' |
---|
807 | assert num.allclose(float(L[1].strip().lower()), 6189000) |
---|
808 | |
---|
809 | #Check grid values |
---|
810 | for j in range(5): |
---|
811 | L = lines[6+j].strip().split() |
---|
812 | y = (4-j) * cellsize |
---|
813 | for i in range(5): |
---|
814 | assert num.allclose(float(L[i]), -i*cellsize - y) |
---|
815 | |
---|
816 | #Cleanup |
---|
817 | os.remove(prjfile) |
---|
818 | os.remove(ascfile) |
---|
819 | os.remove(swwfile) |
---|
820 | |
---|
821 | def test_export_gridII(self): |
---|
822 | """ |
---|
823 | test_export_gridII(self): |
---|
824 | Test that sww information can be converted correctly to asc/prj |
---|
825 | format readable by e.g. ArcView |
---|
826 | """ |
---|
827 | |
---|
828 | import time, os |
---|
829 | from Scientific.IO.NetCDF import NetCDFFile |
---|
830 | |
---|
831 | try: |
---|
832 | os.remove('teg*.sww') |
---|
833 | except: |
---|
834 | pass |
---|
835 | |
---|
836 | #Setup |
---|
837 | self.domain.set_name('tegII') |
---|
838 | |
---|
839 | swwfile = self.domain.get_name() + '.sww' |
---|
840 | |
---|
841 | self.domain.set_datadir('.') |
---|
842 | self.domain.smooth = True |
---|
843 | self.domain.set_quantity('elevation', lambda x,y: -x-y) |
---|
844 | self.domain.set_quantity('stage', 1.0) |
---|
845 | |
---|
846 | self.domain.geo_reference = Geo_reference(56,308500,6189000) |
---|
847 | |
---|
848 | sww = SWW_file(self.domain) |
---|
849 | sww.store_connectivity() |
---|
850 | sww.store_timestep() |
---|
851 | self.domain.evolve_to_end(finaltime = 0.01) |
---|
852 | sww.store_timestep() |
---|
853 | |
---|
854 | cellsize = 0.25 |
---|
855 | #Check contents |
---|
856 | #Get NetCDF |
---|
857 | |
---|
858 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
859 | |
---|
860 | # Get the variables |
---|
861 | x = fid.variables['x'][:] |
---|
862 | y = fid.variables['y'][:] |
---|
863 | z = fid.variables['elevation'][:] |
---|
864 | time = fid.variables['time'][:] |
---|
865 | stage = fid.variables['stage'][:] |
---|
866 | xmomentum = fid.variables['xmomentum'][:] |
---|
867 | ymomentum = fid.variables['ymomentum'][:] |
---|
868 | |
---|
869 | #print 'stage', stage |
---|
870 | #print 'xmom', xmomentum |
---|
871 | #print 'ymom', ymomentum |
---|
872 | |
---|
873 | fid.close() |
---|
874 | |
---|
875 | #Export to ascii/prj files |
---|
876 | if True: |
---|
877 | export_grid(self.domain.get_name(), |
---|
878 | quantities = ['elevation', 'depth'], |
---|
879 | cellsize = cellsize, |
---|
880 | verbose = self.verbose, |
---|
881 | format = 'asc') |
---|
882 | |
---|
883 | else: |
---|
884 | export_grid(self.domain.get_name(), |
---|
885 | quantities = ['depth'], |
---|
886 | cellsize = cellsize, |
---|
887 | verbose = self.verbose, |
---|
888 | format = 'asc') |
---|
889 | |
---|
890 | |
---|
891 | export_grid(self.domain.get_name(), |
---|
892 | quantities = ['elevation'], |
---|
893 | cellsize = cellsize, |
---|
894 | verbose = self.verbose, |
---|
895 | format = 'asc') |
---|
896 | |
---|
897 | prjfile = self.domain.get_name() + '_elevation.prj' |
---|
898 | ascfile = self.domain.get_name() + '_elevation.asc' |
---|
899 | |
---|
900 | #Check asc file |
---|
901 | ascid = open(ascfile) |
---|
902 | lines = ascid.readlines() |
---|
903 | ascid.close() |
---|
904 | |
---|
905 | L = lines[2].strip().split() |
---|
906 | assert L[0].strip().lower() == 'xllcorner' |
---|
907 | assert num.allclose(float(L[1].strip().lower()), 308500) |
---|
908 | |
---|
909 | L = lines[3].strip().split() |
---|
910 | assert L[0].strip().lower() == 'yllcorner' |
---|
911 | assert num.allclose(float(L[1].strip().lower()), 6189000) |
---|
912 | |
---|
913 | #print "ascfile", ascfile |
---|
914 | #Check grid values |
---|
915 | for j in range(5): |
---|
916 | L = lines[6+j].strip().split() |
---|
917 | y = (4-j) * cellsize |
---|
918 | for i in range(5): |
---|
919 | #print " -i*cellsize - y", -i*cellsize - y |
---|
920 | #print "float(L[i])", float(L[i]) |
---|
921 | assert num.allclose(float(L[i]), -i*cellsize - y) |
---|
922 | |
---|
923 | #Cleanup |
---|
924 | os.remove(prjfile) |
---|
925 | os.remove(ascfile) |
---|
926 | |
---|
927 | #Check asc file |
---|
928 | ascfile = self.domain.get_name() + '_depth.asc' |
---|
929 | prjfile = self.domain.get_name() + '_depth.prj' |
---|
930 | ascid = open(ascfile) |
---|
931 | lines = ascid.readlines() |
---|
932 | ascid.close() |
---|
933 | |
---|
934 | L = lines[2].strip().split() |
---|
935 | assert L[0].strip().lower() == 'xllcorner' |
---|
936 | assert num.allclose(float(L[1].strip().lower()), 308500) |
---|
937 | |
---|
938 | L = lines[3].strip().split() |
---|
939 | assert L[0].strip().lower() == 'yllcorner' |
---|
940 | assert num.allclose(float(L[1].strip().lower()), 6189000) |
---|
941 | |
---|
942 | #Check grid values |
---|
943 | for j in range(5): |
---|
944 | L = lines[6+j].strip().split() |
---|
945 | y = (4-j) * cellsize |
---|
946 | for i in range(5): |
---|
947 | #print " -i*cellsize - y", -i*cellsize - y |
---|
948 | #print "float(L[i])", float(L[i]) |
---|
949 | assert num.allclose(float(L[i]), 1 - (-i*cellsize - y)) |
---|
950 | |
---|
951 | #Cleanup |
---|
952 | os.remove(prjfile) |
---|
953 | os.remove(ascfile) |
---|
954 | os.remove(swwfile) |
---|
955 | |
---|
956 | |
---|
957 | def test_export_gridIII(self): |
---|
958 | """ |
---|
959 | test_export_gridIII |
---|
960 | Test that sww information can be converted correctly to asc/prj |
---|
961 | format readable by e.g. ArcView |
---|
962 | """ |
---|
963 | |
---|
964 | import time, os |
---|
965 | from Scientific.IO.NetCDF import NetCDFFile |
---|
966 | |
---|
967 | try: |
---|
968 | os.remove('teg*.sww') |
---|
969 | except: |
---|
970 | pass |
---|
971 | |
---|
972 | #Setup |
---|
973 | |
---|
974 | self.domain.set_name('tegIII') |
---|
975 | |
---|
976 | swwfile = self.domain.get_name() + '.sww' |
---|
977 | |
---|
978 | self.domain.set_datadir('.') |
---|
979 | self.domain.format = 'sww' |
---|
980 | self.domain.smooth = True |
---|
981 | self.domain.set_quantity('elevation', lambda x,y: -x-y) |
---|
982 | self.domain.set_quantity('stage', 1.0) |
---|
983 | |
---|
984 | self.domain.geo_reference = Geo_reference(56,308500,6189000) |
---|
985 | |
---|
986 | sww = SWW_file(self.domain) |
---|
987 | sww.store_connectivity() |
---|
988 | sww.store_timestep() #'stage') |
---|
989 | self.domain.evolve_to_end(finaltime = 0.01) |
---|
990 | sww.store_timestep() #'stage') |
---|
991 | |
---|
992 | cellsize = 0.25 |
---|
993 | #Check contents |
---|
994 | #Get NetCDF |
---|
995 | |
---|
996 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
997 | |
---|
998 | # Get the variables |
---|
999 | x = fid.variables['x'][:] |
---|
1000 | y = fid.variables['y'][:] |
---|
1001 | z = fid.variables['elevation'][:] |
---|
1002 | time = fid.variables['time'][:] |
---|
1003 | stage = fid.variables['stage'][:] |
---|
1004 | |
---|
1005 | fid.close() |
---|
1006 | |
---|
1007 | #Export to ascii/prj files |
---|
1008 | extra_name_out = 'yeah' |
---|
1009 | if True: |
---|
1010 | export_grid(self.domain.get_name(), |
---|
1011 | quantities = ['elevation', 'depth'], |
---|
1012 | extra_name_out = extra_name_out, |
---|
1013 | cellsize = cellsize, |
---|
1014 | verbose = self.verbose, |
---|
1015 | format = 'asc') |
---|
1016 | |
---|
1017 | else: |
---|
1018 | export_grid(self.domain.get_name(), |
---|
1019 | quantities = ['depth'], |
---|
1020 | cellsize = cellsize, |
---|
1021 | verbose = self.verbose, |
---|
1022 | format = 'asc') |
---|
1023 | |
---|
1024 | |
---|
1025 | export_grid(self.domain.get_name(), |
---|
1026 | quantities = ['elevation'], |
---|
1027 | cellsize = cellsize, |
---|
1028 | verbose = self.verbose, |
---|
1029 | format = 'asc') |
---|
1030 | |
---|
1031 | prjfile = self.domain.get_name() + '_elevation_yeah.prj' |
---|
1032 | ascfile = self.domain.get_name() + '_elevation_yeah.asc' |
---|
1033 | |
---|
1034 | #Check asc file |
---|
1035 | ascid = open(ascfile) |
---|
1036 | lines = ascid.readlines() |
---|
1037 | ascid.close() |
---|
1038 | |
---|
1039 | L = lines[2].strip().split() |
---|
1040 | assert L[0].strip().lower() == 'xllcorner' |
---|
1041 | assert num.allclose(float(L[1].strip().lower()), 308500) |
---|
1042 | |
---|
1043 | L = lines[3].strip().split() |
---|
1044 | assert L[0].strip().lower() == 'yllcorner' |
---|
1045 | assert num.allclose(float(L[1].strip().lower()), 6189000) |
---|
1046 | |
---|
1047 | #print "ascfile", ascfile |
---|
1048 | #Check grid values |
---|
1049 | for j in range(5): |
---|
1050 | L = lines[6+j].strip().split() |
---|
1051 | y = (4-j) * cellsize |
---|
1052 | for i in range(5): |
---|
1053 | #print " -i*cellsize - y", -i*cellsize - y |
---|
1054 | #print "float(L[i])", float(L[i]) |
---|
1055 | assert num.allclose(float(L[i]), -i*cellsize - y) |
---|
1056 | |
---|
1057 | #Cleanup |
---|
1058 | os.remove(prjfile) |
---|
1059 | os.remove(ascfile) |
---|
1060 | |
---|
1061 | #Check asc file |
---|
1062 | ascfile = self.domain.get_name() + '_depth_yeah.asc' |
---|
1063 | prjfile = self.domain.get_name() + '_depth_yeah.prj' |
---|
1064 | ascid = open(ascfile) |
---|
1065 | lines = ascid.readlines() |
---|
1066 | ascid.close() |
---|
1067 | |
---|
1068 | L = lines[2].strip().split() |
---|
1069 | assert L[0].strip().lower() == 'xllcorner' |
---|
1070 | assert num.allclose(float(L[1].strip().lower()), 308500) |
---|
1071 | |
---|
1072 | L = lines[3].strip().split() |
---|
1073 | assert L[0].strip().lower() == 'yllcorner' |
---|
1074 | assert num.allclose(float(L[1].strip().lower()), 6189000) |
---|
1075 | |
---|
1076 | #Check grid values |
---|
1077 | for j in range(5): |
---|
1078 | L = lines[6+j].strip().split() |
---|
1079 | y = (4-j) * cellsize |
---|
1080 | for i in range(5): |
---|
1081 | assert num.allclose(float(L[i]), 1 - (-i*cellsize - y)) |
---|
1082 | |
---|
1083 | #Cleanup |
---|
1084 | os.remove(prjfile) |
---|
1085 | os.remove(ascfile) |
---|
1086 | os.remove(swwfile) |
---|
1087 | |
---|
1088 | def test_export_grid_bad(self): |
---|
1089 | """Test that sww information can be converted correctly to asc/prj |
---|
1090 | format readable by e.g. ArcView |
---|
1091 | """ |
---|
1092 | |
---|
1093 | try: |
---|
1094 | export_grid('a_small_round-egg', |
---|
1095 | quantities = ['elevation', 'depth'], |
---|
1096 | cellsize = 99, |
---|
1097 | verbose = self.verbose, |
---|
1098 | format = 'asc') |
---|
1099 | except IOError: |
---|
1100 | pass |
---|
1101 | else: |
---|
1102 | self.failUnless(0 ==1, 'Bad input did not throw exception error!') |
---|
1103 | |
---|
1104 | def test_export_grid_parallel(self): |
---|
1105 | """Test that sww information can be converted correctly to asc/prj |
---|
1106 | format readable by e.g. ArcView |
---|
1107 | """ |
---|
1108 | |
---|
1109 | import time, os |
---|
1110 | from Scientific.IO.NetCDF import NetCDFFile |
---|
1111 | |
---|
1112 | base_name = 'tegp' |
---|
1113 | #Setup |
---|
1114 | self.domain.set_name(base_name+'_P0_8') |
---|
1115 | swwfile = self.domain.get_name() + '.sww' |
---|
1116 | |
---|
1117 | self.domain.set_datadir('.') |
---|
1118 | self.domain.format = 'sww' |
---|
1119 | self.domain.smooth = True |
---|
1120 | self.domain.set_quantity('elevation', lambda x,y: -x-y) |
---|
1121 | self.domain.set_quantity('stage', 1.0) |
---|
1122 | |
---|
1123 | self.domain.geo_reference = Geo_reference(56,308500,6189000) |
---|
1124 | |
---|
1125 | sww = SWW_file(self.domain) |
---|
1126 | sww.store_connectivity() |
---|
1127 | sww.store_timestep() |
---|
1128 | self.domain.evolve_to_end(finaltime = 0.0001) |
---|
1129 | #Setup |
---|
1130 | self.domain.set_name(base_name+'_P1_8') |
---|
1131 | swwfile2 = self.domain.get_name() + '.sww' |
---|
1132 | sww = SWW_file(self.domain) |
---|
1133 | sww.store_connectivity() |
---|
1134 | sww.store_timestep() |
---|
1135 | self.domain.evolve_to_end(finaltime = 0.0002) |
---|
1136 | sww.store_timestep() |
---|
1137 | |
---|
1138 | cellsize = 0.25 |
---|
1139 | #Check contents |
---|
1140 | #Get NetCDF |
---|
1141 | |
---|
1142 | fid = NetCDFFile(sww.filename, netcdf_mode_r) |
---|
1143 | |
---|
1144 | # Get the variables |
---|
1145 | x = fid.variables['x'][:] |
---|
1146 | y = fid.variables['y'][:] |
---|
1147 | z = fid.variables['elevation'][:] |
---|
1148 | time = fid.variables['time'][:] |
---|
1149 | stage = fid.variables['stage'][:] |
---|
1150 | |
---|
1151 | fid.close() |
---|
1152 | |
---|
1153 | #Export to ascii/prj files |
---|
1154 | extra_name_out = 'yeah' |
---|
1155 | export_grid(base_name, |
---|
1156 | quantities = ['elevation', 'depth'], |
---|
1157 | extra_name_out = extra_name_out, |
---|
1158 | cellsize = cellsize, |
---|
1159 | verbose = self.verbose, |
---|
1160 | format = 'asc') |
---|
1161 | |
---|
1162 | prjfile = base_name + '_P0_8_elevation_yeah.prj' |
---|
1163 | ascfile = base_name + '_P0_8_elevation_yeah.asc' |
---|
1164 | #Check asc file |
---|
1165 | ascid = open(ascfile) |
---|
1166 | lines = ascid.readlines() |
---|
1167 | ascid.close() |
---|
1168 | #Check grid values |
---|
1169 | for j in range(5): |
---|
1170 | L = lines[6+j].strip().split() |
---|
1171 | y = (4-j) * cellsize |
---|
1172 | for i in range(5): |
---|
1173 | #print " -i*cellsize - y", -i*cellsize - y |
---|
1174 | #print "float(L[i])", float(L[i]) |
---|
1175 | assert num.allclose(float(L[i]), -i*cellsize - y) |
---|
1176 | #Cleanup |
---|
1177 | os.remove(prjfile) |
---|
1178 | os.remove(ascfile) |
---|
1179 | |
---|
1180 | prjfile = base_name + '_P1_8_elevation_yeah.prj' |
---|
1181 | ascfile = base_name + '_P1_8_elevation_yeah.asc' |
---|
1182 | #Check asc file |
---|
1183 | ascid = open(ascfile) |
---|
1184 | lines = ascid.readlines() |
---|
1185 | ascid.close() |
---|
1186 | #Check grid values |
---|
1187 | for j in range(5): |
---|
1188 | L = lines[6+j].strip().split() |
---|
1189 | y = (4-j) * cellsize |
---|
1190 | for i in range(5): |
---|
1191 | #print " -i*cellsize - y", -i*cellsize - y |
---|
1192 | #print "float(L[i])", float(L[i]) |
---|
1193 | assert num.allclose(float(L[i]), -i*cellsize - y) |
---|
1194 | #Cleanup |
---|
1195 | os.remove(prjfile) |
---|
1196 | os.remove(ascfile) |
---|
1197 | os.remove(swwfile) |
---|
1198 | |
---|
1199 | #Check asc file |
---|
1200 | ascfile = base_name + '_P0_8_depth_yeah.asc' |
---|
1201 | prjfile = base_name + '_P0_8_depth_yeah.prj' |
---|
1202 | ascid = open(ascfile) |
---|
1203 | lines = ascid.readlines() |
---|
1204 | ascid.close() |
---|
1205 | #Check grid values |
---|
1206 | for j in range(5): |
---|
1207 | L = lines[6+j].strip().split() |
---|
1208 | y = (4-j) * cellsize |
---|
1209 | for i in range(5): |
---|
1210 | assert num.allclose(float(L[i]), 1 - (-i*cellsize - y)) |
---|
1211 | #Cleanup |
---|
1212 | os.remove(prjfile) |
---|
1213 | os.remove(ascfile) |
---|
1214 | |
---|
1215 | #Check asc file |
---|
1216 | ascfile = base_name + '_P1_8_depth_yeah.asc' |
---|
1217 | prjfile = base_name + '_P1_8_depth_yeah.prj' |
---|
1218 | ascid = open(ascfile) |
---|
1219 | lines = ascid.readlines() |
---|
1220 | ascid.close() |
---|
1221 | #Check grid values |
---|
1222 | for j in range(5): |
---|
1223 | L = lines[6+j].strip().split() |
---|
1224 | y = (4-j) * cellsize |
---|
1225 | for i in range(5): |
---|
1226 | assert num.allclose(float(L[i]), 1 - (-i*cellsize - y)) |
---|
1227 | #Cleanup |
---|
1228 | os.remove(prjfile) |
---|
1229 | os.remove(ascfile) |
---|
1230 | os.remove(swwfile2) |
---|
1231 | |
---|
1232 | |
---|
1233 | |
---|
1234 | def test_sww2domain1(self): |
---|
1235 | ################################################ |
---|
1236 | #Create a test domain, and evolve and save it. |
---|
1237 | ################################################ |
---|
1238 | from mesh_factory import rectangular |
---|
1239 | |
---|
1240 | #Create basic mesh |
---|
1241 | |
---|
1242 | yiel=0.01 |
---|
1243 | points, vertices, boundary = rectangular(10,10) |
---|
1244 | |
---|
1245 | #Create shallow water domain |
---|
1246 | domain = Domain(points, vertices, boundary) |
---|
1247 | domain.geo_reference = Geo_reference(56,11,11) |
---|
1248 | domain.smooth = False |
---|
1249 | domain.store = True |
---|
1250 | domain.set_name('bedslope') |
---|
1251 | domain.default_order=2 |
---|
1252 | #Bed-slope and friction |
---|
1253 | domain.set_quantity('elevation', lambda x,y: -x/3) |
---|
1254 | domain.set_quantity('friction', 0.1) |
---|
1255 | # Boundary conditions |
---|
1256 | from math import sin, pi |
---|
1257 | Br = Reflective_boundary(domain) |
---|
1258 | Bt = Transmissive_boundary(domain) |
---|
1259 | Bd = Dirichlet_boundary([0.2,0.,0.]) |
---|
1260 | Bw = Time_boundary(domain=domain,f=lambda t: [(0.1*sin(t*2*pi)), 0.0, 0.0]) |
---|
1261 | |
---|
1262 | #domain.set_boundary({'left': Bd, 'right': Br, 'top': Br, 'bottom': Br}) |
---|
1263 | domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd}) |
---|
1264 | |
---|
1265 | domain.quantities_to_be_stored['xmomentum'] = 2 |
---|
1266 | domain.quantities_to_be_stored['ymomentum'] = 2 |
---|
1267 | #Initial condition |
---|
1268 | h = 0.05 |
---|
1269 | elevation = domain.quantities['elevation'].vertex_values |
---|
1270 | domain.set_quantity('stage', elevation + h) |
---|
1271 | |
---|
1272 | domain.check_integrity() |
---|
1273 | #Evolution |
---|
1274 | #domain.tight_slope_limiters = 1 |
---|
1275 | for t in domain.evolve(yieldstep = yiel, finaltime = 0.05): |
---|
1276 | #domain.write_time() |
---|
1277 | pass |
---|
1278 | |
---|
1279 | |
---|
1280 | ########################################## |
---|
1281 | #Import the example's file as a new domain |
---|
1282 | ########################################## |
---|
1283 | from data_manager import sww2domain |
---|
1284 | import os |
---|
1285 | |
---|
1286 | filename = domain.datadir + os.sep + domain.get_name() + '.sww' |
---|
1287 | domain2 = sww2domain(filename,None,fail_if_NaN=False,verbose=self.verbose) |
---|
1288 | #points, vertices, boundary = rectangular(15,15) |
---|
1289 | #domain2.boundary = boundary |
---|
1290 | ################### |
---|
1291 | ##NOW TEST IT!!! |
---|
1292 | ################### |
---|
1293 | |
---|
1294 | os.remove(filename) |
---|
1295 | |
---|
1296 | bits = ['vertex_coordinates'] |
---|
1297 | for quantity in domain.quantities_to_be_stored: |
---|
1298 | bits.append('get_quantity("%s").get_integral()' % quantity) |
---|
1299 | bits.append('get_quantity("%s").get_values()' % quantity) |
---|
1300 | |
---|
1301 | for bit in bits: |
---|
1302 | #print 'testing that domain.'+bit+' has been restored' |
---|
1303 | #print bit |
---|
1304 | #print 'done' |
---|
1305 | assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit)) |
---|
1306 | |
---|
1307 | ###################################### |
---|
1308 | #Now evolve them both, just to be sure |
---|
1309 | ######################################x |
---|
1310 | domain.time = 0. |
---|
1311 | from time import sleep |
---|
1312 | |
---|
1313 | final = .1 |
---|
1314 | domain.set_quantity('friction', 0.1) |
---|
1315 | domain.store = False |
---|
1316 | domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd}) |
---|
1317 | |
---|
1318 | |
---|
1319 | for t in domain.evolve(yieldstep = yiel, finaltime = final): |
---|
1320 | #domain.write_time() |
---|
1321 | pass |
---|
1322 | |
---|
1323 | final = final - (domain2.starttime-domain.starttime) |
---|
1324 | #BUT since domain1 gets time hacked back to 0: |
---|
1325 | final = final + (domain2.starttime-domain.starttime) |
---|
1326 | |
---|
1327 | domain2.smooth = False |
---|
1328 | domain2.store = False |
---|
1329 | domain2.default_order=2 |
---|
1330 | domain2.set_quantity('friction', 0.1) |
---|
1331 | #Bed-slope and friction |
---|
1332 | # Boundary conditions |
---|
1333 | Bd2=Dirichlet_boundary([0.2,0.,0.]) |
---|
1334 | domain2.boundary = domain.boundary |
---|
1335 | #print 'domain2.boundary' |
---|
1336 | #print domain2.boundary |
---|
1337 | domain2.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd}) |
---|
1338 | #domain2.set_boundary({'exterior': Bd}) |
---|
1339 | |
---|
1340 | domain2.check_integrity() |
---|
1341 | |
---|
1342 | for t in domain2.evolve(yieldstep = yiel, finaltime = final): |
---|
1343 | #domain2.write_time() |
---|
1344 | pass |
---|
1345 | |
---|
1346 | ################### |
---|
1347 | ##NOW TEST IT!!! |
---|
1348 | ################## |
---|
1349 | |
---|
1350 | bits = ['vertex_coordinates'] |
---|
1351 | |
---|
1352 | for quantity in ['elevation','stage', 'ymomentum','xmomentum']: |
---|
1353 | bits.append('get_quantity("%s").get_integral()' %quantity) |
---|
1354 | bits.append('get_quantity("%s").get_values()' %quantity) |
---|
1355 | |
---|
1356 | #print bits |
---|
1357 | for bit in bits: |
---|
1358 | #print bit |
---|
1359 | #print eval('domain.'+bit) |
---|
1360 | #print eval('domain2.'+bit) |
---|
1361 | |
---|
1362 | #print eval('domain.'+bit+'-domain2.'+bit) |
---|
1363 | msg = 'Values in the two domains are different for ' + bit |
---|
1364 | assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit), |
---|
1365 | rtol=1.e-5, atol=3.e-8), msg |
---|
1366 | |
---|
1367 | |
---|
1368 | def DISABLEDtest_sww2domain2(self): |
---|
1369 | ################################################################## |
---|
1370 | #Same as previous test, but this checks how NaNs are handled. |
---|
1371 | ################################################################## |
---|
1372 | |
---|
1373 | #FIXME: See ticket 223 |
---|
1374 | |
---|
1375 | from mesh_factory import rectangular |
---|
1376 | |
---|
1377 | #Create basic mesh |
---|
1378 | points, vertices, boundary = rectangular(2,2) |
---|
1379 | |
---|
1380 | #Create shallow water domain |
---|
1381 | domain = Domain(points, vertices, boundary) |
---|
1382 | domain.smooth = False |
---|
1383 | domain.store = True |
---|
1384 | domain.set_name('test_file') |
---|
1385 | domain.set_datadir('.') |
---|
1386 | domain.default_order=2 |
---|
1387 | |
---|
1388 | domain.set_quantity('elevation', lambda x,y: -x/3) |
---|
1389 | domain.set_quantity('friction', 0.1) |
---|
1390 | |
---|
1391 | from math import sin, pi |
---|
1392 | Br = Reflective_boundary(domain) |
---|
1393 | Bt = Transmissive_boundary(domain) |
---|
1394 | Bd = Dirichlet_boundary([0.2,0.,0.]) |
---|
1395 | Bw = Time_boundary(domain=domain, |
---|
1396 | f=lambda t: [(0.1*sin(t*2*pi)), 0.0, 0.0]) |
---|
1397 | |
---|
1398 | domain.set_boundary({'left': Bd, 'right': Br, 'top': Br, 'bottom': Br}) |
---|
1399 | |
---|
1400 | h = 0.05 |
---|
1401 | elevation = domain.quantities['elevation'].vertex_values |
---|
1402 | domain.set_quantity('stage', elevation + h) |
---|
1403 | |
---|
1404 | domain.check_integrity() |
---|
1405 | |
---|
1406 | for t in domain.evolve(yieldstep = 1, finaltime = 2.0): |
---|
1407 | pass |
---|
1408 | #domain.write_time() |
---|
1409 | |
---|
1410 | |
---|
1411 | |
---|
1412 | ################################## |
---|
1413 | #Import the file as a new domain |
---|
1414 | ################################## |
---|
1415 | from data_manager import sww2domain |
---|
1416 | import os |
---|
1417 | |
---|
1418 | filename = domain.datadir + os.sep + domain.get_name() + '.sww' |
---|
1419 | |
---|
1420 | # Fail because NaNs are present |
---|
1421 | #domain2 = sww2domain(filename, |
---|
1422 | # boundary, |
---|
1423 | # fail_if_NaN=True, |
---|
1424 | # verbose=self.verbose) |
---|
1425 | try: |
---|
1426 | domain2 = sww2domain(filename, |
---|
1427 | boundary, |
---|
1428 | fail_if_NaN=True, |
---|
1429 | verbose=self.verbose) |
---|
1430 | except DataDomainError: |
---|
1431 | # Now import it, filling NaNs to be -9999 |
---|
1432 | filler = -9999 |
---|
1433 | domain2 = sww2domain(filename, |
---|
1434 | None, |
---|
1435 | fail_if_NaN=False, |
---|
1436 | NaN_filler=filler, |
---|
1437 | verbose=self.verbose) |
---|
1438 | else: |
---|
1439 | raise Exception, 'should have failed' |
---|
1440 | |
---|
1441 | |
---|
1442 | # Now import it, filling NaNs to be 0 |
---|
1443 | filler = -9999 |
---|
1444 | domain2 = sww2domain(filename, |
---|
1445 | None, |
---|
1446 | fail_if_NaN=False, |
---|
1447 | NaN_filler=filler, |
---|
1448 | verbose=self.verbose) |
---|
1449 | |
---|
1450 | import sys; sys.exit() |
---|
1451 | |
---|
1452 | # Clean up |
---|
1453 | os.remove(filename) |
---|
1454 | |
---|
1455 | |
---|
1456 | bits = ['geo_reference.get_xllcorner()', |
---|
1457 | 'geo_reference.get_yllcorner()', |
---|
1458 | 'vertex_coordinates'] |
---|
1459 | |
---|
1460 | for quantity in domain.quantities_to_be_stored: |
---|
1461 | bits.append('get_quantity("%s").get_integral()' %quantity) |
---|
1462 | bits.append('get_quantity("%s").get_values()' %quantity) |
---|
1463 | |
---|
1464 | for bit in bits: |
---|
1465 | # print 'testing that domain.'+bit+' has been restored' |
---|
1466 | assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit)) |
---|
1467 | |
---|
1468 | print |
---|
1469 | print |
---|
1470 | print domain2.get_quantity('xmomentum').get_values() |
---|
1471 | print |
---|
1472 | print domain2.get_quantity('stage').get_values() |
---|
1473 | print |
---|
1474 | |
---|
1475 | print 'filler', filler |
---|
1476 | print max(domain2.get_quantity('xmomentum').get_values().flat) |
---|
1477 | |
---|
1478 | assert max(max(domain2.get_quantity('xmomentum').get_values()))==filler |
---|
1479 | assert min(min(domain2.get_quantity('xmomentum').get_values()))==filler |
---|
1480 | assert max(max(domain2.get_quantity('ymomentum').get_values()))==filler |
---|
1481 | assert min(min(domain2.get_quantity('ymomentum').get_values()))==filler |
---|
1482 | |
---|
1483 | |
---|
1484 | |
---|
1485 | def test_weed(self): |
---|
1486 | from data_manager import weed |
---|
1487 | |
---|
1488 | coordinates1 = [[0.,0.],[1.,0.],[1.,1.],[1.,0.],[2.,0.],[1.,1.]] |
---|
1489 | volumes1 = [[0,1,2],[3,4,5]] |
---|
1490 | boundary1= {(0,1): 'external',(1,2): 'not external',(2,0): 'external',(3,4): 'external',(4,5): 'external',(5,3): 'not external'} |
---|
1491 | coordinates2,volumes2,boundary2=weed(coordinates1,volumes1,boundary1) |
---|
1492 | |
---|
1493 | points2 = {(0.,0.):None,(1.,0.):None,(1.,1.):None,(2.,0.):None} |
---|
1494 | |
---|
1495 | assert len(points2)==len(coordinates2) |
---|
1496 | for i in range(len(coordinates2)): |
---|
1497 | coordinate = tuple(coordinates2[i]) |
---|
1498 | assert points2.has_key(coordinate) |
---|
1499 | points2[coordinate]=i |
---|
1500 | |
---|
1501 | for triangle in volumes1: |
---|
1502 | for coordinate in triangle: |
---|
1503 | assert coordinates2[points2[tuple(coordinates1[coordinate])]][0]==coordinates1[coordinate][0] |
---|
1504 | assert coordinates2[points2[tuple(coordinates1[coordinate])]][1]==coordinates1[coordinate][1] |
---|
1505 | |
---|
1506 | |
---|
1507 | #FIXME This fails - smooth makes the comparism too hard for allclose |
---|
1508 | def ztest_sww2domain3(self): |
---|
1509 | ################################################ |
---|
1510 | #DOMAIN.SMOOTH = TRUE !!!!!!!!!!!!!!!!!!!!!!!!!! |
---|
1511 | ################################################ |
---|
1512 | from mesh_factory import rectangular |
---|
1513 | #Create basic mesh |
---|
1514 | |
---|
1515 | yiel=0.01 |
---|
1516 | points, vertices, boundary = rectangular(10,10) |
---|
1517 | |
---|
1518 | #Create shallow water domain |
---|
1519 | domain = Domain(points, vertices, boundary) |
---|
1520 | domain.geo_reference = Geo_reference(56,11,11) |
---|
1521 | domain.smooth = True |
---|
1522 | domain.store = True |
---|
1523 | domain.set_name('bedslope') |
---|
1524 | domain.default_order=2 |
---|
1525 | #Bed-slope and friction |
---|
1526 | domain.set_quantity('elevation', lambda x,y: -x/3) |
---|
1527 | domain.set_quantity('friction', 0.1) |
---|
1528 | # Boundary conditions |
---|
1529 | from math import sin, pi |
---|
1530 | Br = Reflective_boundary(domain) |
---|
1531 | Bt = Transmissive_boundary(domain) |
---|
1532 | Bd = Dirichlet_boundary([0.2,0.,0.]) |
---|
1533 | Bw = Time_boundary(domain=domain, |
---|
1534 | f=lambda t: [(0.1*sin(t*2*pi)), 0.0, 0.0]) |
---|
1535 | |
---|
1536 | domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Bd}) |
---|
1537 | |
---|
1538 | domain.quantities_to_be_stored['xmomentum'] = 2 |
---|
1539 | domain.quantities_to_be_stored['ymomentum'] = 2 |
---|
1540 | #Initial condition |
---|
1541 | h = 0.05 |
---|
1542 | elevation = domain.quantities['elevation'].vertex_values |
---|
1543 | domain.set_quantity('stage', elevation + h) |
---|
1544 | |
---|
1545 | |
---|
1546 | domain.check_integrity() |
---|
1547 | #Evolution |
---|
1548 | for t in domain.evolve(yieldstep = yiel, finaltime = 0.05): |
---|
1549 | # domain.write_time() |
---|
1550 | pass |
---|
1551 | |
---|
1552 | |
---|
1553 | ########################################## |
---|
1554 | #Import the example's file as a new domain |
---|
1555 | ########################################## |
---|
1556 | from data_manager import sww2domain |
---|
1557 | import os |
---|
1558 | |
---|
1559 | filename = domain.datadir + os.sep + domain.get_name() + '.sww' |
---|
1560 | domain2 = sww2domain(filename,None,fail_if_NaN=False,verbose=self.verbose) |
---|
1561 | #points, vertices, boundary = rectangular(15,15) |
---|
1562 | #domain2.boundary = boundary |
---|
1563 | ################### |
---|
1564 | ##NOW TEST IT!!! |
---|
1565 | ################### |
---|
1566 | |
---|
1567 | os.remove(domain.get_name() + '.sww') |
---|
1568 | |
---|
1569 | #FIXME smooth domain so that they can be compared |
---|
1570 | |
---|
1571 | |
---|
1572 | bits = [] |
---|
1573 | for quantity in domain.quantities_to_be_stored: |
---|
1574 | bits.append('quantities["%s"].get_integral()'%quantity) |
---|
1575 | |
---|
1576 | |
---|
1577 | for bit in bits: |
---|
1578 | #print 'testing that domain.'+bit+' has been restored' |
---|
1579 | #print bit |
---|
1580 | #print 'done' |
---|
1581 | #print ('domain.'+bit), eval('domain.'+bit) |
---|
1582 | #print ('domain2.'+bit), eval('domain2.'+bit) |
---|
1583 | assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit),rtol=1.0e-1,atol=1.e-3) |
---|
1584 | pass |
---|
1585 | |
---|
1586 | ###################################### |
---|
1587 | #Now evolve them both, just to be sure |
---|
1588 | ######################################x |
---|
1589 | domain.time = 0. |
---|
1590 | from time import sleep |
---|
1591 | |
---|
1592 | final = .5 |
---|
1593 | domain.set_quantity('friction', 0.1) |
---|
1594 | domain.store = False |
---|
1595 | domain.set_boundary({'left': Bd, 'right': Bd, 'top': Bd, 'bottom': Br}) |
---|
1596 | |
---|
1597 | for t in domain.evolve(yieldstep = yiel, finaltime = final): |
---|
1598 | #domain.write_time() |
---|
1599 | pass |
---|
1600 | |
---|
1601 | domain2.smooth = True |
---|
1602 | domain2.store = False |
---|
1603 | domain2.default_order=2 |
---|
1604 | domain2.set_quantity('friction', 0.1) |
---|
1605 | #Bed-slope and friction |
---|
1606 | # Boundary conditions |
---|
1607 | Bd2=Dirichlet_boundary([0.2,0.,0.]) |
---|
1608 | Br2 = Reflective_boundary(domain2) |
---|
1609 | domain2.boundary = domain.boundary |
---|
1610 | #print 'domain2.boundary' |
---|
1611 | #print domain2.boundary |
---|
1612 | domain2.set_boundary({'left': Bd2, 'right': Bd2, 'top': Bd2, 'bottom': Br2}) |
---|
1613 | #domain2.boundary = domain.boundary |
---|
1614 | #domain2.set_boundary({'exterior': Bd}) |
---|
1615 | |
---|
1616 | domain2.check_integrity() |
---|
1617 | |
---|
1618 | for t in domain2.evolve(yieldstep = yiel, finaltime = final): |
---|
1619 | #domain2.write_time() |
---|
1620 | pass |
---|
1621 | |
---|
1622 | ################### |
---|
1623 | ##NOW TEST IT!!! |
---|
1624 | ################## |
---|
1625 | |
---|
1626 | print '><><><><>>' |
---|
1627 | bits = [ 'vertex_coordinates'] |
---|
1628 | |
---|
1629 | for quantity in ['elevation','xmomentum','ymomentum']: |
---|
1630 | #bits.append('quantities["%s"].get_integral()'%quantity) |
---|
1631 | bits.append('get_quantity("%s").get_values()' %quantity) |
---|
1632 | |
---|
1633 | for bit in bits: |
---|
1634 | print bit |
---|
1635 | assert num.allclose(eval('domain.'+bit),eval('domain2.'+bit)) |
---|
1636 | |
---|
1637 | |
---|
1638 | def test_decimate_dem(self): |
---|
1639 | """Test decimation of dem file |
---|
1640 | """ |
---|
1641 | |
---|
1642 | import os |
---|
1643 | from Scientific.IO.NetCDF import NetCDFFile |
---|
1644 | |
---|
1645 | #Write test dem file |
---|
1646 | root = 'decdemtest' |
---|
1647 | |
---|
1648 | filename = root + '.dem' |
---|
1649 | fid = NetCDFFile(filename, netcdf_mode_w) |
---|
1650 | |
---|
1651 | fid.institution = 'Geoscience Australia' |
---|
1652 | fid.description = 'NetCDF DEM format for compact and portable ' +\ |
---|
1653 | 'storage of spatial point data' |
---|
1654 | |
---|
1655 | nrows = 15 |
---|
1656 | ncols = 18 |
---|
1657 | |
---|
1658 | fid.ncols = ncols |
---|
1659 | fid.nrows = nrows |
---|
1660 | fid.xllcorner = 2000.5 |
---|
1661 | fid.yllcorner = 3000.5 |
---|
1662 | fid.cellsize = 25 |
---|
1663 | fid.NODATA_value = -9999 |
---|
1664 | |
---|
1665 | fid.zone = 56 |
---|
1666 | fid.false_easting = 0.0 |
---|
1667 | fid.false_northing = 0.0 |
---|
1668 | fid.projection = 'UTM' |
---|
1669 | fid.datum = 'WGS84' |
---|
1670 | fid.units = 'METERS' |
---|
1671 | |
---|
1672 | fid.createDimension('number_of_points', nrows*ncols) |
---|
1673 | |
---|
1674 | fid.createVariable('elevation', netcdf_float, ('number_of_points',)) |
---|
1675 | |
---|
1676 | elevation = fid.variables['elevation'] |
---|
1677 | |
---|
1678 | elevation[:] = (num.arange(nrows*ncols)) |
---|
1679 | |
---|
1680 | fid.close() |
---|
1681 | |
---|
1682 | #generate the elevation values expected in the decimated file |
---|
1683 | ref_elevation = [( 0+ 1+ 2+ 18+ 19+ 20+ 36+ 37+ 38) / 9.0, |
---|
1684 | ( 4+ 5+ 6+ 22+ 23+ 24+ 40+ 41+ 42) / 9.0, |
---|
1685 | ( 8+ 9+ 10+ 26+ 27+ 28+ 44+ 45+ 46) / 9.0, |
---|
1686 | ( 12+ 13+ 14+ 30+ 31+ 32+ 48+ 49+ 50) / 9.0, |
---|
1687 | ( 72+ 73+ 74+ 90+ 91+ 92+108+109+110) / 9.0, |
---|
1688 | ( 76+ 77+ 78+ 94+ 95+ 96+112+113+114) / 9.0, |
---|
1689 | ( 80+ 81+ 82+ 98+ 99+100+116+117+118) / 9.0, |
---|
1690 | ( 84+ 85+ 86+102+103+104+120+121+122) / 9.0, |
---|
1691 | (144+145+146+162+163+164+180+181+182) / 9.0, |
---|
1692 | (148+149+150+166+167+168+184+185+186) / 9.0, |
---|
1693 | (152+153+154+170+171+172+188+189+190) / 9.0, |
---|
1694 | (156+157+158+174+175+176+192+193+194) / 9.0, |
---|
1695 | (216+217+218+234+235+236+252+253+254) / 9.0, |
---|
1696 | (220+221+222+238+239+240+256+257+258) / 9.0, |
---|
1697 | (224+225+226+242+243+244+260+261+262) / 9.0, |
---|
1698 | (228+229+230+246+247+248+264+265+266) / 9.0] |
---|
1699 | |
---|
1700 | # generate a stencil for computing the decimated values |
---|
1701 | stencil = num.ones((3,3), num.float) / 9.0 |
---|
1702 | |
---|
1703 | decimate_dem(root, stencil=stencil, cellsize_new=100) |
---|
1704 | |
---|
1705 | # Open decimated NetCDF file |
---|
1706 | fid = NetCDFFile(root + '_100.dem', netcdf_mode_r) |
---|
1707 | |
---|
1708 | # Get decimated elevation |
---|
1709 | elevation = fid.variables['elevation'] |
---|
1710 | |
---|
1711 | # Check values |
---|
1712 | assert num.allclose(elevation, ref_elevation) |
---|
1713 | |
---|
1714 | # Cleanup |
---|
1715 | fid.close() |
---|
1716 | |
---|
1717 | os.remove(root + '.dem') |
---|
1718 | os.remove(root + '_100.dem') |
---|
1719 | |
---|
1720 | def test_decimate_dem_NODATA(self): |
---|
1721 | """Test decimation of dem file that includes NODATA values |
---|
1722 | """ |
---|
1723 | |
---|
1724 | import os |
---|
1725 | from Scientific.IO.NetCDF import NetCDFFile |
---|
1726 | |
---|
1727 | # Write test dem file |
---|
1728 | root = 'decdemtest' |
---|
1729 | |
---|
1730 | filename = root + '.dem' |
---|
1731 | fid = NetCDFFile(filename, netcdf_mode_w) |
---|
1732 | |
---|
1733 | fid.institution = 'Geoscience Australia' |
---|
1734 | fid.description = 'NetCDF DEM format for compact and portable ' +\ |
---|
1735 | 'storage of spatial point data' |
---|
1736 | |
---|
1737 | nrows = 15 |
---|
1738 | ncols = 18 |
---|
1739 | NODATA_value = -9999 |
---|
1740 | |
---|
1741 | fid.ncols = ncols |
---|
1742 | fid.nrows = nrows |
---|
1743 | fid.xllcorner = 2000.5 |
---|
1744 | fid.yllcorner = 3000.5 |
---|
1745 | fid.cellsize = 25 |
---|
1746 | fid.NODATA_value = NODATA_value |
---|
1747 | |
---|
1748 | fid.zone = 56 |
---|
1749 | fid.false_easting = 0.0 |
---|
1750 | fid.false_northing = 0.0 |
---|
1751 | fid.projection = 'UTM' |
---|
1752 | fid.datum = 'WGS84' |
---|
1753 | fid.units = 'METERS' |
---|
1754 | |
---|
1755 | fid.createDimension('number_of_points', nrows*ncols) |
---|
1756 | |
---|
1757 | fid.createVariable('elevation', netcdf_float, ('number_of_points',)) |
---|
1758 | |
---|
1759 | elevation = fid.variables['elevation'] |
---|
1760 | |
---|
1761 | # Generate initial elevation values |
---|
1762 | elevation_tmp = (num.arange(nrows*ncols)) |
---|
1763 | |
---|
1764 | # Add some NODATA values |
---|
1765 | elevation_tmp[0] = NODATA_value |
---|
1766 | elevation_tmp[95] = NODATA_value |
---|
1767 | elevation_tmp[188] = NODATA_value |
---|
1768 | elevation_tmp[189] = NODATA_value |
---|
1769 | elevation_tmp[190] = NODATA_value |
---|
1770 | elevation_tmp[209] = NODATA_value |
---|
1771 | elevation_tmp[252] = NODATA_value |
---|
1772 | |
---|
1773 | elevation[:] = elevation_tmp |
---|
1774 | |
---|
1775 | fid.close() |
---|
1776 | |
---|
1777 | # Generate the elevation values expected in the decimated file |
---|
1778 | ref_elevation = [NODATA_value, |
---|
1779 | ( 4+ 5+ 6+ 22+ 23+ 24+ 40+ 41+ 42) / 9.0, |
---|
1780 | ( 8+ 9+ 10+ 26+ 27+ 28+ 44+ 45+ 46) / 9.0, |
---|
1781 | ( 12+ 13+ 14+ 30+ 31+ 32+ 48+ 49+ 50) / 9.0, |
---|
1782 | ( 72+ 73+ 74+ 90+ 91+ 92+108+109+110) / 9.0, |
---|
1783 | NODATA_value, |
---|
1784 | ( 80+ 81+ 82+ 98+ 99+100+116+117+118) / 9.0, |
---|
1785 | ( 84+ 85+ 86+102+103+104+120+121+122) / 9.0, |
---|
1786 | (144+145+146+162+163+164+180+181+182) / 9.0, |
---|
1787 | (148+149+150+166+167+168+184+185+186) / 9.0, |
---|
1788 | NODATA_value, |
---|
1789 | (156+157+158+174+175+176+192+193+194) / 9.0, |
---|
1790 | NODATA_value, |
---|
1791 | (220+221+222+238+239+240+256+257+258) / 9.0, |
---|
1792 | (224+225+226+242+243+244+260+261+262) / 9.0, |
---|
1793 | (228+229+230+246+247+248+264+265+266) / 9.0] |
---|
1794 | |
---|
1795 | # Generate a stencil for computing the decimated values |
---|
1796 | stencil = num.ones((3,3), num.float) / 9.0 |
---|
1797 | |
---|
1798 | decimate_dem(root, stencil=stencil, cellsize_new=100) |
---|
1799 | |
---|
1800 | # Open decimated NetCDF file |
---|
1801 | fid = NetCDFFile(root + '_100.dem', netcdf_mode_r) |
---|
1802 | |
---|
1803 | # Get decimated elevation |
---|
1804 | elevation = fid.variables['elevation'] |
---|
1805 | |
---|
1806 | # Check values |
---|
1807 | assert num.allclose(elevation, ref_elevation) |
---|
1808 | |
---|
1809 | # Cleanup |
---|
1810 | fid.close() |
---|
1811 | |
---|
1812 | os.remove(root + '.dem') |
---|
1813 | os.remove(root + '_100.dem') |
---|
1814 | |
---|
1815 | def test_urs2sts0(self): |
---|
1816 | """ |
---|
1817 | Test single source |
---|
1818 | """ |
---|
1819 | tide=0 |
---|
1820 | time_step_count = 3 |
---|
1821 | time_step = 2 |
---|
1822 | lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)] |
---|
1823 | n=len(lat_long_points) |
---|
1824 | first_tstep=num.ones(n,num.int) |
---|
1825 | first_tstep[0]+=1 |
---|
1826 | first_tstep[2]+=1 |
---|
1827 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
1828 | last_tstep[0]-=1 |
---|
1829 | |
---|
1830 | gauge_depth=20*num.ones(n,num.float) |
---|
1831 | ha=2*num.ones((n,time_step_count),num.float) |
---|
1832 | ha[0]=num.arange(0,time_step_count) |
---|
1833 | ha[1]=num.arange(time_step_count,2*time_step_count) |
---|
1834 | ha[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
1835 | ha[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
1836 | ua=5*num.ones((n,time_step_count),num.float) |
---|
1837 | va=-10*num.ones((n,time_step_count),num.float) |
---|
1838 | |
---|
1839 | base_name, files = self.write_mux2(lat_long_points, |
---|
1840 | time_step_count, time_step, |
---|
1841 | first_tstep, last_tstep, |
---|
1842 | depth=gauge_depth, |
---|
1843 | ha=ha, |
---|
1844 | ua=ua, |
---|
1845 | va=va) |
---|
1846 | |
---|
1847 | urs2sts(base_name, |
---|
1848 | basename_out=base_name, |
---|
1849 | mean_stage=tide,verbose=False) |
---|
1850 | |
---|
1851 | # now I want to check the sts file ... |
---|
1852 | sts_file = base_name + '.sts' |
---|
1853 | |
---|
1854 | #Let's interigate the sww file |
---|
1855 | # Note, the sww info is not gridded. It is point data. |
---|
1856 | fid = NetCDFFile(sts_file) |
---|
1857 | |
---|
1858 | # Make x and y absolute |
---|
1859 | x = fid.variables['x'][:] |
---|
1860 | y = fid.variables['y'][:] |
---|
1861 | |
---|
1862 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
1863 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
1864 | points = ensure_numeric(points) |
---|
1865 | |
---|
1866 | x = points[:,0] |
---|
1867 | y = points[:,1] |
---|
1868 | |
---|
1869 | #Check that first coordinate is correctly represented |
---|
1870 | #Work out the UTM coordinates for first point |
---|
1871 | for i in range(4): |
---|
1872 | zone, e, n = redfearn(lat_long_points[i][0], lat_long_points[i][1]) |
---|
1873 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
1874 | |
---|
1875 | #Check the time vector |
---|
1876 | times = fid.variables['time'][:] |
---|
1877 | |
---|
1878 | times_actual = [] |
---|
1879 | for i in range(time_step_count): |
---|
1880 | times_actual.append(time_step * i) |
---|
1881 | |
---|
1882 | assert num.allclose(ensure_numeric(times), |
---|
1883 | ensure_numeric(times_actual)) |
---|
1884 | |
---|
1885 | #Check first value |
---|
1886 | stage = fid.variables['stage'][:] |
---|
1887 | xmomentum = fid.variables['xmomentum'][:] |
---|
1888 | ymomentum = fid.variables['ymomentum'][:] |
---|
1889 | elevation = fid.variables['elevation'][:] |
---|
1890 | |
---|
1891 | # Set original data used to write mux file to be zero when gauges are |
---|
1892 | #not recdoring |
---|
1893 | ha[0][0]=0.0 |
---|
1894 | ha[0][time_step_count-1]=0.0; |
---|
1895 | ha[2][0]=0.0; |
---|
1896 | ua[0][0]=0.0 |
---|
1897 | ua[0][time_step_count-1]=0.0; |
---|
1898 | ua[2][0]=0.0; |
---|
1899 | va[0][0]=0.0 |
---|
1900 | va[0][time_step_count-1]=0.0; |
---|
1901 | va[2][0]=0.0; |
---|
1902 | |
---|
1903 | assert num.allclose(num.transpose(ha),stage) #Meters |
---|
1904 | |
---|
1905 | #Check the momentums - ua |
---|
1906 | #momentum = velocity*(stage-elevation) |
---|
1907 | # elevation = - depth |
---|
1908 | #momentum = velocity_ua *(stage+depth) |
---|
1909 | |
---|
1910 | depth=num.zeros((len(lat_long_points),time_step_count),num.float) |
---|
1911 | for i in range(len(lat_long_points)): |
---|
1912 | depth[i]=gauge_depth[i]+tide+ha[i] |
---|
1913 | assert num.allclose(num.transpose(ua*depth),xmomentum) |
---|
1914 | |
---|
1915 | #Check the momentums - va |
---|
1916 | #momentum = velocity*(stage-elevation) |
---|
1917 | # elevation = - depth |
---|
1918 | #momentum = velocity_va *(stage+depth) |
---|
1919 | |
---|
1920 | assert num.allclose(num.transpose(va*depth),ymomentum) |
---|
1921 | |
---|
1922 | # check the elevation values. |
---|
1923 | # -ve since urs measures depth, sww meshers height, |
---|
1924 | assert num.allclose(-elevation, gauge_depth) #Meters |
---|
1925 | |
---|
1926 | fid.close() |
---|
1927 | self.delete_mux(files) |
---|
1928 | os.remove(sts_file) |
---|
1929 | |
---|
1930 | def test_urs2sts_nonstandard_meridian(self): |
---|
1931 | """ |
---|
1932 | Test single source using the meridian from zone 50 as a nonstandard meridian |
---|
1933 | """ |
---|
1934 | tide=0 |
---|
1935 | time_step_count = 3 |
---|
1936 | time_step = 2 |
---|
1937 | lat_long_points =[(-21.,114.5),(-21.,113.5),(-21.,114.), (-21.,115.)] |
---|
1938 | n=len(lat_long_points) |
---|
1939 | first_tstep=num.ones(n,num.int) |
---|
1940 | first_tstep[0]+=1 |
---|
1941 | first_tstep[2]+=1 |
---|
1942 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
1943 | last_tstep[0]-=1 |
---|
1944 | |
---|
1945 | gauge_depth=20*num.ones(n,num.float) |
---|
1946 | ha=2*num.ones((n,time_step_count),num.float) |
---|
1947 | ha[0]=num.arange(0,time_step_count) |
---|
1948 | ha[1]=num.arange(time_step_count,2*time_step_count) |
---|
1949 | ha[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
1950 | ha[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
1951 | ua=5*num.ones((n,time_step_count),num.float) |
---|
1952 | va=-10*num.ones((n,time_step_count),num.float) |
---|
1953 | |
---|
1954 | base_name, files = self.write_mux2(lat_long_points, |
---|
1955 | time_step_count, time_step, |
---|
1956 | first_tstep, last_tstep, |
---|
1957 | depth=gauge_depth, |
---|
1958 | ha=ha, |
---|
1959 | ua=ua, |
---|
1960 | va=va) |
---|
1961 | |
---|
1962 | urs2sts(base_name, |
---|
1963 | basename_out=base_name, |
---|
1964 | central_meridian=123, |
---|
1965 | mean_stage=tide, |
---|
1966 | verbose=False) |
---|
1967 | |
---|
1968 | # now I want to check the sts file ... |
---|
1969 | sts_file = base_name + '.sts' |
---|
1970 | |
---|
1971 | #Let's interigate the sww file |
---|
1972 | # Note, the sww info is not gridded. It is point data. |
---|
1973 | fid = NetCDFFile(sts_file) |
---|
1974 | |
---|
1975 | # Make x and y absolute |
---|
1976 | x = fid.variables['x'][:] |
---|
1977 | y = fid.variables['y'][:] |
---|
1978 | |
---|
1979 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
1980 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
1981 | points = ensure_numeric(points) |
---|
1982 | |
---|
1983 | x = points[:,0] |
---|
1984 | y = points[:,1] |
---|
1985 | |
---|
1986 | # Check that all coordinate are correctly represented |
---|
1987 | # Using the non standard projection (50) |
---|
1988 | for i in range(4): |
---|
1989 | zone, e, n = redfearn(lat_long_points[i][0], |
---|
1990 | lat_long_points[i][1], |
---|
1991 | central_meridian=123) |
---|
1992 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
1993 | assert zone==-1 |
---|
1994 | |
---|
1995 | self.delete_mux(files) |
---|
1996 | |
---|
1997 | |
---|
1998 | def test_urs2sts_nonstandard_projection_reverse(self): |
---|
1999 | """ |
---|
2000 | Test that a point not in the specified zone can occur first |
---|
2001 | """ |
---|
2002 | tide=0 |
---|
2003 | time_step_count = 3 |
---|
2004 | time_step = 2 |
---|
2005 | lat_long_points =[(-21.,113.5),(-21.,114.5),(-21.,114.), (-21.,115.)] |
---|
2006 | n=len(lat_long_points) |
---|
2007 | first_tstep=num.ones(n,num.int) |
---|
2008 | first_tstep[0]+=1 |
---|
2009 | first_tstep[2]+=1 |
---|
2010 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
2011 | last_tstep[0]-=1 |
---|
2012 | |
---|
2013 | gauge_depth=20*num.ones(n,num.float) |
---|
2014 | ha=2*num.ones((n,time_step_count),num.float) |
---|
2015 | ha[0]=num.arange(0,time_step_count) |
---|
2016 | ha[1]=num.arange(time_step_count,2*time_step_count) |
---|
2017 | ha[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
2018 | ha[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
2019 | ua=5*num.ones((n,time_step_count),num.float) |
---|
2020 | va=-10*num.ones((n,time_step_count),num.float) |
---|
2021 | |
---|
2022 | base_name, files = self.write_mux2(lat_long_points, |
---|
2023 | time_step_count, time_step, |
---|
2024 | first_tstep, last_tstep, |
---|
2025 | depth=gauge_depth, |
---|
2026 | ha=ha, |
---|
2027 | ua=ua, |
---|
2028 | va=va) |
---|
2029 | |
---|
2030 | urs2sts(base_name, |
---|
2031 | basename_out=base_name, |
---|
2032 | zone=50, |
---|
2033 | mean_stage=tide,verbose=False) |
---|
2034 | |
---|
2035 | # now I want to check the sts file ... |
---|
2036 | sts_file = base_name + '.sts' |
---|
2037 | |
---|
2038 | #Let's interigate the sww file |
---|
2039 | # Note, the sww info is not gridded. It is point data. |
---|
2040 | fid = NetCDFFile(sts_file) |
---|
2041 | |
---|
2042 | # Make x and y absolute |
---|
2043 | x = fid.variables['x'][:] |
---|
2044 | y = fid.variables['y'][:] |
---|
2045 | |
---|
2046 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
2047 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
2048 | points = ensure_numeric(points) |
---|
2049 | |
---|
2050 | x = points[:,0] |
---|
2051 | y = points[:,1] |
---|
2052 | |
---|
2053 | # Check that all coordinate are correctly represented |
---|
2054 | # Using the non standard projection (50) |
---|
2055 | for i in range(4): |
---|
2056 | zone, e, n = redfearn(lat_long_points[i][0], lat_long_points[i][1], |
---|
2057 | zone=50) |
---|
2058 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
2059 | assert zone==geo_reference.zone |
---|
2060 | |
---|
2061 | self.delete_mux(files) |
---|
2062 | |
---|
2063 | |
---|
2064 | def test_urs2stsII(self): |
---|
2065 | """ |
---|
2066 | Test multiple sources |
---|
2067 | """ |
---|
2068 | tide=0 |
---|
2069 | time_step_count = 3 |
---|
2070 | time_step = 2 |
---|
2071 | lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)] |
---|
2072 | n=len(lat_long_points) |
---|
2073 | first_tstep=num.ones(n,num.int) |
---|
2074 | first_tstep[0]+=1 |
---|
2075 | first_tstep[2]+=1 |
---|
2076 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
2077 | last_tstep[0]-=1 |
---|
2078 | |
---|
2079 | gauge_depth=20*num.ones(n,num.float) |
---|
2080 | ha=2*num.ones((n,time_step_count),num.float) |
---|
2081 | ha[0]=num.arange(0,time_step_count) |
---|
2082 | ha[1]=num.arange(time_step_count,2*time_step_count) |
---|
2083 | ha[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
2084 | ha[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
2085 | ua=5*num.ones((n,time_step_count),num.float) |
---|
2086 | va=-10*num.ones((n,time_step_count),num.float) |
---|
2087 | |
---|
2088 | # Create two identical mux files to be combined by urs2sts |
---|
2089 | base_nameI, filesI = self.write_mux2(lat_long_points, |
---|
2090 | time_step_count, time_step, |
---|
2091 | first_tstep, last_tstep, |
---|
2092 | depth=gauge_depth, |
---|
2093 | ha=ha, |
---|
2094 | ua=ua, |
---|
2095 | va=va) |
---|
2096 | |
---|
2097 | base_nameII, filesII = self.write_mux2(lat_long_points, |
---|
2098 | time_step_count, time_step, |
---|
2099 | first_tstep, last_tstep, |
---|
2100 | depth=gauge_depth, |
---|
2101 | ha=ha, |
---|
2102 | ua=ua, |
---|
2103 | va=va) |
---|
2104 | |
---|
2105 | # Call urs2sts with multiple mux files |
---|
2106 | urs2sts([base_nameI, base_nameII], |
---|
2107 | basename_out=base_nameI, |
---|
2108 | weights=[1.0, 1.0], |
---|
2109 | mean_stage=tide, |
---|
2110 | verbose=False) |
---|
2111 | |
---|
2112 | # now I want to check the sts file ... |
---|
2113 | sts_file = base_nameI + '.sts' |
---|
2114 | |
---|
2115 | #Let's interrogate the sts file |
---|
2116 | # Note, the sts info is not gridded. It is point data. |
---|
2117 | fid = NetCDFFile(sts_file) |
---|
2118 | |
---|
2119 | # Make x and y absolute |
---|
2120 | x = fid.variables['x'][:] |
---|
2121 | y = fid.variables['y'][:] |
---|
2122 | |
---|
2123 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
2124 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
2125 | points = ensure_numeric(points) |
---|
2126 | |
---|
2127 | x = points[:,0] |
---|
2128 | y = points[:,1] |
---|
2129 | |
---|
2130 | #Check that first coordinate is correctly represented |
---|
2131 | #Work out the UTM coordinates for first point |
---|
2132 | zone, e, n = redfearn(lat_long_points[0][0], lat_long_points[0][1]) |
---|
2133 | assert num.allclose([x[0],y[0]], [e,n]) |
---|
2134 | |
---|
2135 | #Check the time vector |
---|
2136 | times = fid.variables['time'][:] |
---|
2137 | |
---|
2138 | times_actual = [] |
---|
2139 | for i in range(time_step_count): |
---|
2140 | times_actual.append(time_step * i) |
---|
2141 | |
---|
2142 | assert num.allclose(ensure_numeric(times), |
---|
2143 | ensure_numeric(times_actual)) |
---|
2144 | |
---|
2145 | #Check first value |
---|
2146 | stage = fid.variables['stage'][:] |
---|
2147 | xmomentum = fid.variables['xmomentum'][:] |
---|
2148 | ymomentum = fid.variables['ymomentum'][:] |
---|
2149 | elevation = fid.variables['elevation'][:] |
---|
2150 | |
---|
2151 | # Set original data used to write mux file to be zero when gauges are |
---|
2152 | # not recdoring |
---|
2153 | |
---|
2154 | ha[0][0]=0.0 |
---|
2155 | ha[0][time_step_count-1]=0.0 |
---|
2156 | ha[2][0]=0.0 |
---|
2157 | ua[0][0]=0.0 |
---|
2158 | ua[0][time_step_count-1]=0.0 |
---|
2159 | ua[2][0]=0.0 |
---|
2160 | va[0][0]=0.0 |
---|
2161 | va[0][time_step_count-1]=0.0 |
---|
2162 | va[2][0]=0.0; |
---|
2163 | |
---|
2164 | # The stage stored in the .sts file should be the sum of the stage |
---|
2165 | # in the two mux2 files because both have weights = 1. In this case |
---|
2166 | # the mux2 files are the same so stage == 2.0 * ha |
---|
2167 | #print 2.0*num.transpose(ha) - stage |
---|
2168 | assert num.allclose(2.0*num.transpose(ha), stage) #Meters |
---|
2169 | |
---|
2170 | #Check the momentums - ua |
---|
2171 | #momentum = velocity*(stage-elevation) |
---|
2172 | # elevation = - depth |
---|
2173 | #momentum = velocity_ua *(stage+depth) |
---|
2174 | |
---|
2175 | depth=num.zeros((len(lat_long_points),time_step_count),num.float) |
---|
2176 | for i in range(len(lat_long_points)): |
---|
2177 | depth[i]=gauge_depth[i]+tide+2.0*ha[i] |
---|
2178 | #2.0*ha necessary because using two files with weights=1 are used |
---|
2179 | |
---|
2180 | # The xmomentum stored in the .sts file should be the sum of the ua |
---|
2181 | # in the two mux2 files multiplied by the depth. |
---|
2182 | assert num.allclose(2.0*num.transpose(ua*depth), xmomentum) |
---|
2183 | |
---|
2184 | #Check the momentums - va |
---|
2185 | #momentum = velocity*(stage-elevation) |
---|
2186 | # elevation = - depth |
---|
2187 | #momentum = velocity_va *(stage+depth) |
---|
2188 | |
---|
2189 | # The ymomentum stored in the .sts file should be the sum of the va |
---|
2190 | # in the two mux2 files multiplied by the depth. |
---|
2191 | assert num.allclose(2.0*num.transpose(va*depth), ymomentum) |
---|
2192 | |
---|
2193 | # check the elevation values. |
---|
2194 | # -ve since urs measures depth, sww meshers height, |
---|
2195 | assert num.allclose(-elevation, gauge_depth) #Meters |
---|
2196 | |
---|
2197 | fid.close() |
---|
2198 | self.delete_mux(filesI) |
---|
2199 | self.delete_mux(filesII) |
---|
2200 | os.remove(sts_file) |
---|
2201 | |
---|
2202 | def test_urs2sts_individual_sources(self): |
---|
2203 | """Test that individual sources compare to actual urs output |
---|
2204 | Test that the first recording time is the smallest |
---|
2205 | over waveheight, easting and northing velocity |
---|
2206 | """ |
---|
2207 | |
---|
2208 | # Get path where this test is run |
---|
2209 | path = get_pathname_from_package('anuga.shallow_water') |
---|
2210 | |
---|
2211 | testdir = os.path.join(path, 'urs_test_data') |
---|
2212 | ordering_filename=os.path.join(testdir, 'thinned_bound_order_test.txt') |
---|
2213 | |
---|
2214 | sources = ['1-z.grd','2-z.grd','3-z.grd'] |
---|
2215 | |
---|
2216 | # Start times by source and station taken manually from urs header files |
---|
2217 | time_start_z = num.array([[10.0,11.5,13,14.5,17.7], |
---|
2218 | [9.8,11.2,12.7,14.2,17.4], |
---|
2219 | [9.5,10.9,12.4,13.9,17.1]]) |
---|
2220 | |
---|
2221 | time_start_e = time_start_n = time_start_z |
---|
2222 | |
---|
2223 | # time step in urs output |
---|
2224 | delta_t = 0.1 |
---|
2225 | |
---|
2226 | # Make sts file for each source |
---|
2227 | for k, source_filename in enumerate(sources): |
---|
2228 | source_number = k + 1 # Source numbering starts at 1 |
---|
2229 | |
---|
2230 | urs_filenames = os.path.join(testdir, source_filename) |
---|
2231 | weights = [1.] |
---|
2232 | sts_name_out = 'test' |
---|
2233 | |
---|
2234 | urs2sts(urs_filenames, |
---|
2235 | basename_out=sts_name_out, |
---|
2236 | ordering_filename=ordering_filename, |
---|
2237 | weights=weights, |
---|
2238 | mean_stage=0.0, |
---|
2239 | verbose=False) |
---|
2240 | |
---|
2241 | # Read in sts file for this source file |
---|
2242 | fid = NetCDFFile(sts_name_out+'.sts', netcdf_mode_r) # Open existing file for read |
---|
2243 | x = fid.variables['x'][:]+fid.xllcorner # x-coordinates of vertices |
---|
2244 | y = fid.variables['y'][:]+fid.yllcorner # y-coordinates of vertices |
---|
2245 | elevation = fid.variables['elevation'][:] |
---|
2246 | time=fid.variables['time'][:]+fid.starttime |
---|
2247 | |
---|
2248 | # Get quantity data from sts file |
---|
2249 | quantity_names=['stage','xmomentum','ymomentum'] |
---|
2250 | quantities = {} |
---|
2251 | for i, name in enumerate(quantity_names): |
---|
2252 | quantities[name] = fid.variables[name][:] |
---|
2253 | |
---|
2254 | # Make sure start time from sts file is the minimum starttime |
---|
2255 | # across all stations (for this source) |
---|
2256 | #print k, time_start_z[k,:] |
---|
2257 | starttime = min(time_start_z[k, :]) |
---|
2258 | sts_starttime = fid.starttime[0] |
---|
2259 | msg = 'sts starttime for source %d was %f. Should have been %f'\ |
---|
2260 | %(source_number, sts_starttime, starttime) |
---|
2261 | assert num.allclose(sts_starttime, starttime), msg |
---|
2262 | |
---|
2263 | # For each station, compare urs2sts output to known urs output |
---|
2264 | for j in range(len(x)): |
---|
2265 | index_start_urs = 0 |
---|
2266 | index_end_urs = 0 |
---|
2267 | index_start = 0 |
---|
2268 | index_end = 0 |
---|
2269 | count = 0 |
---|
2270 | |
---|
2271 | # read in urs test data for stage, e and n velocity |
---|
2272 | urs_file_name_z = 'z_'+str(source_number)+'_'+str(j)+'.csv' |
---|
2273 | dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z)) |
---|
2274 | urs_stage = dict['urs_stage'] |
---|
2275 | urs_file_name_e = 'e_'+str(source_number)+'_'+str(j)+'.csv' |
---|
2276 | dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e)) |
---|
2277 | urs_e = dict['urs_e'] |
---|
2278 | urs_file_name_n = 'n_'+str(source_number)+'_'+str(j)+'.csv' |
---|
2279 | dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n)) |
---|
2280 | urs_n = dict['urs_n'] |
---|
2281 | |
---|
2282 | # find start and end time for stage |
---|
2283 | for i in range(len(urs_stage)): |
---|
2284 | if urs_stage[i] == 0.0: |
---|
2285 | index_start_urs_z = i+1 |
---|
2286 | if int(urs_stage[i]) == 99 and count <> 1: |
---|
2287 | count +=1 |
---|
2288 | index_end_urs_z = i |
---|
2289 | |
---|
2290 | if count == 0: index_end_urs_z = len(urs_stage) |
---|
2291 | |
---|
2292 | start_times_z = time_start_z[source_number-1] |
---|
2293 | |
---|
2294 | # start times for easting velocities should be the same as stage |
---|
2295 | start_times_e = time_start_e[source_number-1] |
---|
2296 | index_start_urs_e = index_start_urs_z |
---|
2297 | index_end_urs_e = index_end_urs_z |
---|
2298 | |
---|
2299 | # start times for northing velocities should be the same as stage |
---|
2300 | start_times_n = time_start_n[source_number-1] |
---|
2301 | index_start_urs_n = index_start_urs_z |
---|
2302 | index_end_urs_n = index_end_urs_z |
---|
2303 | |
---|
2304 | # Check that actual start time matches header information for stage |
---|
2305 | msg = 'stage start time from urs file is not the same as the ' |
---|
2306 | msg += 'header file for source %i and station %i' %(source_number,j) |
---|
2307 | assert num.allclose(index_start_urs_z,start_times_z[j]/delta_t), msg |
---|
2308 | |
---|
2309 | msg = 'e velocity start time from urs file is not the same as the ' |
---|
2310 | msg += 'header file for source %i and station %i' %(source_number,j) |
---|
2311 | assert num.allclose(index_start_urs_e,start_times_e[j]/delta_t), msg |
---|
2312 | |
---|
2313 | msg = 'n velocity start time from urs file is not the same as the ' |
---|
2314 | msg += 'header file for source %i and station %i' %(source_number,j) |
---|
2315 | assert num.allclose(index_start_urs_n,start_times_n[j]/delta_t), msg |
---|
2316 | |
---|
2317 | # get index for start and end time for sts quantities |
---|
2318 | index_start_stage = 0 |
---|
2319 | index_end_stage = 0 |
---|
2320 | count = 0 |
---|
2321 | sts_stage = quantities['stage'][:,j] |
---|
2322 | for i in range(len(sts_stage)): |
---|
2323 | if sts_stage[i] <> 0.0 and count <> 1: |
---|
2324 | count += 1 |
---|
2325 | index_start_stage = i |
---|
2326 | if int(sts_stage[i]) == 99 and count <> 1: |
---|
2327 | count += 1 |
---|
2328 | index_end_stage = i |
---|
2329 | |
---|
2330 | index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z]) |
---|
2331 | |
---|
2332 | sts_xmom = quantities['xmomentum'][:,j] |
---|
2333 | index_start_x = index_start_stage |
---|
2334 | index_end_x = index_start_x + len(urs_e[index_start_urs_e:index_end_urs_e]) |
---|
2335 | |
---|
2336 | sts_ymom = quantities['ymomentum'][:,j] |
---|
2337 | index_start_y = index_start_stage |
---|
2338 | index_end_y = index_start_y + len(urs_n[index_start_urs_n:index_end_urs_n]) |
---|
2339 | |
---|
2340 | # check that urs stage and sts stage are the same |
---|
2341 | msg = 'urs stage is not equal to sts stage for for source %i and station %i' %(source_number,j) |
---|
2342 | assert num.allclose(urs_stage[index_start_urs_z:index_end_urs_z], |
---|
2343 | sts_stage[index_start_stage:index_end_stage], |
---|
2344 | rtol=1.0e-6, atol=1.0e-5 ), msg |
---|
2345 | |
---|
2346 | # check that urs e velocity and sts xmomentum are the same |
---|
2347 | msg = 'urs e velocity is not equivalent to sts x momentum for for source %i and station %i' %(source_number,j) |
---|
2348 | assert num.allclose(urs_e[index_start_urs_e:index_end_urs_e]*(urs_stage[index_start_urs_e:index_end_urs_e]-elevation[j]), |
---|
2349 | sts_xmom[index_start_x:index_end_x], |
---|
2350 | rtol=1.0e-5, atol=1.0e-4 ), msg |
---|
2351 | |
---|
2352 | # check that urs n velocity and sts ymomentum are the same |
---|
2353 | #print 'urs n velocity', urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j]) |
---|
2354 | #print 'sts momentum', sts_ymom[index_start_y:index_end_y] |
---|
2355 | msg = 'urs n velocity is not equivalent to sts y momentum for source %i and station %i' %(source_number,j) |
---|
2356 | assert num.allclose(urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j]), |
---|
2357 | -sts_ymom[index_start_y:index_end_y], |
---|
2358 | rtol=1.0e-5, atol=1.0e-4 ), msg |
---|
2359 | |
---|
2360 | |
---|
2361 | fid.close() |
---|
2362 | |
---|
2363 | os.remove(sts_name_out+'.sts') |
---|
2364 | |
---|
2365 | def test_urs2sts_combined_sources(self): |
---|
2366 | """Test that combined sources compare to actual urs output |
---|
2367 | Test that the first recording time is the smallest |
---|
2368 | over waveheight, easting and northing velocity |
---|
2369 | """ |
---|
2370 | |
---|
2371 | # combined |
---|
2372 | time_start_z = num.array([9.5,10.9,12.4,13.9,17.1]) |
---|
2373 | time_start_e = time_start_n = time_start_z |
---|
2374 | |
---|
2375 | # make sts file for combined sources |
---|
2376 | weights = [1., 2., 3.] |
---|
2377 | |
---|
2378 | path = get_pathname_from_package('anuga.shallow_water') |
---|
2379 | |
---|
2380 | testdir = os.path.join(path, 'urs_test_data') |
---|
2381 | ordering_filename=os.path.join(testdir, 'thinned_bound_order_test.txt') |
---|
2382 | |
---|
2383 | urs_filenames = [os.path.join(testdir,'1-z.grd'), |
---|
2384 | os.path.join(testdir,'2-z.grd'), |
---|
2385 | os.path.join(testdir,'3-z.grd')] |
---|
2386 | sts_name_out = 'test' |
---|
2387 | |
---|
2388 | urs2sts(urs_filenames, |
---|
2389 | basename_out=sts_name_out, |
---|
2390 | ordering_filename=ordering_filename, |
---|
2391 | weights=weights, |
---|
2392 | mean_stage=0.0, |
---|
2393 | verbose=False) |
---|
2394 | |
---|
2395 | # read in sts file for combined source |
---|
2396 | fid = NetCDFFile(sts_name_out+'.sts', netcdf_mode_r) # Open existing file for read |
---|
2397 | x = fid.variables['x'][:]+fid.xllcorner # x-coordinates of vertices |
---|
2398 | y = fid.variables['y'][:]+fid.yllcorner # y-coordinates of vertices |
---|
2399 | elevation = fid.variables['elevation'][:] |
---|
2400 | time=fid.variables['time'][:]+fid.starttime |
---|
2401 | |
---|
2402 | |
---|
2403 | # Check that stored permutation is as per default |
---|
2404 | permutation = range(len(x)) |
---|
2405 | stored_permutation = fid.variables['permutation'][:] |
---|
2406 | msg = 'Permutation was not stored correctly. I got ' |
---|
2407 | msg += str(stored_permutation) |
---|
2408 | assert num.allclose(stored_permutation, permutation), msg |
---|
2409 | |
---|
2410 | # Get quantity data from sts file |
---|
2411 | quantity_names=['stage','xmomentum','ymomentum'] |
---|
2412 | quantities = {} |
---|
2413 | for i, name in enumerate(quantity_names): |
---|
2414 | quantities[name] = fid.variables[name][:] |
---|
2415 | |
---|
2416 | # For each station, compare urs2sts output to known urs output |
---|
2417 | delta_t = 0.1 |
---|
2418 | |
---|
2419 | # Make sure start time from sts file is the minimum starttime |
---|
2420 | # across all stations (for this source) |
---|
2421 | starttime = min(time_start_z[:]) |
---|
2422 | sts_starttime = fid.starttime[0] |
---|
2423 | msg = 'sts starttime was %f. Should have been %f'\ |
---|
2424 | %(sts_starttime, starttime) |
---|
2425 | assert num.allclose(sts_starttime, starttime), msg |
---|
2426 | |
---|
2427 | #stations = [1,2,3] |
---|
2428 | #for j in stations: |
---|
2429 | for j in range(len(x)): |
---|
2430 | index_start_urs_z = 0 |
---|
2431 | index_end_urs_z = 0 |
---|
2432 | index_start_urs_e = 0 |
---|
2433 | index_end_urs_e = 0 |
---|
2434 | index_start_urs_n = 0 |
---|
2435 | index_end_urs_n = 0 |
---|
2436 | count = 0 |
---|
2437 | |
---|
2438 | # read in urs test data for stage, e and n velocity |
---|
2439 | urs_file_name_z = 'z_combined_'+str(j)+'.csv' |
---|
2440 | dict = load_csv_as_array(os.path.join(testdir, urs_file_name_z)) |
---|
2441 | urs_stage = dict['urs_stage'] |
---|
2442 | urs_file_name_e = 'e_combined_'+str(j)+'.csv' |
---|
2443 | dict = load_csv_as_array(os.path.join(testdir, urs_file_name_e)) |
---|
2444 | urs_e = dict['urs_e'] |
---|
2445 | urs_file_name_n = 'n_combined_'+str(j)+'.csv' |
---|
2446 | dict = load_csv_as_array(os.path.join(testdir, urs_file_name_n)) |
---|
2447 | urs_n = dict['urs_n'] |
---|
2448 | |
---|
2449 | # find start and end time for stage |
---|
2450 | for i in range(len(urs_stage)): |
---|
2451 | if urs_stage[i] == 0.0: |
---|
2452 | index_start_urs_z = i+1 |
---|
2453 | if int(urs_stage[i]) == 99 and count <> 1: |
---|
2454 | count +=1 |
---|
2455 | index_end_urs_z = i |
---|
2456 | |
---|
2457 | if count == 0: index_end_urs_z = len(urs_stage) |
---|
2458 | |
---|
2459 | start_times_z = time_start_z[j] |
---|
2460 | |
---|
2461 | start_times_e = time_start_e[j] |
---|
2462 | index_start_urs_e = index_start_urs_z |
---|
2463 | |
---|
2464 | start_times_n = time_start_n[j] |
---|
2465 | index_start_urs_n = index_start_urs_z |
---|
2466 | |
---|
2467 | # Check that actual start time matches header information for stage |
---|
2468 | msg = 'stage start time from urs file is not the same as the ' |
---|
2469 | msg += 'header file at station %i' %(j) |
---|
2470 | assert num.allclose(index_start_urs_z,start_times_z/delta_t), msg |
---|
2471 | |
---|
2472 | msg = 'e velocity start time from urs file is not the same as the ' |
---|
2473 | msg += 'header file at station %i' %(j) |
---|
2474 | assert num.allclose(index_start_urs_e,start_times_e/delta_t), msg |
---|
2475 | |
---|
2476 | msg = 'n velocity start time from urs file is not the same as the ' |
---|
2477 | msg += 'header file at station %i' %(j) |
---|
2478 | assert num.allclose(index_start_urs_n,start_times_n/delta_t), msg |
---|
2479 | |
---|
2480 | # get index for start and end time for sts quantities |
---|
2481 | index_start_stage = 0 |
---|
2482 | index_end_stage = 0 |
---|
2483 | index_start_x = 0 |
---|
2484 | index_end_x = 0 |
---|
2485 | index_start_y = 0 |
---|
2486 | index_end_y = 0 |
---|
2487 | count = 0 |
---|
2488 | count1 = 0 |
---|
2489 | sts_stage = quantities['stage'][:,j] |
---|
2490 | for i in range(len(sts_stage)): |
---|
2491 | if sts_stage[i] <> 0.0 and count <> 1: |
---|
2492 | count += 1 |
---|
2493 | index_start_stage = i |
---|
2494 | if int(urs_stage[i]) == 99 and count <> 1: |
---|
2495 | count +=1 |
---|
2496 | index_end_stage = i |
---|
2497 | |
---|
2498 | index_end_stage = index_start_stage + len(urs_stage[index_start_urs_z:index_end_urs_z]) |
---|
2499 | |
---|
2500 | index_start_x = index_start_stage |
---|
2501 | index_end_x = index_start_x + len(urs_stage[index_start_urs_e:index_end_urs_e]) |
---|
2502 | sts_xmom = quantities['ymomentum'][:,j] |
---|
2503 | |
---|
2504 | index_start_y = index_start_stage |
---|
2505 | index_end_y = index_start_y + len(urs_stage[index_start_urs_n:index_end_urs_n]) |
---|
2506 | sts_ymom = quantities['ymomentum'][:,j] |
---|
2507 | |
---|
2508 | # check that urs stage and sts stage are the same |
---|
2509 | msg = 'urs stage is not equal to sts stage for station %i' %j |
---|
2510 | #print 'urs stage', urs_stage[index_start_urs_z:index_end_urs_z] |
---|
2511 | #print 'sts stage', sts_stage[index_start_stage:index_end_stage] |
---|
2512 | #print 'diff', max(urs_stage[index_start_urs_z:index_end_urs_z]-sts_stage[index_start_stage:index_end_stage]) |
---|
2513 | #print 'index', index_start_stage, index_end_stage, len(sts_stage) |
---|
2514 | assert num.allclose(urs_stage[index_start_urs_z:index_end_urs_z], |
---|
2515 | sts_stage[index_start_stage:index_end_stage], |
---|
2516 | rtol=1.0e-5, atol=1.0e-4 ), msg |
---|
2517 | |
---|
2518 | # check that urs e velocity and sts xmomentum are the same |
---|
2519 | msg = 'urs e velocity is not equivalent to sts xmomentum for station %i' %j |
---|
2520 | assert num.allclose(urs_e[index_start_urs_e:index_end_urs_e]*(urs_stage[index_start_urs_e:index_end_urs_e]-elevation[j]), |
---|
2521 | sts_xmom[index_start_x:index_end_x], |
---|
2522 | rtol=1.0e-5, atol=1.0e-4 ), msg |
---|
2523 | |
---|
2524 | # check that urs n velocity and sts ymomentum are the same |
---|
2525 | msg = 'urs n velocity is not equivalent to sts ymomentum for station %i' %j |
---|
2526 | assert num.allclose(urs_n[index_start_urs_n:index_end_urs_n]*(urs_stage[index_start_urs_n:index_end_urs_n]-elevation[j]), |
---|
2527 | sts_ymom[index_start_y:index_end_y], |
---|
2528 | rtol=1.0e-5, atol=1.0e-4 ), msg |
---|
2529 | |
---|
2530 | fid.close() |
---|
2531 | |
---|
2532 | os.remove(sts_name_out+'.sts') |
---|
2533 | |
---|
2534 | |
---|
2535 | |
---|
2536 | def test_urs2sts_ordering(self): |
---|
2537 | """Test multiple sources with ordering file |
---|
2538 | """ |
---|
2539 | |
---|
2540 | tide = 0.35 |
---|
2541 | time_step_count = 6 # I made this a little longer (Ole) |
---|
2542 | time_step = 2 |
---|
2543 | lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)] |
---|
2544 | n=len(lat_long_points) |
---|
2545 | first_tstep=num.ones(n,num.int) |
---|
2546 | first_tstep[0]+=1 |
---|
2547 | first_tstep[2]+=1 |
---|
2548 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
2549 | last_tstep[0]-=1 |
---|
2550 | |
---|
2551 | gauge_depth=20*num.ones(n,num.float) |
---|
2552 | ha=2*num.ones((n,time_step_count),num.float) |
---|
2553 | ha[0]=num.arange(0,time_step_count) |
---|
2554 | ha[1]=num.arange(time_step_count,2*time_step_count) |
---|
2555 | ha[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
2556 | ha[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
2557 | ua=5*num.ones((n,time_step_count),num.float) |
---|
2558 | va=-10*num.ones((n,time_step_count),num.float) |
---|
2559 | |
---|
2560 | # Create two identical mux files to be combined by urs2sts |
---|
2561 | base_nameI, filesI = self.write_mux2(lat_long_points, |
---|
2562 | time_step_count, time_step, |
---|
2563 | first_tstep, last_tstep, |
---|
2564 | depth=gauge_depth, |
---|
2565 | ha=ha, |
---|
2566 | ua=ua, |
---|
2567 | va=va) |
---|
2568 | |
---|
2569 | base_nameII, filesII = self.write_mux2(lat_long_points, |
---|
2570 | time_step_count, time_step, |
---|
2571 | first_tstep, last_tstep, |
---|
2572 | depth=gauge_depth, |
---|
2573 | ha=ha, |
---|
2574 | ua=ua, |
---|
2575 | va=va) |
---|
2576 | |
---|
2577 | |
---|
2578 | # Create ordering file |
---|
2579 | permutation = [3,0,2] |
---|
2580 | |
---|
2581 | _, ordering_filename = tempfile.mkstemp('') |
---|
2582 | order_fid = open(ordering_filename, 'w') |
---|
2583 | order_fid.write('index, longitude, latitude\n') |
---|
2584 | for index in permutation: |
---|
2585 | order_fid.write('%d, %f, %f\n' %(index, |
---|
2586 | lat_long_points[index][1], |
---|
2587 | lat_long_points[index][0])) |
---|
2588 | order_fid.close() |
---|
2589 | |
---|
2590 | |
---|
2591 | |
---|
2592 | |
---|
2593 | # Call urs2sts with multiple mux files |
---|
2594 | urs2sts([base_nameI, base_nameII], |
---|
2595 | basename_out=base_nameI, |
---|
2596 | ordering_filename=ordering_filename, |
---|
2597 | weights=[1.0, 1.0], |
---|
2598 | mean_stage=tide, |
---|
2599 | verbose=False) |
---|
2600 | |
---|
2601 | # now I want to check the sts file ... |
---|
2602 | sts_file = base_nameI + '.sts' |
---|
2603 | |
---|
2604 | #Let's interrogate the sts file |
---|
2605 | # Note, the sts info is not gridded. It is point data. |
---|
2606 | fid = NetCDFFile(sts_file) |
---|
2607 | |
---|
2608 | # Check that original indices have been stored |
---|
2609 | stored_permutation = fid.variables['permutation'][:] |
---|
2610 | msg = 'Permutation was not stored correctly. I got ' |
---|
2611 | msg += str(stored_permutation) |
---|
2612 | assert num.allclose(stored_permutation, permutation), msg |
---|
2613 | |
---|
2614 | |
---|
2615 | # Make x and y absolute |
---|
2616 | x = fid.variables['x'][:] |
---|
2617 | y = fid.variables['y'][:] |
---|
2618 | |
---|
2619 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
2620 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
2621 | points = ensure_numeric(points) |
---|
2622 | |
---|
2623 | x = points[:,0] |
---|
2624 | y = points[:,1] |
---|
2625 | |
---|
2626 | #print |
---|
2627 | #print x |
---|
2628 | #print y |
---|
2629 | for i, index in enumerate(permutation): |
---|
2630 | # Check that STS points are stored in the correct order |
---|
2631 | |
---|
2632 | # Work out the UTM coordinates sts point i |
---|
2633 | zone, e, n = redfearn(lat_long_points[index][0], |
---|
2634 | lat_long_points[index][1]) |
---|
2635 | |
---|
2636 | #print i, [x[i],y[i]], [e,n] |
---|
2637 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
2638 | |
---|
2639 | |
---|
2640 | # Check the time vector |
---|
2641 | times = fid.variables['time'][:] |
---|
2642 | |
---|
2643 | times_actual = [] |
---|
2644 | for i in range(time_step_count): |
---|
2645 | times_actual.append(time_step * i) |
---|
2646 | |
---|
2647 | assert num.allclose(ensure_numeric(times), |
---|
2648 | ensure_numeric(times_actual)) |
---|
2649 | |
---|
2650 | |
---|
2651 | # Check sts values |
---|
2652 | stage = fid.variables['stage'][:] |
---|
2653 | xmomentum = fid.variables['xmomentum'][:] |
---|
2654 | ymomentum = fid.variables['ymomentum'][:] |
---|
2655 | elevation = fid.variables['elevation'][:] |
---|
2656 | |
---|
2657 | # Set original data used to write mux file to be zero when gauges are |
---|
2658 | # not recdoring |
---|
2659 | |
---|
2660 | ha[0][0]=0.0 |
---|
2661 | ha[0][time_step_count-1]=0.0 |
---|
2662 | ha[2][0]=0.0 |
---|
2663 | ua[0][0]=0.0 |
---|
2664 | ua[0][time_step_count-1]=0.0 |
---|
2665 | ua[2][0]=0.0 |
---|
2666 | va[0][0]=0.0 |
---|
2667 | va[0][time_step_count-1]=0.0 |
---|
2668 | va[2][0]=0.0; |
---|
2669 | |
---|
2670 | |
---|
2671 | # The stage stored in the .sts file should be the sum of the stage |
---|
2672 | # in the two mux2 files because both have weights = 1. In this case |
---|
2673 | # the mux2 files are the same so stage == 2.0 * ha |
---|
2674 | #print 2.0*num.transpose(ha) - stage |
---|
2675 | |
---|
2676 | ha_permutation = num.take(ha, permutation, axis=0) |
---|
2677 | ua_permutation = num.take(ua, permutation, axis=0) |
---|
2678 | va_permutation = num.take(va, permutation, axis=0) |
---|
2679 | gauge_depth_permutation = num.take(gauge_depth, permutation, axis=0) |
---|
2680 | |
---|
2681 | |
---|
2682 | assert num.allclose(2.0*num.transpose(ha_permutation)+tide, stage) # Meters |
---|
2683 | |
---|
2684 | #Check the momentums - ua |
---|
2685 | #momentum = velocity*(stage-elevation) |
---|
2686 | # elevation = - depth |
---|
2687 | #momentum = velocity_ua *(stage+depth) |
---|
2688 | |
---|
2689 | depth=num.zeros((len(lat_long_points),time_step_count),num.float) |
---|
2690 | for i in range(len(lat_long_points)): |
---|
2691 | depth[i]=gauge_depth[i]+tide+2.0*ha[i] |
---|
2692 | #2.0*ha necessary because using two files with weights=1 are used |
---|
2693 | |
---|
2694 | depth_permutation = num.take(depth, permutation, axis=0) |
---|
2695 | |
---|
2696 | |
---|
2697 | # The xmomentum stored in the .sts file should be the sum of the ua |
---|
2698 | # in the two mux2 files multiplied by the depth. |
---|
2699 | assert num.allclose(2.0*num.transpose(ua_permutation*depth_permutation), xmomentum) |
---|
2700 | |
---|
2701 | #Check the momentums - va |
---|
2702 | #momentum = velocity*(stage-elevation) |
---|
2703 | # elevation = - depth |
---|
2704 | #momentum = velocity_va *(stage+depth) |
---|
2705 | |
---|
2706 | # The ymomentum stored in the .sts file should be the sum of the va |
---|
2707 | # in the two mux2 files multiplied by the depth. |
---|
2708 | assert num.allclose(2.0*num.transpose(va_permutation*depth_permutation), ymomentum) |
---|
2709 | |
---|
2710 | # check the elevation values. |
---|
2711 | # -ve since urs measures depth, sww meshers height, |
---|
2712 | assert num.allclose(-gauge_depth_permutation, elevation) #Meters |
---|
2713 | |
---|
2714 | fid.close() |
---|
2715 | self.delete_mux(filesI) |
---|
2716 | self.delete_mux(filesII) |
---|
2717 | os.remove(sts_file) |
---|
2718 | |
---|
2719 | |
---|
2720 | |
---|
2721 | |
---|
2722 | |
---|
2723 | def Xtest_urs2sts_ordering_exception(self): |
---|
2724 | """Test that inconsistent lats and lons in ordering file are caught. |
---|
2725 | """ |
---|
2726 | |
---|
2727 | tide=0 |
---|
2728 | time_step_count = 3 |
---|
2729 | time_step = 2 |
---|
2730 | lat_long_points =[(-21.5,114.5),(-21,114.5),(-21.5,115), (-21.,115.)] |
---|
2731 | n=len(lat_long_points) |
---|
2732 | first_tstep=num.ones(n,num.int) |
---|
2733 | first_tstep[0]+=1 |
---|
2734 | first_tstep[2]+=1 |
---|
2735 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
2736 | last_tstep[0]-=1 |
---|
2737 | |
---|
2738 | gauge_depth=20*num.ones(n,num.float) |
---|
2739 | ha=2*num.ones((n,time_step_count),num.float) |
---|
2740 | ha[0]=num.arange(0,time_step_count) |
---|
2741 | ha[1]=num.arange(time_step_count,2*time_step_count) |
---|
2742 | ha[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
2743 | ha[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
2744 | ua=5*num.ones((n,time_step_count),num.float) |
---|
2745 | va=-10*num.ones((n,time_step_count),num.float) |
---|
2746 | |
---|
2747 | # Create two identical mux files to be combined by urs2sts |
---|
2748 | base_nameI, filesI = self.write_mux2(lat_long_points, |
---|
2749 | time_step_count, time_step, |
---|
2750 | first_tstep, last_tstep, |
---|
2751 | depth=gauge_depth, |
---|
2752 | ha=ha, |
---|
2753 | ua=ua, |
---|
2754 | va=va) |
---|
2755 | |
---|
2756 | base_nameII, filesII = self.write_mux2(lat_long_points, |
---|
2757 | time_step_count, time_step, |
---|
2758 | first_tstep, last_tstep, |
---|
2759 | depth=gauge_depth, |
---|
2760 | ha=ha, |
---|
2761 | ua=ua, |
---|
2762 | va=va) |
---|
2763 | |
---|
2764 | |
---|
2765 | # Create ordering file |
---|
2766 | permutation = [3,0,2] |
---|
2767 | |
---|
2768 | # Do it wrongly and check that exception is being raised |
---|
2769 | _, ordering_filename = tempfile.mkstemp('') |
---|
2770 | order_fid = open(ordering_filename, 'w') |
---|
2771 | order_fid.write('index, longitude, latitude\n') |
---|
2772 | for index in permutation: |
---|
2773 | order_fid.write('%d, %f, %f\n' %(index, |
---|
2774 | lat_long_points[index][0], |
---|
2775 | lat_long_points[index][1])) |
---|
2776 | order_fid.close() |
---|
2777 | |
---|
2778 | try: |
---|
2779 | urs2sts([base_nameI, base_nameII], |
---|
2780 | basename_out=base_nameI, |
---|
2781 | ordering_filename=ordering_filename, |
---|
2782 | weights=[1.0, 1.0], |
---|
2783 | mean_stage=tide, |
---|
2784 | verbose=False) |
---|
2785 | os.remove(ordering_filename) |
---|
2786 | except: |
---|
2787 | pass |
---|
2788 | else: |
---|
2789 | msg = 'Should have caught wrong lat longs' |
---|
2790 | raise Exception, msg |
---|
2791 | |
---|
2792 | |
---|
2793 | self.delete_mux(filesI) |
---|
2794 | self.delete_mux(filesII) |
---|
2795 | |
---|
2796 | |
---|
2797 | |
---|
2798 | |
---|
2799 | def test_urs2sts_ordering_different_sources(self): |
---|
2800 | """Test multiple sources with ordering file, different source files and weights. |
---|
2801 | This test also has more variable values than the previous ones |
---|
2802 | """ |
---|
2803 | |
---|
2804 | tide = 1.5 |
---|
2805 | time_step_count = 10 |
---|
2806 | time_step = 0.2 |
---|
2807 | |
---|
2808 | times_ref = num.arange(0, time_step_count*time_step, time_step) |
---|
2809 | #print 'time vector', times_ref |
---|
2810 | |
---|
2811 | lat_long_points = [(-21.5,114.5), (-21,114.5), (-21.5,115), (-21.,115.), (-22., 117.)] |
---|
2812 | n = len(lat_long_points) |
---|
2813 | |
---|
2814 | # Create non-trivial weights |
---|
2815 | #weights = [0.8, 1.5] # OK |
---|
2816 | #weights = [0.8, 10.5] # Fail (up to allclose tolerance) |
---|
2817 | #weights = [10.5, 10.5] # OK |
---|
2818 | #weights = [0.0, 10.5] # OK |
---|
2819 | #weights = [0.8, 0.] # OK |
---|
2820 | #weights = [8, 0.1] # OK |
---|
2821 | #weights = [0.8, 10.0] # OK |
---|
2822 | #weights = [0.8, 10.6] # OK |
---|
2823 | weights = [3.8, 7.6] # OK |
---|
2824 | #weights = [0.5, 0.5] # OK |
---|
2825 | #weights = [2., 2.] # OK |
---|
2826 | #weights = [0.0, 0.5] # OK |
---|
2827 | #weights = [1.0, 1.0] # OK |
---|
2828 | |
---|
2829 | |
---|
2830 | # Create different timeseries starting and ending at different times |
---|
2831 | first_tstep=num.ones(n,num.int) |
---|
2832 | first_tstep[0]+=2 # Point 0 starts at 2 |
---|
2833 | first_tstep[1]+=4 # Point 1 starts at 4 |
---|
2834 | first_tstep[2]+=3 # Point 2 starts at 3 |
---|
2835 | |
---|
2836 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
2837 | last_tstep[0]-=1 # Point 0 ends 1 step early |
---|
2838 | last_tstep[1]-=2 # Point 1 ends 2 steps early |
---|
2839 | last_tstep[4]-=3 # Point 4 ends 3 steps early |
---|
2840 | |
---|
2841 | #print |
---|
2842 | #print 'time_step_count', time_step_count |
---|
2843 | #print 'time_step', time_step |
---|
2844 | #print 'first_tstep', first_tstep |
---|
2845 | #print 'last_tstep', last_tstep |
---|
2846 | |
---|
2847 | |
---|
2848 | # Create varying elevation data (positive values for seafloor) |
---|
2849 | gauge_depth=20*num.ones(n,num.float) |
---|
2850 | for i in range(n): |
---|
2851 | gauge_depth[i] += i**2 |
---|
2852 | |
---|
2853 | #print 'gauge_depth', gauge_depth |
---|
2854 | |
---|
2855 | # Create data to be written to first mux file |
---|
2856 | ha0=2*num.ones((n,time_step_count),num.float) |
---|
2857 | ha0[0]=num.arange(0,time_step_count) |
---|
2858 | ha0[1]=num.arange(time_step_count,2*time_step_count) |
---|
2859 | ha0[2]=num.arange(2*time_step_count,3*time_step_count) |
---|
2860 | ha0[3]=num.arange(3*time_step_count,4*time_step_count) |
---|
2861 | ua0=5*num.ones((n,time_step_count),num.float) |
---|
2862 | va0=-10*num.ones((n,time_step_count),num.float) |
---|
2863 | |
---|
2864 | # Ensure data used to write mux file to be zero when gauges are |
---|
2865 | # not recording |
---|
2866 | for i in range(n): |
---|
2867 | # For each point |
---|
2868 | |
---|
2869 | for j in range(0, first_tstep[i]-1) + range(last_tstep[i], time_step_count): |
---|
2870 | # For timesteps before and after recording range |
---|
2871 | ha0[i][j] = ua0[i][j] = va0[i][j] = 0.0 |
---|
2872 | |
---|
2873 | |
---|
2874 | |
---|
2875 | #print |
---|
2876 | #print 'using varying start and end time' |
---|
2877 | #print 'ha0', ha0[4] |
---|
2878 | #print 'ua0', ua0 |
---|
2879 | #print 'va0', va0 |
---|
2880 | |
---|
2881 | # Write first mux file to be combined by urs2sts |
---|
2882 | base_nameI, filesI = self.write_mux2(lat_long_points, |
---|
2883 | time_step_count, time_step, |
---|
2884 | first_tstep, last_tstep, |
---|
2885 | depth=gauge_depth, |
---|
2886 | ha=ha0, |
---|
2887 | ua=ua0, |
---|
2888 | va=va0) |
---|
2889 | |
---|
2890 | |
---|
2891 | |
---|
2892 | # Create data to be written to second mux file |
---|
2893 | ha1=num.ones((n,time_step_count),num.float) |
---|
2894 | ha1[0]=num.sin(times_ref) |
---|
2895 | ha1[1]=2*num.sin(times_ref - 3) |
---|
2896 | ha1[2]=5*num.sin(4*times_ref) |
---|
2897 | ha1[3]=num.sin(times_ref) |
---|
2898 | ha1[4]=num.sin(2*times_ref-0.7) |
---|
2899 | |
---|
2900 | ua1=num.zeros((n,time_step_count),num.float) |
---|
2901 | ua1[0]=3*num.cos(times_ref) |
---|
2902 | ua1[1]=2*num.sin(times_ref-0.7) |
---|
2903 | ua1[2]=num.arange(3*time_step_count,4*time_step_count) |
---|
2904 | ua1[4]=2*num.ones(time_step_count) |
---|
2905 | |
---|
2906 | va1=num.zeros((n,time_step_count),num.float) |
---|
2907 | va1[0]=2*num.cos(times_ref-0.87) |
---|
2908 | va1[1]=3*num.ones(time_step_count, num.int) #array default# |
---|
2909 | va1[3]=2*num.sin(times_ref-0.71) |
---|
2910 | |
---|
2911 | |
---|
2912 | # Ensure data used to write mux file to be zero when gauges are |
---|
2913 | # not recording |
---|
2914 | for i in range(n): |
---|
2915 | # For each point |
---|
2916 | |
---|
2917 | for j in range(0, first_tstep[i]-1) + range(last_tstep[i], time_step_count): |
---|
2918 | # For timesteps before and after recording range |
---|
2919 | ha1[i][j] = ua1[i][j] = va1[i][j] = 0.0 |
---|
2920 | |
---|
2921 | |
---|
2922 | #print |
---|
2923 | #print 'using varying start and end time' |
---|
2924 | #print 'ha1', ha1[4] |
---|
2925 | #print 'ua1', ua1 |
---|
2926 | #print 'va1', va1 |
---|
2927 | |
---|
2928 | |
---|
2929 | # Write second mux file to be combined by urs2sts |
---|
2930 | base_nameII, filesII = self.write_mux2(lat_long_points, |
---|
2931 | time_step_count, time_step, |
---|
2932 | first_tstep, last_tstep, |
---|
2933 | depth=gauge_depth, |
---|
2934 | ha=ha1, |
---|
2935 | ua=ua1, |
---|
2936 | va=va1) |
---|
2937 | |
---|
2938 | |
---|
2939 | # Create ordering file |
---|
2940 | permutation = [4,0,2] |
---|
2941 | |
---|
2942 | _, ordering_filename = tempfile.mkstemp('') |
---|
2943 | order_fid = open(ordering_filename, 'w') |
---|
2944 | order_fid.write('index, longitude, latitude\n') |
---|
2945 | for index in permutation: |
---|
2946 | order_fid.write('%d, %f, %f\n' %(index, |
---|
2947 | lat_long_points[index][1], |
---|
2948 | lat_long_points[index][0])) |
---|
2949 | order_fid.close() |
---|
2950 | |
---|
2951 | |
---|
2952 | |
---|
2953 | #------------------------------------------------------------ |
---|
2954 | # Now read the mux files one by one without weights and test |
---|
2955 | |
---|
2956 | # Call urs2sts with mux file #0 |
---|
2957 | urs2sts([base_nameI], |
---|
2958 | basename_out=base_nameI, |
---|
2959 | ordering_filename=ordering_filename, |
---|
2960 | mean_stage=tide, |
---|
2961 | verbose=False) |
---|
2962 | |
---|
2963 | # Now read the sts file and check that values have been stored correctly. |
---|
2964 | sts_file = base_nameI + '.sts' |
---|
2965 | fid = NetCDFFile(sts_file) |
---|
2966 | |
---|
2967 | |
---|
2968 | # Check that original indices have been stored |
---|
2969 | stored_permutation = fid.variables['permutation'][:] |
---|
2970 | msg = 'Permutation was not stored correctly. I got ' |
---|
2971 | msg += str(stored_permutation) |
---|
2972 | assert num.allclose(stored_permutation, permutation), msg |
---|
2973 | |
---|
2974 | |
---|
2975 | |
---|
2976 | |
---|
2977 | # Make x and y absolute |
---|
2978 | x = fid.variables['x'][:] |
---|
2979 | y = fid.variables['y'][:] |
---|
2980 | |
---|
2981 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
2982 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
2983 | points = ensure_numeric(points) |
---|
2984 | |
---|
2985 | x = points[:,0] |
---|
2986 | y = points[:,1] |
---|
2987 | |
---|
2988 | for i, index in enumerate(permutation): |
---|
2989 | # Check that STS points are stored in the correct order |
---|
2990 | |
---|
2991 | # Work out the UTM coordinates sts point i |
---|
2992 | zone, e, n = redfearn(lat_long_points[index][0], |
---|
2993 | lat_long_points[index][1]) |
---|
2994 | |
---|
2995 | #print i, [x[i],y[i]], [e,n] |
---|
2996 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
2997 | |
---|
2998 | |
---|
2999 | # Check the time vector |
---|
3000 | times = fid.variables['time'][:] |
---|
3001 | assert num.allclose(ensure_numeric(times), |
---|
3002 | ensure_numeric(times_ref)) |
---|
3003 | |
---|
3004 | |
---|
3005 | # Check sts values for mux #0 |
---|
3006 | stage0 = fid.variables['stage'][:].copy() |
---|
3007 | xmomentum0 = fid.variables['xmomentum'][:].copy() |
---|
3008 | ymomentum0 = fid.variables['ymomentum'][:].copy() |
---|
3009 | elevation0 = fid.variables['elevation'][:].copy() |
---|
3010 | |
---|
3011 | |
---|
3012 | #print 'stage', stage0 |
---|
3013 | #print 'xmomentum', xmomentum0 |
---|
3014 | #print 'ymomentum', ymomentum0 |
---|
3015 | #print 'elevation', elevation0 |
---|
3016 | |
---|
3017 | # The quantities stored in the .sts file should be the weighted sum of the |
---|
3018 | # quantities written to the mux2 files subject to the permutation vector. |
---|
3019 | |
---|
3020 | ha_ref = num.take(ha0, permutation, axis=0) |
---|
3021 | ua_ref = num.take(ua0, permutation, axis=0) |
---|
3022 | va_ref = num.take(va0, permutation, axis=0) |
---|
3023 | |
---|
3024 | gauge_depth_ref = num.take(gauge_depth, permutation, axis=0) |
---|
3025 | |
---|
3026 | assert num.allclose(num.transpose(ha_ref)+tide, stage0) # Meters |
---|
3027 | |
---|
3028 | |
---|
3029 | |
---|
3030 | #Check the momentums - ua |
---|
3031 | #momentum = velocity*(stage-elevation) |
---|
3032 | # elevation = - depth |
---|
3033 | #momentum = velocity_ua *(stage+depth) |
---|
3034 | |
---|
3035 | depth_ref = num.zeros((len(permutation), time_step_count), num.float) |
---|
3036 | for i in range(len(permutation)): |
---|
3037 | depth_ref[i]=gauge_depth_ref[i]+tide+ha_ref[i] |
---|
3038 | |
---|
3039 | |
---|
3040 | # The xmomentum stored in the .sts file should be the sum of the ua |
---|
3041 | # in the two mux2 files multiplied by the depth. |
---|
3042 | assert num.allclose(num.transpose(ua_ref*depth_ref), xmomentum0) |
---|
3043 | |
---|
3044 | #Check the momentums - va |
---|
3045 | #momentum = velocity*(stage-elevation) |
---|
3046 | # elevation = - depth |
---|
3047 | #momentum = velocity_va *(stage+depth) |
---|
3048 | |
---|
3049 | # The ymomentum stored in the .sts file should be the sum of the va |
---|
3050 | # in the two mux2 files multiplied by the depth. |
---|
3051 | |
---|
3052 | |
---|
3053 | #print transpose(va_ref*depth_ref) |
---|
3054 | #print ymomentum |
---|
3055 | assert num.allclose(num.transpose(va_ref*depth_ref), ymomentum0) |
---|
3056 | |
---|
3057 | # check the elevation values. |
---|
3058 | # -ve since urs measures depth, sww meshers height, |
---|
3059 | assert num.allclose(-gauge_depth_ref, elevation0) |
---|
3060 | |
---|
3061 | fid.close() |
---|
3062 | os.remove(sts_file) |
---|
3063 | |
---|
3064 | |
---|
3065 | |
---|
3066 | |
---|
3067 | # Call urs2sts with mux file #1 |
---|
3068 | urs2sts([base_nameII], |
---|
3069 | basename_out=base_nameI, |
---|
3070 | ordering_filename=ordering_filename, |
---|
3071 | mean_stage=tide, |
---|
3072 | verbose=False) |
---|
3073 | |
---|
3074 | # Now read the sts file and check that values have been stored correctly. |
---|
3075 | sts_file = base_nameI + '.sts' |
---|
3076 | fid = NetCDFFile(sts_file) |
---|
3077 | |
---|
3078 | |
---|
3079 | # Check that original indices have been stored |
---|
3080 | stored_permutation = fid.variables['permutation'][:] |
---|
3081 | msg = 'Permutation was not stored correctly. I got ' |
---|
3082 | msg += str(stored_permutation) |
---|
3083 | assert num.allclose(stored_permutation, permutation), msg |
---|
3084 | |
---|
3085 | # Make x and y absolute |
---|
3086 | x = fid.variables['x'][:] |
---|
3087 | y = fid.variables['y'][:] |
---|
3088 | |
---|
3089 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
3090 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
3091 | points = ensure_numeric(points) |
---|
3092 | |
---|
3093 | x = points[:,0] |
---|
3094 | y = points[:,1] |
---|
3095 | |
---|
3096 | for i, index in enumerate(permutation): |
---|
3097 | # Check that STS points are stored in the correct order |
---|
3098 | |
---|
3099 | # Work out the UTM coordinates sts point i |
---|
3100 | zone, e, n = redfearn(lat_long_points[index][0], |
---|
3101 | lat_long_points[index][1]) |
---|
3102 | |
---|
3103 | #print i, [x[i],y[i]], [e,n] |
---|
3104 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
3105 | |
---|
3106 | |
---|
3107 | # Check the time vector |
---|
3108 | times = fid.variables['time'][:] |
---|
3109 | assert num.allclose(ensure_numeric(times), |
---|
3110 | ensure_numeric(times_ref)) |
---|
3111 | |
---|
3112 | |
---|
3113 | # Check sts values for mux #1 |
---|
3114 | stage1 = fid.variables['stage'][:].copy() |
---|
3115 | xmomentum1 = fid.variables['xmomentum'][:].copy() |
---|
3116 | ymomentum1 = fid.variables['ymomentum'][:].copy() |
---|
3117 | elevation1 = fid.variables['elevation'][:].copy() |
---|
3118 | |
---|
3119 | |
---|
3120 | #print 'stage', stage1 |
---|
3121 | #print 'xmomentum', xmomentum1 |
---|
3122 | #print 'ymomentum', ymomentum1 |
---|
3123 | #print 'elevation', elevation1 |
---|
3124 | |
---|
3125 | # The quantities stored in the .sts file should be the weighted sum of the |
---|
3126 | # quantities written to the mux2 files subject to the permutation vector. |
---|
3127 | |
---|
3128 | ha_ref = num.take(ha1, permutation, axis=0) |
---|
3129 | ua_ref = num.take(ua1, permutation, axis=0) |
---|
3130 | va_ref = num.take(va1, permutation, axis=0) |
---|
3131 | |
---|
3132 | gauge_depth_ref = num.take(gauge_depth, permutation, axis=0) |
---|
3133 | |
---|
3134 | |
---|
3135 | #print |
---|
3136 | #print stage1 |
---|
3137 | #print transpose(ha_ref)+tide - stage1 |
---|
3138 | |
---|
3139 | |
---|
3140 | assert num.allclose(num.transpose(ha_ref)+tide, stage1) # Meters |
---|
3141 | #import sys; sys.exit() |
---|
3142 | |
---|
3143 | #Check the momentums - ua |
---|
3144 | #momentum = velocity*(stage-elevation) |
---|
3145 | # elevation = - depth |
---|
3146 | #momentum = velocity_ua *(stage+depth) |
---|
3147 | |
---|
3148 | depth_ref = num.zeros((len(permutation), time_step_count), num.float) |
---|
3149 | for i in range(len(permutation)): |
---|
3150 | depth_ref[i]=gauge_depth_ref[i]+tide+ha_ref[i] |
---|
3151 | |
---|
3152 | |
---|
3153 | # The xmomentum stored in the .sts file should be the sum of the ua |
---|
3154 | # in the two mux2 files multiplied by the depth. |
---|
3155 | assert num.allclose(num.transpose(ua_ref*depth_ref), xmomentum1) |
---|
3156 | |
---|
3157 | #Check the momentums - va |
---|
3158 | #momentum = velocity*(stage-elevation) |
---|
3159 | # elevation = - depth |
---|
3160 | #momentum = velocity_va *(stage+depth) |
---|
3161 | |
---|
3162 | # The ymomentum stored in the .sts file should be the sum of the va |
---|
3163 | # in the two mux2 files multiplied by the depth. |
---|
3164 | |
---|
3165 | |
---|
3166 | #print transpose(va_ref*depth_ref) |
---|
3167 | #print ymomentum |
---|
3168 | assert num.allclose(num.transpose(va_ref*depth_ref), ymomentum1) |
---|
3169 | |
---|
3170 | # check the elevation values. |
---|
3171 | # -ve since urs measures depth, sww meshers height, |
---|
3172 | assert num.allclose(-gauge_depth_ref, elevation1) |
---|
3173 | |
---|
3174 | fid.close() |
---|
3175 | os.remove(sts_file) |
---|
3176 | |
---|
3177 | #---------------------- |
---|
3178 | # Then read the mux files together and test |
---|
3179 | |
---|
3180 | |
---|
3181 | # Call urs2sts with multiple mux files |
---|
3182 | urs2sts([base_nameI, base_nameII], |
---|
3183 | basename_out=base_nameI, |
---|
3184 | ordering_filename=ordering_filename, |
---|
3185 | weights=weights, |
---|
3186 | mean_stage=tide, |
---|
3187 | verbose=False) |
---|
3188 | |
---|
3189 | # Now read the sts file and check that values have been stored correctly. |
---|
3190 | sts_file = base_nameI + '.sts' |
---|
3191 | fid = NetCDFFile(sts_file) |
---|
3192 | |
---|
3193 | # Make x and y absolute |
---|
3194 | x = fid.variables['x'][:] |
---|
3195 | y = fid.variables['y'][:] |
---|
3196 | |
---|
3197 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
3198 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
3199 | points = ensure_numeric(points) |
---|
3200 | |
---|
3201 | x = points[:,0] |
---|
3202 | y = points[:,1] |
---|
3203 | |
---|
3204 | for i, index in enumerate(permutation): |
---|
3205 | # Check that STS points are stored in the correct order |
---|
3206 | |
---|
3207 | # Work out the UTM coordinates sts point i |
---|
3208 | zone, e, n = redfearn(lat_long_points[index][0], |
---|
3209 | lat_long_points[index][1]) |
---|
3210 | |
---|
3211 | #print i, [x[i],y[i]], [e,n] |
---|
3212 | assert num.allclose([x[i],y[i]], [e,n]) |
---|
3213 | |
---|
3214 | |
---|
3215 | # Check the time vector |
---|
3216 | times = fid.variables['time'][:] |
---|
3217 | assert num.allclose(ensure_numeric(times), |
---|
3218 | ensure_numeric(times_ref)) |
---|
3219 | |
---|
3220 | |
---|
3221 | # Check sts values |
---|
3222 | stage = fid.variables['stage'][:].copy() |
---|
3223 | xmomentum = fid.variables['xmomentum'][:].copy() |
---|
3224 | ymomentum = fid.variables['ymomentum'][:].copy() |
---|
3225 | elevation = fid.variables['elevation'][:].copy() |
---|
3226 | |
---|
3227 | |
---|
3228 | #print 'stage', stage |
---|
3229 | #print 'elevation', elevation |
---|
3230 | |
---|
3231 | # The quantities stored in the .sts file should be the weighted sum of the |
---|
3232 | # quantities written to the mux2 files subject to the permutation vector. |
---|
3233 | |
---|
3234 | ha_ref = (weights[0]*num.take(ha0, permutation, axis=0) |
---|
3235 | + weights[1]*num.take(ha1, permutation, axis=0)) |
---|
3236 | ua_ref = (weights[0]*num.take(ua0, permutation, axis=0) |
---|
3237 | + weights[1]*num.take(ua1, permutation, axis=0)) |
---|
3238 | va_ref = (weights[0]*num.take(va0, permutation, axis=0) |
---|
3239 | + weights[1]*num.take(va1, permutation, axis=0)) |
---|
3240 | |
---|
3241 | gauge_depth_ref = num.take(gauge_depth, permutation, axis=0) |
---|
3242 | |
---|
3243 | |
---|
3244 | #print |
---|
3245 | #print stage |
---|
3246 | #print transpose(ha_ref)+tide - stage |
---|
3247 | |
---|
3248 | assert num.allclose(num.transpose(ha_ref)+tide, stage) # Meters |
---|
3249 | |
---|
3250 | #Check the momentums - ua |
---|
3251 | #momentum = velocity*(stage-elevation) |
---|
3252 | # elevation = - depth |
---|
3253 | #momentum = velocity_ua *(stage+depth) |
---|
3254 | |
---|
3255 | depth_ref = num.zeros((len(permutation), time_step_count), num.float) |
---|
3256 | for i in range(len(permutation)): |
---|
3257 | depth_ref[i]=gauge_depth_ref[i]+tide+ha_ref[i] |
---|
3258 | |
---|
3259 | |
---|
3260 | |
---|
3261 | |
---|
3262 | # The xmomentum stored in the .sts file should be the sum of the ua |
---|
3263 | # in the two mux2 files multiplied by the depth. |
---|
3264 | assert num.allclose(num.transpose(ua_ref*depth_ref), xmomentum) |
---|
3265 | |
---|
3266 | #Check the momentums - va |
---|
3267 | #momentum = velocity*(stage-elevation) |
---|
3268 | # elevation = - depth |
---|
3269 | #momentum = velocity_va *(stage+depth) |
---|
3270 | |
---|
3271 | # The ymomentum stored in the .sts file should be the sum of the va |
---|
3272 | # in the two mux2 files multiplied by the depth. |
---|
3273 | |
---|
3274 | |
---|
3275 | #print transpose(va_ref*depth_ref) |
---|
3276 | #print ymomentum |
---|
3277 | |
---|
3278 | assert num.allclose(num.transpose(va_ref*depth_ref), ymomentum) |
---|
3279 | |
---|
3280 | # check the elevation values. |
---|
3281 | # -ve since urs measures depth, sww meshers height, |
---|
3282 | assert num.allclose(-gauge_depth_ref, elevation) #Meters |
---|
3283 | |
---|
3284 | fid.close() |
---|
3285 | self.delete_mux(filesI) |
---|
3286 | self.delete_mux(filesII) |
---|
3287 | os.remove(sts_file) |
---|
3288 | |
---|
3289 | #--------------- |
---|
3290 | # "Manually" add the timeseries up with weights and test |
---|
3291 | # Tide is discounted from individual results and added back in |
---|
3292 | # |
---|
3293 | |
---|
3294 | stage_man = weights[0]*(stage0-tide) + weights[1]*(stage1-tide) + tide |
---|
3295 | assert num.allclose(stage_man, stage) |
---|
3296 | |
---|
3297 | |
---|
3298 | def test_file_boundary_stsI(self): |
---|
3299 | """test_file_boundary_stsI(self): |
---|
3300 | """ |
---|
3301 | |
---|
3302 | # FIXME (Ole): These tests should really move to |
---|
3303 | # test_generic_boundaries.py |
---|
3304 | |
---|
3305 | from anuga.shallow_water import Domain |
---|
3306 | from anuga.shallow_water import Reflective_boundary |
---|
3307 | from anuga.shallow_water import Dirichlet_boundary |
---|
3308 | from anuga.shallow_water import File_boundary |
---|
3309 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
3310 | |
---|
3311 | bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],[6.02,97.02],[6.00,97.02]] |
---|
3312 | tide = 0.37 |
---|
3313 | time_step_count = 5 |
---|
3314 | time_step = 2 |
---|
3315 | lat_long_points =bounding_polygon[0:3] |
---|
3316 | n=len(lat_long_points) |
---|
3317 | first_tstep=num.ones(n,num.int) |
---|
3318 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
3319 | |
---|
3320 | h = 20 |
---|
3321 | w = 2 |
---|
3322 | u = 10 |
---|
3323 | v = -10 |
---|
3324 | gauge_depth=h*num.ones(n,num.float) |
---|
3325 | ha=w*num.ones((n,time_step_count),num.float) |
---|
3326 | ua=u*num.ones((n,time_step_count),num.float) |
---|
3327 | va=v*num.ones((n,time_step_count),num.float) |
---|
3328 | base_name, files = self.write_mux2(lat_long_points, |
---|
3329 | time_step_count, time_step, |
---|
3330 | first_tstep, last_tstep, |
---|
3331 | depth=gauge_depth, |
---|
3332 | ha=ha, |
---|
3333 | ua=ua, |
---|
3334 | va=va) |
---|
3335 | |
---|
3336 | sts_file=base_name |
---|
3337 | urs2sts(base_name, |
---|
3338 | sts_file, |
---|
3339 | mean_stage=tide, |
---|
3340 | verbose=False) |
---|
3341 | self.delete_mux(files) |
---|
3342 | |
---|
3343 | #print 'start create mesh from regions' |
---|
3344 | for i in range(len(bounding_polygon)): |
---|
3345 | zone,bounding_polygon[i][0],bounding_polygon[i][1]=redfearn(bounding_polygon[i][0],bounding_polygon[i][1]) |
---|
3346 | extent_res=1000000 |
---|
3347 | meshname = 'urs_test_mesh' + '.tsh' |
---|
3348 | interior_regions=None |
---|
3349 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
3350 | create_mesh_from_regions(bounding_polygon, |
---|
3351 | boundary_tags=boundary_tags, |
---|
3352 | maximum_triangle_area=extent_res, |
---|
3353 | filename=meshname, |
---|
3354 | interior_regions=interior_regions, |
---|
3355 | verbose=False) |
---|
3356 | |
---|
3357 | domain_fbound = Domain(meshname) |
---|
3358 | domain_fbound.set_quantity('stage', tide) |
---|
3359 | Bf = File_boundary(sts_file+'.sts', |
---|
3360 | domain_fbound, |
---|
3361 | boundary_polygon=bounding_polygon) |
---|
3362 | Br = Reflective_boundary(domain_fbound) |
---|
3363 | Bd = Dirichlet_boundary([w+tide, u*(w+h+tide), v*(w+h+tide)]) |
---|
3364 | |
---|
3365 | domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) |
---|
3366 | |
---|
3367 | # Check boundary object evaluates as it should |
---|
3368 | for i, ((vol_id, edge_id), B) in enumerate(domain_fbound.boundary_objects): |
---|
3369 | if B is Bf: |
---|
3370 | |
---|
3371 | qf = B.evaluate(vol_id, edge_id) # File boundary |
---|
3372 | qd = Bd.evaluate(vol_id, edge_id) # Dirichlet boundary |
---|
3373 | |
---|
3374 | assert num.allclose(qf, qd) |
---|
3375 | |
---|
3376 | |
---|
3377 | # Evolve |
---|
3378 | finaltime=time_step*(time_step_count-1) |
---|
3379 | yieldstep=time_step |
---|
3380 | temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3381 | |
---|
3382 | for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, |
---|
3383 | finaltime=finaltime, |
---|
3384 | skip_initial_step=False)): |
---|
3385 | |
---|
3386 | D = domain_fbound |
---|
3387 | temp_fbound[i]=D.quantities['stage'].centroid_values[2] |
---|
3388 | |
---|
3389 | # Check that file boundary object has populated |
---|
3390 | # boundary array correctly |
---|
3391 | # FIXME (Ole): Do this for the other tests too! |
---|
3392 | for j, val in enumerate(D.get_quantity('stage').boundary_values): |
---|
3393 | |
---|
3394 | (vol_id, edge_id), B = D.boundary_objects[j] |
---|
3395 | if isinstance(B, File_boundary): |
---|
3396 | #print j, val |
---|
3397 | assert num.allclose(val, w + tide) |
---|
3398 | |
---|
3399 | |
---|
3400 | |
---|
3401 | domain_drchlt = Domain(meshname) |
---|
3402 | domain_drchlt.set_quantity('stage', tide) |
---|
3403 | Br = Reflective_boundary(domain_drchlt) |
---|
3404 | |
---|
3405 | domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br}) |
---|
3406 | temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3407 | |
---|
3408 | for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep, |
---|
3409 | finaltime=finaltime, |
---|
3410 | skip_initial_step=False)): |
---|
3411 | temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2] |
---|
3412 | |
---|
3413 | #print domain_fbound.quantities['stage'].vertex_values |
---|
3414 | #print domain_drchlt.quantities['stage'].vertex_values |
---|
3415 | |
---|
3416 | assert num.allclose(temp_fbound,temp_drchlt) |
---|
3417 | |
---|
3418 | assert num.allclose(domain_fbound.quantities['stage'].vertex_values, |
---|
3419 | domain_drchlt.quantities['stage'].vertex_values) |
---|
3420 | |
---|
3421 | assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values, |
---|
3422 | domain_drchlt.quantities['xmomentum'].vertex_values) |
---|
3423 | |
---|
3424 | assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values, |
---|
3425 | domain_drchlt.quantities['ymomentum'].vertex_values) |
---|
3426 | |
---|
3427 | |
---|
3428 | os.remove(sts_file+'.sts') |
---|
3429 | os.remove(meshname) |
---|
3430 | |
---|
3431 | |
---|
3432 | def test_file_boundary_stsI_beyond_model_time(self): |
---|
3433 | """test_file_boundary_stsI(self): |
---|
3434 | |
---|
3435 | Test that file_boundary can work when model time |
---|
3436 | exceeds available data. |
---|
3437 | This is optional and requires the user to specify a default |
---|
3438 | boundary object. |
---|
3439 | """ |
---|
3440 | |
---|
3441 | # Don't do warnings in unit test |
---|
3442 | import warnings |
---|
3443 | warnings.simplefilter('ignore') |
---|
3444 | |
---|
3445 | from anuga.shallow_water import Domain |
---|
3446 | from anuga.shallow_water import Reflective_boundary |
---|
3447 | from anuga.shallow_water import Dirichlet_boundary |
---|
3448 | from anuga.shallow_water import File_boundary |
---|
3449 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
3450 | |
---|
3451 | bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0], |
---|
3452 | [6.02,97.02],[6.00,97.02]] |
---|
3453 | tide = 0.37 |
---|
3454 | time_step_count = 5 |
---|
3455 | time_step = 2 |
---|
3456 | lat_long_points = bounding_polygon[0:3] |
---|
3457 | n=len(lat_long_points) |
---|
3458 | first_tstep=num.ones(n,num.int) |
---|
3459 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
3460 | |
---|
3461 | h = 20 |
---|
3462 | w = 2 |
---|
3463 | u = 10 |
---|
3464 | v = -10 |
---|
3465 | gauge_depth=h*num.ones(n,num.float) |
---|
3466 | ha=w*num.ones((n,time_step_count),num.float) |
---|
3467 | ua=u*num.ones((n,time_step_count),num.float) |
---|
3468 | va=v*num.ones((n,time_step_count),num.float) |
---|
3469 | base_name, files = self.write_mux2(lat_long_points, |
---|
3470 | time_step_count, time_step, |
---|
3471 | first_tstep, last_tstep, |
---|
3472 | depth=gauge_depth, |
---|
3473 | ha=ha, |
---|
3474 | ua=ua, |
---|
3475 | va=va) |
---|
3476 | |
---|
3477 | sts_file=base_name |
---|
3478 | urs2sts(base_name, |
---|
3479 | sts_file, |
---|
3480 | mean_stage=tide, |
---|
3481 | verbose=False) |
---|
3482 | self.delete_mux(files) |
---|
3483 | |
---|
3484 | #print 'start create mesh from regions' |
---|
3485 | for i in range(len(bounding_polygon)): |
---|
3486 | zone,\ |
---|
3487 | bounding_polygon[i][0],\ |
---|
3488 | bounding_polygon[i][1]=redfearn(bounding_polygon[i][0], |
---|
3489 | bounding_polygon[i][1]) |
---|
3490 | |
---|
3491 | extent_res=1000000 |
---|
3492 | meshname = 'urs_test_mesh' + '.tsh' |
---|
3493 | interior_regions=None |
---|
3494 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
3495 | create_mesh_from_regions(bounding_polygon, |
---|
3496 | boundary_tags=boundary_tags, |
---|
3497 | maximum_triangle_area=extent_res, |
---|
3498 | filename=meshname, |
---|
3499 | interior_regions=interior_regions, |
---|
3500 | verbose=False) |
---|
3501 | |
---|
3502 | domain_fbound = Domain(meshname) |
---|
3503 | domain_fbound.set_quantity('stage', tide) |
---|
3504 | |
---|
3505 | Br = Reflective_boundary(domain_fbound) |
---|
3506 | Bd = Dirichlet_boundary([w+tide, u*(w+h+tide), v*(w+h+tide)]) |
---|
3507 | Bf = File_boundary(sts_file+'.sts', |
---|
3508 | domain_fbound, |
---|
3509 | boundary_polygon=bounding_polygon, |
---|
3510 | default_boundary=Bd) # Condition to be used |
---|
3511 | # if model time exceeds |
---|
3512 | # available data |
---|
3513 | |
---|
3514 | domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) |
---|
3515 | |
---|
3516 | # Check boundary object evaluates as it should |
---|
3517 | for i, ((vol_id, edge_id), B) in enumerate(domain_fbound.boundary_objects): |
---|
3518 | if B is Bf: |
---|
3519 | |
---|
3520 | qf = B.evaluate(vol_id, edge_id) # File boundary |
---|
3521 | qd = Bd.evaluate(vol_id, edge_id) # Dirichlet boundary |
---|
3522 | |
---|
3523 | assert num.allclose(qf, qd) |
---|
3524 | |
---|
3525 | |
---|
3526 | # Evolve |
---|
3527 | data_finaltime = time_step*(time_step_count-1) |
---|
3528 | finaltime = data_finaltime + 10 # Let model time exceed available data |
---|
3529 | yieldstep = time_step |
---|
3530 | temp_fbound=num.zeros(int(finaltime/yieldstep)+1, num.float) |
---|
3531 | |
---|
3532 | for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, |
---|
3533 | finaltime=finaltime, |
---|
3534 | skip_initial_step=False)): |
---|
3535 | |
---|
3536 | D = domain_fbound |
---|
3537 | temp_fbound[i]=D.quantities['stage'].centroid_values[2] |
---|
3538 | |
---|
3539 | # Check that file boundary object has populated |
---|
3540 | # boundary array correctly |
---|
3541 | # FIXME (Ole): Do this for the other tests too! |
---|
3542 | for j, val in enumerate(D.get_quantity('stage').boundary_values): |
---|
3543 | |
---|
3544 | (vol_id, edge_id), B = D.boundary_objects[j] |
---|
3545 | if isinstance(B, File_boundary): |
---|
3546 | #print j, val |
---|
3547 | assert num.allclose(val, w + tide) |
---|
3548 | |
---|
3549 | |
---|
3550 | domain_drchlt = Domain(meshname) |
---|
3551 | domain_drchlt.set_quantity('stage', tide) |
---|
3552 | Br = Reflective_boundary(domain_drchlt) |
---|
3553 | |
---|
3554 | domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br}) |
---|
3555 | temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3556 | |
---|
3557 | for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep, |
---|
3558 | finaltime=finaltime, |
---|
3559 | skip_initial_step=False)): |
---|
3560 | temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2] |
---|
3561 | |
---|
3562 | #print domain_fbound.quantities['stage'].vertex_values |
---|
3563 | #print domain_drchlt.quantities['stage'].vertex_values |
---|
3564 | |
---|
3565 | assert num.allclose(temp_fbound,temp_drchlt) |
---|
3566 | |
---|
3567 | assert num.allclose(domain_fbound.quantities['stage'].vertex_values, |
---|
3568 | domain_drchlt.quantities['stage'].vertex_values) |
---|
3569 | |
---|
3570 | assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values, |
---|
3571 | domain_drchlt.quantities['xmomentum'].vertex_values) |
---|
3572 | |
---|
3573 | assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values, |
---|
3574 | domain_drchlt.quantities['ymomentum'].vertex_values) |
---|
3575 | |
---|
3576 | os.remove(sts_file+'.sts') |
---|
3577 | os.remove(meshname) |
---|
3578 | |
---|
3579 | |
---|
3580 | def test_field_boundary_stsI_beyond_model_time(self): |
---|
3581 | """test_field_boundary(self): |
---|
3582 | |
---|
3583 | Test that field_boundary can work when model time |
---|
3584 | exceeds available data whilst adjusting mean_stage. |
---|
3585 | |
---|
3586 | """ |
---|
3587 | |
---|
3588 | # Don't do warnings in unit test |
---|
3589 | import warnings |
---|
3590 | warnings.simplefilter('ignore') |
---|
3591 | |
---|
3592 | from anuga.shallow_water import Domain |
---|
3593 | from anuga.shallow_water import Reflective_boundary |
---|
3594 | from anuga.shallow_water import Dirichlet_boundary |
---|
3595 | from anuga.shallow_water import File_boundary |
---|
3596 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
3597 | |
---|
3598 | bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0], |
---|
3599 | [6.02,97.02],[6.00,97.02]] |
---|
3600 | tide = 0.37 |
---|
3601 | time_step_count = 5 |
---|
3602 | time_step = 2 |
---|
3603 | lat_long_points = bounding_polygon[0:3] |
---|
3604 | n=len(lat_long_points) |
---|
3605 | first_tstep=num.ones(n,num.int) |
---|
3606 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
3607 | |
---|
3608 | h = 20 |
---|
3609 | w = 2 |
---|
3610 | u = 10 |
---|
3611 | v = -10 |
---|
3612 | gauge_depth=h*num.ones(n,num.float) |
---|
3613 | ha=w*num.ones((n,time_step_count),num.float) |
---|
3614 | ua=u*num.ones((n,time_step_count),num.float) |
---|
3615 | va=v*num.ones((n,time_step_count),num.float) |
---|
3616 | base_name, files = self.write_mux2(lat_long_points, |
---|
3617 | time_step_count, time_step, |
---|
3618 | first_tstep, last_tstep, |
---|
3619 | depth=gauge_depth, |
---|
3620 | ha=ha, |
---|
3621 | ua=ua, |
---|
3622 | va=va) |
---|
3623 | |
---|
3624 | sts_file=base_name |
---|
3625 | urs2sts(base_name, |
---|
3626 | sts_file, |
---|
3627 | mean_stage=0.0, # Deliberately let Field_boundary do the adjustment |
---|
3628 | verbose=False) |
---|
3629 | self.delete_mux(files) |
---|
3630 | |
---|
3631 | #print 'start create mesh from regions' |
---|
3632 | for i in range(len(bounding_polygon)): |
---|
3633 | zone,\ |
---|
3634 | bounding_polygon[i][0],\ |
---|
3635 | bounding_polygon[i][1]=redfearn(bounding_polygon[i][0], |
---|
3636 | bounding_polygon[i][1]) |
---|
3637 | |
---|
3638 | extent_res=1000000 |
---|
3639 | meshname = 'urs_test_mesh' + '.tsh' |
---|
3640 | interior_regions=None |
---|
3641 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
3642 | create_mesh_from_regions(bounding_polygon, |
---|
3643 | boundary_tags=boundary_tags, |
---|
3644 | maximum_triangle_area=extent_res, |
---|
3645 | filename=meshname, |
---|
3646 | interior_regions=interior_regions, |
---|
3647 | verbose=False) |
---|
3648 | |
---|
3649 | domain_fbound = Domain(meshname) |
---|
3650 | domain_fbound.set_quantity('stage', tide) |
---|
3651 | |
---|
3652 | Br = Reflective_boundary(domain_fbound) |
---|
3653 | Bd = Dirichlet_boundary([w+tide, u*(w+h), v*(w+h)]) |
---|
3654 | Bdefault = Dirichlet_boundary([w, u*(w+h), v*(w+h)]) |
---|
3655 | |
---|
3656 | Bf = Field_boundary(sts_file+'.sts', |
---|
3657 | domain_fbound, |
---|
3658 | mean_stage=tide, # Field boundary to adjust for tide |
---|
3659 | boundary_polygon=bounding_polygon, |
---|
3660 | default_boundary=Bdefault) # Condition to be used |
---|
3661 | # if model time exceeds |
---|
3662 | # available data |
---|
3663 | |
---|
3664 | domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) |
---|
3665 | |
---|
3666 | # Check boundary object evaluates as it should |
---|
3667 | for i, ((vol_id, edge_id), B) in enumerate(domain_fbound.boundary_objects): |
---|
3668 | if B is Bf: |
---|
3669 | |
---|
3670 | qf = B.evaluate(vol_id, edge_id) # Field boundary |
---|
3671 | qd = Bd.evaluate(vol_id, edge_id) # Dirichlet boundary |
---|
3672 | |
---|
3673 | msg = 'Got %s, should have been %s' %(qf, qd) |
---|
3674 | assert num.allclose(qf, qd), msg |
---|
3675 | |
---|
3676 | # Evolve |
---|
3677 | data_finaltime = time_step*(time_step_count-1) |
---|
3678 | finaltime = data_finaltime + 10 # Let model time exceed available data |
---|
3679 | yieldstep = time_step |
---|
3680 | temp_fbound=num.zeros(int(finaltime/yieldstep)+1, num.float) |
---|
3681 | |
---|
3682 | for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, |
---|
3683 | finaltime=finaltime, |
---|
3684 | skip_initial_step=False)): |
---|
3685 | |
---|
3686 | D = domain_fbound |
---|
3687 | temp_fbound[i]=D.quantities['stage'].centroid_values[2] |
---|
3688 | |
---|
3689 | # Check that file boundary object has populated |
---|
3690 | # boundary array correctly |
---|
3691 | # FIXME (Ole): Do this for the other tests too! |
---|
3692 | for j, val in enumerate(D.get_quantity('stage').boundary_values): |
---|
3693 | |
---|
3694 | (vol_id, edge_id), B = D.boundary_objects[j] |
---|
3695 | if isinstance(B, Field_boundary): |
---|
3696 | msg = 'Got %f should have been %f' %(val, w+tide) |
---|
3697 | assert num.allclose(val, w + tide), msg |
---|
3698 | |
---|
3699 | |
---|
3700 | def test_file_boundary_stsII(self): |
---|
3701 | """test_file_boundary_stsII(self): |
---|
3702 | |
---|
3703 | mux2 file has points not included in boundary |
---|
3704 | mux2 gauges are not stored with the same order as they are |
---|
3705 | found in bounding_polygon. This does not matter as long as bounding |
---|
3706 | polygon passed to file_function contains the mux2 points needed (in |
---|
3707 | the correct order). |
---|
3708 | """ |
---|
3709 | |
---|
3710 | from anuga.shallow_water import Domain |
---|
3711 | from anuga.shallow_water import Reflective_boundary |
---|
3712 | from anuga.shallow_water import Dirichlet_boundary |
---|
3713 | from anuga.shallow_water import File_boundary |
---|
3714 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
3715 | |
---|
3716 | bounding_polygon=[[6.01,97.0],[6.02,97.0],[6.02,97.02],[6.00,97.02],[6.0,97.0]] |
---|
3717 | tide = -2.20 |
---|
3718 | time_step_count = 20 |
---|
3719 | time_step = 2 |
---|
3720 | lat_long_points=bounding_polygon[0:2] |
---|
3721 | lat_long_points.insert(0,bounding_polygon[len(bounding_polygon)-1]) |
---|
3722 | lat_long_points.insert(0,[6.0,97.01]) |
---|
3723 | n=len(lat_long_points) |
---|
3724 | first_tstep=num.ones(n,num.int) |
---|
3725 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
3726 | gauge_depth=20*num.ones(n,num.float) |
---|
3727 | ha=2*num.ones((n,time_step_count),num.float) |
---|
3728 | ua=10*num.ones((n,time_step_count),num.float) |
---|
3729 | va=-10*num.ones((n,time_step_count),num.float) |
---|
3730 | base_name, files = self.write_mux2(lat_long_points, |
---|
3731 | time_step_count, |
---|
3732 | time_step, |
---|
3733 | first_tstep, |
---|
3734 | last_tstep, |
---|
3735 | depth=gauge_depth, |
---|
3736 | ha=ha, |
---|
3737 | ua=ua, |
---|
3738 | va=va) |
---|
3739 | |
---|
3740 | sts_file=base_name |
---|
3741 | urs2sts(base_name,sts_file,mean_stage=tide,verbose=False) |
---|
3742 | self.delete_mux(files) |
---|
3743 | |
---|
3744 | #print 'start create mesh from regions' |
---|
3745 | for i in range(len(bounding_polygon)): |
---|
3746 | zone,\ |
---|
3747 | bounding_polygon[i][0],\ |
---|
3748 | bounding_polygon[i][1]=redfearn(bounding_polygon[i][0], |
---|
3749 | bounding_polygon[i][1]) |
---|
3750 | |
---|
3751 | extent_res=1000000 |
---|
3752 | meshname = 'urs_test_mesh' + '.tsh' |
---|
3753 | interior_regions=None |
---|
3754 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
3755 | # have to change boundary tags from last example because now bounding |
---|
3756 | # polygon starts in different place. |
---|
3757 | create_mesh_from_regions(bounding_polygon,boundary_tags=boundary_tags, |
---|
3758 | maximum_triangle_area=extent_res,filename=meshname, |
---|
3759 | interior_regions=interior_regions,verbose=False) |
---|
3760 | |
---|
3761 | domain_fbound = Domain(meshname) |
---|
3762 | domain_fbound.set_quantity('stage', tide) |
---|
3763 | Bf = File_boundary(sts_file+'.sts', |
---|
3764 | domain_fbound, |
---|
3765 | boundary_polygon=bounding_polygon) |
---|
3766 | Br = Reflective_boundary(domain_fbound) |
---|
3767 | |
---|
3768 | domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) |
---|
3769 | finaltime=time_step*(time_step_count-1) |
---|
3770 | yieldstep=time_step |
---|
3771 | temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3772 | |
---|
3773 | for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, |
---|
3774 | finaltime=finaltime, |
---|
3775 | skip_initial_step = False)): |
---|
3776 | temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2] |
---|
3777 | |
---|
3778 | domain_drchlt = Domain(meshname) |
---|
3779 | domain_drchlt.set_quantity('stage', tide) |
---|
3780 | Br = Reflective_boundary(domain_drchlt) |
---|
3781 | Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide]) |
---|
3782 | domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br}) |
---|
3783 | temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3784 | |
---|
3785 | for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep, |
---|
3786 | finaltime=finaltime, |
---|
3787 | skip_initial_step = False)): |
---|
3788 | temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2] |
---|
3789 | |
---|
3790 | |
---|
3791 | assert num.allclose(temp_fbound,temp_drchlt) |
---|
3792 | |
---|
3793 | #print domain_fbound.quantities['stage'].vertex_values |
---|
3794 | #print domain_drchlt.quantities['stage'].vertex_values |
---|
3795 | |
---|
3796 | |
---|
3797 | assert num.allclose(domain_fbound.quantities['stage'].vertex_values, |
---|
3798 | domain_drchlt.quantities['stage'].vertex_values) |
---|
3799 | |
---|
3800 | assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values, |
---|
3801 | domain_drchlt.quantities['xmomentum'].vertex_values) |
---|
3802 | |
---|
3803 | assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values, |
---|
3804 | domain_drchlt.quantities['ymomentum'].vertex_values) |
---|
3805 | |
---|
3806 | |
---|
3807 | |
---|
3808 | os.remove(sts_file+'.sts') |
---|
3809 | os.remove(meshname) |
---|
3810 | |
---|
3811 | |
---|
3812 | |
---|
3813 | def test_file_boundary_stsIII_ordering(self): |
---|
3814 | """test_file_boundary_stsIII_ordering(self): |
---|
3815 | Read correct points from ordering file and apply sts to boundary |
---|
3816 | """ |
---|
3817 | from anuga.shallow_water import Domain |
---|
3818 | from anuga.shallow_water import Reflective_boundary |
---|
3819 | from anuga.shallow_water import Dirichlet_boundary |
---|
3820 | from anuga.shallow_water import File_boundary |
---|
3821 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
3822 | |
---|
3823 | lat_long_points=[[6.01,97.0],[6.02,97.0],[6.05,96.9],[6.0,97.0]] |
---|
3824 | bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0], |
---|
3825 | [6.02,97.02],[6.00,97.02]] |
---|
3826 | tide = 3.0 |
---|
3827 | time_step_count = 50 |
---|
3828 | time_step = 2 |
---|
3829 | n=len(lat_long_points) |
---|
3830 | first_tstep=num.ones(n,num.int) |
---|
3831 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
3832 | gauge_depth=20*num.ones(n,num.float) |
---|
3833 | ha=2*num.ones((n,time_step_count),num.float) |
---|
3834 | ua=10*num.ones((n,time_step_count),num.float) |
---|
3835 | va=-10*num.ones((n,time_step_count),num.float) |
---|
3836 | base_name, files = self.write_mux2(lat_long_points, |
---|
3837 | time_step_count, |
---|
3838 | time_step, |
---|
3839 | first_tstep, |
---|
3840 | last_tstep, |
---|
3841 | depth=gauge_depth, |
---|
3842 | ha=ha, |
---|
3843 | ua=ua, |
---|
3844 | va=va) |
---|
3845 | |
---|
3846 | # Write order file |
---|
3847 | file_handle, order_base_name = tempfile.mkstemp("") |
---|
3848 | os.close(file_handle) |
---|
3849 | os.remove(order_base_name) |
---|
3850 | d="," |
---|
3851 | order_file=order_base_name+'order.txt' |
---|
3852 | fid=open(order_file,'w') |
---|
3853 | |
---|
3854 | # Write Header |
---|
3855 | header='index, longitude, latitude\n' |
---|
3856 | fid.write(header) |
---|
3857 | indices=[3,0,1] |
---|
3858 | for i in indices: |
---|
3859 | line=str(i)+d+str(lat_long_points[i][1])+d+\ |
---|
3860 | str(lat_long_points[i][0])+"\n" |
---|
3861 | fid.write(line) |
---|
3862 | fid.close() |
---|
3863 | |
---|
3864 | sts_file=base_name |
---|
3865 | urs2sts(base_name, |
---|
3866 | basename_out=sts_file, |
---|
3867 | ordering_filename=order_file, |
---|
3868 | mean_stage=tide, |
---|
3869 | verbose=False) |
---|
3870 | self.delete_mux(files) |
---|
3871 | |
---|
3872 | boundary_polygon = create_sts_boundary(base_name) |
---|
3873 | |
---|
3874 | os.remove(order_file) |
---|
3875 | |
---|
3876 | # Append the remaining part of the boundary polygon to be defined by |
---|
3877 | # the user |
---|
3878 | bounding_polygon_utm=[] |
---|
3879 | for point in bounding_polygon: |
---|
3880 | zone,easting,northing=redfearn(point[0],point[1]) |
---|
3881 | bounding_polygon_utm.append([easting,northing]) |
---|
3882 | |
---|
3883 | boundary_polygon.append(bounding_polygon_utm[3]) |
---|
3884 | boundary_polygon.append(bounding_polygon_utm[4]) |
---|
3885 | |
---|
3886 | plot=False |
---|
3887 | if plot: |
---|
3888 | from pylab import plot,show,axis |
---|
3889 | boundary_polygon=ensure_numeric(boundary_polygon) |
---|
3890 | bounding_polygon_utm=ensure_numeric(bounding_polygon_utm) |
---|
3891 | #lat_long_points=ensure_numeric(lat_long_points) |
---|
3892 | #plot(lat_long_points[:,0],lat_long_points[:,1],'o') |
---|
3893 | plot(boundary_polygon[:,0], boundary_polygon[:,1],'d') |
---|
3894 | plot(bounding_polygon_utm[:,0],bounding_polygon_utm[:,1],'o') |
---|
3895 | show() |
---|
3896 | |
---|
3897 | assert num.allclose(bounding_polygon_utm,boundary_polygon) |
---|
3898 | |
---|
3899 | |
---|
3900 | extent_res=1000000 |
---|
3901 | meshname = 'urs_test_mesh' + '.tsh' |
---|
3902 | interior_regions=None |
---|
3903 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
3904 | |
---|
3905 | # have to change boundary tags from last example because now bounding |
---|
3906 | # polygon starts in different place. |
---|
3907 | create_mesh_from_regions(boundary_polygon,boundary_tags=boundary_tags, |
---|
3908 | maximum_triangle_area=extent_res,filename=meshname, |
---|
3909 | interior_regions=interior_regions,verbose=False) |
---|
3910 | |
---|
3911 | domain_fbound = Domain(meshname) |
---|
3912 | domain_fbound.set_quantity('stage', tide) |
---|
3913 | Bf = File_boundary(sts_file+'.sts', |
---|
3914 | domain_fbound, |
---|
3915 | boundary_polygon=boundary_polygon) |
---|
3916 | Br = Reflective_boundary(domain_fbound) |
---|
3917 | |
---|
3918 | domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) |
---|
3919 | finaltime=time_step*(time_step_count-1) |
---|
3920 | yieldstep=time_step |
---|
3921 | temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3922 | |
---|
3923 | for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, |
---|
3924 | finaltime=finaltime, |
---|
3925 | skip_initial_step = False)): |
---|
3926 | temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2] |
---|
3927 | |
---|
3928 | |
---|
3929 | domain_drchlt = Domain(meshname) |
---|
3930 | domain_drchlt.set_quantity('stage', tide) |
---|
3931 | Br = Reflective_boundary(domain_drchlt) |
---|
3932 | Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide]) |
---|
3933 | domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br}) |
---|
3934 | temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
3935 | |
---|
3936 | for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep, |
---|
3937 | finaltime=finaltime, |
---|
3938 | skip_initial_step = False)): |
---|
3939 | temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2] |
---|
3940 | |
---|
3941 | |
---|
3942 | #print domain_fbound.quantities['stage'].vertex_values |
---|
3943 | #print domain_drchlt.quantities['stage'].vertex_values |
---|
3944 | |
---|
3945 | assert num.allclose(temp_fbound,temp_drchlt) |
---|
3946 | |
---|
3947 | |
---|
3948 | assert num.allclose(domain_fbound.quantities['stage'].vertex_values, |
---|
3949 | domain_drchlt.quantities['stage'].vertex_values) |
---|
3950 | |
---|
3951 | assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values, |
---|
3952 | domain_drchlt.quantities['xmomentum'].vertex_values) |
---|
3953 | |
---|
3954 | assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values, |
---|
3955 | domain_drchlt.quantities['ymomentum'].vertex_values) |
---|
3956 | |
---|
3957 | # Use known Dirichlet condition (if sufficient timesteps have been taken) |
---|
3958 | |
---|
3959 | #FIXME: What do these assertions test? Also do they assume tide =0 |
---|
3960 | #print domain_fbound.quantities['stage'].vertex_values |
---|
3961 | #assert allclose(domain_drchlt.quantities['stage'].vertex_values[6], 2) |
---|
3962 | #assert allclose(domain_fbound.quantities['stage'].vertex_values[6], 2) |
---|
3963 | |
---|
3964 | |
---|
3965 | |
---|
3966 | try: |
---|
3967 | os.remove(sts_file+'.sts') |
---|
3968 | except: |
---|
3969 | # Windoze can't remove this file for some reason |
---|
3970 | pass |
---|
3971 | |
---|
3972 | os.remove(meshname) |
---|
3973 | |
---|
3974 | |
---|
3975 | |
---|
3976 | def test_file_boundary_stsIV_sinewave_ordering(self): |
---|
3977 | """test_file_boundary_stsIV_sinewave_ordering(self): |
---|
3978 | Read correct points from ordering file and apply sts to boundary |
---|
3979 | This one uses a sine wave and compares to time boundary |
---|
3980 | """ |
---|
3981 | |
---|
3982 | from anuga.shallow_water import Domain |
---|
3983 | from anuga.shallow_water import Reflective_boundary |
---|
3984 | from anuga.shallow_water import Dirichlet_boundary |
---|
3985 | from anuga.shallow_water import File_boundary |
---|
3986 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
3987 | |
---|
3988 | lat_long_points=[[6.01,97.0],[6.02,97.0],[6.05,96.9],[6.0,97.0]] |
---|
3989 | bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],[6.02,97.02],[6.00,97.02]] |
---|
3990 | tide = 0.35 |
---|
3991 | time_step_count = 50 |
---|
3992 | time_step = 0.1 |
---|
3993 | times_ref = num.arange(0, time_step_count*time_step, time_step) |
---|
3994 | |
---|
3995 | n=len(lat_long_points) |
---|
3996 | first_tstep=num.ones(n,num.int) |
---|
3997 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
3998 | |
---|
3999 | gauge_depth=20*num.ones(n,num.float) |
---|
4000 | |
---|
4001 | ha1=num.ones((n,time_step_count),num.float) |
---|
4002 | ua1=3.*num.ones((n,time_step_count),num.float) |
---|
4003 | va1=2.*num.ones((n,time_step_count),num.float) |
---|
4004 | for i in range(n): |
---|
4005 | ha1[i]=num.sin(times_ref) |
---|
4006 | |
---|
4007 | |
---|
4008 | base_name, files = self.write_mux2(lat_long_points, |
---|
4009 | time_step_count, time_step, |
---|
4010 | first_tstep, last_tstep, |
---|
4011 | depth=gauge_depth, |
---|
4012 | ha=ha1, |
---|
4013 | ua=ua1, |
---|
4014 | va=va1) |
---|
4015 | |
---|
4016 | # Write order file |
---|
4017 | file_handle, order_base_name = tempfile.mkstemp("") |
---|
4018 | os.close(file_handle) |
---|
4019 | os.remove(order_base_name) |
---|
4020 | d="," |
---|
4021 | order_file=order_base_name+'order.txt' |
---|
4022 | fid=open(order_file,'w') |
---|
4023 | |
---|
4024 | # Write Header |
---|
4025 | header='index, longitude, latitude\n' |
---|
4026 | fid.write(header) |
---|
4027 | indices=[3,0,1] |
---|
4028 | for i in indices: |
---|
4029 | line=str(i)+d+str(lat_long_points[i][1])+d+\ |
---|
4030 | str(lat_long_points[i][0])+"\n" |
---|
4031 | fid.write(line) |
---|
4032 | fid.close() |
---|
4033 | |
---|
4034 | sts_file=base_name |
---|
4035 | urs2sts(base_name, basename_out=sts_file, |
---|
4036 | ordering_filename=order_file, |
---|
4037 | mean_stage=tide, |
---|
4038 | verbose=False) |
---|
4039 | self.delete_mux(files) |
---|
4040 | |
---|
4041 | |
---|
4042 | |
---|
4043 | # Now read the sts file and check that values have been stored correctly. |
---|
4044 | fid = NetCDFFile(sts_file + '.sts') |
---|
4045 | |
---|
4046 | # Check the time vector |
---|
4047 | times = fid.variables['time'][:] |
---|
4048 | |
---|
4049 | #print times |
---|
4050 | |
---|
4051 | # Check sts quantities |
---|
4052 | stage = fid.variables['stage'][:] |
---|
4053 | xmomentum = fid.variables['xmomentum'][:] |
---|
4054 | ymomentum = fid.variables['ymomentum'][:] |
---|
4055 | elevation = fid.variables['elevation'][:] |
---|
4056 | |
---|
4057 | #print stage |
---|
4058 | #print xmomentum |
---|
4059 | #print ymomentum |
---|
4060 | #print elevation |
---|
4061 | |
---|
4062 | |
---|
4063 | |
---|
4064 | # Create beginnings of boundary polygon based on sts_boundary |
---|
4065 | boundary_polygon = create_sts_boundary(base_name) |
---|
4066 | |
---|
4067 | os.remove(order_file) |
---|
4068 | |
---|
4069 | # Append the remaining part of the boundary polygon to be defined by |
---|
4070 | # the user |
---|
4071 | bounding_polygon_utm=[] |
---|
4072 | for point in bounding_polygon: |
---|
4073 | zone,easting,northing=redfearn(point[0],point[1]) |
---|
4074 | bounding_polygon_utm.append([easting,northing]) |
---|
4075 | |
---|
4076 | boundary_polygon.append(bounding_polygon_utm[3]) |
---|
4077 | boundary_polygon.append(bounding_polygon_utm[4]) |
---|
4078 | |
---|
4079 | #print 'boundary_polygon', boundary_polygon |
---|
4080 | |
---|
4081 | plot=False |
---|
4082 | if plot: |
---|
4083 | from pylab import plot,show,axis |
---|
4084 | boundary_polygon=ensure_numeric(boundary_polygon) |
---|
4085 | bounding_polygon_utm=ensure_numeric(bounding_polygon_utm) |
---|
4086 | #plot(lat_long_points[:,0],lat_long_points[:,1],'o') |
---|
4087 | plot(boundary_polygon[:,0], boundary_polygon[:,1]) |
---|
4088 | plot(bounding_polygon_utm[:,0],bounding_polygon_utm[:,1]) |
---|
4089 | show() |
---|
4090 | |
---|
4091 | assert num.allclose(bounding_polygon_utm,boundary_polygon) |
---|
4092 | |
---|
4093 | |
---|
4094 | extent_res=1000000 |
---|
4095 | meshname = 'urs_test_mesh' + '.tsh' |
---|
4096 | interior_regions=None |
---|
4097 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
4098 | |
---|
4099 | # have to change boundary tags from last example because now bounding |
---|
4100 | # polygon starts in different place. |
---|
4101 | create_mesh_from_regions(boundary_polygon, |
---|
4102 | boundary_tags=boundary_tags, |
---|
4103 | maximum_triangle_area=extent_res, |
---|
4104 | filename=meshname, |
---|
4105 | interior_regions=interior_regions, |
---|
4106 | verbose=False) |
---|
4107 | |
---|
4108 | domain_fbound = Domain(meshname) |
---|
4109 | domain_fbound.set_quantity('stage', tide) |
---|
4110 | Bf = File_boundary(sts_file+'.sts', |
---|
4111 | domain_fbound, |
---|
4112 | boundary_polygon=boundary_polygon) |
---|
4113 | Br = Reflective_boundary(domain_fbound) |
---|
4114 | |
---|
4115 | domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br}) |
---|
4116 | finaltime=time_step*(time_step_count-1) |
---|
4117 | yieldstep=time_step |
---|
4118 | temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
4119 | |
---|
4120 | for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep, |
---|
4121 | finaltime=finaltime, |
---|
4122 | skip_initial_step=False)): |
---|
4123 | temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2] |
---|
4124 | |
---|
4125 | |
---|
4126 | domain_time = Domain(meshname) |
---|
4127 | domain_time.set_quantity('stage', tide) |
---|
4128 | Br = Reflective_boundary(domain_time) |
---|
4129 | Bw = Time_boundary(domain=domain_time, |
---|
4130 | f=lambda t: [num.sin(t)+tide,3.*(20.+num.sin(t)+tide),2.*(20.+num.sin(t)+tide)]) |
---|
4131 | domain_time.set_boundary({'ocean': Bw,'otherocean': Br}) |
---|
4132 | |
---|
4133 | temp_time=num.zeros(int(finaltime/yieldstep)+1,num.float) |
---|
4134 | for i, t in enumerate(domain_time.evolve(yieldstep=yieldstep, |
---|
4135 | finaltime=finaltime, |
---|
4136 | skip_initial_step=False)): |
---|
4137 | temp_time[i]=domain_time.quantities['stage'].centroid_values[2] |
---|
4138 | |
---|
4139 | |
---|
4140 | |
---|
4141 | #print temp_fbound |
---|
4142 | #print temp_time |
---|
4143 | |
---|
4144 | #print domain_fbound.quantities['stage'].vertex_values |
---|
4145 | #print domain_time.quantities['stage'].vertex_values |
---|
4146 | |
---|
4147 | assert num.allclose(temp_fbound, temp_time) |
---|
4148 | assert num.allclose(domain_fbound.quantities['stage'].vertex_values, |
---|
4149 | domain_time.quantities['stage'].vertex_values) |
---|
4150 | |
---|
4151 | assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values, |
---|
4152 | domain_time.quantities['xmomentum'].vertex_values) |
---|
4153 | |
---|
4154 | assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values, |
---|
4155 | domain_time.quantities['ymomentum'].vertex_values) |
---|
4156 | |
---|
4157 | |
---|
4158 | try: |
---|
4159 | os.remove(sts_file+'.sts') |
---|
4160 | except: |
---|
4161 | # Windoze can't remove this file for some reason |
---|
4162 | pass |
---|
4163 | |
---|
4164 | os.remove(meshname) |
---|
4165 | |
---|
4166 | |
---|
4167 | |
---|
4168 | |
---|
4169 | |
---|
4170 | def test_file_boundary_sts_time_limit(self): |
---|
4171 | """test_file_boundary_stsIV_sinewave_ordering(self): |
---|
4172 | Read correct points from ordering file and apply sts to boundary |
---|
4173 | This one uses a sine wave and compares to time boundary |
---|
4174 | |
---|
4175 | This one tests that times used can be limited by upper limit |
---|
4176 | """ |
---|
4177 | |
---|
4178 | from anuga.shallow_water import Domain |
---|
4179 | from anuga.shallow_water import Reflective_boundary |
---|
4180 | from anuga.shallow_water import Dirichlet_boundary |
---|
4181 | from anuga.shallow_water import File_boundary |
---|
4182 | from anuga.pmesh.mesh_interface import create_mesh_from_regions |
---|
4183 | |
---|
4184 | lat_long_points=[[6.01,97.0],[6.02,97.0],[6.05,96.9],[6.0,97.0]] |
---|
4185 | bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],[6.02,97.02],[6.00,97.02]] |
---|
4186 | tide = 0.35 |
---|
4187 | time_step_count = 50 |
---|
4188 | time_step = 0.1 |
---|
4189 | times_ref = num.arange(0, time_step_count*time_step, time_step) |
---|
4190 | |
---|
4191 | n=len(lat_long_points) |
---|
4192 | first_tstep=num.ones(n,num.int) |
---|
4193 | last_tstep=(time_step_count)*num.ones(n,num.int) |
---|
4194 | |
---|
4195 | gauge_depth=20*num.ones(n,num.float) |
---|
4196 | |
---|
4197 | ha1=num.ones((n,time_step_count),num.float) |
---|
4198 | ua1=3.*num.ones((n,time_step_count),num.float) |
---|
4199 | va1=2.*num.ones((n,time_step_count),num.float) |
---|
4200 | for i in range(n): |
---|
4201 | ha1[i]=num.sin(times_ref) |
---|
4202 | |
---|
4203 | |
---|
4204 | base_name, files = self.write_mux2(lat_long_points, |
---|
4205 | time_step_count, time_step, |
---|
4206 | first_tstep, last_tstep, |
---|
4207 | depth=gauge_depth, |
---|
4208 | ha=ha1, |
---|
4209 | ua=ua1, |
---|
4210 | va=va1) |
---|
4211 | |
---|
4212 | # Write order file |
---|
4213 | file_handle, order_base_name = tempfile.mkstemp("") |
---|
4214 | os.close(file_handle) |
---|
4215 | os.remove(order_base_name) |
---|
4216 | d="," |
---|
4217 | order_file=order_base_name+'order.txt' |
---|
4218 | fid=open(order_file,'w') |
---|
4219 | |
---|
4220 | # Write Header |
---|
4221 | header='index, longitude, latitude\n' |
---|
4222 | fid.write(header) |
---|
4223 | indices=[3,0,1] |
---|
4224 | for i in indices: |
---|
4225 | line=str(i)+d+str(lat_long_points[i][1])+d+\ |
---|
4226 | str(lat_long_points[i][0])+"\n" |
---|
4227 | fid.write(line) |
---|
4228 | fid.close() |
---|
4229 | |
---|
4230 | sts_file=base_name |
---|
4231 | urs2sts(base_name, basename_out=sts_file, |
---|
4232 | ordering_filename=order_file, |
---|
4233 | mean_stage=tide, |
---|
4234 | verbose=False) |
---|
4235 | self.delete_mux(files) |
---|
4236 | |
---|
4237 | |
---|
4238 | |
---|
4239 | # Now read the sts file and check that values have been stored correctly. |
---|
4240 | fid = NetCDFFile(sts_file + '.sts') |
---|
4241 | |
---|
4242 | # Check the time vector |
---|
4243 | times = fid.variables['time'][:] |
---|
4244 | starttime = fid.starttime[0] |
---|
4245 | #print times |
---|
4246 | #print starttime |
---|
4247 | |
---|
4248 | # Check sts quantities |
---|
4249 | stage = fid.variables['stage'][:] |
---|
4250 | xmomentum = fid.variables['xmomentum'][:] |
---|
4251 | ymomentum = fid.variables['ymomentum'][:] |
---|
4252 | elevation = fid.variables['elevation'][:] |
---|
4253 | |
---|
4254 | |
---|
4255 | |
---|
4256 | # Create beginnings of boundary polygon based on sts_boundary |
---|
4257 | boundary_polygon = create_sts_boundary(base_name) |
---|
4258 | |
---|
4259 | os.remove(order_file) |
---|
4260 | |
---|
4261 | # Append the remaining part of the boundary polygon to be defined by |
---|
4262 | # the user |
---|
4263 | bounding_polygon_utm=[] |
---|
4264 | for point in bounding_polygon: |
---|
4265 | zone,easting,northing=redfearn(point[0],point[1]) |
---|
4266 | bounding_polygon_utm.append([easting,northing]) |
---|
4267 | |
---|
4268 | boundary_polygon.append(bounding_polygon_utm[3]) |
---|
4269 | boundary_polygon.append(bounding_polygon_utm[4]) |
---|
4270 | |
---|
4271 | #print 'boundary_polygon', boundary_polygon |
---|
4272 | |
---|
4273 | |
---|
4274 | assert num.allclose(bounding_polygon_utm,boundary_polygon) |
---|
4275 | |
---|
4276 | |
---|
4277 | extent_res=1000000 |
---|
4278 | meshname = 'urs_test_mesh' + '.tsh' |
---|
4279 | interior_regions=None |
---|
4280 | boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]} |
---|
4281 | |
---|
4282 | # have to change boundary tags from last example because now bounding |
---|
4283 | # polygon starts in different place. |
---|
4284 | create_mesh_from_regions(boundary_polygon, |
---|
4285 | boundary_tags=boundary_tags, |
---|
4286 | maximum_triangle_area=extent_res, |
---|
4287 | filename=meshname, |
---|
4288 | interior_regions=interior_regions, |
---|
4289 | verbose=False) |
---|
4290 | |
---|
4291 | domain_fbound = Domain(meshname) |
---|
4292 | domain_fbound.set_quantity('stage', tide) |
---|
4293 | |
---|
4294 | |
---|
4295 | Bf = File_boundary(sts_file+'.sts', |
---|
4296 | domain_fbound, |
---|
4297 | boundary_polygon=boundary_polygon) |
---|
4298 | time_vec = Bf.F.get_time() |
---|
4299 | assert num.allclose(Bf.F.starttime, starttime) |
---|
4300 | assert num.allclose(time_vec, times_ref) |
---|
4301 | |
---|
4302 | for time_limit in [0.1, 0.2, 0.5, 1.0, 2.2, 3.0, 4.3, 6.0, 10.0]: |
---|
4303 | Bf = File_boundary(sts_file+'.sts', |
---|
4304 | domain_fbound, |
---|
4305 | time_limit=time_limit+starttime, |
---|
4306 | boundary_polygon=boundary_polygon) |
---|
4307 | |
---|
4308 | time_vec = Bf.F.get_time() |
---|
4309 | assert num.allclose(Bf.F.starttime, starttime) |
---|
4310 | assert num.alltrue(time_vec < time_limit) |
---|
4311 | |
---|
4312 | |
---|
4313 | try: |
---|
4314 | Bf = File_boundary(sts_file+'.sts', |
---|
4315 | domain_fbound, |
---|
4316 | time_limit=-1+starttime, |
---|
4317 | boundary_polygon=boundary_polygon) |
---|
4318 | time_vec = Bf.F.get_time() |
---|
4319 | print time_vec |
---|
4320 | except AssertionError: |
---|
4321 | pass |
---|
4322 | else: |
---|
4323 | raise Exception, 'Should have raised Exception here' |
---|
4324 | |
---|
4325 | def test_lon_lat2grid(self): |
---|
4326 | lonlatdep = [ |
---|
4327 | [ 113.06700134 , -26.06669998 , 1. ] , |
---|
4328 | [ 113.06700134 , -26.33329964 , 3. ] , |
---|
4329 | [ 113.19999695 , -26.06669998 , 2. ] , |
---|
4330 | [ 113.19999695 , -26.33329964 , 4. ] ] |
---|
4331 | |
---|
4332 | long, lat, quantity = lon_lat2grid(lonlatdep) |
---|
4333 | |
---|
4334 | for i, result in enumerate(lat): |
---|
4335 | assert lonlatdep [i][1] == result |
---|
4336 | assert len(lat) == 2 |
---|
4337 | |
---|
4338 | for i, result in enumerate(long): |
---|
4339 | assert lonlatdep [i*2][0] == result |
---|
4340 | assert len(long) == 2 |
---|
4341 | |
---|
4342 | for i,q in enumerate(quantity): |
---|
4343 | assert q == i+1 |
---|
4344 | |
---|
4345 | def test_lon_lat2grid_bad(self): |
---|
4346 | lonlatdep = [ |
---|
4347 | [ -26.06669998, 113.06700134, 1. ], |
---|
4348 | [ -26.06669998 , 113.19999695 , 2. ], |
---|
4349 | [ -26.06669998 , 113.33300018, 3. ], |
---|
4350 | [ -26.06669998 , 113.43299866 , 4. ], |
---|
4351 | [ -26.20000076 , 113.06700134, 5. ], |
---|
4352 | [ -26.20000076 , 113.19999695 , 6. ], |
---|
4353 | [ -26.20000076 , 113.33300018 , 7. ], |
---|
4354 | [ -26.20000076 , 113.43299866 , 8. ], |
---|
4355 | [ -26.33329964 , 113.06700134, 9. ], |
---|
4356 | [ -26.33329964 , 113.19999695 , 10. ], |
---|
4357 | [ -26.33329964 , 113.33300018 , 11. ], |
---|
4358 | [ -26.33329964 , 113.43299866 , 12. ], |
---|
4359 | [ -26.43330002 , 113.06700134 , 13 ], |
---|
4360 | [ -26.43330002 , 113.19999695 , 14. ], |
---|
4361 | [ -26.43330002 , 113.33300018, 15. ], |
---|
4362 | [ -26.43330002 , 113.43299866, 16. ]] |
---|
4363 | try: |
---|
4364 | long, lat, quantity = lon_lat2grid(lonlatdep) |
---|
4365 | except AssertionError: |
---|
4366 | pass |
---|
4367 | else: |
---|
4368 | msg = 'Should have raised exception' |
---|
4369 | raise msg |
---|
4370 | |
---|
4371 | def test_lon_lat2gridII(self): |
---|
4372 | lonlatdep = [ |
---|
4373 | [ 113.06700134 , -26.06669998 , 1. ] , |
---|
4374 | [ 113.06700134 , -26.33329964 , 2. ] , |
---|
4375 | [ 113.19999695 , -26.06669998 , 3. ] , |
---|
4376 | [ 113.19999695 , -26.344329964 , 4. ] ] |
---|
4377 | try: |
---|
4378 | long, lat, quantity = lon_lat2grid(lonlatdep) |
---|
4379 | except AssertionError: |
---|
4380 | pass |
---|
4381 | else: |
---|
4382 | msg = 'Should have raised exception' |
---|
4383 | raise msg |
---|
4384 | |
---|
4385 | #### END TESTS FOR URS 2 SWW ### |
---|
4386 | |
---|
4387 | #### TESTS URS UNGRIDDED 2 SWW ### |
---|
4388 | def test_URS_points_needed(self): |
---|
4389 | |
---|
4390 | ll_lat = -21.5 |
---|
4391 | ll_long = 114.5 |
---|
4392 | grid_spacing = 1./60. |
---|
4393 | lat_amount = 30 |
---|
4394 | long_amount = 30 |
---|
4395 | zone = 50 |
---|
4396 | |
---|
4397 | boundary_polygon = [[250000,7660000],[280000,7660000], |
---|
4398 | [280000,7630000],[250000,7630000]] |
---|
4399 | geo=URS_points_needed(boundary_polygon, zone, |
---|
4400 | ll_lat, ll_long, grid_spacing, |
---|
4401 | lat_amount, long_amount, |
---|
4402 | verbose=self.verbose) |
---|
4403 | # to test this geo, can info from it be transfered onto the boundary |
---|
4404 | # poly? |
---|
4405 | #Maybe see if I can fit the data to the polygon - have to represent |
---|
4406 | # the poly as points though. |
---|
4407 | #geo.export_points_file("results.txt", as_lat_long=True) |
---|
4408 | results = frozenset(geo.get_data_points(as_lat_long=True)) |
---|
4409 | #print 'results',results |
---|
4410 | |
---|
4411 | # These are a set of points that have to be in results |
---|
4412 | points = [] |
---|
4413 | for i in range(18): |
---|
4414 | lat = -21.0 - 8./60 - grid_spacing * i |
---|
4415 | points.append((lat,degminsec2decimal_degrees(114,35,0))) |
---|
4416 | points.append((lat,degminsec2decimal_degrees(114,36,0))) |
---|
4417 | points.append((lat,degminsec2decimal_degrees(114,52,0))) |
---|
4418 | points.append((lat,degminsec2decimal_degrees(114,53,0))) |
---|
4419 | geo_answer = Geospatial_data(data_points=points, |
---|
4420 | points_are_lats_longs=True) |
---|
4421 | #geo_answer.export_points_file("answer.txt", as_lat_long=True) |
---|
4422 | answer = frozenset(points) |
---|
4423 | |
---|
4424 | outs = answer.difference(results) |
---|
4425 | #print "outs", outs |
---|
4426 | # This doesn't work. Though visualising the results shows that |
---|
4427 | # it is correct. |
---|
4428 | #assert answer.issubset(results) |
---|
4429 | # this is why; |
---|
4430 | #point (-21.133333333333333, 114.58333333333333) |
---|
4431 | #result (-21.133333332232368, 114.58333333300342) |
---|
4432 | |
---|
4433 | for point in points: |
---|
4434 | found = False |
---|
4435 | for result in results: |
---|
4436 | if num.allclose(point, result): |
---|
4437 | found = True |
---|
4438 | break |
---|
4439 | if not found: |
---|
4440 | assert False |
---|
4441 | |
---|
4442 | |
---|
4443 | def dave_test_URS_points_needed(self): |
---|
4444 | ll_lat = -21.51667 |
---|
4445 | ll_long = 114.51667 |
---|
4446 | grid_spacing = 2./60. |
---|
4447 | lat_amount = 15 |
---|
4448 | long_amount = 15 |
---|
4449 | |
---|
4450 | |
---|
4451 | boundary_polygon = [[250000,7660000],[280000,7660000], |
---|
4452 | [280000,7630000],[250000,7630000]] |
---|
4453 | URS_points_needed_to_file('a_test_example',boundary_polygon, |
---|
4454 | ll_lat, ll_long, grid_spacing, |
---|
4455 | lat_amount, long_amount, |
---|
4456 | verbose=self.verbose) |
---|
4457 | |
---|
4458 | def X_test_URS_points_neededII(self): |
---|
4459 | ll_lat = -21.5 |
---|
4460 | ll_long = 114.5 |
---|
4461 | grid_spacing = 1./60. |
---|
4462 | lat_amount = 30 |
---|
4463 | long_amount = 30 |
---|
4464 | |
---|
4465 | # change this so lats and longs are inputed, then converted |
---|
4466 | |
---|
4467 | #boundary_polygon = [[7660000,250000],[7660000,280000], |
---|
4468 | # [7630000,280000],[7630000,250000]] |
---|
4469 | URS_points_needed(boundary_polygon, ll_lat, ll_long, grid_spacing, |
---|
4470 | lat_amount, long_amount, |
---|
4471 | verbose=self.verbose) |
---|
4472 | |
---|
4473 | def test_URS_points_northern_hemisphere(self): |
---|
4474 | |
---|
4475 | LL_LAT = 8.0 |
---|
4476 | LL_LONG = 97.0 |
---|
4477 | GRID_SPACING = 2.0/60.0 |
---|
4478 | LAT_AMOUNT = 2 |
---|
4479 | LONG_AMOUNT = 2 |
---|
4480 | ZONE = 47 |
---|
4481 | |
---|
4482 | # |
---|
4483 | points = [] |
---|
4484 | for i in range(2): |
---|
4485 | for j in range(2): |
---|
4486 | points.append((degminsec2decimal_degrees(8,1+i*2,0), |
---|
4487 | degminsec2decimal_degrees(97,1+i*2,0))) |
---|
4488 | #print "points", points |
---|
4489 | geo_poly = Geospatial_data(data_points=points, |
---|
4490 | points_are_lats_longs=True) |
---|
4491 | poly_lat_long = geo_poly.get_data_points(as_lat_long=False, |
---|
4492 | isSouthHemisphere=False) |
---|
4493 | #print "seg_lat_long", poly_lat_long |
---|
4494 | |
---|
4495 | # geo=URS_points_needed_to_file('test_example_poly3', poly_lat_long, |
---|
4496 | # ZONE, |
---|
4497 | # LL_LAT, LL_LONG, |
---|
4498 | # GRID_SPACING, |
---|
4499 | # LAT_AMOUNT, LONG_AMOUNT, |
---|
4500 | # isSouthernHemisphere=False, |
---|
4501 | # export_csv=True, |
---|
4502 | # verbose=self.verbose) |
---|
4503 | |
---|
4504 | geo=URS_points_needed(poly_lat_long, |
---|
4505 | ZONE, |
---|
4506 | LL_LAT, LL_LONG, |
---|
4507 | GRID_SPACING, |
---|
4508 | LAT_AMOUNT, LONG_AMOUNT, |
---|
4509 | isSouthHemisphere=False, |
---|
4510 | verbose=self.verbose) |
---|
4511 | |
---|
4512 | results = frozenset(geo.get_data_points(as_lat_long=True, |
---|
4513 | isSouthHemisphere=False)) |
---|
4514 | #print 'results',results |
---|
4515 | |
---|
4516 | # These are a set of points that have to be in results |
---|
4517 | points = [] |
---|
4518 | for i in range(2): |
---|
4519 | for j in range(2): |
---|
4520 | points.append((degminsec2decimal_degrees(8,i*2,0), |
---|
4521 | degminsec2decimal_degrees(97,i*2,0))) |
---|
4522 | #print "answer points", points |
---|
4523 | answer = frozenset(points) |
---|
4524 | |
---|
4525 | for point in points: |
---|
4526 | found = False |
---|
4527 | for result in results: |
---|
4528 | if num.allclose(point, result): |
---|
4529 | found = True |
---|
4530 | break |
---|
4531 | if not found: |
---|
4532 | assert False |
---|
4533 | |
---|
4534 | |
---|
4535 | def covered_in_other_tests_test_URS_points_needed_poly1(self): |
---|
4536 | # Values used for FESA 2007 results |
---|
4537 | # domain in southern hemisphere zone 51 |
---|
4538 | LL_LAT = -50.0 |
---|
4539 | LL_LONG = 80.0 |
---|
4540 | GRID_SPACING = 2.0/60.0 |
---|
4541 | LAT_AMOUNT = 4800 |
---|
4542 | LONG_AMOUNT = 3600 |
---|
4543 | ZONE = 51 |
---|
4544 | |
---|
4545 | poly1 = [[296361.89, 8091928.62], |
---|
4546 | [429495.07,8028278.82], |
---|
4547 | [447230.56,8000674.05], |
---|
4548 | [429661.2,7982177.6], |
---|
4549 | [236945.9,7897453.16], |
---|
4550 | [183493.44,7942782.27], |
---|
4551 | [226583.04,8008058.96]] |
---|
4552 | |
---|
4553 | URS_points_needed_to_file('test_example_poly2', poly1, |
---|
4554 | ZONE, |
---|
4555 | LL_LAT, LL_LONG, |
---|
4556 | GRID_SPACING, |
---|
4557 | LAT_AMOUNT, LONG_AMOUNT, |
---|
4558 | verbose=self.verbose) |
---|
4559 | |
---|
4560 | |
---|
4561 | |
---|
4562 | def covered_in_other_tests_test_URS_points_needed_poly2(self): |
---|
4563 | # Values used for 2004 validation work |
---|
4564 | # domain in northern hemisphere zone 47 |
---|
4565 | LL_LAT = 0.0 |
---|
4566 | LL_LONG = 90.0 |
---|
4567 | GRID_SPACING = 2.0/60.0 |
---|
4568 | LAT_AMOUNT = (15-LL_LAT)/GRID_SPACING |
---|
4569 | LONG_AMOUNT = (100-LL_LONG)/GRID_SPACING |
---|
4570 | ZONE = 47 |
---|
4571 | |
---|
4572 | poly2 = [[419336.424,810100.845], |
---|
4573 | [342405.0281,711455.8026], |
---|
4574 | [274649.9152,723352.9603], |
---|
4575 | [272089.092,972393.0131], |
---|
4576 | [347633.3754,968551.7784], |
---|
4577 | [427979.2022,885965.2313], |
---|
4578 | [427659.0993,875721.9386], |
---|
4579 | [429259.6138,861317.3083], |
---|
4580 | [436301.8775,840830.723]] |
---|
4581 | |
---|
4582 | URS_points_needed_to_file('test_example_poly2', poly2, |
---|
4583 | ZONE, |
---|
4584 | LL_LAT, LL_LONG, |
---|
4585 | GRID_SPACING, |
---|
4586 | LAT_AMOUNT, LONG_AMOUNT, |
---|
4587 | isSouthernHemisphere=False, |
---|
4588 | verbose=self.verbose) |
---|
4589 | |
---|
4590 | #### END TESTS URS UNGRIDDED 2 SWW ### |
---|
4591 | def test_Urs_points(self): |
---|
4592 | time_step_count = 3 |
---|
4593 | time_step = 2 |
---|
4594 | lat_long_points =[(-21.5,114.5),(-21.5,115),(-21.,115)] |
---|
4595 | base_name, files = self.write_mux(lat_long_points, |
---|
4596 | time_step_count, time_step) |
---|
4597 | for file in files: |
---|
4598 | urs = Urs_points(file) |
---|
4599 | assert time_step_count == urs.time_step_count |
---|
4600 | assert time_step == urs.time_step |
---|
4601 | |
---|
4602 | for lat_lon, dep in map(None, lat_long_points, urs.lonlatdep): |
---|
4603 | _ , e, n = redfearn(lat_lon[0], lat_lon[1]) |
---|
4604 | assert num.allclose(n, dep[2]) |
---|
4605 | |
---|
4606 | count = 0 |
---|
4607 | for slice in urs: |
---|
4608 | count += 1 |
---|
4609 | #print slice |
---|
4610 | for lat_lon, quantity in map(None, lat_long_points, slice): |
---|
4611 | _ , e, n = redfearn(lat_lon[0], lat_lon[1]) |
---|
4612 | #print "quantity", quantity |
---|
4613 | #print "e", e |
---|
4614 | #print "n", n |
---|
4615 | if file[-5:] == WAVEHEIGHT_MUX_LABEL[-5:] or \ |
---|
4616 | file[-5:] == NORTH_VELOCITY_LABEL[-5:] : |
---|
4617 | assert num.allclose(e, quantity) |
---|
4618 | if file[-5:] == EAST_VELOCITY_LABEL[-5:]: |
---|
4619 | assert num.allclose(n, quantity) |
---|
4620 | assert count == time_step_count |
---|
4621 | |
---|
4622 | self.delete_mux(files) |
---|
4623 | |
---|
4624 | def test_urs_ungridded2sww (self): |
---|
4625 | |
---|
4626 | #Zone: 50 |
---|
4627 | #Easting: 240992.578 Northing: 7620442.472 |
---|
4628 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
4629 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
4630 | time_step_count = 2 |
---|
4631 | time_step = 400 |
---|
4632 | tide = 9000000 |
---|
4633 | base_name, files = self.write_mux(lat_long, |
---|
4634 | time_step_count, time_step) |
---|
4635 | urs_ungridded2sww(base_name, mean_stage=tide, |
---|
4636 | verbose=self.verbose) |
---|
4637 | |
---|
4638 | # now I want to check the sww file ... |
---|
4639 | sww_file = base_name + '.sww' |
---|
4640 | |
---|
4641 | #Let's interigate the sww file |
---|
4642 | # Note, the sww info is not gridded. It is point data. |
---|
4643 | fid = NetCDFFile(sww_file) |
---|
4644 | |
---|
4645 | # Make x and y absolute |
---|
4646 | x = fid.variables['x'][:] |
---|
4647 | y = fid.variables['y'][:] |
---|
4648 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
4649 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
4650 | points = ensure_numeric(points) |
---|
4651 | x = points[:,0] |
---|
4652 | y = points[:,1] |
---|
4653 | |
---|
4654 | #Check that first coordinate is correctly represented |
---|
4655 | #Work out the UTM coordinates for first point |
---|
4656 | zone, e, n = redfearn(lat_long[0][0], lat_long[0][1]) |
---|
4657 | assert num.allclose([x[0],y[0]], [e,n]) |
---|
4658 | |
---|
4659 | #Check the time vector |
---|
4660 | times = fid.variables['time'][:] |
---|
4661 | |
---|
4662 | times_actual = [] |
---|
4663 | for i in range(time_step_count): |
---|
4664 | times_actual.append(time_step * i) |
---|
4665 | |
---|
4666 | assert num.allclose(ensure_numeric(times), |
---|
4667 | ensure_numeric(times_actual)) |
---|
4668 | |
---|
4669 | #Check first value |
---|
4670 | stage = fid.variables['stage'][:] |
---|
4671 | xmomentum = fid.variables['xmomentum'][:] |
---|
4672 | ymomentum = fid.variables['ymomentum'][:] |
---|
4673 | elevation = fid.variables['elevation'][:] |
---|
4674 | assert num.allclose(stage[0,0], e +tide) #Meters |
---|
4675 | |
---|
4676 | |
---|
4677 | #Check the momentums - ua |
---|
4678 | #momentum = velocity*(stage-elevation) |
---|
4679 | # elevation = - depth |
---|
4680 | #momentum = velocity_ua *(stage+depth) |
---|
4681 | # = n*(e+tide+n) based on how I'm writing these files |
---|
4682 | # |
---|
4683 | answer_x = n*(e+tide+n) |
---|
4684 | actual_x = xmomentum[0,0] |
---|
4685 | #print "answer_x",answer_x |
---|
4686 | #print "actual_x",actual_x |
---|
4687 | assert num.allclose(answer_x, actual_x) #Meters |
---|
4688 | |
---|
4689 | #Check the momentums - va |
---|
4690 | #momentum = velocity*(stage-elevation) |
---|
4691 | # elevation = - depth |
---|
4692 | #momentum = velocity_va *(stage+depth) |
---|
4693 | # = e*(e+tide+n) based on how I'm writing these files |
---|
4694 | # |
---|
4695 | answer_y = -1*e*(e+tide+n) |
---|
4696 | actual_y = ymomentum[0,0] |
---|
4697 | #print "answer_y",answer_y |
---|
4698 | #print "actual_y",actual_y |
---|
4699 | assert num.allclose(answer_y, actual_y) #Meters |
---|
4700 | |
---|
4701 | # check the stage values, first time step. |
---|
4702 | # These arrays are equal since the Easting values were used as |
---|
4703 | # the stage |
---|
4704 | assert num.allclose(stage[0], x +tide) #Meters |
---|
4705 | # check the elevation values. |
---|
4706 | # -ve since urs measures depth, sww meshers height, |
---|
4707 | # these arrays are equal since the northing values were used as |
---|
4708 | # the elevation |
---|
4709 | assert num.allclose(-elevation, y) #Meters |
---|
4710 | |
---|
4711 | fid.close() |
---|
4712 | self.delete_mux(files) |
---|
4713 | os.remove(sww_file) |
---|
4714 | |
---|
4715 | def test_urs_ungridded2swwII (self): |
---|
4716 | |
---|
4717 | #Zone: 50 |
---|
4718 | #Easting: 240992.578 Northing: 7620442.472 |
---|
4719 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
4720 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
4721 | time_step_count = 2 |
---|
4722 | time_step = 400 |
---|
4723 | tide = 9000000 |
---|
4724 | geo_reference = Geo_reference(50, 3434543,34534543) |
---|
4725 | base_name, files = self.write_mux(lat_long, |
---|
4726 | time_step_count, time_step) |
---|
4727 | urs_ungridded2sww(base_name, mean_stage=tide, origin = geo_reference, |
---|
4728 | verbose=self.verbose) |
---|
4729 | |
---|
4730 | # now I want to check the sww file ... |
---|
4731 | sww_file = base_name + '.sww' |
---|
4732 | |
---|
4733 | #Let's interigate the sww file |
---|
4734 | # Note, the sww info is not gridded. It is point data. |
---|
4735 | fid = NetCDFFile(sww_file) |
---|
4736 | |
---|
4737 | # Make x and y absolute |
---|
4738 | x = fid.variables['x'][:] |
---|
4739 | y = fid.variables['y'][:] |
---|
4740 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
4741 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
4742 | points = ensure_numeric(points) |
---|
4743 | x = points[:,0] |
---|
4744 | y = points[:,1] |
---|
4745 | |
---|
4746 | #Check that first coordinate is correctly represented |
---|
4747 | #Work out the UTM coordinates for first point |
---|
4748 | zone, e, n = redfearn(lat_long[0][0], lat_long[0][1]) |
---|
4749 | assert num.allclose([x[0],y[0]], [e,n]) |
---|
4750 | |
---|
4751 | #Check the time vector |
---|
4752 | times = fid.variables['time'][:] |
---|
4753 | |
---|
4754 | times_actual = [] |
---|
4755 | for i in range(time_step_count): |
---|
4756 | times_actual.append(time_step * i) |
---|
4757 | |
---|
4758 | assert num.allclose(ensure_numeric(times), |
---|
4759 | ensure_numeric(times_actual)) |
---|
4760 | |
---|
4761 | #Check first value |
---|
4762 | stage = fid.variables['stage'][:] |
---|
4763 | xmomentum = fid.variables['xmomentum'][:] |
---|
4764 | ymomentum = fid.variables['ymomentum'][:] |
---|
4765 | elevation = fid.variables['elevation'][:] |
---|
4766 | assert num.allclose(stage[0,0], e +tide) #Meters |
---|
4767 | |
---|
4768 | #Check the momentums - ua |
---|
4769 | #momentum = velocity*(stage-elevation) |
---|
4770 | # elevation = - depth |
---|
4771 | #momentum = velocity_ua *(stage+depth) |
---|
4772 | # = n*(e+tide+n) based on how I'm writing these files |
---|
4773 | # |
---|
4774 | answer_x = n*(e+tide+n) |
---|
4775 | actual_x = xmomentum[0,0] |
---|
4776 | #print "answer_x",answer_x |
---|
4777 | #print "actual_x",actual_x |
---|
4778 | assert num.allclose(answer_x, actual_x) #Meters |
---|
4779 | |
---|
4780 | #Check the momentums - va |
---|
4781 | #momentum = velocity*(stage-elevation) |
---|
4782 | # elevation = - depth |
---|
4783 | #momentum = velocity_va *(stage+depth) |
---|
4784 | # = e*(e+tide+n) based on how I'm writing these files |
---|
4785 | # |
---|
4786 | answer_y = -1*e*(e+tide+n) |
---|
4787 | actual_y = ymomentum[0,0] |
---|
4788 | #print "answer_y",answer_y |
---|
4789 | #print "actual_y",actual_y |
---|
4790 | assert num.allclose(answer_y, actual_y) #Meters |
---|
4791 | |
---|
4792 | # check the stage values, first time step. |
---|
4793 | # These arrays are equal since the Easting values were used as |
---|
4794 | # the stage |
---|
4795 | assert num.allclose(stage[0], x +tide) #Meters |
---|
4796 | # check the elevation values. |
---|
4797 | # -ve since urs measures depth, sww meshers height, |
---|
4798 | # these arrays are equal since the northing values were used as |
---|
4799 | # the elevation |
---|
4800 | assert num.allclose(-elevation, y) #Meters |
---|
4801 | |
---|
4802 | fid.close() |
---|
4803 | self.delete_mux(files) |
---|
4804 | os.remove(sww_file) |
---|
4805 | |
---|
4806 | def test_urs_ungridded2swwIII (self): |
---|
4807 | |
---|
4808 | #Zone: 50 |
---|
4809 | #Easting: 240992.578 Northing: 7620442.472 |
---|
4810 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
4811 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
4812 | time_step_count = 2 |
---|
4813 | time_step = 400 |
---|
4814 | tide = 9000000 |
---|
4815 | base_name, files = self.write_mux(lat_long, |
---|
4816 | time_step_count, time_step) |
---|
4817 | urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343), |
---|
4818 | verbose=self.verbose) |
---|
4819 | |
---|
4820 | # now I want to check the sww file ... |
---|
4821 | sww_file = base_name + '.sww' |
---|
4822 | |
---|
4823 | #Let's interigate the sww file |
---|
4824 | # Note, the sww info is not gridded. It is point data. |
---|
4825 | fid = NetCDFFile(sww_file) |
---|
4826 | |
---|
4827 | # Make x and y absolute |
---|
4828 | x = fid.variables['x'][:] |
---|
4829 | y = fid.variables['y'][:] |
---|
4830 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
4831 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
4832 | points = ensure_numeric(points) |
---|
4833 | x = points[:,0] |
---|
4834 | y = points[:,1] |
---|
4835 | |
---|
4836 | #Check that first coordinate is correctly represented |
---|
4837 | #Work out the UTM coordinates for first point |
---|
4838 | zone, e, n = redfearn(lat_long[0][0], lat_long[0][1]) |
---|
4839 | assert num.allclose([x[0],y[0]], [e,n]) |
---|
4840 | |
---|
4841 | #Check the time vector |
---|
4842 | times = fid.variables['time'][:] |
---|
4843 | |
---|
4844 | times_actual = [] |
---|
4845 | for i in range(time_step_count): |
---|
4846 | times_actual.append(time_step * i) |
---|
4847 | |
---|
4848 | assert num.allclose(ensure_numeric(times), |
---|
4849 | ensure_numeric(times_actual)) |
---|
4850 | |
---|
4851 | #Check first value |
---|
4852 | stage = fid.variables['stage'][:] |
---|
4853 | xmomentum = fid.variables['xmomentum'][:] |
---|
4854 | ymomentum = fid.variables['ymomentum'][:] |
---|
4855 | elevation = fid.variables['elevation'][:] |
---|
4856 | assert num.allclose(stage[0,0], e +tide) #Meters |
---|
4857 | |
---|
4858 | #Check the momentums - ua |
---|
4859 | #momentum = velocity*(stage-elevation) |
---|
4860 | # elevation = - depth |
---|
4861 | #momentum = velocity_ua *(stage+depth) |
---|
4862 | # = n*(e+tide+n) based on how I'm writing these files |
---|
4863 | # |
---|
4864 | answer_x = n*(e+tide+n) |
---|
4865 | actual_x = xmomentum[0,0] |
---|
4866 | #print "answer_x",answer_x |
---|
4867 | #print "actual_x",actual_x |
---|
4868 | assert num.allclose(answer_x, actual_x) #Meters |
---|
4869 | |
---|
4870 | #Check the momentums - va |
---|
4871 | #momentum = velocity*(stage-elevation) |
---|
4872 | # elevation = - depth |
---|
4873 | #momentum = velocity_va *(stage+depth) |
---|
4874 | # = e*(e+tide+n) based on how I'm writing these files |
---|
4875 | # |
---|
4876 | answer_y = -1*e*(e+tide+n) |
---|
4877 | actual_y = ymomentum[0,0] |
---|
4878 | #print "answer_y",answer_y |
---|
4879 | #print "actual_y",actual_y |
---|
4880 | assert num.allclose(answer_y, actual_y) #Meters |
---|
4881 | |
---|
4882 | # check the stage values, first time step. |
---|
4883 | # These arrays are equal since the Easting values were used as |
---|
4884 | # the stage |
---|
4885 | assert num.allclose(stage[0], x +tide) #Meters |
---|
4886 | # check the elevation values. |
---|
4887 | # -ve since urs measures depth, sww meshers height, |
---|
4888 | # these arrays are equal since the northing values were used as |
---|
4889 | # the elevation |
---|
4890 | assert num.allclose(-elevation, y) #Meters |
---|
4891 | |
---|
4892 | fid.close() |
---|
4893 | self.delete_mux(files) |
---|
4894 | os.remove(sww_file) |
---|
4895 | |
---|
4896 | |
---|
4897 | def test_urs_ungridded_hole (self): |
---|
4898 | |
---|
4899 | #Zone: 50 |
---|
4900 | #Easting: 240992.578 Northing: 7620442.472 |
---|
4901 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
4902 | lat_long = [[-20.5, 114.5], |
---|
4903 | [-20.6, 114.6], |
---|
4904 | [-20.5, 115.], |
---|
4905 | [-20.6, 115.], |
---|
4906 | [-20.5, 115.5], |
---|
4907 | [-20.6, 115.4], |
---|
4908 | |
---|
4909 | [-21., 114.5], |
---|
4910 | [-21., 114.6], |
---|
4911 | [-21., 115.5], |
---|
4912 | [-21., 115.4], |
---|
4913 | |
---|
4914 | [-21.5, 114.5], |
---|
4915 | [-21.4, 114.6], |
---|
4916 | [-21.5, 115.], |
---|
4917 | [-21.4, 115.], |
---|
4918 | [-21.5, 115.5], |
---|
4919 | [-21.4, 115.4] |
---|
4920 | ] |
---|
4921 | time_step_count = 6 |
---|
4922 | time_step = 100 |
---|
4923 | tide = 9000000 |
---|
4924 | base_name, files = self.write_mux(lat_long, |
---|
4925 | time_step_count, time_step) |
---|
4926 | #Easting: 292110.784 Northing: 7676551.710 |
---|
4927 | #Latitude: -21 0 ' 0.00000 '' Longitude: 115 0 ' 0.00000 '' |
---|
4928 | |
---|
4929 | urs_ungridded2sww(base_name, mean_stage=-240992.0, |
---|
4930 | hole_points_UTM=[ 292110.784, 7676551.710 ], |
---|
4931 | verbose=self.verbose) |
---|
4932 | |
---|
4933 | # now I want to check the sww file ... |
---|
4934 | sww_file = base_name + '.sww' |
---|
4935 | |
---|
4936 | #Let's interigate the sww file |
---|
4937 | # Note, the sww info is not gridded. It is point data. |
---|
4938 | fid = NetCDFFile(sww_file) |
---|
4939 | |
---|
4940 | number_of_volumes = fid.variables['volumes'] |
---|
4941 | #print "number_of_volumes",len(number_of_volumes) |
---|
4942 | assert num.allclose(16, len(number_of_volumes)) |
---|
4943 | |
---|
4944 | fid.close() |
---|
4945 | self.delete_mux(files) |
---|
4946 | #print "sww_file", sww_file |
---|
4947 | os.remove(sww_file) |
---|
4948 | |
---|
4949 | def test_urs_ungridded_holeII(self): |
---|
4950 | |
---|
4951 | # Check that if using a hole that returns no triangles, |
---|
4952 | # urs_ungridded2sww removes the hole label. |
---|
4953 | |
---|
4954 | lat_long = [[-20.5, 114.5], |
---|
4955 | [-20.6, 114.6], |
---|
4956 | [-20.5, 115.5], |
---|
4957 | [-20.6, 115.4], |
---|
4958 | |
---|
4959 | |
---|
4960 | [-21.5, 114.5], |
---|
4961 | [-21.4, 114.6], |
---|
4962 | [-21.5, 115.5], |
---|
4963 | [-21.4, 115.4] |
---|
4964 | ] |
---|
4965 | time_step_count = 6 |
---|
4966 | time_step = 100 |
---|
4967 | tide = 9000000 |
---|
4968 | base_name, files = self.write_mux(lat_long, |
---|
4969 | time_step_count, time_step) |
---|
4970 | #Easting: 292110.784 Northing: 7676551.710 |
---|
4971 | #Latitude: -21 0 ' 0.00000 '' Longitude: 115 0 ' 0.00000 '' |
---|
4972 | |
---|
4973 | urs_ungridded2sww(base_name, mean_stage=-240992.0, |
---|
4974 | hole_points_UTM=[ 292110.784, 7676551.710 ], |
---|
4975 | verbose=self.verbose) |
---|
4976 | |
---|
4977 | # now I want to check the sww file ... |
---|
4978 | sww_file = base_name + '.sww' |
---|
4979 | fid = NetCDFFile(sww_file) |
---|
4980 | |
---|
4981 | volumes = fid.variables['volumes'] |
---|
4982 | #print "number_of_volumes",len(volumes) |
---|
4983 | |
---|
4984 | fid.close() |
---|
4985 | os.remove(sww_file) |
---|
4986 | |
---|
4987 | urs_ungridded2sww(base_name, mean_stage=-240992.0) |
---|
4988 | |
---|
4989 | # now I want to check the sww file ... |
---|
4990 | sww_file = base_name + '.sww' |
---|
4991 | fid = NetCDFFile(sww_file) |
---|
4992 | |
---|
4993 | volumes_again = fid.variables['volumes'] |
---|
4994 | #print "number_of_volumes",len(volumes_again) |
---|
4995 | assert num.allclose(len(volumes_again), |
---|
4996 | len(volumes)) |
---|
4997 | fid.close() |
---|
4998 | os.remove(sww_file) |
---|
4999 | self.delete_mux(files) |
---|
5000 | |
---|
5001 | def test_urs_ungridded2sww_mint_maxt (self): |
---|
5002 | |
---|
5003 | #Zone: 50 |
---|
5004 | #Easting: 240992.578 Northing: 7620442.472 |
---|
5005 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
5006 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
5007 | time_step_count = 6 |
---|
5008 | time_step = 100 |
---|
5009 | tide = 9000000 |
---|
5010 | base_name, files = self.write_mux(lat_long, |
---|
5011 | time_step_count, time_step) |
---|
5012 | urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343), |
---|
5013 | mint=101, maxt=500, |
---|
5014 | verbose=self.verbose) |
---|
5015 | |
---|
5016 | # now I want to check the sww file ... |
---|
5017 | sww_file = base_name + '.sww' |
---|
5018 | |
---|
5019 | #Let's interigate the sww file |
---|
5020 | # Note, the sww info is not gridded. It is point data. |
---|
5021 | fid = NetCDFFile(sww_file) |
---|
5022 | |
---|
5023 | # Make x and y absolute |
---|
5024 | x = fid.variables['x'][:] |
---|
5025 | y = fid.variables['y'][:] |
---|
5026 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
5027 | points = geo_reference.get_absolute(map(None, x, y)) |
---|
5028 | points = ensure_numeric(points) |
---|
5029 | x = points[:,0] |
---|
5030 | y = points[:,1] |
---|
5031 | |
---|
5032 | #Check that first coordinate is correctly represented |
---|
5033 | #Work out the UTM coordinates for first point |
---|
5034 | zone, e, n = redfearn(lat_long[0][0], lat_long[0][1]) |
---|
5035 | assert num.allclose([x[0],y[0]], [e,n]) |
---|
5036 | |
---|
5037 | #Check the time vector |
---|
5038 | times = fid.variables['time'][:] |
---|
5039 | |
---|
5040 | times_actual = [0,100,200,300] |
---|
5041 | |
---|
5042 | assert num.allclose(ensure_numeric(times), |
---|
5043 | ensure_numeric(times_actual)) |
---|
5044 | |
---|
5045 | #Check first value |
---|
5046 | stage = fid.variables['stage'][:] |
---|
5047 | xmomentum = fid.variables['xmomentum'][:] |
---|
5048 | ymomentum = fid.variables['ymomentum'][:] |
---|
5049 | elevation = fid.variables['elevation'][:] |
---|
5050 | assert num.allclose(stage[0,0], e +tide) #Meters |
---|
5051 | |
---|
5052 | #Check the momentums - ua |
---|
5053 | #momentum = velocity*(stage-elevation) |
---|
5054 | # elevation = - depth |
---|
5055 | #momentum = velocity_ua *(stage+depth) |
---|
5056 | # = n*(e+tide+n) based on how I'm writing these files |
---|
5057 | # |
---|
5058 | answer_x = n*(e+tide+n) |
---|
5059 | actual_x = xmomentum[0,0] |
---|
5060 | #print "answer_x",answer_x |
---|
5061 | #print "actual_x",actual_x |
---|
5062 | assert num.allclose(answer_x, actual_x) #Meters |
---|
5063 | |
---|
5064 | #Check the momentums - va |
---|
5065 | #momentum = velocity*(stage-elevation) |
---|
5066 | # elevation = - depth |
---|
5067 | #momentum = velocity_va *(stage+depth) |
---|
5068 | # = e*(e+tide+n) based on how I'm writing these files |
---|
5069 | # |
---|
5070 | answer_y = -1*e*(e+tide+n) |
---|
5071 | actual_y = ymomentum[0,0] |
---|
5072 | #print "answer_y",answer_y |
---|
5073 | #print "actual_y",actual_y |
---|
5074 | assert num.allclose(answer_y, actual_y) #Meters |
---|
5075 | |
---|
5076 | # check the stage values, first time step. |
---|
5077 | # These arrays are equal since the Easting values were used as |
---|
5078 | # the stage |
---|
5079 | assert num.allclose(stage[0], x +tide) #Meters |
---|
5080 | # check the elevation values. |
---|
5081 | # -ve since urs measures depth, sww meshers height, |
---|
5082 | # these arrays are equal since the northing values were used as |
---|
5083 | # the elevation |
---|
5084 | assert num.allclose(-elevation, y) #Meters |
---|
5085 | |
---|
5086 | fid.close() |
---|
5087 | self.delete_mux(files) |
---|
5088 | os.remove(sww_file) |
---|
5089 | |
---|
5090 | def test_urs_ungridded2sww_mint_maxtII (self): |
---|
5091 | |
---|
5092 | #Zone: 50 |
---|
5093 | #Easting: 240992.578 Northing: 7620442.472 |
---|
5094 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
5095 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
5096 | time_step_count = 6 |
---|
5097 | time_step = 100 |
---|
5098 | tide = 9000000 |
---|
5099 | base_name, files = self.write_mux(lat_long, |
---|
5100 | time_step_count, time_step) |
---|
5101 | urs_ungridded2sww(base_name, mean_stage=tide, origin =(50,23432,4343), |
---|
5102 | mint=0, maxt=100000, |
---|
5103 | verbose=self.verbose) |
---|
5104 | |
---|
5105 | # now I want to check the sww file ... |
---|
5106 | sww_file = base_name + '.sww' |
---|
5107 | |
---|
5108 | #Let's interigate the sww file |
---|
5109 | # Note, the sww info is not gridded. It is point data. |
---|
5110 | fid = NetCDFFile(sww_file) |
---|
5111 | |
---|
5112 | # Make x and y absolute |
---|
5113 | geo_reference = Geo_reference(NetCDFObject=fid) |
---|
5114 | points = geo_reference.get_absolute(map(None, fid.variables['x'][:], |
---|
5115 | fid.variables['y'][:])) |
---|
5116 | points = ensure_numeric(points) |
---|
5117 | x = points[:,0] |
---|
5118 | |
---|
5119 | #Check the time vector |
---|
5120 | times = fid.variables['time'][:] |
---|
5121 | |
---|
5122 | times_actual = [0,100,200,300,400,500] |
---|
5123 | assert num.allclose(ensure_numeric(times), |
---|
5124 | ensure_numeric(times_actual)) |
---|
5125 | |
---|
5126 | #Check first value |
---|
5127 | stage = fid.variables['stage'][:] |
---|
5128 | assert num.allclose(stage[0], x +tide) |
---|
5129 | |
---|
5130 | fid.close() |
---|
5131 | self.delete_mux(files) |
---|
5132 | os.remove(sww_file) |
---|
5133 | |
---|
5134 | def test_urs_ungridded2sww_mint_maxtIII (self): |
---|
5135 | |
---|
5136 | #Zone: 50 |
---|
5137 | #Easting: 240992.578 Northing: 7620442.472 |
---|
5138 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
5139 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
5140 | time_step_count = 6 |
---|
5141 | time_step = 100 |
---|
5142 | tide = 9000000 |
---|
5143 | base_name, files = self.write_mux(lat_long, |
---|
5144 | time_step_count, time_step) |
---|
5145 | try: |
---|
5146 | urs_ungridded2sww(base_name, mean_stage=tide, |
---|
5147 | origin =(50,23432,4343), |
---|
5148 | mint=301, maxt=399, |
---|
5149 | verbose=self.verbose) |
---|
5150 | except: |
---|
5151 | pass |
---|
5152 | else: |
---|
5153 | self.failUnless(0 ==1, 'Bad input did not throw exception error!') |
---|
5154 | |
---|
5155 | self.delete_mux(files) |
---|
5156 | |
---|
5157 | def test_urs_ungridded2sww_mint_maxt_bad (self): |
---|
5158 | #Zone: 50 |
---|
5159 | #Easting: 240992.578 Northing: 7620442.472 |
---|
5160 | #Latitude: -21 30 ' 0.00000 '' Longitude: 114 30 ' 0.00000 '' |
---|
5161 | lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]] |
---|
5162 | time_step_count = 6 |
---|
5163 | time_step = 100 |
---|
5164 | tide = 9000000 |
---|
5165 | base_name, files = self.write_mux(lat_long, |
---|
5166 | time_step_count, time_step) |
---|
5167 | try: |
---|
5168 | urs_ungridded2sww(base_name, mean_stage=tide, |
---|
5169 | origin =(50,23432,4343), |
---|
5170 | mint=301, maxt=301, |
---|
5171 | verbose=self.verbose) |
---|
5172 | except: |
---|
5173 | pass |
---|
5174 | else: |
---|
5175 | self.failUnless(0 ==1, 'Bad input did not throw exception error!') |
---|
5176 | |
---|
5177 | self.delete_mux(files) |
---|
5178 | |
---|
5179 | |
---|
5180 | def test_URS_points_needed_and_urs_ungridded2sww(self): |
---|
5181 | # This doesn't actually check anything |
---|
5182 | # |
---|
5183 | ll_lat = -21.5 |
---|
5184 | ll_long = 114.5 |
---|
5185 | grid_spacing = 1./60. |
---|
5186 | lat_amount = 30 |
---|
5187 | long_amount = 30 |
---|
5188 | time_step_count = 2 |
---|
5189 | time_step = 400 |
---|
5190 | tide = -200000 |
---|
5191 | zone = 50 |
---|
5192 | |
---|
5193 | boundary_polygon = [[250000,7660000],[280000,7660000], |
---|
5194 | [280000,7630000],[250000,7630000]] |
---|
5195 | geo=URS_points_needed(boundary_polygon, zone, |
---|
5196 | ll_lat, ll_long, grid_spacing, |
---|
5197 | lat_amount, long_amount, |
---|
5198 | verbose=self.verbose) |
---|
5199 | lat_long = geo.get_data_points(as_lat_long=True) |
---|
5200 | base_name, files = self.write_mux(lat_long, |
---|
5201 | time_step_count, time_step) |
---|
5202 | urs_ungridded2sww(base_name, mean_stage=tide, |
---|
5203 | verbose=self.verbose) |
---|
5204 | self.delete_mux(files) |
---|
5205 | os.remove( base_name + '.sww') |
---|
5206 | |
---|
5207 | def cache_test_URS_points_needed_and_urs_ungridded2sww(self): |
---|
5208 | |
---|
5209 | ll_lat = -21.5 |
---|
5210 | ll_long = 114.5 |
---|
5211 | grid_spacing = 1./60. |
---|
5212 | lat_amount = 30 |
---|
5213 | long_amount = 30 |
---|
5214 | time_step_count = 2 |
---|
5215 | time_step = 400 |
---|
5216 | tide = -200000 |
---|
5217 | zone = 50 |
---|
5218 | |
---|
5219 | boundary_polygon = [[250000,7660000],[270000,7650000], |
---|
5220 | [280000,7630000],[250000,7630000]] |
---|
5221 | geo=URS_points_needed(boundary_polygon, zone, |
---|
5222 | ll_lat, ll_long, grid_spacing, |
---|
5223 | lat_amount, long_amount, use_cache=True, |
---|
5224 | verbose=True) |
---|
5225 | |
---|
5226 | def visual_test_URS_points_needed_and_urs_ungridded2sww(self): |
---|
5227 | |
---|
5228 | ll_lat = -21.5 |
---|
5229 | ll_long = 114.5 |
---|
5230 | grid_spacing = 1./60. |
---|
5231 | lat_amount = 30 |
---|
5232 | long_amount = 30 |
---|
5233 | time_step_count = 2 |
---|
5234 | time_step = 400 |
---|
5235 | tide = -200000 |
---|
5236 | zone = 50 |
---|
5237 | |
---|
5238 | boundary_polygon = [[250000,7660000],[270000,7650000], |
---|
5239 | [280000,7630000],[250000,7630000]] |
---|
5240 | geo=URS_points_needed(boundary_polygon, zone, |
---|
5241 | ll_lat, ll_long, grid_spacing, |
---|
5242 | lat_amount, long_amount) |
---|
5243 | lat_long = geo.get_data_points(as_lat_long=True) |
---|
5244 | base_name, files = self.write_mux(lat_long, |
---|
5245 | time_step_count, time_step) |
---|
5246 | urs_ungridded2sww(base_name, mean_stage=tide) |
---|
5247 | self.delete_mux(files) |
---|
5248 | os.remove( base_name + '.sww') |
---|
5249 | # extend this so it interpolates onto the boundary. |
---|
5250 | # have it fail if there is NaN |
---|
5251 | |
---|
5252 | def test_triangulation(self): |
---|
5253 | # |
---|
5254 | # |
---|
5255 | |
---|
5256 | filename = tempfile.mktemp("_data_manager.sww") |
---|
5257 | outfile = NetCDFFile(filename, netcdf_mode_w) |
---|
5258 | points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]]) |
---|
5259 | volumes = (0,1,2) |
---|
5260 | elevation = [0,1,2] |
---|
5261 | new_origin = None |
---|
5262 | new_origin = Geo_reference(56, 0, 0) |
---|
5263 | times = [0, 10] |
---|
5264 | number_of_volumes = len(volumes) |
---|
5265 | number_of_points = len(points_utm) |
---|
5266 | sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) |
---|
5267 | sww.store_header(outfile, times, number_of_volumes, |
---|
5268 | number_of_points, description='fully sick testing', |
---|
5269 | verbose=self.verbose,sww_precision=netcdf_float) |
---|
5270 | sww.store_triangulation(outfile, points_utm, volumes, |
---|
5271 | elevation, new_origin=new_origin, |
---|
5272 | verbose=self.verbose) |
---|
5273 | outfile.close() |
---|
5274 | fid = NetCDFFile(filename) |
---|
5275 | |
---|
5276 | x = fid.variables['x'][:] |
---|
5277 | y = fid.variables['y'][:] |
---|
5278 | fid.close() |
---|
5279 | |
---|
5280 | assert num.allclose(num.array(map(None, x,y)), points_utm) |
---|
5281 | os.remove(filename) |
---|
5282 | |
---|
5283 | |
---|
5284 | def test_triangulationII(self): |
---|
5285 | # |
---|
5286 | # |
---|
5287 | |
---|
5288 | filename = tempfile.mktemp("_data_manager.sww") |
---|
5289 | outfile = NetCDFFile(filename, netcdf_mode_w) |
---|
5290 | points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]]) |
---|
5291 | volumes = (0,1,2) |
---|
5292 | elevation = [0,1,2] |
---|
5293 | new_origin = None |
---|
5294 | #new_origin = Geo_reference(56, 0, 0) |
---|
5295 | times = [0, 10] |
---|
5296 | number_of_volumes = len(volumes) |
---|
5297 | number_of_points = len(points_utm) |
---|
5298 | sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) |
---|
5299 | sww.store_header(outfile, times, number_of_volumes, |
---|
5300 | number_of_points, description='fully sick testing', |
---|
5301 | verbose=self.verbose,sww_precision=netcdf_float) |
---|
5302 | sww.store_triangulation(outfile, points_utm, volumes, |
---|
5303 | new_origin=new_origin, |
---|
5304 | verbose=self.verbose) |
---|
5305 | sww.store_static_quantities(outfile, elevation=elevation) |
---|
5306 | |
---|
5307 | outfile.close() |
---|
5308 | fid = NetCDFFile(filename) |
---|
5309 | |
---|
5310 | x = fid.variables['x'][:] |
---|
5311 | y = fid.variables['y'][:] |
---|
5312 | results_georef = Geo_reference() |
---|
5313 | results_georef.read_NetCDF(fid) |
---|
5314 | assert results_georef == Geo_reference(DEFAULT_ZONE, 0, 0) |
---|
5315 | fid.close() |
---|
5316 | |
---|
5317 | assert num.allclose(num.array(map(None, x,y)), points_utm) |
---|
5318 | os.remove(filename) |
---|
5319 | |
---|
5320 | |
---|
5321 | def test_triangulation_new_origin(self): |
---|
5322 | # |
---|
5323 | # |
---|
5324 | |
---|
5325 | filename = tempfile.mktemp("_data_manager.sww") |
---|
5326 | outfile = NetCDFFile(filename, netcdf_mode_w) |
---|
5327 | points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]]) |
---|
5328 | volumes = (0,1,2) |
---|
5329 | elevation = [0,1,2] |
---|
5330 | new_origin = None |
---|
5331 | new_origin = Geo_reference(56, 1, 554354) |
---|
5332 | points_utm = new_origin.change_points_geo_ref(points_utm) |
---|
5333 | times = [0, 10] |
---|
5334 | number_of_volumes = len(volumes) |
---|
5335 | number_of_points = len(points_utm) |
---|
5336 | sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) |
---|
5337 | sww.store_header(outfile, times, number_of_volumes, |
---|
5338 | number_of_points, description='fully sick testing', |
---|
5339 | verbose=self.verbose,sww_precision=netcdf_float) |
---|
5340 | sww.store_triangulation(outfile, points_utm, volumes, |
---|
5341 | elevation, new_origin=new_origin, |
---|
5342 | verbose=self.verbose) |
---|
5343 | outfile.close() |
---|
5344 | fid = NetCDFFile(filename) |
---|
5345 | |
---|
5346 | x = fid.variables['x'][:] |
---|
5347 | y = fid.variables['y'][:] |
---|
5348 | results_georef = Geo_reference() |
---|
5349 | results_georef.read_NetCDF(fid) |
---|
5350 | assert results_georef == new_origin |
---|
5351 | fid.close() |
---|
5352 | |
---|
5353 | absolute = Geo_reference(56, 0,0) |
---|
5354 | assert num.allclose(num.array( |
---|
5355 | absolute.change_points_geo_ref(map(None, x,y), |
---|
5356 | new_origin)),points_utm) |
---|
5357 | |
---|
5358 | os.remove(filename) |
---|
5359 | |
---|
5360 | def test_triangulation_points_georeference(self): |
---|
5361 | # |
---|
5362 | # |
---|
5363 | |
---|
5364 | filename = tempfile.mktemp("_data_manager.sww") |
---|
5365 | outfile = NetCDFFile(filename, netcdf_mode_w) |
---|
5366 | points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]]) |
---|
5367 | volumes = (0,1,2) |
---|
5368 | elevation = [0,1,2] |
---|
5369 | new_origin = None |
---|
5370 | points_georeference = Geo_reference(56, 1, 554354) |
---|
5371 | points_utm = points_georeference.change_points_geo_ref(points_utm) |
---|
5372 | times = [0, 10] |
---|
5373 | number_of_volumes = len(volumes) |
---|
5374 | number_of_points = len(points_utm) |
---|
5375 | sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) |
---|
5376 | sww.store_header(outfile, times, number_of_volumes, |
---|
5377 | number_of_points, description='fully sick testing', |
---|
5378 | verbose=self.verbose,sww_precision=netcdf_float) |
---|
5379 | sww.store_triangulation(outfile, points_utm, volumes, |
---|
5380 | elevation, new_origin=new_origin, |
---|
5381 | points_georeference=points_georeference, |
---|
5382 | verbose=self.verbose) |
---|
5383 | outfile.close() |
---|
5384 | fid = NetCDFFile(filename) |
---|
5385 | |
---|
5386 | x = fid.variables['x'][:] |
---|
5387 | y = fid.variables['y'][:] |
---|
5388 | results_georef = Geo_reference() |
---|
5389 | results_georef.read_NetCDF(fid) |
---|
5390 | assert results_georef == points_georeference |
---|
5391 | fid.close() |
---|
5392 | |
---|
5393 | assert num.allclose(num.array(map(None, x,y)), points_utm) |
---|
5394 | os.remove(filename) |
---|
5395 | |
---|
5396 | def test_triangulation_2_geo_refs(self): |
---|
5397 | # |
---|
5398 | # |
---|
5399 | |
---|
5400 | filename = tempfile.mktemp("_data_manager.sww") |
---|
5401 | outfile = NetCDFFile(filename, netcdf_mode_w) |
---|
5402 | points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]]) |
---|
5403 | volumes = (0,1,2) |
---|
5404 | elevation = [0,1,2] |
---|
5405 | new_origin = Geo_reference(56, 1, 1) |
---|
5406 | points_georeference = Geo_reference(56, 0, 0) |
---|
5407 | points_utm = points_georeference.change_points_geo_ref(points_utm) |
---|
5408 | times = [0, 10] |
---|
5409 | number_of_volumes = len(volumes) |
---|
5410 | number_of_points = len(points_utm) |
---|
5411 | sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum']) |
---|
5412 | sww.store_header(outfile, times, number_of_volumes, |
---|
5413 | number_of_points, description='fully sick testing', |
---|
5414 | verbose=self.verbose,sww_precision=netcdf_float) |
---|
5415 | sww.store_triangulation(outfile, points_utm, volumes, |
---|
5416 | elevation, new_origin=new_origin, |
---|
5417 | points_georeference=points_georeference, |
---|
5418 | verbose=self.verbose) |
---|
5419 | outfile.close() |
---|
5420 | fid = NetCDFFile(filename) |
---|
5421 | |
---|
5422 | x = fid.variables['x'][:] |
---|
5423 | y = fid.variables['y'][:] |
---|
5424 | results_georef = Geo_reference() |
---|
5425 | results_georef.read_NetCDF(fid) |
---|
5426 | assert results_georef == new_origin |
---|
5427 | fid.close() |
---|
5428 | |
---|
5429 | |
---|
5430 | absolute = Geo_reference(56, 0,0) |
---|
5431 | assert num.allclose(num.array( |
---|
5432 | absolute.change_points_geo_ref(map(None, x,y), |
---|
5433 | new_origin)),points_utm) |
---|
5434 | os.remove(filename) |
---|
5435 | |
---|
5436 | def test_get_data_from_file(self): |
---|
5437 | # from anuga.abstract_2d_finite_volumes.util import get_data_from_file |
---|
5438 | |
---|
5439 | import os |
---|
5440 | |
---|
5441 | fileName = tempfile.mktemp(".txt") |
---|
5442 | # print"filename",fileName |
---|
5443 | file = open(fileName,"w") |
---|
5444 | file.write("elevation, stage\n\ |
---|
5445 | 1.0, 3 \n\ |
---|
5446 | 0.0, 4 \n\ |
---|
5447 | 4.0, 3 \n\ |
---|
5448 | 1.0, 6 \n") |
---|
5449 | file.close() |
---|
5450 | |
---|
5451 | header,x = get_data_from_file(fileName) |
---|
5452 | # print 'x',x |
---|
5453 | os.remove(fileName) |
---|
5454 | |
---|
5455 | assert num.allclose(x[:,0], [1.0, 0.0,4.0, 1.0]) |
---|
5456 | |
---|
5457 | def test_get_data_from_file1(self): |
---|
5458 | # from anuga.abstract_2d_finite_volumes.util import get_data_from_file |
---|
5459 | |
---|
5460 | import os |
---|
5461 | |
---|
5462 | fileName = tempfile.mktemp(".txt") |
---|
5463 | # print"filename",fileName |
---|
5464 | file = open(fileName,"w") |
---|
5465 | file.write("elevation stage\n\ |
---|
5466 | 1.3 3 \n\ |
---|
5467 | 0.0 4 \n\ |
---|
5468 | 4.5 3.5 \n\ |
---|
5469 | 1.0 6 \n") |
---|
5470 | file.close() |
---|
5471 | |
---|
5472 | header, x = get_data_from_file(fileName,separator_value=' ') |
---|
5473 | os.remove(fileName) |
---|
5474 | # x = get_data_from_file(fileName) |
---|
5475 | # print '1x',x[:,0] |
---|
5476 | |
---|
5477 | assert num.allclose(x[:,0], [1.3, 0.0,4.5, 1.0]) |
---|
5478 | |
---|
5479 | def test_store_parameters(self): |
---|
5480 | """tests store temporary file |
---|
5481 | """ |
---|
5482 | |
---|
5483 | from os import sep, getenv |
---|
5484 | |
---|
5485 | output_dir='' |
---|
5486 | file_name='details.csv' |
---|
5487 | |
---|
5488 | kwargs = {'file_name':'new2.txt', |
---|
5489 | 'output_dir':output_dir, |
---|
5490 | 'file_name':file_name, |
---|
5491 | 'who':'me', |
---|
5492 | 'what':'detail', |
---|
5493 | 'how':2, |
---|
5494 | 'why':241, |
---|
5495 | # 'completed':345 |
---|
5496 | } |
---|
5497 | store_parameters(verbose=False,**kwargs) |
---|
5498 | |
---|
5499 | temp='detail_temp.csv' |
---|
5500 | fid = open(temp) |
---|
5501 | file_header = fid.readline() |
---|
5502 | file_line = fid.readline() |
---|
5503 | fid.close() |
---|
5504 | |
---|
5505 | |
---|
5506 | keys = kwargs.keys() |
---|
5507 | keys.sort() |
---|
5508 | line='' |
---|
5509 | header='' |
---|
5510 | count=0 |
---|
5511 | #used the sorted keys to create the header and line data |
---|
5512 | for k in keys: |
---|
5513 | # print "%s = %s" %(k, kwargs[k]) |
---|
5514 | header = header+str(k) |
---|
5515 | line = line+str(kwargs[k]) |
---|
5516 | count+=1 |
---|
5517 | if count <len(kwargs): |
---|
5518 | header = header+',' |
---|
5519 | line = line+',' |
---|
5520 | header+='\n' |
---|
5521 | line+='\n' |
---|
5522 | |
---|
5523 | |
---|
5524 | #file exists |
---|
5525 | assert access(temp,F_OK) |
---|
5526 | assert header == file_header |
---|
5527 | assert line == file_line |
---|
5528 | |
---|
5529 | os.remove(temp) |
---|
5530 | |
---|
5531 | def test_store_parameters1(self): |
---|
5532 | """tests store in temporary file and other file |
---|
5533 | """ |
---|
5534 | |
---|
5535 | from os import sep, getenv |
---|
5536 | |
---|
5537 | output_dir='' |
---|
5538 | file_name='details.csv' |
---|
5539 | |
---|
5540 | kwargs = {'file_name':'new2.txt', |
---|
5541 | 'output_dir':output_dir, |
---|
5542 | 'file_name':file_name, |
---|
5543 | 'who':'me', |
---|
5544 | 'what':'detail', |
---|
5545 | 'how':2, |
---|
5546 | 'why':241, |
---|
5547 | # 'completed':345 |
---|
5548 | } |
---|
5549 | store_parameters(verbose=False,**kwargs) |
---|
5550 | |
---|
5551 | kwargs['how']=55 |
---|
5552 | kwargs['completed']=345 |
---|
5553 | |
---|
5554 | keys = kwargs.keys() |
---|
5555 | keys.sort() |
---|
5556 | line='' |
---|
5557 | header='' |
---|
5558 | count=0 |
---|
5559 | #used the sorted keys to create the header and line data |
---|
5560 | for k in keys: |
---|
5561 | # print "%s = %s" %(k, kwargs[k]) |
---|
5562 | header = header+str(k) |
---|
5563 | line = line+str(kwargs[k]) |
---|
5564 | count+=1 |
---|
5565 | if count <len(kwargs): |
---|
5566 | header = header+',' |
---|
5567 | line = line+',' |
---|
5568 | header+='\n' |
---|
5569 | line+='\n' |
---|
5570 | |
---|
5571 | kwargs['how']=55 |
---|
5572 | kwargs['completed']=345 |
---|
5573 | |
---|
5574 | store_parameters(verbose=False,**kwargs) |
---|
5575 | |
---|
5576 | # temp='detail_temp.csv' |
---|
5577 | fid = open(file_name) |
---|
5578 | file_header = fid.readline() |
---|
5579 | file_line1 = fid.readline() |
---|
5580 | file_line2 = fid.readline() |
---|
5581 | fid.close() |
---|
5582 | |
---|
5583 | |
---|
5584 | #file exists |
---|
5585 | # print 'header',header,'line',line |
---|
5586 | # print 'file_header',file_header,'file_line1',file_line1,'file_line2',file_line2 |
---|
5587 | assert access(file_name,F_OK) |
---|
5588 | assert header == file_header |
---|
5589 | assert line == file_line1 |
---|
5590 | |
---|
5591 | temp='detail_temp.csv' |
---|
5592 | os.remove(temp) |
---|
5593 | os.remove(file_name) |
---|
5594 | |
---|
5595 | def test_store_parameters2(self): |
---|
5596 | """tests appending the data to the end of an existing file |
---|
5597 | """ |
---|
5598 | |
---|
5599 | from os import sep, getenv |
---|
5600 | |
---|
5601 | output_dir='' |
---|
5602 | file_name='details.csv' |
---|
5603 | |
---|
5604 | kwargs = {'file_name':'new2.txt', |
---|
5605 | 'output_dir':output_dir, |
---|
5606 | 'file_name':file_name, |
---|
5607 | 'who':'me', |
---|
5608 | 'what':'detail', |
---|
5609 | 'how':2, |
---|
5610 | 'why':241, |
---|
5611 | 'completed':345 |
---|
5612 | } |
---|
5613 | store_parameters(verbose=False,**kwargs) |
---|
5614 | |
---|
5615 | kwargs['how']=55 |
---|
5616 | kwargs['completed']=23.54532 |
---|
5617 | |
---|
5618 | store_parameters(verbose=False,**kwargs) |
---|
5619 | |
---|
5620 | keys = kwargs.keys() |
---|
5621 | keys.sort() |
---|
5622 | line='' |
---|
5623 | header='' |
---|
5624 | count=0 |
---|
5625 | #used the sorted keys to create the header and line data |
---|
5626 | for k in keys: |
---|
5627 | # print "%s = %s" %(k, kwargs[k]) |
---|
5628 | header = header+str(k) |
---|
5629 | line = line+str(kwargs[k]) |
---|
5630 | count+=1 |
---|
5631 | if count <len(kwargs): |
---|
5632 | header = header+',' |
---|
5633 | line = line+',' |
---|
5634 | header+='\n' |
---|
5635 | line+='\n' |
---|
5636 | |
---|
5637 | fid = open(file_name) |
---|
5638 | file_header = fid.readline() |
---|
5639 | file_line1 = fid.readline() |
---|
5640 | file_line2 = fid.readline() |
---|
5641 | fid.close() |
---|
5642 | |
---|
5643 | assert access(file_name,F_OK) |
---|
5644 | assert header == file_header |
---|
5645 | assert line == file_line2 |
---|
5646 | |
---|
5647 | os.remove(file_name) |
---|
5648 | |
---|
5649 | |
---|
5650 | def test_get_maximum_inundation(self): |
---|
5651 | """Test that sww information can be converted correctly to maximum |
---|
5652 | runup elevation and location (without and with georeferencing) |
---|
5653 | |
---|
5654 | This test creates a slope and a runup which is maximal (~11m) at around 10s |
---|
5655 | and levels out to the boundary condition (1m) at about 30s. |
---|
5656 | """ |
---|
5657 | |
---|
5658 | import time, os |
---|
5659 | from Scientific.IO.NetCDF import NetCDFFile |
---|
5660 | |
---|
5661 | #Setup |
---|
5662 | |
---|
5663 | from mesh_factory import rectangular |
---|
5664 | |
---|
5665 | # Create basic mesh (100m x 100m) |
---|
5666 | points, vertices, boundary = rectangular(20, 5, 100, 50) |
---|
5667 | |
---|
5668 | # Create shallow water domain |
---|
5669 | domain = Domain(points, vertices, boundary) |
---|
5670 | domain.default_order = 2 |
---|
5671 | domain.set_minimum_storable_height(0.01) |
---|
5672 | |
---|
5673 | domain.set_name('runuptest') |
---|
5674 | swwfile = domain.get_name() + '.sww' |
---|
5675 | |
---|
5676 | domain.set_datadir('.') |
---|
5677 | domain.format = 'sww' |
---|
5678 | domain.smooth = True |
---|
5679 | |
---|
5680 | # FIXME (Ole): Backwards compatibility |
---|
5681 | # Look at sww file and see what happens when |
---|
5682 | # domain.tight_slope_limiters = 1 |
---|
5683 | domain.tight_slope_limiters = 0 |
---|
5684 | domain.use_centroid_velocities = 0 # Backwards compatibility (7/5/8) |
---|
5685 | |
---|
5686 | Br = Reflective_boundary(domain) |
---|
5687 | Bd = Dirichlet_boundary([1.0,0,0]) |
---|
5688 | |
---|
5689 | |
---|
5690 | #---------- First run without geo referencing |
---|
5691 | |
---|
5692 | domain.set_quantity('elevation', lambda x,y: -0.2*x + 14) # Slope |
---|
5693 | domain.set_quantity('stage', -6) |
---|
5694 | domain.set_boundary( {'left': Br, 'right': Bd, 'top': Br, 'bottom': Br}) |
---|
5695 | |
---|
5696 | for t in domain.evolve(yieldstep=1, finaltime = 50): |
---|
5697 | pass |
---|
5698 | |
---|
5699 | |
---|
5700 | # Check maximal runup |
---|
5701 | runup = get_maximum_inundation_elevation(swwfile) |
---|
5702 | location = get_maximum_inundation_location(swwfile) |
---|
5703 | #print 'Runup, location', runup, location |
---|
5704 | assert num.allclose(runup, 11) or num.allclose(runup, 12) # old limiters |
---|
5705 | assert num.allclose(location[0], 15) or num.allclose(location[0], 10) |
---|
5706 | |
---|
5707 | # Check final runup |
---|
5708 | runup = get_maximum_inundation_elevation(swwfile, time_interval=[45,50]) |
---|
5709 | location = get_maximum_inundation_location(swwfile, time_interval=[45,50]) |
---|
5710 | # print 'Runup, location:',runup, location |
---|
5711 | assert num.allclose(runup, 1) |
---|
5712 | assert num.allclose(location[0], 65) |
---|
5713 | |
---|
5714 | # Check runup restricted to a polygon |
---|
5715 | p = [[50,1], [99,1], [99,49], [50,49]] |
---|
5716 | runup = get_maximum_inundation_elevation(swwfile, polygon=p) |
---|
5717 | location = get_maximum_inundation_location(swwfile, polygon=p) |
---|
5718 | #print runup, location |
---|
5719 | assert num.allclose(runup, 4) |
---|
5720 | assert num.allclose(location[0], 50) |
---|
5721 | |
---|
5722 | # Check that mimimum_storable_height works |
---|
5723 | fid = NetCDFFile(swwfile, netcdf_mode_r) # Open existing file |
---|
5724 | |
---|
5725 | stage = fid.variables['stage'][:] |
---|
5726 | z = fid.variables['elevation'][:] |
---|
5727 | xmomentum = fid.variables['xmomentum'][:] |
---|
5728 | ymomentum = fid.variables['ymomentum'][:] |
---|
5729 | |
---|
5730 | |
---|
5731 | |
---|
5732 | for i in range(stage.shape[0]): |
---|
5733 | h = stage[i]-z # depth vector at time step i |
---|
5734 | |
---|
5735 | # Check every node location |
---|
5736 | for j in range(stage.shape[1]): |
---|
5737 | # Depth being either exactly zero implies |
---|
5738 | # momentum being zero. |
---|
5739 | # Or else depth must be greater than or equal to |
---|
5740 | # the minimal storable height |
---|
5741 | if h[j] == 0.0: |
---|
5742 | assert xmomentum[i,j] == 0.0 |
---|
5743 | assert ymomentum[i,j] == 0.0 |
---|
5744 | else: |
---|
5745 | assert h[j] >= domain.minimum_storable_height |
---|
5746 | |
---|
5747 | fid.close() |
---|
5748 | |
---|
5749 | # Cleanup |
---|
5750 | os.remove(swwfile) |
---|
5751 | |
---|
5752 | |
---|
5753 | |
---|
5754 | #------------- Now the same with georeferencing |
---|
5755 | |
---|
5756 | domain.time=0.0 |
---|
5757 | E = 308500 |
---|
5758 | N = 6189000 |
---|
5759 | #E = N = 0 |
---|
5760 | domain.geo_reference = Geo_reference(56, E, N) |
---|
5761 | |
---|
5762 | domain.set_quantity('elevation', lambda x,y: -0.2*x + 14) # Slope |
---|
5763 | domain.set_quantity('stage', -6) |
---|
5764 | domain.set_boundary( {'left': Br, 'right': Bd, 'top': Br, 'bottom': Br}) |
---|
5765 | |
---|
5766 | for t in domain.evolve(yieldstep=1, finaltime = 50): |
---|
5767 | pass |
---|
5768 | |
---|
5769 | # Check maximal runup |
---|
5770 | runup = get_maximum_inundation_elevation(swwfile) |
---|
5771 | location = get_maximum_inundation_location(swwfile) |
---|
5772 | assert num.allclose(runup, 11) or num.allclose(runup, 12) # old limiters |
---|
5773 | assert num.allclose(location[0], 15+E) or num.allclose(location[0], 10+E) |
---|
5774 | |
---|
5775 | # Check final runup |
---|
5776 | runup = get_maximum_inundation_elevation(swwfile, time_interval=[45,50]) |
---|
5777 | location = get_maximum_inundation_location(swwfile, time_interval=[45,50]) |
---|
5778 | assert num.allclose(runup, 1) |
---|
5779 | assert num.allclose(location[0], 65+E) |
---|
5780 | |
---|
5781 | # Check runup restricted to a polygon |
---|
5782 | p = num.array([[50,1], [99,1], [99,49], [50,49]], num.int) + num.array([E, N], num.int) #array default# |
---|
5783 | |
---|
5784 | runup = get_maximum_inundation_elevation(swwfile, polygon=p) |
---|
5785 | location = get_maximum_inundation_location(swwfile, polygon=p) |
---|
5786 | assert num.allclose(runup, 4) |
---|
5787 | assert num.allclose(location[0], 50+E) |
---|
5788 | |
---|
5789 | |
---|
5790 | # Cleanup |
---|
5791 | os.remove(swwfile) |
---|
5792 | |
---|
5793 | |
---|
5794 | def test_get_mesh_and_quantities_from_sww_file(self): |
---|
5795 | """test_get_mesh_and_quantities_from_sww_file(self): |
---|
5796 | """ |
---|
5797 | |
---|
5798 | # Generate a test sww file with non trivial georeference |
---|
5799 | |
---|
5800 | import time, os |
---|
5801 | from Scientific.IO.NetCDF import NetCDFFile |
---|
5802 | |
---|
5803 | # Setup |
---|
5804 | from mesh_factory import rectangular |
---|
5805 | |
---|
5806 | # Create basic mesh (100m x 5m) |
---|
5807 | width = 5 |
---|
5808 | length = 50 |
---|
5809 | t_end = 10 |
---|
5810 | points, vertices, boundary = rectangular(length, width, 50, 5) |
---|
5811 | |
---|
5812 | # Create shallow water domain |
---|
5813 | domain = Domain(points, vertices, boundary, |
---|
5814 | geo_reference = Geo_reference(56,308500,6189000)) |
---|
5815 | |
---|
5816 | domain.set_name('flowtest') |
---|
5817 | swwfile = domain.get_name() + '.sww' |
---|
5818 | domain.set_datadir('.') |
---|
5819 | |
---|
5820 | Br = Reflective_boundary(domain) # Side walls |
---|
5821 | Bd = Dirichlet_boundary([1, 0, 0]) # inflow |
---|
5822 | |
---|
5823 | domain.set_boundary( {'left': Bd, 'right': Bd, 'top': Br, 'bottom': Br}) |
---|
5824 | |
---|
5825 | for t in domain.evolve(yieldstep=1, finaltime = t_end): |
---|
5826 | pass |
---|
5827 | |
---|
5828 | |
---|
5829 | # Read it |
---|
5830 | |
---|
5831 | # Get mesh and quantities from sww file |
---|
5832 | X = get_mesh_and_quantities_from_file(swwfile, |
---|
5833 | quantities=['elevation', |
---|
5834 | 'stage', |
---|
5835 | 'xmomentum', |
---|
5836 | 'ymomentum'], |
---|
5837 | verbose=False) |
---|
5838 | mesh, quantities, time = X |
---|
5839 | |
---|
5840 | |
---|
5841 | # Check that mesh has been recovered |
---|
5842 | assert num.alltrue(mesh.triangles == domain.get_triangles()) |
---|
5843 | assert num.allclose(mesh.nodes, domain.get_nodes()) |
---|
5844 | |
---|
5845 | # Check that time has been recovered |
---|
5846 | assert num.allclose(time, range(t_end+1)) |
---|
5847 | |
---|
5848 | # Check that quantities have been recovered |
---|
5849 | # (sww files use single precision) |
---|
5850 | z=domain.get_quantity('elevation').get_values(location='unique vertices') |
---|
5851 | assert num.allclose(quantities['elevation'], z) |
---|
5852 | |
---|
5853 | for q in ['stage', 'xmomentum', 'ymomentum']: |
---|
5854 | # Get quantity at last timestep |
---|
5855 | q_ref=domain.get_quantity(q).get_values(location='unique vertices') |
---|
5856 | |
---|
5857 | #print q,quantities[q] |
---|
5858 | q_sww=quantities[q][-1,:] |
---|
5859 | |
---|
5860 | msg = 'Quantity %s failed to be recovered' %q |
---|
5861 | assert num.allclose(q_ref, q_sww, atol=1.0e-6), msg |
---|
5862 | |
---|
5863 | # Cleanup |
---|
5864 | os.remove(swwfile) |
---|
5865 | |
---|
5866 | |
---|
5867 | def test_get_flow_through_cross_section(self): |
---|
5868 | """test_get_flow_through_cross_section(self): |
---|
5869 | |
---|
5870 | Test that the total flow through a cross section can be |
---|
5871 | correctly obtained from an sww file. |
---|
5872 | |
---|
5873 | This test creates a flat bed with a known flow through it and tests |
---|
5874 | that the function correctly returns the expected flow. |
---|
5875 | |
---|
5876 | The specifics are |
---|
5877 | u = 2 m/s |
---|
5878 | h = 1 m |
---|
5879 | w = 3 m (width of channel) |
---|
5880 | |
---|
5881 | q = u*h*w = 6 m^3/s |
---|
5882 | |
---|
5883 | #---------- First run without geo referencing |
---|
5884 | |
---|
5885 | """ |
---|
5886 | |
---|
5887 | import time, os |
---|
5888 | from Scientific.IO.NetCDF import NetCDFFile |
---|
5889 | |
---|
5890 | # Setup |
---|
5891 | from mesh_factory import rectangular |
---|
5892 | |
---|
5893 | # Create basic mesh (20m x 3m) |
---|
5894 | width = 3 |
---|
5895 | length = 20 |
---|
5896 | t_end = 3 |
---|
5897 | points, vertices, boundary = rectangular(length, width, |
---|
5898 | length, width) |
---|
5899 | |
---|
5900 | # Create shallow water domain |
---|
5901 | domain = Domain(points, vertices, boundary) |
---|
5902 | domain.default_order = 2 |
---|
5903 | domain.set_minimum_storable_height(0.01) |
---|
5904 | |
---|
5905 | domain.set_name('flowtest') |
---|
5906 | swwfile = domain.get_name() + '.sww' |
---|
5907 | |
---|
5908 | domain.set_datadir('.') |
---|
5909 | domain.format = 'sww' |
---|
5910 | domain.smooth = True |
---|
5911 | |
---|
5912 | h = 1.0 |
---|
5913 | u = 2.0 |
---|
5914 | uh = u*h |
---|
5915 | |
---|
5916 | Br = Reflective_boundary(domain) # Side walls |
---|
5917 | Bd = Dirichlet_boundary([h, uh, 0]) # 2 m/s across the 3 m inlet: |
---|
5918 | |
---|
5919 | |
---|
5920 | |
---|
5921 | domain.set_quantity('elevation', 0.0) |
---|
5922 | domain.set_quantity('stage', h) |
---|
5923 | domain.set_quantity('xmomentum', uh) |
---|
5924 | domain.set_boundary( {'left': Bd, 'right': Bd, 'top': Br, 'bottom': Br}) |
---|
5925 | |
---|
5926 | for t in domain.evolve(yieldstep=1, finaltime = t_end): |
---|
5927 | pass |
---|
5928 | |
---|
5929 | # Check that momentum is as it should be in the interior |
---|
5930 | |
---|
5931 | I = [[0, width/2.], |
---|
5932 | [length/2., width/2.], |
---|
5933 | [length, width/2.]] |
---|
5934 | |
---|
5935 | f = file_function(swwfile, |
---|
5936 | quantities=['stage', 'xmomentum', 'ymomentum'], |
---|
5937 | interpolation_points=I, |
---|
5938 | verbose=False) |
---|
5939 | for t in range(t_end+1): |
---|
5940 | for i in range(3): |
---|
5941 | assert num.allclose(f(t, i), [1, 2, 0], atol=1.0e-6) |
---|
5942 | |
---|
5943 | |
---|
5944 | # Check flows through the middle |
---|
5945 | for i in range(5): |
---|
5946 | x = length/2. + i*0.23674563 # Arbitrary |
---|
5947 | cross_section = [[x, 0], [x, width]] |
---|
5948 | time, Q = get_flow_through_cross_section(swwfile, |
---|
5949 | cross_section, |
---|
5950 | verbose=False) |
---|
5951 | |
---|
5952 | assert num.allclose(Q, uh*width) |
---|
5953 | |
---|
5954 | |
---|
5955 | |
---|
5956 | # Try the same with partial lines |
---|
5957 | x = length/2. |
---|
5958 | for i in range(5): |
---|
5959 | start_point = [length/2., i*width/5.] |
---|
5960 | #print start_point |
---|
5961 | |
---|
5962 | cross_section = [start_point, [length/2., width]] |
---|
5963 | time, Q = get_flow_through_cross_section(swwfile, |
---|
5964 | cross_section, |
---|
5965 | verbose=False) |
---|
5966 | |
---|
5967 | #print i, Q, (width-start_point[1]) |
---|
5968 | assert num.allclose(Q, uh*(width-start_point[1])) |
---|
5969 | |
---|
5970 | |
---|
5971 | # Verify no flow when line is parallel to flow |
---|
5972 | cross_section = [[length/2.-10, width/2.], [length/2.+10, width/2.]] |
---|
5973 | time, Q = get_flow_through_cross_section(swwfile, |
---|
5974 | cross_section, |
---|
5975 | verbose=False) |
---|
5976 | |
---|
5977 | #print i, Q |
---|
5978 | assert num.allclose(Q, 0, atol=1.0e-5) |
---|
5979 | |
---|
5980 | |
---|
5981 | # Try with lines on an angle (all flow still runs through here) |
---|
5982 | cross_section = [[length/2., 0], [length/2.+width, width]] |
---|
5983 | time, Q = get_flow_through_cross_section(swwfile, |
---|
5984 | cross_section, |
---|
5985 | verbose=False) |
---|
5986 | |
---|
5987 | assert num.allclose(Q, uh*width) |
---|
5988 | |
---|
5989 | |
---|
5990 | |
---|
5991 | |
---|
5992 | def test_get_flow_through_cross_section_with_geo(self): |
---|
5993 | """test_get_flow_through_cross_section(self): |
---|
5994 | |
---|
5995 | Test that the total flow through a cross section can be |
---|
5996 | correctly obtained from an sww file. |
---|
5997 | |
---|
5998 | This test creates a flat bed with a known flow through it and tests |
---|
5999 | that the function correctly returns the expected flow. |
---|
6000 | |
---|
6001 | The specifics are |
---|
6002 | u = 2 m/s |
---|
6003 | h = 2 m |
---|
6004 | w = 3 m (width of channel) |
---|
6005 | |
---|
6006 | q = u*h*w = 12 m^3/s |
---|
6007 | |
---|
6008 | |
---|
6009 | This run tries it with georeferencing and with elevation = -1 |
---|
6010 | |
---|
6011 | """ |
---|
6012 | |
---|
6013 | import time, os |
---|
6014 | from Scientific.IO.NetCDF import NetCDFFile |
---|
6015 | |
---|
6016 | # Setup |
---|
6017 | from mesh_factory import rectangular |
---|
6018 | |
---|
6019 | # Create basic mesh (20m x 3m) |
---|
6020 | width = 3 |
---|
6021 | length = 20 |
---|
6022 | t_end = 1 |
---|
6023 | points, vertices, boundary = rectangular(length, width, |
---|
6024 | length, width) |
---|
6025 | |
---|
6026 | # Create shallow water domain |
---|
6027 | domain = Domain(points, vertices, boundary, |
---|
6028 | geo_reference = Geo_reference(56,308500,6189000)) |
---|
6029 | |
---|
6030 | domain.default_order = 2 |
---|
6031 | domain.set_minimum_storable_height(0.01) |
---|
6032 | |
---|
6033 | domain.set_name('flowtest') |
---|
6034 | swwfile = domain.get_name() + '.sww' |
---|
6035 | |
---|
6036 | domain.set_datadir('.') |
---|
6037 | domain.format = 'sww' |
---|
6038 | domain.smooth = True |
---|
6039 | |
---|
6040 | e = -1.0 |
---|
6041 | w = 1.0 |
---|
6042 | h = w-e |
---|
6043 | u = 2.0 |
---|
6044 | uh = u*h |
---|
6045 | |
---|
6046 | Br = Reflective_boundary(domain) # Side walls |
---|
6047 | Bd = Dirichlet_boundary([w, uh, 0]) # 2 m/s across the 3 m inlet: |
---|
6048 | |
---|
6049 | |
---|
6050 | |
---|
6051 | |
---|
6052 | domain.set_quantity('elevation', e) |
---|
6053 | domain.set_quantity('stage', w) |
---|
6054 | domain.set_quantity('xmomentum', uh) |
---|
6055 | domain.set_boundary( {'left': Bd, 'right': Bd, 'top': Br, 'bottom': Br}) |
---|
6056 | |
---|
6057 | for t in domain.evolve(yieldstep=1, finaltime = t_end): |
---|
6058 | pass |
---|
6059 | |
---|
6060 | # Check that momentum is as it should be in the interior |
---|
6061 | |
---|
6062 | I = [[0, width/2.], |
---|
6063 | [length/2., width/2.], |
---|
6064 | [length, width/2.]] |
---|
6065 | |
---|
6066 | I = domain.geo_reference.get_absolute(I) |
---|
6067 | f = file_function(swwfile, |
---|
6068 | quantities=['stage', 'xmomentum', 'ymomentum'], |
---|
6069 | interpolation_points=I, |
---|
6070 | verbose=False) |
---|
6071 | |
---|
6072 | for t in range(t_end+1): |
---|
6073 | for i in range(3): |
---|
6074 | #print i, t, f(t, i) |
---|
6075 | assert num.allclose(f(t, i), [w, uh, 0], atol=1.0e-6) |
---|
6076 | |
---|
6077 | |
---|
6078 | # Check flows through the middle |
---|
6079 | for i in range(5): |
---|
6080 | x = length/2. + i*0.23674563 # Arbitrary |
---|
6081 | cross_section = [[x, 0], [x, width]] |
---|
6082 | |
---|
6083 | cross_section = domain.geo_reference.get_absolute(cross_section) |
---|
6084 | time, Q = get_flow_through_cross_section(swwfile, |
---|
6085 | cross_section, |
---|
6086 | verbose=False) |
---|
6087 | |
---|
6088 | assert num.allclose(Q, uh*width) |
---|
6089 | |
---|
6090 | |
---|
6091 | |
---|
6092 | def test_get_energy_through_cross_section(self): |
---|
6093 | """test_get_energy_through_cross_section(self): |
---|
6094 | |
---|
6095 | Test that the specific and total energy through a cross section can be |
---|
6096 | correctly obtained from an sww file. |
---|
6097 | |
---|
6098 | This test creates a flat bed with a known flow through it and tests |
---|
6099 | that the function correctly returns the expected energies. |
---|
6100 | |
---|
6101 | The specifics are |
---|
6102 | u = 2 m/s |
---|
6103 | h = 1 m |
---|
6104 | w = 3 m (width of channel) |
---|
6105 | |
---|
6106 | q = u*h*w = 6 m^3/s |
---|
6107 | Es = h + 0.5*v*v/g # Specific energy head [m] |
---|
6108 | Et = w + 0.5*v*v/g # Total energy head [m] |
---|
6109 | |
---|
6110 | |
---|
6111 | This test uses georeferencing |
---|
6112 | |
---|
6113 | """ |
---|
6114 | |
---|
6115 | import time, os |
---|
6116 | from Scientific.IO.NetCDF import NetCDFFile |
---|
6117 | |
---|
6118 | # Setup |
---|
6119 | from mesh_factory import rectangular |
---|
6120 | |
---|
6121 | # Create basic mesh (20m x 3m) |
---|
6122 | width = 3 |
---|
6123 | length = 20 |
---|
6124 | t_end = 1 |
---|
6125 | points, vertices, boundary = rectangular(length, width, |
---|
6126 | length, width) |
---|
6127 | |
---|
6128 | # Create shallow water domain |
---|
6129 | domain = Domain(points, vertices, boundary, |
---|
6130 | geo_reference = Geo_reference(56,308500,6189000)) |
---|
6131 | |
---|
6132 | domain.default_order = 2 |
---|
6133 | domain.set_minimum_storable_height(0.01) |
---|
6134 | |
---|
6135 | domain.set_name('flowtest') |
---|
6136 | swwfile = domain.get_name() + '.sww' |
---|
6137 | |
---|
6138 | domain.set_datadir('.') |
---|
6139 | domain.format = 'sww' |
---|
6140 | domain.smooth = True |
---|
6141 | |
---|
6142 | e = -1.0 |
---|
6143 | w = 1.0 |
---|
6144 | h = w-e |
---|
6145 | u = 2.0 |
---|
6146 | uh = u*h |
---|
6147 | |
---|
6148 | Br = Reflective_boundary(domain) # Side walls |
---|
6149 | Bd = Dirichlet_boundary([w, uh, 0]) # 2 m/s across the 3 m inlet: |
---|
6150 | |
---|
6151 | |
---|
6152 | domain.set_quantity('elevation', e) |
---|
6153 | domain.set_quantity('stage', w) |
---|
6154 | domain.set_quantity('xmomentum', uh) |
---|
6155 | domain.set_boundary( {'left': Bd, 'right': Bd, 'top': Br, 'bottom': Br}) |
---|
6156 | |
---|
6157 | for t in domain.evolve(yieldstep=1, finaltime = t_end): |
---|
6158 | pass |
---|
6159 | |
---|
6160 | # Check that momentum is as it should be in the interior |
---|
6161 | |
---|
6162 | I = [[0, width/2.], |
---|
6163 | [length/2., width/2.], |
---|
6164 | [length, width/2.]] |
---|
6165 | |
---|
6166 | I = domain.geo_reference.get_absolute(I) |
---|
6167 | f = file_function(swwfile, |
---|
6168 | quantities=['stage', 'xmomentum', 'ymomentum'], |
---|
6169 | interpolation_points=I, |
---|
6170 | verbose=False) |
---|
6171 | |
---|
6172 | for t in range(t_end+1): |
---|
6173 | for i in range(3): |
---|
6174 | #print i, t, f(t, i) |
---|
6175 | assert num.allclose(f(t, i), [w, uh, 0], atol=1.0e-6) |
---|
6176 | |
---|
6177 | |
---|
6178 | # Check energies through the middle |
---|
6179 | for i in range(5): |
---|
6180 | x = length/2. + i*0.23674563 # Arbitrary |
---|
6181 | cross_section = [[x, 0], [x, width]] |
---|
6182 | |
---|
6183 | cross_section = domain.geo_reference.get_absolute(cross_section) |
---|
6184 | |
---|
6185 | time, Es = get_energy_through_cross_section(swwfile, |
---|
6186 | cross_section, |
---|
6187 | kind='specific', |
---|
6188 | verbose=False) |
---|
6189 | assert num.allclose(Es, h + 0.5*u*u/g) |
---|
6190 | |
---|
6191 | time, Et = get_energy_through_cross_section(swwfile, |
---|
6192 | cross_section, |
---|
6193 | kind='total', |
---|
6194 | verbose=False) |
---|
6195 | assert num.allclose(Et, w + 0.5*u*u/g) |
---|
6196 | |
---|
6197 | |
---|
6198 | |
---|
6199 | def test_get_all_swwfiles(self): |
---|
6200 | try: |
---|
6201 | swwfiles = get_all_swwfiles('','test.txt') #Invalid |
---|
6202 | except IOError: |
---|
6203 | pass |
---|
6204 | else: |
---|
6205 | raise 'Should have raised exception' |
---|
6206 | |
---|
6207 | def test_get_all_swwfiles1(self): |
---|
6208 | |
---|
6209 | temp_dir = tempfile.mkdtemp('','sww_test') |
---|
6210 | filename0 = tempfile.mktemp('.sww','test',temp_dir) |
---|
6211 | filename1 = tempfile.mktemp('.sww','test',temp_dir) |
---|
6212 | filename2 = tempfile.mktemp('.sww','test',temp_dir) |
---|
6213 | filename3 = tempfile.mktemp('.sww','test',temp_dir) |
---|
6214 | |
---|
6215 | #print'filename', filename0,filename1,filename2,filename3 |
---|
6216 | |
---|
6217 | fid0 = open(filename0, 'w') |
---|
6218 | fid1 = open(filename1, 'w') |
---|
6219 | fid2 = open(filename2, 'w') |
---|
6220 | fid3 = open(filename3, 'w') |
---|
6221 | |
---|
6222 | fid0.write('hello') |
---|
6223 | fid1.write('hello') |
---|
6224 | fid2.write('hello') |
---|
6225 | fid3.write('hello') |
---|
6226 | |
---|
6227 | fid0.close() |
---|
6228 | fid1.close() |
---|
6229 | fid2.close() |
---|
6230 | fid3.close() |
---|
6231 | |
---|
6232 | |
---|
6233 | dir, name0 = os.path.split(filename0) |
---|
6234 | #print 'dir',dir,name0 |
---|
6235 | |
---|
6236 | iterate=get_all_swwfiles(dir,'test') |
---|
6237 | |
---|
6238 | del_dir(temp_dir) |
---|
6239 | # removeall(temp_dir) |
---|
6240 | |
---|
6241 | _, name0 = os.path.split(filename0) |
---|
6242 | #print'name0',name0[:-4],iterate[0] |
---|
6243 | _, name1 = os.path.split(filename1) |
---|
6244 | _, name2 = os.path.split(filename2) |
---|
6245 | _, name3 = os.path.split(filename3) |
---|
6246 | |
---|
6247 | assert name0[:-4] in iterate |
---|
6248 | assert name1[:-4] in iterate |
---|
6249 | assert name2[:-4] in iterate |
---|
6250 | assert name3[:-4] in iterate |
---|
6251 | |
---|
6252 | assert len(iterate)==4 |
---|
6253 | |
---|
6254 | |
---|
6255 | def test_points2polygon(self): |
---|
6256 | att_dict = {} |
---|
6257 | pointlist = num.array([[1.0, 0.0],[0.0, 1.0],[0.0, 0.0]]) |
---|
6258 | att_dict['elevation'] = num.array([10.1, 0.0, 10.4]) |
---|
6259 | att_dict['brightness'] = num.array([10.0, 1.0, 10.4]) |
---|
6260 | |
---|
6261 | fileName = tempfile.mktemp(".csv") |
---|
6262 | |
---|
6263 | G = Geospatial_data(pointlist, att_dict) |
---|
6264 | |
---|
6265 | G.export_points_file(fileName) |
---|
6266 | |
---|
6267 | polygon = points2polygon(fileName) |
---|
6268 | |
---|
6269 | # This test may fail if the order changes |
---|
6270 | assert (polygon == [[0.0, 0.0],[1.0, 0.0],[0.0, 1.0]]) |
---|
6271 | |
---|
6272 | |
---|
6273 | def test_csv2polygons(self): |
---|
6274 | """test_csv2polygons |
---|
6275 | """ |
---|
6276 | |
---|
6277 | path = get_pathname_from_package('anuga.shallow_water') |
---|
6278 | testfile = os.path.join(path, 'polygon_values_example.csv') |
---|
6279 | |
---|
6280 | polygons, values = csv2polygons(testfile, |
---|
6281 | value_name='floors') |
---|
6282 | |
---|
6283 | assert len(polygons) == 7, 'Must have seven polygons' |
---|
6284 | assert len(values) == 7, 'Must have seven values' |
---|
6285 | |
---|
6286 | # Known floor values |
---|
6287 | floors = {'1': 2, '2': 0, '3': 1, '4': 2, '5': 0, '8': 1, '9': 1} |
---|
6288 | |
---|
6289 | # Known polygon values |
---|
6290 | known_polys = {'1': [[422681.61,871117.55], |
---|
6291 | [422691.02,871117.60], |
---|
6292 | [422690.87,871084.23], |
---|
6293 | [422649.36,871081.85], |
---|
6294 | [422649.36,871080.39], |
---|
6295 | [422631.86,871079.50], |
---|
6296 | [422631.72,871086.75], |
---|
6297 | [422636.75,871087.20], |
---|
6298 | [422636.75,871091.50], |
---|
6299 | [422649.66,871092.09], |
---|
6300 | [422649.83,871084.91], |
---|
6301 | [422652.94,871084.90], |
---|
6302 | [422652.84,871092.39], |
---|
6303 | [422681.83,871093.73], |
---|
6304 | [422681.61,871117.55]], |
---|
6305 | '2': [[422664.22,870785.46], |
---|
6306 | [422672.48,870780.14], |
---|
6307 | [422668.17,870772.62], |
---|
6308 | [422660.35,870777.17], |
---|
6309 | [422664.22,870785.46]], |
---|
6310 | '3': [[422661.30,871215.06], |
---|
6311 | [422667.50,871215.70], |
---|
6312 | [422668.30,871204.86], |
---|
6313 | [422662.21,871204.33], |
---|
6314 | [422661.30,871215.06]], |
---|
6315 | '4': [[422473.44,871191.22], |
---|
6316 | [422478.33,871192.26], |
---|
6317 | [422479.52,871186.03], |
---|
6318 | [422474.78,871185.14], |
---|
6319 | [422473.44,871191.22]], |
---|
6320 | '5': [[422369.69,871049.29], |
---|
6321 | [422378.63,871053.58], |
---|
6322 | [422383.91,871044.51], |
---|
6323 | [422374.97,871040.32], |
---|
6324 | [422369.69,871049.29]], |
---|
6325 | '8': [[422730.56,871203.13], |
---|
6326 | [422734.10,871204.90], |
---|
6327 | [422735.26,871202.18], |
---|
6328 | [422731.87,871200.58], |
---|
6329 | [422730.56,871203.13]], |
---|
6330 | '9': [[422659.85,871213.80], |
---|
6331 | [422660.91,871210.97], |
---|
6332 | [422655.42,871208.85], |
---|
6333 | [422654.36,871211.68], |
---|
6334 | [422659.85,871213.80]] |
---|
6335 | } |
---|
6336 | |
---|
6337 | |
---|
6338 | |
---|
6339 | |
---|
6340 | for id in ['1', '2', '3', '4', '5' ,'8' ,'9']: |
---|
6341 | assert id in polygons.keys() |
---|
6342 | assert id in values.keys() |
---|
6343 | |
---|
6344 | assert int(values[id]) == int(floors[id]) |
---|
6345 | assert len(polygons[id]) == len(known_polys[id]) |
---|
6346 | assert num.allclose(polygons[id], known_polys[id]) |
---|
6347 | |
---|
6348 | |
---|
6349 | def test_csv2polygons_with_clipping(self): |
---|
6350 | """test_csv2polygons with optional clipping |
---|
6351 | """ |
---|
6352 | #FIXME(Ole): Not Done!! |
---|
6353 | |
---|
6354 | path = get_pathname_from_package('anuga.shallow_water') |
---|
6355 | testfile = os.path.join(path, 'polygon_values_example.csv') |
---|
6356 | |
---|
6357 | polygons, values = csv2polygons(testfile, |
---|
6358 | value_name='floors', |
---|
6359 | clipping_polygons=None) |
---|
6360 | |
---|
6361 | assert len(polygons) == 7, 'Must have seven polygons' |
---|
6362 | assert len(values) == 7, 'Must have seven values' |
---|
6363 | |
---|
6364 | # Known floor values |
---|
6365 | floors = {'1': 2, '2': 0, '3': 1, '4': 2, '5': 0, '8': 1, '9': 1} |
---|
6366 | |
---|
6367 | # Known polygon values |
---|
6368 | known_polys = {'1': [[422681.61,871117.55], |
---|
6369 | [422691.02,871117.60], |
---|
6370 | [422690.87,871084.23], |
---|
6371 | [422649.36,871081.85], |
---|
6372 | [422649.36,871080.39], |
---|
6373 | [422631.86,871079.50], |
---|
6374 | [422631.72,871086.75], |
---|
6375 | [422636.75,871087.20], |
---|
6376 | [422636.75,871091.50], |
---|
6377 | [422649.66,871092.09], |
---|
6378 | [422649.83,871084.91], |
---|
6379 | [422652.94,871084.90], |
---|
6380 | [422652.84,871092.39], |
---|
6381 | [422681.83,871093.73], |
---|
6382 | [422681.61,871117.55]], |
---|
6383 | '2': [[422664.22,870785.46], |
---|
6384 | [422672.48,870780.14], |
---|
6385 | [422668.17,870772.62], |
---|
6386 | [422660.35,870777.17], |
---|
6387 | [422664.22,870785.46]], |
---|
6388 | '3': [[422661.30,871215.06], |
---|
6389 | [422667.50,871215.70], |
---|
6390 | [422668.30,871204.86], |
---|
6391 | [422662.21,871204.33], |
---|
6392 | [422661.30,871215.06]], |
---|
6393 | '4': [[422473.44,871191.22], |
---|
6394 | [422478.33,871192.26], |
---|
6395 | [422479.52,871186.03], |
---|
6396 | [422474.78,871185.14], |
---|
6397 | [422473.44,871191.22]], |
---|
6398 | '5': [[422369.69,871049.29], |
---|
6399 | [422378.63,871053.58], |
---|
6400 | [422383.91,871044.51], |
---|
6401 | [422374.97,871040.32], |
---|
6402 | [422369.69,871049.29]], |
---|
6403 | '8': [[422730.56,871203.13], |
---|
6404 | [422734.10,871204.90], |
---|
6405 | [422735.26,871202.18], |
---|
6406 | [422731.87,871200.58], |
---|
6407 | [422730.56,871203.13]], |
---|
6408 | '9': [[422659.85,871213.80], |
---|
6409 | [422660.91,871210.97], |
---|
6410 | [422655.42,871208.85], |
---|
6411 | [422654.36,871211.68], |
---|
6412 | [422659.85,871213.80]] |
---|
6413 | } |
---|
6414 | |
---|
6415 | |
---|
6416 | |
---|
6417 | |
---|
6418 | for id in ['1', '2', '3', '4', '5' ,'8' ,'9']: |
---|
6419 | assert id in polygons.keys() |
---|
6420 | assert id in values.keys() |
---|
6421 | |
---|
6422 | assert int(values[id]) == int(floors[id]) |
---|
6423 | assert len(polygons[id]) == len(known_polys[id]) |
---|
6424 | assert num.allclose(polygons[id], known_polys[id]) |
---|
6425 | |
---|
6426 | |
---|
6427 | |
---|
6428 | |
---|
6429 | |
---|
6430 | def test_csv2building_polygons(self): |
---|
6431 | """test_csv2building_polygons |
---|
6432 | """ |
---|
6433 | |
---|
6434 | path = get_pathname_from_package('anuga.shallow_water') |
---|
6435 | testfile = os.path.join(path, 'polygon_values_example.csv') |
---|
6436 | |
---|
6437 | polygons, values = csv2building_polygons(testfile, |
---|
6438 | floor_height=3) |
---|
6439 | |
---|
6440 | assert len(polygons) == 7, 'Must have seven polygons' |
---|
6441 | assert len(values) == 7, 'Must have seven values' |
---|
6442 | |
---|
6443 | # Known floor values |
---|
6444 | floors = {'1': 6, '2': 0, '3': 3, '4': 6, '5': 0, '8': 3, '9': 3} |
---|
6445 | |
---|
6446 | |
---|
6447 | for id in ['1', '2', '3', '4', '5' ,'8' ,'9']: |
---|
6448 | assert id in polygons.keys() |
---|
6449 | assert id in values.keys() |
---|
6450 | |
---|
6451 | assert float(values[id]) == float(floors[id]) |
---|
6452 | |
---|
6453 | |
---|
6454 | #------------------------------------------------------------- |
---|
6455 | |
---|
6456 | if __name__ == "__main__": |
---|
6457 | #suite = unittest.makeSuite(Test_Data_Manager, 'test_sww2domain2') |
---|
6458 | suite = unittest.makeSuite(Test_Data_Manager, 'test_sww') |
---|
6459 | |
---|
6460 | |
---|
6461 | |
---|
6462 | # FIXME(Ole): When Ross has implemented logging, we can |
---|
6463 | # probably get rid of all this: |
---|
6464 | if len(sys.argv) > 1 and sys.argv[1][0].upper() == 'V': |
---|
6465 | Test_Data_Manager.verbose=True |
---|
6466 | saveout = sys.stdout |
---|
6467 | filename = ".temp_verbose" |
---|
6468 | fid = open(filename, 'w') |
---|
6469 | sys.stdout = fid |
---|
6470 | else: |
---|
6471 | pass |
---|
6472 | runner = unittest.TextTestRunner() #verbosity=2) |
---|
6473 | runner.run(suite) |
---|
6474 | |
---|
6475 | # Cleaning up |
---|
6476 | if len(sys.argv) > 1 and sys.argv[1][0].upper() == 'V': |
---|
6477 | sys.stdout = saveout |
---|
6478 | fid.close() |
---|
6479 | os.remove(filename) |
---|
6480 | |
---|
6481 | |
---|