1 | from struct import pack, unpack |
---|
2 | import array as p_array |
---|
3 | import numpy as num |
---|
4 | |
---|
5 | |
---|
6 | from anuga.coordinate_transforms.geo_reference import Geo_reference |
---|
7 | |
---|
8 | from anuga.geospatial_data.geospatial_data import ensure_absolute, \ |
---|
9 | Geospatial_data |
---|
10 | |
---|
11 | from anuga.coordinate_transforms.redfearn import redfearn |
---|
12 | |
---|
13 | class Read_urs: |
---|
14 | """ |
---|
15 | Read the info in URS mux files. |
---|
16 | |
---|
17 | for the quantities here's a correlation between the file names and |
---|
18 | what they mean; |
---|
19 | z-mux is height above sea level, m |
---|
20 | e-mux is velocity is Eastern direction, m/s |
---|
21 | n-mux is velocity is Northern direction, m/s |
---|
22 | """ |
---|
23 | |
---|
24 | def __init__(self, urs_file): |
---|
25 | self.iterated = False |
---|
26 | columns = 3 # long, lat , depth |
---|
27 | mux_file = open(urs_file, 'rb') |
---|
28 | |
---|
29 | # Number of points/stations |
---|
30 | (self.points_num,) = unpack('i', mux_file.read(4)) |
---|
31 | |
---|
32 | # nt, int - Number of time steps |
---|
33 | (self.time_step_count,) = unpack('i', mux_file.read(4)) |
---|
34 | #dt, float - time step, seconds |
---|
35 | (self.time_step,) = unpack('f', mux_file.read(4)) |
---|
36 | msg = "Bad data in the urs file." |
---|
37 | if self.points_num < 0: |
---|
38 | mux_file.close() |
---|
39 | raise ANUGAError(msg) |
---|
40 | if self.time_step_count < 0: |
---|
41 | mux_file.close() |
---|
42 | raise ANUGAError(msg) |
---|
43 | if self.time_step < 0: |
---|
44 | mux_file.close() |
---|
45 | raise ANUGAError(msg) |
---|
46 | |
---|
47 | # The depth is in meters, and it is the distance from the ocean |
---|
48 | # to the sea bottom. |
---|
49 | lonlatdep = p_array.array('f') |
---|
50 | lonlatdep.read(mux_file, columns * self.points_num) |
---|
51 | lonlatdep = num.array(lonlatdep, dtype=num.float) |
---|
52 | lonlatdep = num.reshape(lonlatdep, (self.points_num, columns)) |
---|
53 | self.lonlatdep = lonlatdep |
---|
54 | |
---|
55 | self.mux_file = mux_file |
---|
56 | # check this array |
---|
57 | |
---|
58 | def __iter__(self): |
---|
59 | """ |
---|
60 | iterate over quantity data which is with respect to time. |
---|
61 | |
---|
62 | Note: You can only iterate once over an object |
---|
63 | |
---|
64 | returns quantity infomation for each time slice |
---|
65 | """ |
---|
66 | |
---|
67 | msg = "You can only interate once over a urs file." |
---|
68 | assert not self.iterated, msg |
---|
69 | |
---|
70 | self.iter_time_step = 0 |
---|
71 | self.iterated = True |
---|
72 | |
---|
73 | return self |
---|
74 | |
---|
75 | def next(self): |
---|
76 | if self.time_step_count == self.iter_time_step: |
---|
77 | self.close() |
---|
78 | raise StopIteration |
---|
79 | |
---|
80 | #Read in a time slice from mux file |
---|
81 | hz_p_array = p_array.array('f') |
---|
82 | hz_p_array.read(self.mux_file, self.points_num) |
---|
83 | hz_p = num.array(hz_p_array, dtype=num.float) |
---|
84 | self.iter_time_step += 1 |
---|
85 | |
---|
86 | return hz_p |
---|
87 | |
---|
88 | def close(self): |
---|
89 | self.mux_file.close() |
---|
90 | |
---|
91 | |
---|
92 | |
---|
93 | |
---|
94 | ### PRODUCING THE POINTS NEEDED FILE ### |
---|
95 | |
---|
96 | # Ones used for FESA 2007 results |
---|
97 | #LL_LAT = -50.0 |
---|
98 | #LL_LONG = 80.0 |
---|
99 | #GRID_SPACING = 1.0/60.0 |
---|
100 | #LAT_AMOUNT = 4800 |
---|
101 | #LONG_AMOUNT = 3600 |
---|
102 | |
---|
103 | |
---|
104 | def save_boundary_as_urs(file_name, boundary_polygon, zone, |
---|
105 | ll_lat, ll_long, |
---|
106 | grid_spacing, |
---|
107 | lat_amount, long_amount, |
---|
108 | isSouthernHemisphere=True, |
---|
109 | export_csv=False, use_cache=False, |
---|
110 | verbose=False): |
---|
111 | """ |
---|
112 | Given the info to replicate the URS grid and a polygon output |
---|
113 | a file that specifies the cloud of boundary points for URS. |
---|
114 | |
---|
115 | This creates a .urs file. This is in the format used by URS; |
---|
116 | 1st line is the number of points, |
---|
117 | each line after represents a point,in lats and longs. |
---|
118 | |
---|
119 | Note: The polygon cannot cross zones or hemispheres. |
---|
120 | |
---|
121 | A work-a-round for different zones or hemispheres is to run this twice, |
---|
122 | once for each zone, and then combine the output. |
---|
123 | |
---|
124 | file_name - name of the urs file produced for David. |
---|
125 | boundary_polygon - a list of points that describes a polygon. |
---|
126 | The last point is assumed ot join the first point. |
---|
127 | This is in UTM (lat long would be better though) |
---|
128 | |
---|
129 | This is info about the URS model that needs to be inputted. |
---|
130 | |
---|
131 | ll_lat - lower left latitude, in decimal degrees |
---|
132 | ll-long - lower left longitude, in decimal degrees |
---|
133 | grid_spacing - in deciamal degrees |
---|
134 | lat_amount - number of latitudes |
---|
135 | long_amount- number of longs |
---|
136 | |
---|
137 | Don't add the file extension. It will be added. |
---|
138 | """ |
---|
139 | |
---|
140 | geo = calculate_boundary_points(boundary_polygon, zone, ll_lat, ll_long, |
---|
141 | grid_spacing, |
---|
142 | lat_amount, long_amount, isSouthernHemisphere, |
---|
143 | use_cache, verbose) |
---|
144 | |
---|
145 | if not file_name[-4:] == ".urs": |
---|
146 | file_name += ".urs" |
---|
147 | |
---|
148 | geo.export_points_file(file_name, isSouthHemisphere=isSouthernHemisphere) |
---|
149 | |
---|
150 | if export_csv: |
---|
151 | if file_name[-4:] == ".urs": |
---|
152 | file_name = file_name[:-4] + ".csv" |
---|
153 | geo.export_points_file(file_name) |
---|
154 | |
---|
155 | return geo |
---|
156 | |
---|
157 | |
---|
158 | def calculate_boundary_points(boundary_polygon, zone, ll_lat, |
---|
159 | ll_long, grid_spacing, |
---|
160 | lat_amount, long_amount, isSouthHemisphere=True, |
---|
161 | use_cache=False, verbose=False): |
---|
162 | args = (boundary_polygon, |
---|
163 | zone, ll_lat, |
---|
164 | ll_long, grid_spacing, |
---|
165 | lat_amount, long_amount, isSouthHemisphere) |
---|
166 | kwargs = {} |
---|
167 | |
---|
168 | if use_cache is True: |
---|
169 | try: |
---|
170 | from anuga.caching import cache |
---|
171 | except: |
---|
172 | msg = 'Caching was requested, but caching module' \ |
---|
173 | 'could not be imported' |
---|
174 | raise Exception(msg) |
---|
175 | |
---|
176 | geo = cache(_calculate_boundary_points, |
---|
177 | args, kwargs, |
---|
178 | verbose=verbose, |
---|
179 | compression=False) |
---|
180 | else: |
---|
181 | geo = apply(_calculate_boundary_points, args, kwargs) |
---|
182 | |
---|
183 | return geo |
---|
184 | |
---|
185 | |
---|
186 | def _calculate_boundary_points(boundary_polygon, |
---|
187 | zone, ll_lat, |
---|
188 | ll_long, grid_spacing, |
---|
189 | lat_amount, long_amount, |
---|
190 | isSouthHemisphere): |
---|
191 | """ |
---|
192 | boundary_polygon - a list of points that describes a polygon. |
---|
193 | The last point is assumed ot join the first point. |
---|
194 | This is in UTM (lat long would b better though) |
---|
195 | |
---|
196 | ll_lat - lower left latitude, in decimal degrees |
---|
197 | ll-long - lower left longitude, in decimal degrees |
---|
198 | grid_spacing - in decimal degrees |
---|
199 | |
---|
200 | """ |
---|
201 | |
---|
202 | msg = "grid_spacing can not be zero" |
---|
203 | assert not grid_spacing == 0, msg |
---|
204 | |
---|
205 | a = boundary_polygon |
---|
206 | |
---|
207 | # List of segments. Each segment is two points. |
---|
208 | segs = [i and [a[i-1], a[i]] or [a[len(a)-1], a[0]] for i in range(len(a))] |
---|
209 | |
---|
210 | |
---|
211 | # convert the segs to Lat's and longs. |
---|
212 | # Don't assume the zone of the segments is the same as the lower left |
---|
213 | # corner of the lat long data!! They can easily be in different zones |
---|
214 | lat_long_set = frozenset() |
---|
215 | for seg in segs: |
---|
216 | |
---|
217 | points_lat_long = points_needed(seg, ll_lat, ll_long, grid_spacing, |
---|
218 | lat_amount, long_amount, zone, |
---|
219 | isSouthHemisphere) |
---|
220 | |
---|
221 | lat_long_set |= frozenset(points_lat_long) |
---|
222 | |
---|
223 | |
---|
224 | if lat_long_set == frozenset([]): |
---|
225 | msg = "URS region specified and polygon does not overlap." |
---|
226 | raise ValueError(msg) |
---|
227 | |
---|
228 | # Warning there is no info in geospatial saying the hemisphere of |
---|
229 | # these points. There should be. |
---|
230 | geo = Geospatial_data(data_points=list(lat_long_set), |
---|
231 | points_are_lats_longs=True) |
---|
232 | |
---|
233 | return geo |
---|
234 | |
---|
235 | |
---|
236 | def points_needed(seg, ll_lat, ll_long, grid_spacing, |
---|
237 | lat_amount, long_amount, zone, |
---|
238 | isSouthHemisphere): |
---|
239 | """ |
---|
240 | seg is two points, in UTM |
---|
241 | return a list of the points, in lats and longs that are needed to |
---|
242 | interpolate any point on the segment. |
---|
243 | """ |
---|
244 | |
---|
245 | from math import sqrt |
---|
246 | |
---|
247 | geo_reference = Geo_reference(zone=zone) |
---|
248 | geo = Geospatial_data(seg, geo_reference=geo_reference) |
---|
249 | seg_lat_long = geo.get_data_points(as_lat_long=True, |
---|
250 | isSouthHemisphere=isSouthHemisphere) |
---|
251 | |
---|
252 | # 1.415 = 2^0.5, rounded up.... |
---|
253 | sqrt_2_rounded_up = 1.415 |
---|
254 | buffer = sqrt_2_rounded_up * grid_spacing |
---|
255 | |
---|
256 | max_lat = max(seg_lat_long[0][0], seg_lat_long[1][0]) + buffer |
---|
257 | max_long = max(seg_lat_long[0][1], seg_lat_long[1][1]) + buffer |
---|
258 | min_lat = min(seg_lat_long[0][0], seg_lat_long[1][0]) - buffer |
---|
259 | min_long = min(seg_lat_long[0][1], seg_lat_long[1][1]) - buffer |
---|
260 | |
---|
261 | first_row = (min_long - ll_long) / grid_spacing |
---|
262 | |
---|
263 | # To round up |
---|
264 | first_row_long = int(round(first_row + 0.5)) |
---|
265 | |
---|
266 | last_row = (max_long - ll_long) / grid_spacing # round down |
---|
267 | last_row_long = int(round(last_row)) |
---|
268 | |
---|
269 | first_row = (min_lat - ll_lat) / grid_spacing |
---|
270 | # To round up |
---|
271 | first_row_lat = int(round(first_row + 0.5)) |
---|
272 | |
---|
273 | last_row = (max_lat - ll_lat) / grid_spacing # round down |
---|
274 | last_row_lat = int(round(last_row)) |
---|
275 | |
---|
276 | max_distance = 157147.4112 * grid_spacing |
---|
277 | points_lat_long = [] |
---|
278 | |
---|
279 | # Create a list of the lat long points to include. |
---|
280 | for index_lat in range(first_row_lat, last_row_lat + 1): |
---|
281 | for index_long in range(first_row_long, last_row_long + 1): |
---|
282 | |
---|
283 | lat = ll_lat + index_lat*grid_spacing |
---|
284 | long = ll_long + index_long*grid_spacing |
---|
285 | |
---|
286 | |
---|
287 | #filter here to keep good points |
---|
288 | if keep_point(lat, long, seg, max_distance): |
---|
289 | points_lat_long.append((lat, long)) #must be hashable |
---|
290 | |
---|
291 | |
---|
292 | # Now that we have these points, lets throw ones out that are too far away |
---|
293 | |
---|
294 | return points_lat_long |
---|
295 | |
---|
296 | |
---|
297 | |
---|
298 | def keep_point(lat, long, seg, max_distance): |
---|
299 | """ |
---|
300 | seg is two points, UTM |
---|
301 | """ |
---|
302 | |
---|
303 | from math import sqrt |
---|
304 | |
---|
305 | _ , x0, y0 = redfearn(lat, long) |
---|
306 | x1 = seg[0][0] |
---|
307 | y1 = seg[0][1] |
---|
308 | x2 = seg[1][0] |
---|
309 | y2 = seg[1][1] |
---|
310 | x2_1 = x2-x1 |
---|
311 | y2_1 = y2-y1 |
---|
312 | |
---|
313 | |
---|
314 | num = (x2_1)*(x2_1)+(y2_1)*(y2_1) |
---|
315 | if sqrt(num) == 0 and abs(num) == 0: |
---|
316 | return True |
---|
317 | else: |
---|
318 | d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/num |
---|
319 | return d <= max_distance |
---|
320 | |
---|
321 | |
---|
322 | |
---|
323 | |
---|