source: trunk/anuga_core/source/anuga/file_conversion/urs2nc.py @ 7800

Last change on this file since 7800 was 7800, checked in by hudson, 12 years ago

urs2sww has an extra urs_ungridded2sww function.

File size: 6.5 KB
Line 
1import os
2from struct import pack, unpack
3import array as p_array
4import numpy as num
5
6from anuga.utilities.numerical_tools import ensure_numeric   
7from anuga.caching.caching import myhash
8
9from anuga.file.netcdf import Write_nc, write_elevation_nc
10
11
12from mux import WAVEHEIGHT_MUX_LABEL, EAST_VELOCITY_LABEL, \
13                            NORTH_VELOCITY_LABEL
14
15
16def urs2nc(basename_in='o', basename_out='urs'):
17    """Convert the 3 urs files to 4 nc files.
18
19    The name of the urs file names must be;
20    [basename_in]-z-mux
21    [basename_in]-e-mux
22    [basename_in]-n-mux
23    """
24
25    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
26                basename_in + EAST_VELOCITY_LABEL,
27                basename_in + NORTH_VELOCITY_LABEL]
28    files_out = [basename_out + '_ha.nc',
29                 basename_out + '_ua.nc',
30                 basename_out + '_va.nc']
31    quantities = ['HA', 'UA', 'VA']
32
33    #if os.access(files_in[0]+'.mux', os.F_OK) == 0 :
34    for i, file_name in enumerate(files_in):
35        if os.access(file_name, os.F_OK) == 0:
36            if os.access(file_name + '.mux', os.F_OK) == 0 :
37                msg = 'File %s does not exist or is not accessible' % file_name
38                raise IOError, msg
39            else:
40               files_in[i] += '.mux'
41               log.critical("file_name %s" % file_name)
42
43    hashed_elevation = None
44    for file_in, file_out, quantity in map(None, files_in,
45                                           files_out,
46                                           quantities):
47        lonlatdep, lon, lat, depth = _binary_c2nc(file_in,
48                                                  file_out,
49                                                  quantity)
50        if hashed_elevation == None:
51            elevation_file = basename_out + '_e.nc'
52            write_elevation_nc(elevation_file,
53                               lon,
54                               lat,
55                               depth)
56            hashed_elevation = myhash(lonlatdep)
57        else:
58            msg = "The elevation information in the mux files is inconsistent"
59            assert hashed_elevation == myhash(lonlatdep), msg
60
61    files_out.append(elevation_file)
62
63    return files_out
64
65
66##
67# @brief Convert a quantity URS file to a NetCDF file.
68# @param file_in Path to input URS file.
69# @param file_out Path to the output file.
70# @param quantity Name of the quantity to be written to the output file.
71# @return A tuple (lonlatdep, lon, lat, depth).
72def _binary_c2nc(file_in, file_out, quantity):
73    """Reads in a quantity urs file and writes a quantity nc file.
74    Additionally, returns the depth and lat, long info,
75    so it can be written to a file.
76    """
77
78    columns = 3                           # long, lat , depth
79    mux_file = open(file_in, 'rb')
80
81    # Number of points/stations
82    (points_num,) = unpack('i', mux_file.read(4))
83
84    # nt, int - Number of time steps
85    (time_step_count,) = unpack('i', mux_file.read(4))
86
87    #dt, float - time step, seconds
88    (time_step,) = unpack('f', mux_file.read(4))
89
90    msg = "Bad data in the mux file."
91    if points_num < 0:
92        mux_file.close()
93        raise ANUGAError, msg
94    if time_step_count < 0:
95        mux_file.close()
96        raise ANUGAError, msg
97    if time_step < 0:
98        mux_file.close()
99        raise ANUGAError, msg
100
101    lonlatdep = p_array.array('f')
102    lonlatdep.read(mux_file, columns * points_num)
103    lonlatdep = num.array(lonlatdep, dtype=num.float)
104    lonlatdep = num.reshape(lonlatdep, (points_num, columns))
105
106    lon, lat, depth = lon_lat2grid(lonlatdep)
107    lon_sorted = list(lon)
108    lon_sorted.sort()
109
110    if not num.alltrue(lon == lon_sorted):
111        msg = "Longitudes in mux file are not in ascending order"
112        raise IOError, msg
113
114    lat_sorted = list(lat)
115    lat_sorted.sort()
116
117    nc_file = Write_nc(quantity,
118                       file_out,
119                       time_step_count,
120                       time_step,
121                       lon,
122                       lat)
123
124    for i in range(time_step_count):
125        #Read in a time slice from mux file
126        hz_p_array = p_array.array('f')
127        hz_p_array.read(mux_file, points_num)
128        hz_p = num.array(hz_p_array, dtype=num.float)
129        hz_p = num.reshape(hz_p, (len(lon), len(lat)))
130        hz_p = num.transpose(hz_p)  # mux has lat varying fastest, nc has long v.f.
131
132        #write time slice to nc file
133        nc_file.store_timestep(hz_p)
134
135    mux_file.close()
136    nc_file.close()
137
138    return lonlatdep, lon, lat, depth
139
140
141
142##
143# @brief
144# @param long_lat_dep
145# @return A tuple (long, lat, quantity).
146# @note The latitude is the fastest varying dimension - in mux files.
147def lon_lat2grid(long_lat_dep):
148    """
149    given a list of points that are assumed to be an a grid,
150    return the long's and lat's of the grid.
151    long_lat_dep is an array where each row is a position.
152    The first column is longitudes.
153    The second column is latitudes.
154
155    The latitude is the fastest varying dimension - in mux files
156    """
157
158    LONG = 0
159    LAT = 1
160    QUANTITY = 2
161
162    long_lat_dep = ensure_numeric(long_lat_dep, num.float)
163
164    num_points = long_lat_dep.shape[0]
165    this_rows_long = long_lat_dep[0,LONG]
166
167    # Count the length of unique latitudes
168    i = 0
169    while long_lat_dep[i,LONG] == this_rows_long and i < num_points:
170        i += 1
171
172    # determine the lats and longsfrom the grid
173    lat = long_lat_dep[:i, LAT]
174    long = long_lat_dep[::i, LONG]
175
176    lenlong = len(long)
177    lenlat = len(lat)
178
179    msg = 'Input data is not gridded'
180    assert num_points % lenlat == 0, msg
181    assert num_points % lenlong == 0, msg
182
183    # Test that data is gridded
184    for i in range(lenlong):
185        msg = 'Data is not gridded.  It must be for this operation'
186        first = i * lenlat
187        last = first + lenlat
188
189        assert num.allclose(long_lat_dep[first:last,LAT], lat), msg
190        assert num.allclose(long_lat_dep[first:last,LONG], long[i]), msg
191
192    msg = 'Out of range latitudes/longitudes'
193    for l in lat:assert -90 < l < 90 , msg
194    for l in long:assert -180 < l < 180 , msg
195
196    # Changing quantity from lat being the fastest varying dimension to
197    # long being the fastest varying dimension
198    # FIXME - make this faster/do this a better way
199    # use numeric transpose, after reshaping the quantity vector
200    quantity = num.zeros(num_points, num.float)
201
202    for lat_i, _ in enumerate(lat):
203        for long_i, _ in enumerate(long):
204            q_index = lat_i*lenlong + long_i
205            lld_index = long_i*lenlat + lat_i
206            temp = long_lat_dep[lld_index, QUANTITY]
207            quantity[q_index] = temp
208
209    return long, lat, quantity
210
211
212
Note: See TracBrowser for help on using the repository browser.