source: trunk/anuga_core/source/anuga/file_conversion/file_conversion.py @ 7800

Last change on this file since 7800 was 7800, checked in by hudson, 12 years ago

urs2sww has an extra urs_ungridded2sww function.

File size: 12.9 KB
Line 
1""" Conversion routines.
2    ANUGA needs to deal with many different file formats, and this
3    module provides routines for easily converting between them.
4   
5    These routines are necessarily high level, sitting above the various
6    ANUGA modules. They take a file as input, and output a file.
7"""
8
9#non ANUGA imports
10from Scientific.IO.NetCDF import NetCDFFile
11import numpy as num
12import os.path
13import os
14
15#ANUGA imports
16from anuga.coordinate_transforms.geo_reference import Geo_reference, \
17     write_NetCDF_georeference, ensure_geo_reference
18from anuga.abstract_2d_finite_volumes.pmesh2domain import \
19     pmesh_to_domain_instance
20from anuga.utilities.numerical_tools import ensure_numeric
21from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a, \
22                            netcdf_float
23
24
25#shallow water imports
26from anuga.file.sww import Read_sww, Write_sww
27from anuga.shallow_water.shallow_water_domain import Domain
28from anuga.shallow_water.shallow_water_domain import Domain
29
30
31def sww2obj(basefilename, size):
32    """ Convert netcdf based data output to obj
33
34        Convert SWW data to OBJ data.
35        basefilename Stem of filename, needs size and extension added.
36        size The number of lines to write.
37    """
38
39    from Scientific.IO.NetCDF import NetCDFFile
40
41    # Get NetCDF
42    FN = create_filename('.', basefilename, 'sww', size)
43    log.critical('Reading from %s' % FN)
44    fid = NetCDFFile(FN, netcdf_mode_r)  #Open existing file for read
45
46    # Get the variables
47    x = fid.variables['x']
48    y = fid.variables['y']
49    z = fid.variables['elevation']
50    time = fid.variables['time']
51    stage = fid.variables['stage']
52
53    M = size  #Number of lines
54    xx = num.zeros((M,3), num.float)
55    yy = num.zeros((M,3), num.float)
56    zz = num.zeros((M,3), num.float)
57
58    for i in range(M):
59        for j in range(3):
60            xx[i,j] = x[i+j*M]
61            yy[i,j] = y[i+j*M]
62            zz[i,j] = z[i+j*M]
63
64    # Write obj for bathymetry
65    FN = create_filename('.', basefilename, 'obj', size)
66    write_obj(FN,xx,yy,zz)
67
68    # Now read all the data with variable information, combine with
69    # x,y info and store as obj
70    for k in range(len(time)):
71        t = time[k]
72        log.critical('Processing timestep %f' % t)
73
74        for i in range(M):
75            for j in range(3):
76                zz[i,j] = stage[k,i+j*M]
77
78        #Write obj for variable data
79        #FN = create_filename(basefilename, 'obj', size, time=t)
80        FN = create_filename('.', basefilename[:5], 'obj', size, time=t)
81        write_obj(FN, xx, yy, zz)
82
83
84##
85# @brief
86# @param basefilename Stem of filename, needs size and extension added.
87def dat2obj(basefilename):
88    """Convert line based data output to obj
89    FIXME: Obsolete?
90    """
91
92    import glob, os
93    from anuga.config import data_dir
94
95    # Get bathymetry and x,y's
96    lines = open(data_dir+os.sep+basefilename+'_geometry.dat', 'r').readlines()
97
98    M = len(lines)  #Number of lines
99    x = num.zeros((M,3), num.float)
100    y = num.zeros((M,3), num.float)
101    z = num.zeros((M,3), num.float)
102
103    for i, line in enumerate(lines):
104        tokens = line.split()
105        values = map(float, tokens)
106
107        for j in range(3):
108            x[i,j] = values[j*3]
109            y[i,j] = values[j*3+1]
110            z[i,j] = values[j*3+2]
111
112    # Write obj for bathymetry
113    write_obj(data_dir + os.sep + basefilename + '_geometry', x, y, z)
114
115    # Now read all the data files with variable information, combine with
116    # x,y info and store as obj.
117
118    files = glob.glob(data_dir + os.sep + basefilename + '*.dat')
119    for filename in files:
120        log.critical('Processing %s' % filename)
121
122        lines = open(data_dir + os.sep + filename, 'r').readlines()
123        assert len(lines) == M
124        root, ext = os.path.splitext(filename)
125
126        # Get time from filename
127        i0 = filename.find('_time=')
128        if i0 == -1:
129            #Skip bathymetry file
130            continue
131
132        i0 += 6  #Position where time starts
133        i1 = filename.find('.dat')
134
135        if i1 > i0:
136            t = float(filename[i0:i1])
137        else:
138            raise DataTimeError, 'Hmmmm'
139
140        for i, line in enumerate(lines):
141            tokens = line.split()
142            values = map(float,tokens)
143
144            for j in range(3):
145                z[i,j] = values[j]
146
147        # Write obj for variable data
148        write_obj(data_dir + os.sep + basefilename + '_time=%.4f' % t, x, y, z)
149
150##
151# @brief Convert time-series text file to TMS file.
152# @param filename
153# @param quantity_names
154# @param time_as_seconds
155def timefile2netcdf(filename, quantity_names=None, time_as_seconds=False):
156    """Template for converting typical text files with time series to
157    NetCDF tms file.
158
159    The file format is assumed to be either two fields separated by a comma:
160
161        time [DD/MM/YY hh:mm:ss], value0 value1 value2 ...
162
163    E.g
164
165      31/08/04 00:00:00, 1.328223 0 0
166      31/08/04 00:15:00, 1.292912 0 0
167
168    or time (seconds), value0 value1 value2 ...
169
170      0.0, 1.328223 0 0
171      0.1, 1.292912 0 0
172
173    will provide a time dependent function f(t) with three attributes
174
175    filename is assumed to be the rootname with extenisons .txt and .sww
176    """
177
178    import time, calendar
179    from anuga.config import time_format
180    from anuga.utilities.numerical_tools import ensure_numeric
181
182    file_text = filename + '.txt'
183    fid = open(file_text)
184    line = fid.readline()
185    fid.close()
186
187    fields = line.split(',')
188    msg = "File %s must have the format 'datetime, value0 value1 value2 ...'" \
189          % file_text
190    assert len(fields) == 2, msg
191
192    if not time_as_seconds:
193        try:
194            starttime = calendar.timegm(time.strptime(fields[0], time_format))
195        except ValueError:
196            msg = 'First field in file %s must be' % file_text
197            msg += ' date-time with format %s.\n' % time_format
198            msg += 'I got %s instead.' % fields[0]
199            raise DataTimeError, msg
200    else:
201        try:
202            starttime = float(fields[0])
203        except Error:
204            msg = "Bad time format"
205            raise DataTimeError, msg
206
207    # Split values
208    values = []
209    for value in fields[1].split():
210        values.append(float(value))
211
212    q = ensure_numeric(values)
213
214    msg = 'ERROR: File must contain at least one independent value'
215    assert len(q.shape) == 1, msg
216
217    # Read times proper
218    from anuga.config import time_format
219    import time, calendar
220
221    fid = open(file_text)
222    lines = fid.readlines()
223    fid.close()
224
225    N = len(lines)
226    d = len(q)
227
228    T = num.zeros(N, num.float)       # Time
229    Q = num.zeros((N, d), num.float)  # Values
230
231    for i, line in enumerate(lines):
232        fields = line.split(',')
233        if not time_as_seconds:
234            realtime = calendar.timegm(time.strptime(fields[0], time_format))
235        else:
236            realtime = float(fields[0])
237        T[i] = realtime - starttime
238
239        for j, value in enumerate(fields[1].split()):
240            Q[i, j] = float(value)
241
242    msg = 'File %s must list time as a monotonuosly ' % filename
243    msg += 'increasing sequence'
244    assert num.alltrue(T[1:] - T[:-1] > 0), msg
245
246    #Create NetCDF file
247    from Scientific.IO.NetCDF import NetCDFFile
248
249    fid = NetCDFFile(filename + '.tms', netcdf_mode_w)
250
251    fid.institution = 'Geoscience Australia'
252    fid.description = 'Time series'
253
254    #Reference point
255    #Start time in seconds since the epoch (midnight 1/1/1970)
256    #FIXME: Use Georef
257    fid.starttime = starttime
258
259    # dimension definitions
260    #fid.createDimension('number_of_volumes', self.number_of_volumes)
261    #fid.createDimension('number_of_vertices', 3)
262
263    fid.createDimension('number_of_timesteps', len(T))
264
265    fid.createVariable('time', netcdf_float, ('number_of_timesteps',))
266
267    fid.variables['time'][:] = T
268
269    for i in range(Q.shape[1]):
270        try:
271            name = quantity_names[i]
272        except:
273            name = 'Attribute%d' % i
274
275        fid.createVariable(name, netcdf_float, ('number_of_timesteps',))
276        fid.variables[name][:] = Q[:,i]
277
278    fid.close()
279   
280   
281
282##
283# @brief
284# @param filename
285# @param verbose
286def tsh2sww(filename, verbose=False):
287    """
288    to check if a tsh/msh file 'looks' good.
289    """
290
291    if verbose == True: log.critical('Creating domain from %s' % filename)
292
293    domain = pmesh_to_domain_instance(filename, Domain)
294
295    if verbose == True: log.critical("Number of triangles = %s" % len(domain))
296
297    domain.smooth = True
298    domain.format = 'sww'   #Native netcdf visualisation format
299    file_path, filename = path.split(filename)
300    filename, ext = path.splitext(filename)
301    domain.set_name(filename)
302    domain.reduction = mean
303
304    if verbose == True: log.critical("file_path = %s" % file_path)
305
306    if file_path == "":
307        file_path = "."
308    domain.set_datadir(file_path)
309
310    if verbose == True:
311        log.critical("Output written to %s%s%s.%s"
312                     % (domain.get_datadir(), sep, domain.get_name(),
313                        domain.format))
314
315    sww = SWW_file(domain)
316    sww.store_connectivity()
317    sww.store_timestep()
318
319
320
321##
322# @brief Return max&min indexes (for slicing) of area specified.
323# @param latitudes_ref ??
324# @param longitudes_ref ??
325# @param minlat Minimum latitude of specified area.
326# @param maxlat Maximum latitude of specified area.
327# @param minlon Minimum longitude of specified area.
328# @param maxlon Maximum longitude of specified area.
329# @return Tuple (lat_min_index, lat_max_index, lon_min_index, lon_max_index)
330def get_min_max_indices(latitudes_ref, longitudes_ref,
331                         minlat=None, maxlat=None,
332                         minlon=None, maxlon=None):
333    """
334    Return max, min indexes (for slicing) of the lat and long arrays to cover
335    the area specified with min/max lat/long.
336
337    Think of the latitudes and longitudes describing a 2d surface.
338    The area returned is, if possible, just big enough to cover the
339    inputed max/min area. (This will not be possible if the max/min area
340    has a section outside of the latitudes/longitudes area.)
341
342    asset  longitudes are sorted,
343    long - from low to high (west to east, eg 148 - 151)
344    assert latitudes are sorted, ascending or decending
345    """
346
347    latitudes = latitudes_ref[:]
348    longitudes = longitudes_ref[:]
349
350    latitudes = ensure_numeric(latitudes)
351    longitudes = ensure_numeric(longitudes)
352
353    assert num.allclose(num.sort(longitudes), longitudes)
354
355    #print latitudes[0],longitudes[0]
356    #print len(latitudes),len(longitudes)
357    #print latitudes[len(latitudes)-1],longitudes[len(longitudes)-1]
358
359    lat_ascending = True
360    if not num.allclose(num.sort(latitudes), latitudes):
361        lat_ascending = False
362        # reverse order of lat, so it's in ascending order
363        latitudes = latitudes[::-1]
364        assert num.allclose(num.sort(latitudes), latitudes)
365
366    largest_lat_index = len(latitudes)-1
367
368    #Cut out a smaller extent.
369    if minlat == None:
370        lat_min_index = 0
371    else:
372        lat_min_index = num.searchsorted(latitudes, minlat)-1
373        if lat_min_index < 0:
374            lat_min_index = 0
375
376    if maxlat == None:
377        lat_max_index = largest_lat_index #len(latitudes)
378    else:
379        lat_max_index = num.searchsorted(latitudes, maxlat)
380        if lat_max_index > largest_lat_index:
381            lat_max_index = largest_lat_index
382
383    if minlon == None:
384        lon_min_index = 0
385    else:
386        lon_min_index = num.searchsorted(longitudes, minlon)-1
387        if lon_min_index < 0:
388            lon_min_index = 0
389
390    if maxlon == None:
391        lon_max_index = len(longitudes)
392    else:
393        lon_max_index = num.searchsorted(longitudes, maxlon)
394
395    # Reversing the indexes, if the lat array is decending
396    if lat_ascending is False:
397        lat_min_index, lat_max_index = largest_lat_index - lat_max_index, \
398                                       largest_lat_index - lat_min_index
399    lat_max_index = lat_max_index + 1 # taking into account how slicing works
400    lon_max_index = lon_max_index + 1 # taking into account how slicing works
401
402    return lat_min_index, lat_max_index, lon_min_index, lon_max_index
403
404
405##
406# @brief
407# @param filename
408# @param x
409# @param y
410# @param z
411def write_obj(filename, x, y, z):
412    """Store x,y,z vectors into filename (obj format).
413
414       Vectors are assumed to have dimension (M,3) where
415       M corresponds to the number elements.
416       triangles are assumed to be disconnected
417
418       The three numbers in each vector correspond to three vertices,
419
420       e.g. the x coordinate of vertex 1 of element i is in x[i,1]
421    """
422
423    import os.path
424
425    root, ext = os.path.splitext(filename)
426    if ext == '.obj':
427        FN = filename
428    else:
429        FN = filename + '.obj'
430
431    outfile = open(FN, 'wb')
432    outfile.write("# Triangulation as an obj file\n")
433
434    M, N = x.shape
435    assert N == 3  #Assuming three vertices per element
436
437    for i in range(M):
438        for j in range(N):
439            outfile.write("v %f %f %f\n" % (x[i,j], y[i,j], z[i,j]))
440
441    for i in range(M):
442        base = i * N
443        outfile.write("f %d %d %d\n" % (base+1, base+2, base+3))
444
445    outfile.close()
446
Note: See TracBrowser for help on using the repository browser.