Coverage for src/thunderfish/collectfish.py: 0%
448 statements
« prev ^ index » next coverage.py v7.5.0, created at 2024-04-29 16:21 +0000
« prev ^ index » next coverage.py v7.5.0, created at 2024-04-29 16:21 +0000
1"""
2Collect data generated by thunderfish in a wavefish and a pulsefish table.
3"""
5import os
6import glob
7import io
8import zipfile
9import sys
10import argparse
11import numpy as np
12from thunderlab.configfile import ConfigFile
13from thunderlab.tabledata import TableData, add_write_table_config, write_table_args
14from .version import __version__, __year__
15from .harmonics import add_harmonic_groups_config
16from .eodanalysis import wave_similarity, pulse_similarity
17from .eodanalysis import load_species_waveforms, add_species_config
18from .eodanalysis import wave_quality, wave_quality_args, add_eod_quality_config
19from .eodanalysis import pulse_quality, pulse_quality_args
20from .eodanalysis import adjust_eodf
21from .eodanalysis import parse_filename
24def collect_fish(files, simplify_file=False,
25 meta_data=None, meta_recordings=None, skip_recordings=False,
26 temp_col=None, q10=1.62, max_fish=0, harmonics=None,
27 peaks0=None, peaks1=None, cfg=None, verbose=0):
28 """Combine all *-wavefish.* and/or *-pulsefish.* files into respective summary tables.
30 Data from the *-wavespectrum-*.* and the *-pulsepeaks-*.* files can be added
31 as specified by `harmonics`, `peaks0`, and `peaks1`.
33 Meta data of the recordings can also be added via `meta_data` and
34 `meta_recordings`. If `meta_data` contains a column with
35 temperature, this column can be specified by the `temp_col`
36 parameter. In this case, an 'T_adjust' and an 'EODf_adjust' column
37 are inserted into the resulting tables containing the mean
38 temperature and EOD frequencies adjusted to this temperature,
39 respectively. For the temperature adjustment of EOD frequency
40 a Q10 value can be supplied by the `q10` parameter.
42 Parameters
43 ----------
44 files: list of strings
45 Files to be combined.
46 simplify_file: boolean
47 Remove initial common directories from input files.
48 meta_data: TableData or None
49 Table with additional data for each of the recordings.
50 The meta data are inserted into the summary table according to
51 the name of the recording as specified in `meta_recordings`.
52 meta_recordings: array of strings
53 For each row in `meta_data` the name of the recording.
54 This name is matched agains the basename of input `files`.
55 skip_recordings: bool
56 If True skip recordings that are not found in `meta_recordings`.
57 temp_col: string or None
58 A column in `meta_data` with temperatures to which EOD
59 frequences should be adjusted.
60 q10: float
61 Q10 value describing temperature dependence of EOD
62 frequencies. The default of 1.62 is from Dunlap, Smith, Yetka
63 (2000) Brain Behav Evol, measured for Apteronotus
64 lepthorhynchus in the lab.
65 max_fish: int
66 Maximum number of fish to be taken, if 0 take all.
67 harmonics: int
68 Number of harmonic to be added to the wave-type fish table
69 (amplitude, relampl, phase). This data is read in from the
70 corresponding *-wavespectrum-*.* files.
71 peaks0: int
72 Index of the first peak of a EOD pulse to be added to the
73 pulse-type fish table. This data is read in from the
74 corresponding *-pulsepeaks-*.* files.
75 peaks1: int
76 Index of the last peak of a EOD pulse to be added to the
77 pulse-type fish table. This data is read in from the
78 corresponding *-pulsepeaks-*.* files.
79 cfg: ConfigFile
80 Configuration parameter for EOD quality assessment and species
81 assignment.
82 verbose: int
83 Verbose output:
85 1: print infos on meta data coverage.
86 2: print additional infos on discarded recordings.
88 Returns
89 -------
90 wave_table: TableData
91 Summary table for all wave-type fish.
92 pulse_table: TableData
93 Summary table for all pulse-type fish.
94 all_table: TableData
95 Summary table for all wave-type and pulse-type fish.
97 """
98 def file_iter(files):
99 """ Iterate over analysis files.
101 Parameters
102 ----------
103 files: list of str
104 Input files.
106 Yields
107 ------
108 zf: ZipFile or None
109 In case an input file is a zip archive, the open archive.
110 file_path: str
111 The full path of a single file to be processed.
112 I.e. a '*-wavefish.*' or '*-pulsefish.*' file.
113 fish_type: str
114 Either 'wave' or 'pulse'.
115 """
116 for file_path in files:
117 _, _, _, _, ftype, _, ext = parse_filename(file_path)
118 if ext == 'zip':
119 zf = zipfile.ZipFile(file_path)
120 file_pathes = sorted(zf.namelist())
121 for zfile in file_pathes:
122 _, _, _, _, ftype, _, _ = parse_filename(zfile)
123 if ftype in ['wavefish', 'pulsefish']:
124 yield zf, zfile, ftype[:-4]
125 elif ftype in ['wavefish', 'pulsefish']:
126 yield None, file_path, ftype[:-4]
127 else:
128 continue
131 def find_recording(recording, meta_recordings):
132 """ Find row of a recording in meta data.
134 Parameters
135 ----------
136 recording: string
137 Path and base name of a recording.
138 meta_recordings: list of string
139 List of meta data recordings where to find `recording`.
140 """
141 if meta_data is not None:
142 rec = os.path.splitext(os.path.basename(recording))[0]
143 for i in range(len(meta_recordings)):
144 # TODO: strip extension!
145 if rec == meta_recordings[i]:
146 return i
147 return -1
149 # prepare meta recodings names:
150 meta_recordings_used = None
151 if meta_recordings is not None:
152 meta_recordings_used = np.zeros(len(meta_recordings), dtype=bool)
153 for r in range(len(meta_recordings)):
154 meta_recordings[r] = os.path.splitext(os.path.basename(meta_recordings[r]))[0]
155 # prepare adjusted temperatures:
156 if meta_data is not None and temp_col is not None:
157 temp_idx = meta_data.index(temp_col)
158 temp = meta_data[:,temp_idx]
159 mean_tmp = np.round(np.nanmean(temp)/0.1)*0.1
160 meta_data.insert(temp_idx+1, 'T_adjust', 'C', '%.1f')
161 meta_data.append_data_column([mean_tmp]*meta_data.rows(), temp_idx+1)
162 # prepare species distances:
163 wave_names, wave_eods, pulse_names, pulse_eods = \
164 load_species_waveforms(cfg.value('speciesFile'))
165 wave_max_rms = cfg.value('maximumWaveSpeciesRMS')
166 pulse_max_rms = cfg.value('maximumPulseSpeciesRMS')
167 # load data:
168 wave_table = None
169 pulse_table = None
170 all_table = None
171 file_pathes = []
172 for zf, file_name, fish_type in file_iter(files):
173 # file name:
174 table = None
175 window_time = None
176 recording, base_path, channel, start_time, _, _, file_ext = \
177 parse_filename(file_name)
178 file_ext = os.extsep + file_ext
179 file_pathes.append(os.path.normpath(recording).split(os.path.sep))
180 if verbose > 2:
181 print('processing %s (%s):' % (file_name, recording))
182 # find row in meta_data:
183 mr = -1
184 if meta_data is not None:
185 mr = find_recording(recording, meta_recordings)
186 if mr < 0:
187 if skip_recordings:
188 if verbose > 0:
189 print('skip recording %s: no metadata found' % recording)
190 continue
191 elif verbose > 0:
192 print('no metadata found for recording %s' % recording)
193 else:
194 meta_recordings_used[mr] = True
195 # data:
196 if zf is not None:
197 file_name = io.TextIOWrapper(zf.open(file_name, 'r'))
198 data = TableData(file_name)
199 if 'twin' in data:
200 start_time = data[0, 'twin']
201 window_time = data[0, 'window']
202 data.remove(['twin', 'window'])
203 table = wave_table if fish_type == 'wave' else pulse_table
204 # prepare tables:
205 if not table:
206 df = TableData(data)
207 df.clear_data()
208 if meta_data is not None:
209 if data.nsecs > 0:
210 df.insert_section(0, 'metadata')
211 for c in range(meta_data.columns()):
212 df.insert(c, *meta_data.column_head(c))
213 df.insert(0, ['recording']*data.nsecs + ['file'], '', '%-s')
214 if window_time is not None:
215 df.insert(1, 'window', 's', '%.2f')
216 if start_time is not None:
217 df.insert(1, 'time', 's', '%.2f')
218 if channel >= 0:
219 df.insert(1, 'channel', '', '%d')
220 if fish_type == 'wave':
221 if harmonics is not None:
222 fn = base_path + '-wavespectrum-0' + file_ext
223 if zf is not None:
224 fn = io.TextIOWrapper(zf.open(fn, 'r'))
225 wave_spec = TableData(fn)
226 if data.nsecs > 0:
227 df.append_section('harmonics')
228 for h in range(min(harmonics, wave_spec.rows())+1):
229 df.append('ampl%d' % h, wave_spec.unit('amplitude'),
230 wave_spec.format('amplitude'))
231 if h > 0:
232 df.append('relampl%d' % h, '%', '%.2f')
233 df.append('relpower%d' % h, '%', '%.2f')
234 df.append('phase%d' % h, 'rad', '%.3f')
235 if len(wave_names) > 0:
236 if data.nsecs > 0:
237 df.append_section('species')
238 for species in wave_names:
239 df.append(species, '%', '%.0f')
240 df.append('species', '', '%-s')
241 else:
242 if peaks0 is not None:
243 fn = base_path + '-pulsepeaks-0' + file_ext
244 if zf is not None:
245 fn = io.TextIOWrapper(zf.open(fn, 'r'))
246 pulse_peaks = TableData(fn)
247 if data.nsecs > 0:
248 df.append_section('peaks')
249 for p in range(peaks0, peaks1+1):
250 if p != 1:
251 df.append('P%dtime' % p, 'ms', '%.3f')
252 df.append('P%dampl' % p, pulse_peaks.unit('amplitude'),
253 pulse_peaks.format('amplitude'))
254 if p != 1:
255 df.append('P%drelampl' % p, '%', '%.2f')
256 df.append('P%dwidth' % p, 'ms', '%.3f')
257 if len(pulse_names) > 0:
258 if data.nsecs > 0:
259 df.append_section('species')
260 for species in pulse_names:
261 df.append(species, '%', '%.0f')
262 df.append('species', '', '%-s')
263 if fish_type == 'wave':
264 wave_table = df
265 table = wave_table
266 else:
267 pulse_table = df
268 table = pulse_table
269 if not all_table:
270 df = TableData()
271 df.append('file', '', '%-s')
272 if channel >= 0:
273 df.append('channel', '', '%d')
274 if start_time is not None:
275 df.append('time', 's', '%.1f')
276 if window_time is not None:
277 df.append('window', 's', '%.1f')
278 if meta_data is not None:
279 for c in range(meta_data.columns()):
280 df.append(*meta_data.column_head(c))
281 df.append('index', '', '%d')
282 df.append('EODf', 'Hz', '%.1f')
283 df.append('type', '', '%-5s')
284 if len(wave_names) + len(pulse_names) > 0:
285 df.append('species', '', '%-s')
286 all_table = df
287 # fill tables:
288 n = data.rows() if not max_fish or max_fish > data.rows() else max_fish
289 for r in range(n):
290 # fish index:
291 idx = r
292 if 'index' in data:
293 idx = data[r,'index']
294 # check quality:
295 skips = ''
296 if fish_type == 'wave':
297 fn = base_path + '-wavespectrum-%d'%idx + file_ext
298 if zf is not None:
299 fn = io.TextIOWrapper(zf.open(fn, 'r'))
300 wave_spec = TableData(fn)
301 if cfg is not None:
302 spec_data = wave_spec.array()
303 props = data.row_dict(r)
304 if 'clipped' in props:
305 props['clipped'] *= 0.01
306 if 'noise' in props:
307 props['noise'] *= 0.01
308 if 'rmserror' in props:
309 props['rmserror'] *= 0.01
310 if 'thd' in props:
311 props['thd'] *= 0.01
312 _, skips, msg = wave_quality(props, 0.01*spec_data[1:,3],
313 **wave_quality_args(cfg))
314 else:
315 if cfg is not None:
316 props = data.row_dict(r)
317 if 'clipped' in props:
318 props['clipped'] *= 0.01
319 if 'noise' in props:
320 props['noise'] *= 0.01
321 skips, msg, _ = pulse_quality(props, **pulse_quality_args(cfg))
322 if len(skips) > 0:
323 if verbose > 1:
324 print('skip fish %2d from %s: %s' % (idx, recording, skips))
325 continue
326 # fill in data:
327 data_col = 0
328 table.append_data(recording, data_col)
329 all_table.append_data(recording, data_col)
330 data_col += 1
331 if channel >= 0:
332 table.append_data(channel, data_col)
333 all_table.append_data(channel, data_col)
334 data_col += 1
335 if start_time is not None:
336 table.append_data(start_time, data_col)
337 all_table.append_data(start_time, data_col)
338 data_col += 1
339 if window_time is not None:
340 table.append_data(window_time, data_col)
341 all_table.append_data(window_time, data_col)
342 data_col += 1
343 # meta data:
344 if mr >= 0:
345 for c in range(meta_data.columns()):
346 table.append_data(meta_data[mr,c], data_col)
347 all_table.append_data(meta_data[mr,c], data_col)
348 data_col += 1
349 elif meta_data is not None:
350 data_col += meta_data.columns()
351 table.append_data(data[r,:].array(), data_col)
352 eodf = data[r,'EODf']
353 all_table.append_data(data[r,'index'], data_col)
354 all_table.append_data(eodf)
355 all_table.append_data(fish_type)
356 species_name = 'unknown'
357 species_rms = 1.0e12
358 if fish_type == 'wave':
359 if harmonics is not None:
360 for h in range(min(harmonics, wave_spec.rows())+1):
361 table.append_data(wave_spec[h,'amplitude'])
362 if h > 0:
363 table.append_data(wave_spec[h,'relampl'])
364 table.append_data(wave_spec[h,'relpower'])
365 table.append_data(wave_spec[h,'phase'])
366 if len(wave_names) > 0:
367 fn = base_path + '-eodwaveform-%d'%idx + file_ext
368 if zf is not None:
369 fn = io.TextIOWrapper(zf.open(fn, 'r'))
370 wave_eod = TableData(fn).array()
371 wave_eod[:,0] *= 0.001
372 for species, eod in zip(wave_names, wave_eods):
373 rms = wave_similarity(eod, wave_eod, 1.0, eodf)
374 if rms < species_rms and rms < wave_max_rms:
375 species_name = species
376 species_rms = rms
377 table.append_data(100.0*rms)
378 table.append_data(species_name)
379 else:
380 if peaks0 is not None:
381 fn = base_path + '-pulsepeaks-%d'%idx + file_ext
382 if zf is not None:
383 fn = io.TextIOWrapper(zf.open(fn, 'r'))
384 pulse_peaks = TableData(fn)
385 for p in range(peaks0, peaks1+1):
386 for pr in range(pulse_peaks.rows()):
387 if pulse_peaks[pr,'P'] == p:
388 break
389 else:
390 continue
391 if p != 1:
392 table.append_data(pulse_peaks[pr,'time'], 'P%dtime' % p)
393 table.append_data(pulse_peaks[pr,'amplitude'], 'P%dampl' % p)
394 if p != 1:
395 table.append_data(pulse_peaks[pr,'relampl'], 'P%drelampl' % p)
396 table.append_data(pulse_peaks[pr,'width'], 'P%dwidth' % p)
397 if len(pulse_names) > 0:
398 fn = base_path + '-eodwaveform-%d'%idx + file_ext
399 if zf is not None:
400 fn = io.TextIOWrapper(zf.open(fn, 'r'))
401 pulse_eod = TableData(fn).array()
402 pulse_eod[:,0] *= 0.001
403 for species, eod in zip(pulse_names, pulse_eods):
404 rms = pulse_similarity(eod, pulse_eod)
405 if rms < species_rms and rms < pulse_max_rms:
406 species_name = species
407 species_rms = rms
408 table.append_data(100.0*rms)
409 table.append_data(species_name)
410 #if len(wave_names) + len(pulse_names) > 0:
411 # all_table.append_data(species_name)
412 table.fill_data()
413 all_table.fill_data()
414 # check coverage of meta data:
415 if meta_recordings_used is not None:
416 if np.all(meta_recordings_used):
417 if verbose > 0:
418 print('found recordings for all meta data')
419 else:
420 if verbose > 0:
421 print('no recordings found for:')
422 for mr in range(len(meta_recordings)):
423 recording = meta_recordings[mr]
424 if not meta_recordings_used[mr]:
425 if verbose > 0:
426 print(recording)
427 all_table.set_column(0)
428 all_table.append_data(recording)
429 for c in range(meta_data.columns()):
430 all_table.append_data(meta_data[mr,c])
431 all_table.append_data(np.nan) # index
432 all_table.append_data(np.nan) # EODf
433 all_table.append_data('none') # type
434 # adjust EODf to mean temperature:
435 for table in [wave_table, pulse_table, all_table]:
436 if table is not None and temp_col is not None:
437 eodf_idx = table.index('EODf')
438 table.insert(eodf_idx+1, 'EODf_adjust', 'Hz', '%.1f')
439 table.fill_data()
440 temp_idx = table.index(temp_col)
441 tadjust_idx = table.index('T_adjust')
442 for r in range(table.rows()):
443 eodf = table[r,eodf_idx]
444 if np.isfinite(table[r,temp_col]) and np.isfinite(table[r,tadjust_idx]):
445 eodf = adjust_eodf(eodf, table[r,temp_col], table[r,tadjust_idx], q10)
446 table[r,eodf_idx+1] = eodf
447 # add wavefish species (experimental):
448 # simplify pathes:
449 if simplify_file and len(file_pathes) > 1:
450 fp0 = file_pathes[0]
451 for fi in range(len(fp0)):
452 is_same = True
453 for fp in file_pathes[1:]:
454 if fi >= len(fp) or fp[fi] != fp0[fi]:
455 is_same = False
456 break
457 if not is_same:
458 break
459 for table in [wave_table, pulse_table, all_table]:
460 if table is not None:
461 for k in range(table.rows()):
462 idx = table.index('file')
463 fps = os.path.normpath(table[k,idx]).split(os.path.sep)
464 table[k,idx] = os.path.sep.join(fps[fi:])
465 return wave_table, pulse_table, all_table
468def rangestr(string):
469 """
470 Parse string of the form N:M .
471 """
472 if string[0] == '=':
473 string = '-' + string[1:]
474 ss = string.split(':')
475 v0 = v1 = None
476 if len(ss) == 1:
477 v0 = int(string)
478 v1 = v0
479 else:
480 v0 = int(ss[0])
481 v1 = int(ss[1])
482 return (v0, v1)
485def main(cargs=None):
486 # command line arguments:
487 if cargs is None:
488 cargs = sys.argv[1:]
489 parser = argparse.ArgumentParser(add_help=True,
490 description='Collect data generated by thunderfish in a wavefish and a pulsefish table.',
491 epilog='version %s by Benda-Lab (2019-%s)' % (__version__, __year__))
492 parser.add_argument('--version', action='version', version=__version__)
493 parser.add_argument('-v', action='count', dest='verbose', default=0,
494 help='verbosity level: -v for meta data coverage, -vv for additional info on discarded recordings.')
495 parser.add_argument('-t', dest='table_type', default=None, choices=['wave', 'pulse'],
496 help='wave-type or pulse-type fish')
497 parser.add_argument('-c', dest='simplify_file', action='store_true',
498 help='remove initial common directories from input files')
499 parser.add_argument('-m', dest='max_fish', type=int, metavar='N',
500 help='maximum number of fish to be taken from each recording')
501 parser.add_argument('-p', dest='pulse_peaks', type=rangestr,
502 default=(0, 1), metavar='N:M',
503 help='add properties of peak N to M of pulse-type EODs to the table')
504 parser.add_argument('-w', dest='harmonics', type=int, default=3, metavar='N',
505 help='add properties of first N harmonics of wave-type EODs to the table')
506 parser.add_argument('-r', dest='remove_cols', action='append', default=[], metavar='COLUMN',
507 help='columns to be removed from output table')
508 parser.add_argument('-s', dest='statistics', action='store_true',
509 help='also write table with statistics')
510 parser.add_argument('-i', dest='meta_file', metavar='FILE:REC:TEMP', default='', type=str,
511 help='insert rows from metadata table in FILE matching recording in colum REC. The optional TEMP specifies a column with temperatures to which EOD frequencies should be adjusted')
512 parser.add_argument('-q', dest='q10', metavar='Q10', default=1.62, type=float,
513 help='Q10 value for adjusting EOD frequencies to a common temperature')
514 parser.add_argument('-S', dest='skip', action='store_true',
515 help='skip recordings that are not contained in metadata table')
516 parser.add_argument('-n', dest='file_suffix', metavar='NAME', default='', type=str,
517 help='name for summary files that is appended to "wavefish" or "pulsefish"')
518 parser.add_argument('-o', dest='out_path', metavar='PATH', default='.', type=str,
519 help='path where to store summary tables')
520 parser.add_argument('-f', dest='format', default='auto', type=str,
521 choices=TableData.formats + ['same'],
522 help='file format used for saving summary tables ("same" uses same format as input files)')
523 parser.add_argument('file', nargs='+', default='', type=str,
524 help='a *-wavefish.* or *-pulsefish.* file as generated by thunderfish')
525 # fix minus sign issue:
526 ca = []
527 pa = False
528 for a in cargs:
529 if pa and a[0] == '-':
530 a = '=' + a[1:]
531 pa = False
532 if a == '-p':
533 pa = True
534 ca.append(a)
535 # read in command line arguments:
536 args = parser.parse_args(ca)
537 verbose = args.verbose
538 table_type = args.table_type
539 remove_cols = args.remove_cols
540 statistics = args.statistics
541 meta_file = args.meta_file
542 file_suffix = args.file_suffix
543 out_path = args.out_path
544 data_format = args.format
546 # expand wildcard patterns:
547 files = []
548 if os.name == 'nt':
549 for fn in args.file:
550 files.extend(glob.glob(fn))
551 else:
552 files = args.file
554 # read configuration:
555 cfgfile = __package__ + '.cfg'
556 cfg = ConfigFile()
557 add_harmonic_groups_config(cfg)
558 add_eod_quality_config(cfg)
559 add_species_config(cfg)
560 add_write_table_config(cfg, table_format='csv', unit_style='row',
561 align_columns=True, shrink_width=False)
562 cfg.load_files(cfgfile, files[0], 3)
563 # output format:
564 if data_format == 'same':
565 ext = os.path.splitext(files[0])[1][1:]
566 if ext in TableData.ext_formats:
567 data_format = TableData.ext_formats[ext]
568 else:
569 data_format = 'dat'
570 if data_format != 'auto':
571 cfg.set('fileFormat', data_format)
572 # create output folder:
573 if not os.path.exists(out_path):
574 os.makedirs(out_path)
575 # read in meta file:
576 md = None
577 rec_data = None
578 temp_col = None
579 if len(meta_file) > 0:
580 mds = meta_file.split(':')
581 meta_data = mds[0]
582 if not os.path.isfile(meta_data):
583 print('meta data file "%s" not found.' % meta_data)
584 exit()
585 md = TableData(meta_data)
586 if len(mds) < 2:
587 print('no recording column specified for the table in %s. Choose one of' % meta_data)
588 for k in md.keys():
589 print(' ', k)
590 exit()
591 rec_col = mds[1]
592 if rec_col not in md:
593 print('%s is not a valid key for the table in %s. Choose one of' % (rec_col, meta_data))
594 for k in md.keys():
595 print(' ', k)
596 exit()
597 else:
598 rec_data = md[:,rec_col]
599 del md[:,rec_col]
600 if len(mds) > 2:
601 temp_col = mds[2]
602 if temp_col not in md:
603 print('%s is not a valid key for the table in %s. Choose one of' % (temp_col, meta_data))
604 for k in md.keys():
605 print(' ', k)
606 exit()
607 # collect files:
608 wave_table, pulse_table, all_table = collect_fish(files, args.simplify_file,
609 md, rec_data, args.skip,
610 temp_col, args.q10,
611 args.max_fish, args.harmonics,
612 args.pulse_peaks[0], args.pulse_peaks[1],
613 cfg, verbose)
614 # write tables:
615 if len(file_suffix) > 0 and file_suffix[0] != '-':
616 file_suffix = '-' + file_suffix
617 tables = []
618 table_names = []
619 if pulse_table and (not table_type or table_type == 'pulse'):
620 tables.append(pulse_table)
621 table_names.append('pulse')
622 if wave_table and (not table_type or table_type == 'wave'):
623 tables.append(wave_table)
624 table_names.append('wave')
625 if all_table and not table_type:
626 tables.append(all_table)
627 table_names.append('all')
628 for table, name in zip(tables, table_names):
629 for rc in remove_cols:
630 if rc in table:
631 table.remove(rc)
632 table.write(os.path.join(out_path, '%sfish%s' % (name, file_suffix)),
633 **write_table_args(cfg))
634 if statistics:
635 s = table.statistics()
636 s.write(os.path.join(out_path, '%sfish%s-statistics' % (name, file_suffix)),
637 **write_table_args(cfg))
640if __name__ == '__main__':
641 main()