1
2
3 """Data file structure and i/o.
4
5 This is the (qubx.tree) format of a simulation scratch file::
6
7 Sampling
8 SignalCount
9 Signals
10 [] Signal
11 Name
12 Units
13 Segments
14 [] Segment
15 SampleCount
16 Signals
17 [] Signal
18 Samples [(float[SampleCount])]
19 IdealSignals
20 [] IdealSignal
21 SignalIndex
22 Segments
23 [] Segment [f, l] {amp, sd, DwellCount, Classes, Firsts, Lasts}
24
25
26 Copyright 2008-2014 Research Foundation State University of New York
27 This file is part of QUB Express.
28
29 QUB Express is free software; you can redistribute it and/or modify
30 it under the terms of the GNU General Public License as published by
31 the Free Software Foundation, either version 3 of the License, or
32 (at your option) any later version.
33
34 QUB Express is distributed in the hope that it will be useful,
35 but WITHOUT ANY WARRANTY; without even the implied warranty of
36 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
37 GNU General Public License for more details.
38
39 You should have received a copy of the GNU General Public License,
40 named LICENSE.txt, in the QUB Express program directory. If not, see
41 <http://www.gnu.org/licenses/>.
42
43 """
44
45 import bisect
46 import collections
47 import itertools
48 import mmap
49 import os
50 import numpy
51 import re
52 import tempfile
53 from itertools import izip, count
54
55 import qubx.fast.filter
56 from qubx.fast.baseline import BaselineSeg
57 import qubx.tree
58 import qubx.trialset
59 from qubx.ideal import Idealization, IdlFits
60 from qubx.util_types import *
61 from qubx.table import *
62 from qubx.table_tree import *
63 from qubx.undo import UndoStack
64
65 from .sorted_collection import SortedCollection
66
67 from ctypes import c_int, c_ushort, c_short, c_double, c_float, POINTER, byref
68 c_int_p = POINTER(c_int)
69 c_float_p = POINTER(c_float)
70
71 DATASOURCE_FILE, DATASOURCE_SCREEN, DATASOURCE_LIST = (0,1,2)
72
73 DOWNSAMPLE_ALL_LOGCOUNT = 2.0
74 DOWNSAMPLE_LOGSLOPE = 3.0
75 DOWNSAMPLES_PER_PIX = lambda Nsigseg, Nsamp: exp( (log(Nsigseg*Nsamp) - DOWNSAMPLE_ALL_LOGCOUNT) / DOWNSAMPLE_LOGSLOPE + DOWNSAMPLE_ALL_LOGCOUNT )
76
77 SAVEBUFLEN = 2**18
78
79 Readers = {}
81 rtn = None
82 if ext.lower() in Readers:
83 rtn = Readers[ext.lower()].create
84 Readers[ext.lower()] = Anon(ext=ext, pattern='*%s'%ext, description=description, create=create)
85 return rtn
86
88 try:
89 return Readers[ext.lower()]
90 except KeyError:
91 return Readers['.txt']
92
93 -def Open(path, progressf=lambda frac: True):
94 try:
95 data = GetReader(os.path.splitext(path)[1]).create(path, progressf)
96 except:
97 traceback.print_exc()
98 data = GetReader('.txt').create(path, progressf)
99 setup_segments_list(data)
100 return data
101
103 lseg = data.lists.by_name('Segments')
104 if lseg.size == len(data.segmentation.segments):
105 i = 0
106 for f, l, n in data.segmentation.segments:
107 if (lseg[i, 'From'] != f) or (lseg[i, 'To'] != l):
108 break
109 i += 1
110 else:
111 return
112 lseg.clear()
113 lseg.insert_selections([(fln[0], fln[1],
114 {'Duration':fln[2]*1e3*data.sampling, 'SampleCount':fln[2], 'Start':data.segmentation.starts[i]})
115 for i, fln in enumerate(data.segmentation.segments)])
116 lseg.user_can_edit = False
117
118
119 Writers = {}
121 Writers[ext.lower()] = Anon(ext=ext, pattern='*%s'%ext, description=description, save=save)
122
123 -def Save(path, *args, **kw):
124 ext = os.path.splitext(path)[1].lower()
125 try:
126 save = Writers[ext].save
127 except KeyError:
128 raise IOError('Unknown data format: %s'%ext)
129 return save(path, *args, **kw)
130
131
132
133 RequestSampling = lambda s: s
134 RequestBinaryParams = lambda sampling, scaling, floating, bytes, signals: (sampling, scaling, floating, bytes, signals)
135
136
137
138
140 """Describes how a contiguous range of samples is divided into segments.
141
142 @ivar segments: list of (first, last, sample_count) of each segment
143 @ivar starts: list of start [ms] of each segment
144 @ivar idl: L{qubx.ideal.Idealization} with one dwell per segment, for quick index_at()
145 @ivar OnAdd: L{WeakEvent}(Segmentation, first, last, start) when add_seg() is called
146 @ivar OnClear: L{WeakEvent}(Segmentation) when clear() is called
147 """
153 """Removes all segments."""
154 self.segments = []
155 self.starts = []
156 self.last = -1
157 self.idl = Idealization(1.0)
158 self.OnClear(self)
160 """Appends a segment."""
161 if l > self.last:
162 self.idl.add_seg(self.last+1, l)
163 self.last = l
164 seg_ix = len(self.segments)
165 self.segments.append( (f, l, l-f+1) )
166 self.starts.append(start)
167 self.idl.set_dwell(f, l, seg_ix)
168 self.OnAdd(self, f, l, start)
170 """Returns the index of the segment containing sample i."""
171 return self.idl[i]
173 """Returns segmented list of (iseg, f, l) with local segment indices."""
174 chunks = []
175 while f <= l:
176 iseg = self.index_at(f)
177 segf, segl, segn = self.segments[iseg]
178 take = min(l, segl) - f + 1
179 if take <= 0:
180 if qubx.global_namespace.DEBUG:
181 print 'segmentation split ended early',f, l, segl, iseg
182 break
183 chunks.append((iseg, f-segf, f-segf+take-1))
184 f += take
185 return chunks
186
188 """Represents piecewise-linear baseline of one signal."""
204 return self.segs[self.segm.index_at(f)]
217 if not len(points): return
218 p = 0
219 for iseg, f, l in self.segm.split(points[0], points[-1]):
220 segf = self.segm.segments[iseg][0]
221 q = p
222 while (q < len(points)) and (points[p] <= (segf+l)):
223 q += 1
224 self.segs[iseg].add_nodes(points[p:q], values[p:q])
225 self.OnChange()
227 p = 0
228 for iseg, f, l in self.segm.split(first, last):
229 segf = self.segm.segments[iseg][0]
230 self.segs[iseg].clear_nodes(segf+f, segf+l)
231 self.OnChange()
233 for i, fln in enumerate(self.segm.segments):
234 f, l, n = fln
235 self.segs[i].clear_nodes(f, l)
236 self.OnChange()
238 samples = numpy.zeros(shape=(last-first+1,), dtype='float32')
239 at = 0
240 for iseg, f, l in self.segm.split(first, last):
241 segf = self.segm.segments[iseg][0]
242 brange = self.segs[iseg].range_at(segf+f)
243 while brange and (brange.node_a.point <= (segf+l)):
244 x0 = max(first, brange.node_a.point)
245 x1 = min(last, brange.node_b.point)
246 y0 = brange.node_a.value
247 y1 = brange.node_b.value
248 n = x1 - x0
249 slope = (y1 - y0) / (brange.node_b.point - brange.node_a.point)
250 y0 += slope * (x0 - brange.node_a.point)
251 if x1 == segf + l:
252 n += 1
253 samples_range = numpy.arange(n, dtype='float32')
254 samples_range *= slope
255 samples_range += y0
256 samples[at:at+n] = samples_range
257 at += n
258 brange = brange.next()
259 return samples
260
261
263 """Represents one idealized signal; resizes with L{Segmentation}; stores per-segment info.
264
265 @ivar sampling: sampling interval, in seconds; please keep synchronized with data.sampling
266 @ivar idl: L{qubx.ideal.Idealization}
267 @ivar seg: list of L{Anon}(f=first, l=last, ix=index, amp=[amp_of_class], std=[std_of_class])
268 """
282 sampling = property(lambda self: self._sampling, lambda self, x: self.set_sampling(x))
284 if f > (self.last+1):
285 self.idl.add_seg(self.last+1, f-1)
286 self.segsAndSpacers += 1
287 self.last = l
288 self.idl.add_seg(f, l)
289 self.segix.append(self.segsAndSpacers)
290 self.segsAndSpacers += 1
291 self.seg.append(Anon(f=f, l=l, ix=len(self.seg), amp=[], std=[]))
293 self.seg = []
294 self.idl = self.Idealization(self._sampling*1e3)
295 self.segix = []
296 self.segsAndSpacers = 0
297 self.last = -1
298
300 - def __init__(self, sampling, segmentation):
318 if self.sels:
319 del self.sels[-1]
320 del self.data[-1]
321 if self.sels:
322 self.f, self.l = self.sels[-1]
323 else:
324 self.f = self.l = None
325 self.cls -= 1
326 self.OnChangeSource()
328 self.idl.set_dwells(n, ff, ll, cc)
329 self.OnChangeOverlays()
331 ff, ll, cc = self.idl.get_dwells_and_gaps(f, l)
332 self.idl.set_dwell(f, l, self.cls)
333 self.undo.push_undo(bind(self.pop_dest, len(ff), ff, ll, cc), bind(self.add_dest, f, l))
334 self.undo.seal_undo('Select Destination')
335 self.OnChangeOverlays()
337 ff, ll, cc = self.idl.get_dwells_and_gaps(f, l)
338 self.idl.set_dwell(f, l, -1)
339 self.undo.push_undo(bind(self.pop_dest, len(ff), ff, ll, cc), bind(self.restore, f, l))
340 self.undo.seal_undo('Restore')
341 self.OnChangeOverlays()
343 for i, fln in enumerate(self.segm.segments):
344 f, l, n = fln
345 self.idl.set_dwell(f, l, -1)
346 self.data[:] = []
347 self.sels[:] = []
348 self.f = self.l = None
349 self.cls = -1
350 self.undo.clear()
351 self.OnChangeSource()
352 self.OnChangeOverlays()
353
354
356 """Represents excluded and included regions as an idealization with class 0 and 1 resp.
357
358 @ivar OnChange: L{WeakEvent}(Exclusion)
359 """
360 - def __init__(self, sampling, segmentation):
373 for f, l, n in self.segm.segments:
374 self.idl.set_dwell(f, l, 1)
375 self.OnChange(self)
377 for f, l, n in self.segm.segments:
378 self.idl.set_dwell(f, l, 0)
379 self.OnChange(self)
380
381
384 qubx.table.SimpleTable.__init__(self, 'List', auto_add_fields=True, global_name='QubX.Data.file.list', sortable=True)
385 self.__ref = Reffer()
386 self.OnRemoved += self.__ref(self.__onRemoved)
387 self.OnSet += self.__ref(self.__onSet)
388 self.add_field('From', 0, acceptIntGreaterThanOrEqualTo(0), str, '')
389 self.add_field('To', 0, acceptIntGreaterThanOrEqualTo(0), str, '')
390 self.add_field('Label', "", str, str, "")
391 self.OnChangeListName = WeakEvent()
392 self.__list_name = ""
393 self.__order = []
394 self.__lasts_order = None
395 self.__building = False
396 self.fits = []
397 self.user_can_edit = True
399 if self.__list_name == x:
400 return
401 self.__list_name = x
402 self.OnChangeListName(self, x)
403 list_name = property(lambda self: self.__list_name, lambda self, x: self.set_list_name(x))
404 - def insert(self, i, entry, undoing=False):
405 if self.__building:
406 qubx.table.SimpleTable.insert(self, i, entry)
407 self.__order.insert(i, entry['From'])
408 self.__lasts_order = None
409 self.fits.insert(i, None)
410 elif ('From' in entry) and ('To' in entry):
411 self.insert_selection(**entry)
412 else:
413 raise Exception('sorted table: use SelectionList.insert_selection instead')
415 ix = bisect.bisect_left(self.__order, From)
416 kw['From'] = From
417 kw['To'] = To
418 kw['Label'] = Label
419 qubx.table.SimpleTable.insert(self, ix, kw)
420 self.__order.insert(ix, From)
421 self.__lasts_order = None
422 self.fits.insert(ix, None)
423 return ix
433 kw['From'] = From
434 kw['To'] = To
435 kw['Label'] = Label
436 qubx.table.SimpleTable.append(self, kw)
437 self.__order.append(From)
438 self.__lasts_order = None
439 self.fits.append(None)
440 return self.size - 1
442 if self.__lasts_order is None:
443 self.__lasts_order = SortedCollection((self.get(i, 'To'), self.get(i, 'From'), self.get(i, 'Label'), i) for i in xrange(self.size))
444 try:
445 start = self.__lasts_order.index_ge((f,))
446 except:
447 return []
448
449 sels = []
450 for ix in xrange(start, self.size):
451 To, From, Label, Index = self.__lasts_order[ix]
452 if From > l:
453 break
454 if trimmed:
455 if From < f: From = f
456 if To > l: To = l
457 sels.append((From, To, Label, Index))
458 return sels
460 del self.__order[index]
461 self.__lasts_order = None
462 del self.fits[index]
463 - def __onSet(self, index, field_name, val, prev, undoing=False):
518 - def from_text(self, txt):
519 self.__building = True
520 qubx.table.SimpleTable.from_text(self, txt, keep_fields=['From', 'To', 'Label'])
521 self.__building = False
523 for i in xrange(self.size):
524 if (self[i,'From'] == From) and (self[i,'To'] == To):
525 return i
526 return -1
527
529 """
530
531 @ivar lists: list of L{SelectionList}
532 """
562 index = property(lambda self: self.__index, lambda self, x: self.set_index(x), doc="index of onscreen list")
563 item = property(lambda self: self.__list, doc="onscreen L{SelectionList}")
565 self.OnChangeList(self, self.__list)
571 self.OnChangeListName(self, lst, name)
573 for i, lst in enumerate(self.lists):
574 if lst.list_name == name:
575 return i
576 raise KeyError(name)
598 try:
599 ix = int(name_or_ix)
600 except:
601 ix = self.index_of(name_or_ix)
602 if ix == self.__index:
603 if len(self.lists) == 1:
604 self.index = -1
605 elif ix == (len(self.lists) - 1):
606 self.index = self.index - 1
607 else:
608 self.index = self.index + 1
609 del self.lists[ix]
610 if self.__index > ix:
611 self.__index -= 1
612 elif self.__index == ix:
613 if ix < len(self.lists):
614 self.index = self.index
615 else:
616 self.index = self.__index - 1
637 if lists_node:
638 for i in reversed(xrange(len(self.lists))):
639 self.del_list(i)
640 for list_node in qubx.tree.children(lists_node, 'List'):
641 self.show_list(list_node.name)
642 self.item.read_from_tree(list_node)
643 if self.item.list_name == 'Segments':
644 self.item.user_can_edit = False
645 try:
646 self.index = lists_node['ListIndex'].data[0]
647 except:
648 pass
669
670
672 """Manages updates to idealization to make them undoable and to emit the OnChangeIdealization event; make sure to call done()."""
677 - def set_dwells(self, n, ff, ll, cc, aa=None, ss=None, event=False):
678 if n <= 0:
679 return
680 idl = self.data.ideal[self.signal].idl
681 idlmeta = self.data.ideal[self.signal].seg[self.data.segmentation.index_at(ff[0])]
682 def make_set(ff, ll, cc, aa, ss, ev):
683 aaa = aa if (aa is None) else aa[:]
684 sss = ss if (ss is None) else ss[:]
685 def do():
686 idl.set_dwells(len(ff), ff, ll, cc)
687 if not ((idlmeta.amp is None) or (aaa is None)) and (len(idlmeta.amp) >= len(aaa)):
688 idlmeta.amp[:len(aaa)] = aaa
689 if not (sss is None):
690 idlmeta.std[:len(sss)] = sss
691 elif not (aaa is None):
692 idlmeta.amp = aaa
693 if not (sss is None):
694 idlmeta.std = sss
695 else:
696 idlmeta.std = [0.0] * len(aaa)
697 if ev:
698 self.data.OnChangeIdealization(self.signal)
699 return do
700
701 uff, ull, ucc = idl.get_dwells_and_gaps(ff[0], ll[-1])
702 rff, rll, rcc = [numpy.array(zz, copy=True) for zz in (ff, ll, cc)]
703 raa = aa if (aa is None) else aa[:]
704 rss = ss if (ss is None) else ss[:]
705 self.data.undoIdl.push_undo(make_set(uff, ull, ucc, idlmeta.amp, idlmeta.std, True),
706 make_set(rff, rll, rcc, raa, rss, True))
707 make_set(ff, ll, cc, aa, ss, event)()
708 if not event:
709 self.defer_event = True
711 self.data.undoIdl.seal_undo('Idealize')
712 if self.defer_event:
713 self.data.OnChangeIdealization(self.signal)
714
715
716 STIM_TYPES = (0, 1, 2)
717 (STIM_TYPE_CONST, STIM_TYPE_SIGNAL, STIM_TYPE_EXPR) = STIM_TYPES
718 STIM_LABELS = ('Constant', 'Signal', 'Expression')
719
727
728
730 """
731 Describes how to find and idealize the time course of a model variable.
732 It can be constant, an analog signal, or an expression in terms of segment index, time since seg-start, and named analog signals. Read-only please. You are more likely to encounter a L{qubx.table.TableRow} with the same fields.
733
734 @ivar Name: the model variable e.g. 'Voltage'
735 @ivar Type: STIM_TYPE_CONST, STIM_TYPE_SIGNAL, or STIM_TYPE_EXPR
736 @ivar Signal_index: index in dataview.signals, if stim_type == STIM_TYPE_SIGNAL
737 @ivar Expr: f(seg, t[, signalname[0], signalname[1], ...]) -> float, if stim_type == STIM_TYPE_EXPR
738 @ivar Latency: [ms] milliseconds of delay to add
739 @ivar Shaping: name of a shaping plugin
740 @ivar Known_amps: list of known stimulus levels for idealization
741 @ivar Add_deltas: whether to add more stimulus levels when idealizing
742 @ivar Delta_amp: minimum difference between added levels
743 @ivar Min_dur: [ms] events any shorter are subsumed by their neighbor
744 """
745 - def __init__(self, name, stim_type=STIM_TYPE_CONST, signal_index=None, expr="0", latency_ms=0.0,
746 shaping="", known_amps=[0.0], add_deltas=True, delta=0.5, min_dur_ms=1.0, expr_index=None, active=True):
747 Anon.__init__(self, Name=name, Type=stim_type, Signal_index=signal_index, Expr=expr, Latency=latency_ms,
748 Shaping=shaping, Known_amps=known_amps, Add_deltas=add_deltas, Delta_amp=delta, Min_dur=min_dur_ms)
749
751 """Base class for an open data file.
752
753 @ivar sampling: interval between data points, in seconds
754 @ivar segmentation: L{Segmentation}
755 @ivar seg: list of objects to hold per-segment attributes
756 @ivar path: folder/filename or ""
757 @ivar exclusion: L{Exclusion} -- idealization of erased regions
758 @ivar analog: list of L{QubData_Analog} for each signal
759 @ivar overlays: list of L{SigOverlays} (or None) for each signal
760 @ivar baseline: list of L{SigBaseline} for each signal
761 @ivar fits: list of L{SigIdl<IdlFits>} for each signal
762 @ivar ideal: list of L{SigIdl<Idealization>} for each signal
763 @ivar qsf: L{qubx.tree.Node} with metadata, saved as <whatever>.qsf
764 @ivar constants: L{qubx.table.SimpleTable} with Name, Value
765 @ivar signals: L{qubx.table.SimpleTable} with Name, Units, Scale, Offset, Filter, Filter Freq
766 @ivar stimuli: L{qubx.table.SimpleTable} with the same fields as L{StimulusRecord}.
767 @ivar segments: L{qubx.table.SimpleTable} of stats etc; auto-adds fields
768 @ivar lists: L{SelectionLists}
769 @ivar list: currently showing L{SelectionList}
770 @ivar notes: comment string for end-use
771 @ivar trials: L{qubx.trialset.TrialSets}
772 #ivar store_fit_curves: True to save qsf['FitCurves'], default False to save space
773 @ivar OnChangePath: L{WeakEvent}(QubData, path or "") when filename or path change
774 @ivar OnChangeField: L{WeakEvent}(QubData, 'Name' or 'In Folder') when filename or path change, for L{qubx.table.ObjectTable}
775 @ivar OnChangeSampling: L{WeakEvent}(QubData, sampling) when the sampling rate changes
776 @ivar OnChangeSamples: L{WeakEvent}(QubData, signal_index or -1, *reserved) when sampled data changes
777 @ivar OnChangeIdealization: L{WeakEvent}(QubData, signal_index or -1, *reserved) when idealized data changes
778 @ivar OnChangeFits: L{WeakEvent}(QubData, signal_index or -1, *reserved) when fit curve changes
779 @ivar OnChangeBaselineNodes: L{WeakEvent}(QubData, signal_index or -1)
780 @ivar OnChangeOverlaySource: L{WeakEvent}(QubData, signal_index)
781 @ivar OnChangeOverlays: L{WeakEvent}(QubData, signal_index)
782 @ivar OnSaving: L{WeakEvent}(QubData, qsf) when qsf (L{qubx.tree.Node}) needs updating prior to save
783 """
785 self.__ref = Reffer()
786 self.__sampling = 1.0
787 self.scaling = 1.0
788 self.OnChangePath = WeakEvent()
789 self.OnChangeField = WeakEvent()
790 self.OnChangeSampling = WeakEvent()
791 self.OnChangeSamples = WeakEvent()
792 self.OnChangeIdealization = WeakEvent()
793 self.OnChangeFits = WeakEvent()
794 self.OnChangeBaselineNodes = WeakEvent()
795 self.OnChangeOverlaySource = WeakEvent()
796 self.OnChangeOverlays = WeakEvent()
797 self.OnChangeOverlays += self.OnChangeSamples
798 self.OnSaving = WeakEvent()
799 self.constants = SimpleTable('Constants', global_name='QubX.Data.file.constants')
800 self.constants.add_field('Name', '', acceptString, str, '')
801 self.constants.add_field('Value', 1.0, acceptFloat, str, '')
802 self.signals = SimpleTable('Signals', global_name='QubX.Data.file.signals')
803 self.signals.add_field('Name', '', acceptNothing, str, '')
804 self.signals.add_field('Units', '', acceptNothing, str, '')
805 self.signals.add_field('Scale', 1.0, acceptFloat, str, '')
806 self.signals.add_field('Offset', 0.0, acceptFloat, str, '')
807 self.signals.add_field('Filter', False, acceptBool, str, '')
808 self.signals.add_field('Filter Freq', 1.0, acceptFloat, '%.3g', 'kHz')
809 self.signals.add_field('Computed as', '', str, str, '')
810 self.signals.OnInsert += self.__ref(self.__onInsertSignal)
811 self.signals.OnRemoving += self.__ref(self.__onRemovingSignal)
812 self.signals.OnSet += self.__ref(self.__onSetSignal)
813 self.analog = []
814 self.overlays = []
815 self.analog_sources = weakref.WeakValueDictionary()
816 self.baseline = []
817 self.fits = []
818 self.store_fit_curves = False
819 self.ideal = []
820 self.stim_expr_ideal = collections.defaultdict(lambda: SigIdl(self.sampling, self.segmentation))
821 self.segmentation = Segmentation()
822 self.segmentation.OnAdd += self.__ref(self.__onAddSeg)
823 self.segmentation.OnClear += self.__ref(self.__onClearSegs)
824 self.segments = SimpleTable('Segments', auto_add_fields=True, global_name='QubX.Data.file.segments')
825 self.segments.add_field('SampleCount', 0, acceptNothing, str, '')
826 self.segments.add_field('Start', 0.0, acceptNothing, '%.5g', 'ms', independent=True)
827 self.segments.add_field('Duration', 0.0, acceptNothing, '%.5g', 'ms')
828 self.segments.add_field('Label', '', acceptString, str, '')
829 self.segments.OnSet += self.__ref(self.__onSetSegments)
830 self.seg_baseline = []
831 self.seg_bl_undo = []
832 self.seg = []
833 self.exclusion = Exclusion(self.__sampling, self.segmentation)
834 self.exclusion.OnChange += self.__ref(self.__onExclusionChange)
835 self.stimuli = SimpleTable('Stimuli', global_name='QubX.Data.file.stimuli')
836 self.stimuli.add_field('Name', '', acceptNothing, str, '')
837 self.stimuli.add_field('Type', STIM_TYPE_CONST, acceptStimType, formatStimType, '', STIM_TYPES)
838 self.stimuli.add_field('Signal_index', 0, acceptIntGreaterThan(-1), str, '')
839 acceptExpr = acceptF(static=[], custom=True)
840 self.stimuli.add_field('Expr', acceptExpr('0'), acceptExpr, str, '')
841 self.stimuli.add_field('Latency', 0.0, acceptFloat, '%.3g', 'ms')
842 self.stimuli.add_field('Shaping', '', str, str, '')
843 self.stimuli.add_field('Known_amps', [], acceptFloatList(), formatList('%.3g'), '')
844 self.stimuli.add_field('Add_deltas', True, acceptBool, str, '')
845 self.stimuli.add_field('Delta_amp', 0.5, acceptFloat, '%.3g', '')
846 self.stimuli.add_field('Min_dur', 0.0, acceptFloatGreaterThanOrEqualTo(0.0), '%.3g', 'ms')
847 self.stimuli.OnInsert += self.__ref(self.__onInsertStimulus)
848 self.stimuli.OnSet += self.__ref(self.__onSetStimulus)
849 self.lists = SelectionLists()
850 self.path = ""
851 self.qsf = qubx.tree.Node('SessionFile')
852 self.group = 1
853 self.notes = ""
854 self.trials = qubx.trialset.TrialSets()
855 self.undoIdl = qubx.undo.UndoStack(depth=2)
856 self.__undoIdlSignal = None
861 list = property(lambda self: self.lists.item)
872 sampling = property(lambda self: self.__sampling, lambda self, x: self.set_sampling(x), "delta t (seconds)")
873 - def get_stimulus(self, name, def_latency=0.0, def_known_amps=[0.0], def_add_deltas=True, def_delta=0.5, def_min_dur=1.0, def_value=1.0):
874 """Returns the L{StimulusRecord}, creating with defaults if needed."""
875 try:
876 i = self.stimuli.index(name)
877 return self.stimuli.get_row_by_name(name)
878 except KeyError:
879 sig_names = [self.signals.get(i, 'Name') for i in xrange(self.signals.size)]
880 try:
881 signal_index = sig_names.index(name)
882 stim_type = STIM_TYPE_SIGNAL
883 except:
884 signal_index = 0
885 stim_type = STIM_TYPE_CONST
886 if stim_type == STIM_TYPE_CONST:
887 const_names = [self.constants.get(i, 'Name') for i in xrange(self.constants.size)]
888 if not (name in const_names):
889 self.constants.append({'Name':name, 'Value':def_value})
890 self.stimuli.append({'Name' : name,
891 'Type' : stim_type,
892 'Signal_index' : signal_index,
893 'Latency' : def_latency,
894 'Known_amps' : def_known_amps,
895 'Add_deltas' : def_add_deltas,
896 'Delta_amp' : def_delta,
897 'Min_dur' : def_min_dur})
898 return self.stimuli.get_row_by_name(name)
900 """Returns the value of a stimulus variable at the sample index f."""
901 stim = self.get_stimulus(name)
902 try:
903 stim.Type
904 except:
905 print stim, stim.fields
906 if stim.Type == STIM_TYPE_CONST:
907 for i in xrange(self.constants.size):
908 if name == self.constants.get(i, 'Name'):
909 return self.constants.get(i, 'Value')
910 elif stim.Type == STIM_TYPE_SIGNAL:
911 ff, ll, cc, dd, aa = self.get_stimulus_idl(stim.Name, f, f)
912 if len(cc) and (cc[0] >= 0):
913 return aa[cc[0]]
914 else:
915 return self.analog[stim.Signal_index].read_overlaid(self, stim.Signal_index, f, f, int(round(stim.Latency*1e-3/self.sampling)))[0]
916 elif stim.Type == STIM_TYPE_EXPR:
917 ff, ll, cc, dd, aa = self.get_stimulus_idl(stim.Name, f, f)
918 return aa[cc[0]]
920 """Returns the idealized stimulus (firsts, lasts, classes, durations, amps) for named variable in sample range (f, l)."""
921 stim = self.get_stimulus(name)
922 if stim.Type == STIM_TYPE_CONST:
923 return (numpy.array([f], dtype='int32'),
924 numpy.array([l], dtype='int32'),
925 numpy.array([0], dtype='int32'),
926 numpy.array([self.sampling*1e3*(l-f+1)], dtype='float32'),
927 numpy.array([self.get_stimulus_at(name, 0)], dtype='float64'))
928 elif stim.Type == STIM_TYPE_SIGNAL:
929 ideal = self.ideal[stim.Signal_index]
930 elif stim.Type == STIM_TYPE_EXPR:
931 ideal = self.stim_expr_ideal[name]
932 firsts, lasts, classes, durations = ideal.idl.get_dwells(f, l, True, True)
933 amp_src = ideal.seg[self.segmentation.index_at(f)].amp
934 if (amp_src is None) or (len(amp_src) == 0):
935 amp_src = []
936 amps = numpy.array(amp_src, dtype='float64')
937 return firsts, lasts, classes, durations, amps
939 try:
940 self.qsf = qsf
941 if qsf.find('Signals'):
942 tree_to_table(qsf['Signals'], self.signals)
943 else:
944 filters, freqs = [qsf.find('Display').find(x).data for x in ['FilterOn', 'FilterFreq']]
945 for c, chan in enumerate(qubx.tree.children(qsf.find('DataChannels'), 'Channel')):
946 if c >= self.signals.size:
947 break
948 self.signals.set(c, 'Name', str(chan['Name'].data))
949 self.signals.set(c, 'Units', str(chan['Units'].data))
950 self.signals.set(c, 'Scale', 1.0 / (chan['Scaling'].data and chan['Scaling'].data[0] or 1.0))
951 self.signals.set(c, 'Offset', chan['Offset'].data and chan['Offset'].data[0] or 0.0)
952 if c < len(filters):
953 self.signals.set(c, 'Filter', bool(filters[c]))
954 if c < len(freqs):
955 self.signals.set(c, 'Filter Freq', 1e-3*freqs[c])
956 if qsf.find('Stimuli'):
957 tree_to_table(qsf['Stimuli'], self.stimuli)
958 tree_to_table(qsf['Constants'], self.constants)
959 else:
960 for var in qubx.tree.children(qsf.find('ExpCond')):
961 ind = var.find('ChannelIndex').data
962 if ind and (ind[0] < self.signals.size):
963 stim = self.get_stimulus(var.name)
964 self.stimuli.set(stim.Index, 'Type', STIM_TYPE_SIGNAL)
965 self.stimuli.set(stim.Index, 'Signal_index', ind[0])
966 elif var.data:
967 try:
968 self.constants.set(self.constants.index(var.name), 'Value', var.data[0])
969 except:
970 self.constants.append({'Name' : var.name, 'Value' : var.data[0]})
971 if qsf.find('Lists_Express'):
972 self.lists.read_from_tree(qsf.find('Lists_Express'))
973 else:
974 self.lists.read_from_classic_tree(qsf.find('Lists'))
975 ReadSessionIdl(self, qsf, addsegs=(not self.segmentation.segments), progressf=progressf)
976 ReadSessionExcl(self, qsf, progressf=progressf)
977 ReadSessionBaseline(self, qsf, progressf=progressf)
978 ReadSessionTrials(self, qsf, progressf=progressf)
979 ReadSessionFits(self, qsf, progressf=progressf)
980 ReadSessionOverlays(self, qsf, progressf=progressf)
981 if qsf.find('Segments'):
982 tree_to_table(qsf['Segments'], self.segments)
983 if qsf.find('DataGroup').data:
984 self.group = qsf['DataGroup'].data[0]
985 self.notes = str(qsf.find('Comment').data)
986 except:
987 traceback.print_exc()
988 - def saveAs(self, path=None, progressf=lambda pct: True):
989 """Writes the data according to file extension (default txt)."""
990 segs_of_signal = [self.get_segmentation_file(signal=s, baseline_nodes=False) for s in xrange(self.signals.size)]
991 ext = os.path.splitext(path)[1].lower()
992 if ext == '':
993 ext = '.txt'
994 path = path + ext
995 tables = self.get_save_tables()
996 constants, segments, signals, stimuli = tables[:4]
997 signals = tables[2] = signals.clone()
998 for s in xrange(signals.size):
999 signals.set(s, 'Scale', 1.0)
1000 signals.set(s, 'Offset', 0.0)
1001 Save(path, self.qsf, segs_of_signal, self.lists, progressf, *tables)
1005 if os.path.split(self.path)[0]:
1006 segs_of_signal = [self.get_segmentation_file(signal=s) for s in xrange(self.signals.size)]
1007 qsf = BuildSession(self.qsf, segs_of_signal, self.lists, *self.get_save_tables())
1008 try:
1009 BuildSessionBaseline(qsf, self.baseline)
1010 BuildSessionTrials(qsf, self.trials)
1011 qsf['Comment'].data = self.notes
1012 if self.store_fit_curves:
1013 BuildSessionFits(qsf, self, segs_of_signal)
1014 BuildSessionOverlays(qsf, self, segs_of_signal)
1015 qsf.saveAs(os.path.splitext(self.path)[0] + '.qsf', as_copy=True)
1016 except:
1017 traceback.print_exc()
1019 tables = [self.constants, self.segments, self.signals, self.stimuli]
1020 self.OnSaving(self, tables)
1021 return tables
1023 self.analog[i] = analog
1024 self.analog[i].sampling = self.sampling
1025 self.analog_sources[SafeName(self.signals[i, 'Name'])] = analog
1026 self.OnChangeSamples(self, i)
1028 if self.overlays[i] is None:
1029 self.overlays[i] = SigOverlays(self.sampling, self.segmentation)
1030 self.overlays[i].OnChangeSource += self.__ref(bind(self.OnChangeOverlaySource, self, i))
1031 self.overlays[i].OnChangeOverlays += self.__ref(bind(self.OnChangeOverlays, self, i))
1032 return self.overlays[i]
1034 return parse_computed_signal(expr, self.analog_sources.keys())
1036 self.analog.insert(i, None)
1037 self.overlays.insert(i, None)
1038 self.baseline.insert(i, SigBaseline(self.segmentation))
1039 self.baseline[i].OnChange += self.__ref(self.__onChangeBaseline)
1040 self.fits.insert(i, SigIdl(self.sampling, self.segmentation, IdlFits))
1041 self.ideal.insert(i, SigIdl(self.sampling, self.segmentation))
1042 qubx.table.run_later(self.__connect_stim_signal_if, i)
1043 name = self.signals[i, 'Name']
1044 baseline_field = 'Baseline offset %s' % name
1045 if baseline_field in self.segments.fields:
1046 seg_baseline = [self.segments[s, baseline_field] for s in len(self.segmentation.segments)]
1047 else:
1048 seg_baseline = [None for s in xrange(len(self.segmentation.segments))]
1049 self.seg_baseline.insert(i, seg_baseline)
1050 self.seg_bl_undo.insert(i, [UndoStack() for s in xrange(len(self.segmentation.segments))])
1051 expr = self.signals[i, 'Computed as']
1052 if name and expr:
1053 try:
1054 self.set_analog(i, QubData_Analog_Computed(self.accept_computed(expr), self.analog_sources, self, i))
1055 except NameError, ne:
1056 print 'Error computing signal %s=%s:\n%s' % (name, expr, ne)
1057 else:
1058 self.signals.user_can_remove[i] = False
1060 for stim_ix in xrange(self.stimuli.size):
1061 stim = self.stimuli.get_row(stim_ix)
1062 if (stim.Type == STIM_TYPE_CONST) and (stim.Name == self.signals.get(sig_ix, 'Name')):
1063 self.stimuli.set(stim_ix, 'Type', STIM_TYPE_SIGNAL)
1064 self.stimuli.set(stim_ix, 'Signal_index', sig_ix)
1065 break
1067 del self.analog[i]
1068 del self.overlays[i]
1069 self.baseline[i].OnChange -= self.__ref(self.__onChangeBaseline)
1070 del self.baseline[i]
1071 del self.fits[i]
1072 del self.ideal[i]
1073 del self.seg_baseline[i]
1074 del self.seg_bl_undo[i]
1075 for ix in xrange(self.stimuli.size):
1076 if ((self.stimuli.get(ix, 'Type') == STIM_TYPE_SIGNAL) and
1077 (self.stimuli.get(ix, 'Signal_index') == i)):
1078 self.stimuli.set(ix, 'Type', STIM_TYPE_CONST)
1079 self.OnChangeSamples(self, None)
1080 self.OnChangeIdealization(self, None)
1082 if (field == 'Filter') or ((field == 'Filter Freq') and self.signals.get(i, 'Filter')):
1083 self.OnChangeSamples(self, i)
1084 elif field == 'Name':
1085 self.__connect_stim_signal_if(i)
1086 if self.analog[i]:
1087 if SafeName(prev) in self.analog_sources:
1088 del self.analog_sources[SafeName(prev)]
1089 self.analog_sources[SafeName(val)] = self.analog[i]
1090 elif field == 'Scale':
1091 bl_name = 'Baseline offset %s' % self.signals[i, 'Name']
1092 if bl_name in self.segments.fields:
1093 offset = self.signals[i, 'Offset']
1094 raw = lambda v: (v - offset) / prev
1095 cooked = lambda v: v * val + offset
1096 for s in xrange(self.segments.size):
1097 self.segments[s, bl_name] = cooked(raw(self.segments[s, bl_name]))
1098 self.OnChangeSamples(self, i)
1099 elif field == 'Offset':
1100 bl_name = 'Baseline offset %s' % self.signals[i, 'Name']
1101 if bl_name in self.segments.fields:
1102 dy = val - prev
1103 for s in xrange(self.segments.size):
1104 self.segments[s, bl_name] = dy + self.segments[s, bl_name]
1105 self.OnChangeSamples(self, i)
1106 elif field == 'Computed as':
1107 if (not self.analog[i]) or hasattr(self.analog[i], 'func'):
1108 self.signals.user_can_remove[i] = True
1109 try:
1110 func = self.accept_computed(val)
1111 if self.analog[i]:
1112 self.analog[i].func = func
1113 self.OnChangeSamples(self, i)
1114 else:
1115 self.set_analog(i, QubData_Analog_Computed(func, self.analog_sources, self, i))
1116 except NameError, ne:
1117 qubx.pyenvGTK.show_message(str(ne))
1118 elif val:
1119 qubx.pyenv.env.call_later(self.signals.set, i, field, '')
1120
1121
1123 match = re.match(r"Baseline offset (.+)", field)
1124 if match:
1125 try:
1126 sig_ix = self.signals.index(match.group(1))
1127 self.seg_baseline[sig_ix][i] = val
1128 if not undoing:
1129 self.seg_bl_undo[sig_ix][i].push_undo(lambda: self.segments.set(i, field, prev, undoing=True),
1130 lambda: self.segments.set(i, field, val, undoing=True))
1131 self.seg_bl_undo[sig_ix][i].seal_undo('Set segment baseline')
1132 self.OnChangeSamples(self, sig_ix)
1133 except:
1134 traceback.print_exc()
1136 self.seg.append(Anon(f=f, l=l, ix=len(self.seg)))
1137 self.segments.append({'SampleCount' : (l - f + 1),
1138 'Duration' : (l - f + 1) * 1e3 * self.sampling,
1139 'Start' : start})
1140 self.segments.user_can_remove[len(self.segments) - 1] = False
1141 for sig in self.seg_baseline:
1142 sig.append(0.0)
1143 for sig in self.seg_bl_undo:
1144 sig.append(UndoStack())
1146 self.seg = []
1147 self.segments.clear()
1148 self.seg_baseline = [ [] for i in xrange(len(self.signals)) ]
1149 self.seg_bl_undo = [ [] for i in xrange(len(self.signals)) ]
1151 self.OnChangeBaselineNodes(self, -1)
1152 self.OnChangeSamples(self, -1)
1154 self.OnChangeSamples(self, -1)
1155 self.OnChangeIdealization(self, -1)
1156 self.OnChangeFits(self, -1)
1191 - def get_segmentation(self, first_seg=None, last_seg=None, left=None, right=None, signal=0, latency=None, baseline_nodes=True):
1192 """Returns a list of all L{SourceSeg}s, modified by args.
1193
1194 @param first_seg:
1195 @param last_seg:
1196 @param left: time offset from start-of-each-seg
1197 @param right: time offset from start-of-each-seg
1198 @param signal: index in file.signals
1199 @param latency: number of samples to shift the data rightwards (same f, l, and idealization)
1200 """
1201 lat = 0 if (latency is None) else latency
1202 if lat is None:
1203 if self.stimuli:
1204 for stim in self.stimuli.all_rows():
1205 if (stim.Type == STIM_TYPE_SIGNAL) and (signal == stim.Signal_index):
1206 lat = int(round(stim.Latency*1e-3/self.sampling))
1207 break
1208 filter_Hz = 1e3*(self.signals.get(signal, 'Filter') and self.signals.get(signal, 'Filter Freq') or 0.0)
1209 segf, segl, l, r = first_seg, last_seg, left, right
1210 if segf is None:
1211 segf = 0
1212 if segl is None:
1213 segl = len(self.segmentation.segments) - 1
1214 if l is None:
1215 l = 0.0
1216 if r is None:
1217 r = 1e20
1218 segs = []
1219 for iseg in xrange(segf, segl+1):
1220 first, last, n = self.segmentation.segments[iseg]
1221 group = self.segments[iseg, 'Group']
1222 dt = self.sampling
1223 first_in = max(first, first + int(round(l/dt)))
1224 last_in = min(last, first + int(round(r/dt)))
1225 if first_in > last_in:
1226 continue
1227 start_seg = self.segmentation.starts[iseg]
1228 seg = SourceSeg(self, signal, iseg, first, first_in, last_in,
1229 latency=lat, filter_Hz=filter_Hz,
1230 start=start_seg + (first_in - first)*1e3*dt,
1231 baseline_nodes=baseline_nodes, group=group)
1232 for ef, el, ec in izip(*self.exclusion.idl.get_dwells(first_in, last_in, True)):
1233 seg.chunks.append(SourceChunk(self, signal, ef, el, ((ec!=0) and (el-ef+1) or 0),
1234 latency=lat, filter_Hz=filter_Hz,
1235 start=start_seg + (ef - first)*1e3*dt,
1236 baseline_nodes=baseline_nodes,
1237 group=group))
1238 segs.append(seg)
1239 return segs
1241 filter_Hz = 1e3*(self.signals.get(signal, 'Filter') and self.signals.get(signal, 'Filter Freq') or 0.0)
1242 if latency is None:
1243 lat = 0
1244 for stim in self.stimuli.all_rows():
1245 if (stim.Type == STIM_TYPE_SIGNAL) and (signal == stim.Signal_index):
1246 lat = int(round(stim.Latency*1e-3/self.sampling))
1247 break
1248 else:
1249 lat = latency
1250
1251 segs = []
1252 for iseg, f, l in self.segmentation.split(first, last):
1253 g = self.segments[iseg, 'Group'] if (group is None) else group
1254 seg_f = self.segmentation.segments[iseg][0]
1255 seg_start = self.segmentation.starts[iseg]
1256 seg = SourceSeg(self, signal, iseg, seg_f, seg_f + f, seg_f + l,
1257 latency=lat, filter_Hz=filter_Hz,
1258 start=seg_start + f*1e3*self.sampling,
1259 baseline_nodes=baseline_nodes,
1260 group=g)
1261 for ef, el, ec in izip(*self.exclusion.idl.get_dwells(seg_f+f, seg_f+l, True)):
1262 seg.chunks.append(SourceChunk(self, signal, ef, el, ((ec!=0) and (el-ef+1) or 0),
1263 latency=lat, filter_Hz=filter_Hz,
1264 start=seg_start + (ef - seg_f)*1e3*self.sampling,
1265 baseline_nodes=baseline_nodes,
1266 group=g))
1267 segs.append(seg)
1268 return segs
1283
1284
1286 if isinstance(segmentation, collections.Iterable):
1287 lists = [seg.file.get_segmentation_indexed(seg.f, seg.l, signal) for seg in segmentation]
1288 return [lst[0] for lst in lists if len(lst)]
1289 else:
1290 return get_segmentation_copy([segmentation], signal)[0]
1291
1292
1294 """Describes one contiguous slice of data from the data source.
1295
1296 @ivar file: L{qubx.data.QubData}
1297 @ivar signal: index of source signal
1298 @ivar f: first data point
1299 @ivar l: last data point
1300 @ivar n: number of data points
1301 @ivar start: in ms
1302 @ivar included: False if excluded
1303 @ivar latency: number of samples to shift the data rightwards (ignored for idealization)
1304 @ivar filter_Hz: low-pass filter frequency, or None
1305 @ivar sampling: sampling interval (sec)
1306 @ivar idealized: True if segment has (starts with) idealized dwells
1307 @ivar baseline_nodes: True to subtract baseline nodes
1308 @ivar group: Group column from List or Segments
1309 """
1310 - def __init__(self, file=None, signal=0, f=0, l=-1, included=True, latency=0, start=-1, sampling=None, baseline_nodes=True, group=0, **kw):
1311 if not ('n' in kw):
1312 kw['n'] = l-f+1
1313 if not ('idealized' in kw):
1314 kw['idealized'] = (0 <= signal < len(file.ideal)) and (0 <= file.ideal[signal].idl[f])
1315 Anon.__init__(self, file=file, signal=signal, f=f, l=l, included=included, latency=latency, start=start,
1316 sampling=sampling or file.sampling, baseline_nodes=baseline_nodes, group=group, **kw)
1318 f, l, start = self.f, self.l, self.start
1319 if not (first_offset is None):
1320 f += first_offset
1321 start += first_offset * self.sampling
1322 if not (last_offset is None):
1323 l = min(self.l, self.f + last_offset)
1324 return f, l, start
1325 - def get_samples(self, first_offset=None, last_offset=None):
1326 f, l, start = self.get_actual_bounds(first_offset, last_offset)
1327 if self.filter_Hz:
1328 return get_file_samples_filtered(self.file, self.signal, f, l, self.included, self.latency,
1329 self.filter_Hz, start=start, baseline_nodes=self.baseline_nodes, group=self.group)
1330 else:
1331 return get_file_samples(self.file, self.signal, f, l, self.included, self.latency, 1, start, baseline_nodes=self.baseline_nodes, group=self.group)
1332 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False,
1333 get_amps=False, get_stds=False, first_offset=0, last_offset=None):
1334 """Returns the dwells in chunk as numpy arrays
1335
1336 @param signal: signal index, or None to use the chunk.signal
1337 @param mark_excluded: if True, replaces excluded dwells with class -1 (or -2?)
1338 @param get_fragments: if False, omits first/last dwells if incomplete
1339 @param get_durations: if True, returns durations
1340 @param get_amps: if True: returns amps
1341 @param get_stds: if True: returns stds
1342 @param first_offset: (optional) to narrow the desired time range
1343 @param last_offset: (optional)
1344 @return: firsts, lasts, classes[, durations, amps, stds]
1345 """
1346 f = self.f + first_offset
1347 l = self.l if (last_offset is None) else min(self.l, self.f + last_offset)
1348 sig = signal
1349 if (sig is None) or not (0 <= sig < self.file.signals.size):
1350 sig = self.signal
1351 if not (0 <= sig < self.file.signals.size):
1352 rtn = [ [], [], [] ]
1353 if get_durations:
1354 rtn.append( [] )
1355 if get_amps:
1356 rtn.append( [] )
1357 if get_stds:
1358 rtn.append( [] )
1359 else:
1360 if mark_excluded:
1361 rtn = list(self.file.ideal[sig].idl.get_dwells_excluding(self.file.exclusion.idl, 0,
1362 f, l, get_durations))
1363 else:
1364 rtn = list(self.file.ideal[sig].idl.get_dwells(f, l, get_fragments, get_durations))
1365 if get_amps:
1366 rtn.append(self.file.ideal[sig].seg[ self.file.segmentation.index_at(f) ].amp)
1367 if get_stds:
1368 rtn.append(self.file.ideal[sig].seg[ self.file.segmentation.index_at(f) ].std)
1369 return rtn
1370
1371
1373 """Describes one segment from the data source.
1374
1375 @ivar index: index of segment in file
1376 @ivar offset: sample index offset of source segment in file
1377 @ivar chunks: list of L{SourceChunk}
1378 """
1379 - def __init__(self, file, signal=0, index=0, offset=0, f=0, l=-1, chunks=None, **kw):
1383
1384
1386 """Returns a L{SourceChunk} with samples and sampling."""
1387 return chunk.get_samples()
1388
1389 -def get_file_samples(file, signal, first, last, included, latency=0, skip=1, start=-1, baseline_nodes=True, group=None):
1390 """Returns a L{SourceChunk} with samples and sampling."""
1391 g = 0
1392 if (first > last):
1393 samples = numpy.zeros(shape=(0,))
1394 elif (0 <= signal < len(file.analog)) and file.analog[signal]:
1395 iseg = file.segmentation.index_at(first)
1396 samples = file.analog[signal].read_overlaid(file, signal, first, last, latency, skip)
1397 bs_off = file.seg_baseline[signal][iseg]
1398 if bs_off:
1399 samples -= bs_off
1400 if baseline_nodes and len(file.baseline[signal].segs[iseg]):
1401 samples -= file.baseline[signal].get_sampled_nodes(first, last)[::skip]
1402 g = file.segments[iseg, 'Group'] if (group is None) else group
1403 else:
1404 samples = numpy.zeros(shape=(last-first+1,), dtype='float32')
1405 return SourceChunk(file, signal, first, last, included=included, latency=latency,
1406 samples=samples, sampling=skip*file.sampling, start=start, baseline_nodes=baseline_nodes, group=group)
1407
1408 -def get_file_samples_filtered(file, sig, first, last, included, latency, filter_Hz, skip=1, get_samples=get_file_samples,
1409 start=-1, baseline_nodes=True, group=None):
1410 for f,l,n in file.segmentation.segments:
1411 if f <= first <= l:
1412 seg_first, seg_last = f, l
1413 break
1414 else:
1415 print "Can't find segment index?"
1416 return get_samples(file, sig, first, last, included, latency, skip, baseline_nodes=baseline_nodes, group=group)
1417 excess = int(round((2 / filter_Hz) / file.sampling))
1418 first_got = max(seg_first, first-excess)
1419 last_got = min(seg_last, last+excess)
1420 raw_chunk = get_samples(file, sig, first_got, last_got, included, latency, skip, baseline_nodes=baseline_nodes, group=group)
1421 missing_begin = int(round((excess - (first-first_got)) * 1.0 / skip))
1422 missing_end = int(round((excess - (last_got-last)) * 1.0 / skip))
1423 to_skip = int(round(excess * 1.0 / skip))
1424 datapad = numpy.zeros(shape=(missing_begin + len(raw_chunk.samples) + missing_end,), dtype=numpy.float32)
1425 datapad[missing_begin : missing_begin + len(raw_chunk.samples)] = raw_chunk.samples
1426 datapad[:missing_begin] = datapad[missing_begin]
1427 if missing_end:
1428 datapad[-missing_end:] = datapad[-missing_end-1]
1429 filterpad = qubx.fast.filter.filter(datapad, raw_chunk.sampling, filter_Hz)
1430 if to_skip:
1431 samples_out = numpy.array(filterpad[to_skip : - to_skip], copy=True)
1432 else:
1433 samples_out = filterpad
1434 return SourceChunk(file=raw_chunk.file, signal=raw_chunk.signal, f=first, l=last, latency=latency,
1435 included=raw_chunk.included, samples=samples_out, sampling=raw_chunk.sampling,
1436 start=start, baseline_nodes=baseline_nodes, group=group)
1437
1438
1439 -def generate_chunk_samples(chunks, main_hold=None, signal=None, maxlen=(1<<16), get_excluded=False,
1440 downsample_w=None, downsample_Nsigseg=1, get_idl=False, get_idl_durs=False):
1441 """Yields (numpy.array(float32) of sampled data, sampling) for each chunk.
1442
1443 @param chunks: list of L{SourceChunk}
1444 @param main_hold: L{qubx.task.GTK_Main_Hold} if in a worker thread, or None in the main thread
1445 @param signal: signal index, or None to use the chunk.signal
1446 @param maxlen: chunks longer than this will be yielded in pieces; can be None: unlimited
1447 @param get_excluded: if True, gets data for chunks with .included==False
1448 @param downsample_w: width of screen in pixels, if you want to maybe skip some for speed
1449 @param downsample_Nsigseg: total number of segments*signals being processed, to help decide how many points to skip
1450 @param get_idl: if True, sets chunk.firsts, chunk.lasts, chunk.classes
1451 @param get_idl_durs: if True and get_idl, sets chunk.durations
1452 """
1453 maxchunk = maxlen or max(chunk.n for chunk in chunks)
1454 if downsample_w is None:
1455 skip = 1
1456 else:
1457 Nsamp = sum(chunk.n for chunk in chunks if get_excluded or chunk.included)
1458 actual_spp = Nsamp / max(downsample_w, 1)
1459 skip = max(1, int(round(actual_spp / DOWNSAMPLES_PER_PIX(downsample_Nsigseg, Nsamp))))
1460 def get_samples_nohold(file, sig, first, last, included, latency, skip, baseline_nodes, group):
1461 return get_file_samples(file, sig, first, last, included, latency, skip, baseline_nodes=baseline_nodes, group=group)
1462 def get_samples_hold(file, sig, first, last, included, latency, skip, baseline_nodes, group):
1463 with main_hold:
1464 chunk_out = get_samples_nohold(file, sig, first, last, included, latency, skip, baseline_nodes, group)
1465 return chunk_out
1466 get_samples = main_hold and get_samples_hold or get_samples_nohold
1467 for chunk in chunks:
1468 if chunk.included or get_excluded:
1469 sig = signal
1470 if sig is None:
1471 sig = chunk.signal
1472 for i in xrange(chunk.f, chunk.l+1, maxchunk):
1473 if chunk.filter_Hz:
1474 chunk_out = get_file_samples_filtered(chunk.file, sig, i, min(chunk.l, i+maxchunk-1), chunk.included, chunk.latency, chunk.filter_Hz, skip, get_samples,
1475 baseline_nodes=chunk.baseline_nodes, group=chunk.group)
1476 else:
1477 chunk_out = get_samples(chunk.file, sig, i, min(chunk.l, i+maxchunk-1), chunk.included, chunk.latency, skip, chunk.baseline_nodes, chunk.group)
1478 if get_idl:
1479 idl = chunk_out.get_idealization(chunk.signal, False, True, get_idl_durs)
1480 chunk_out.firsts, chunk_out.lasts, chunk_out.classes = idl[:3]
1481 if get_idl_durs:
1482 chunk_out.durations = idl[3]
1483 yield chunk_out
1484 else:
1485 yield chunk
1486
1487
1488 -def BuildSession(base_qsf, segs_of_signal, lists, *tables):
1507
1509 if qsf.find('Idealization'):
1510 qsf.remove(qsf['Idealization'])
1511 idlchan = qubx.tree.NullNode()
1512 for s, segs in enumerate(segs_of_signal):
1513 points = 0
1514 idlchan = qsf['Idealization'].insert('Channel', idlchan)
1515 idlchan['sampling'].data = segs[0].sampling * 1e3
1516 qsfseg = qubx.tree.NullNode()
1517 for seg in segs:
1518 ff, ll, cc, dd, aa, ss = seg.get_idealization(mark_excluded=False, get_fragments=True,
1519 get_durations=True, get_amps=True, get_stds=True)
1520 if (len(ff) == 0) or ((len(ff) == 1) and (cc[0] < 0)):
1521 points += seg.n
1522 continue
1523 if seg.f != points:
1524 ll -= seg.f - points
1525 ff -= seg.f - points
1526 qsfseg = idlchan.insert('Segment', qsfseg)
1527 qsfseg.data = (points, points+seg.n-1)
1528 points += seg.n
1529 qsfseg['DwellCount'].data = len(ff)
1530 qsfseg['Firsts'].data = ff
1531 qsfseg['Lasts'].data = ll
1532 qsfseg['Classes'].data = cc
1533 qsfseg['Durations'].data = dd
1534 qsfseg['amp'].data = aa
1535 qsfseg['sd'].data = ss
1536
1537
1539 if qsf.find('FitCurves'):
1540 qsf.remove(qsf['FitCurves'])
1541 qsf['StoreFitCurves'].data = 1
1542 idlchan = qubx.tree.NullNode()
1543 for s, segs in enumerate(segs_of_signal):
1544 points = 0
1545 idlchan = qsf['FitCurves'].insert('Channel', idlchan)
1546 qsfseg = qubx.tree.NullNode()
1547 for iseg, seg in enumerate(segs):
1548 qsfseg = idlchan.insert('Segment', qsfseg)
1549 qsfseg.data = (points, points+seg.n-1)
1550 ff, ll, cc = file.fits[s].idl.get_dwells_and_gaps(seg.f, seg.l)
1551 if (len(ff) == 0) or ((len(ff) == 1) and (cc[0] < 0)):
1552 points += seg.n
1553 continue
1554 points += seg.n
1555 qsfseg['DwellCount'].data = len(ff)
1556 qsfseg['Firsts'].data = ff
1557 qsfseg['Lasts'].data = ll
1558 qsfseg['Classes'].data = cc
1559 qsfseg['Mean'].data = file.fits[s].idl.segmeans[iseg]
1560 qsfseg['Std'].data = file.fits[s].idl.segstds[iseg]
1561
1563 if qsf.find('Overlays'):
1564 qsf.remove(qsf['Overlays'])
1565 chan = qubx.tree.NullNode()
1566 for s, segs in enumerate(segs_of_signal):
1567 points = 0
1568 chan = qsf['Overlays'].insert('Channel', chan);
1569 if file.overlays[s] and file.overlays[s].sels:
1570 chan['Sels'].data = file.overlays[s].sels
1571 qsfseg = qubx.tree.NullNode()
1572 for iseg, seg in enumerate(segs):
1573 qsfseg = chan.insert('Segment', qsfseg)
1574 qsfseg.data = (points, points+seg.n-1)
1575 ff, ll, cc = file.overlays[s].idl.get_dwells_and_gaps(seg.f, seg.l)
1576 qsfseg['DwellCount'].data = len(ff)
1577 if len(ff):
1578 qsfseg['Firsts'].data = ff
1579 qsfseg['Lasts'].data = ll
1580 qsfseg['Classes'].data = cc
1581 points += seg.n
1582
1584 if qsf.find('Exclusion'):
1585 qsf.remove(qsf['Exclusion'])
1586 excl = qsf['Exclusion']
1587 qsfseg = qubx.tree.NullNode()
1588 points = 0
1589 for seg in segs_of_signal[0]:
1590 qsfseg = excl.insert('Segment', qsfseg)
1591 qsfseg.data = (points, points+seg.n-1)
1592 if any(not chunk.included for chunk in seg.chunks):
1593 ff = []
1594 ll = []
1595 cc = []
1596 segpts = points
1597 for chunk in seg.chunks:
1598 ff.append(segpts)
1599 segpts += chunk.n
1600 ll.append(segpts - 1)
1601 if chunk.included:
1602 cc.append(1)
1603 else:
1604 cc.append(0)
1605 qsfseg['DwellCount'].data = len(cc)
1606 qsfseg['Classes'].data = cc
1607 qsfseg['Firsts'].data = ff
1608 qsfseg['Lasts'].data = ll
1609 points += seg.n
1610
1633
1634
1648
1649
1650 -def Save_QDF(path, base_qsf, segs_of_signal, lists, progressf, *tables, **kw):
1651 tbl = dict((t.label, t) for t in tables)
1652 signals = tbl['Signals']
1653 base_path, ext = os.path.splitext(path)
1654 qsf = BuildSession(base_qsf, segs_of_signal, lists, *tables)
1655
1656 qdf = qubx.tree.Node('DataFile')
1657 qdf['Sampling'].data = segs_of_signal[0][0].sampling
1658 qdf['ADDataSize'].data = qsf['ADDataSize'].data = 4
1659 qdf['ADChannelCount'].data = qsf['ADChannelCount'].data = signals.size
1660 qdf['ADDataType'].data = qsf['ADDataType'].data = qubx.tree.QTR_TYPE_INT
1661 qdf.saveAs(path)
1662 points = 0
1663 lo = [1e20] * signals.size
1664 hi = [1e-20] * signals.size
1665 segout = qubx.tree.NullNode()
1666 sigs_of_seg = [ [segs_of_signal[c][i] for c in xrange(signals.size)] for i in xrange(len(segs_of_signal[0]))]
1667 for s, seg_of_signal in enumerate(sigs_of_seg):
1668 segout = qdf['Segments'].insert('Segment', segout)
1669 f = points
1670 l = f + seg_of_signal[0].n - 1
1671 points += seg_of_signal[0].n
1672 segout['StartTime'].data = seg_of_signal[0].start * 1e-3
1673 segout['Channels'].data.setup(qubx.tree.QTR_TYPE_INT, l-f+1, signals.size)
1674 segout['Channels'].data.preload = False
1675 for c, seg in enumerate(seg_of_signal):
1676 for buf_first in xrange(0, seg.n, SAVEBUFLEN):
1677 buf_last = min(seg.n, buf_first + SAVEBUFLEN) - 1
1678 buf = seg.get_samples(buf_first, buf_last).samples
1679 lo[c] = min(lo[c], numpy.min(buf))
1680 hi[c] = max(hi[c], numpy.max(buf))
1681 lo = min((x - signals.get(c, 'Offset')) / signals.get(c, 'Scale') for c,x in enumerate(lo))
1682 hi = max((x - signals.get(c, 'Offset')) / signals.get(c, 'Scale') for c,x in enumerate(hi))
1683 scaling = (2**24)/max(abs(lo), abs(hi))
1684 qdf['Scaling'].data = qsf['Scaling'].data = scaling
1685 qdf.re_map_data()
1686 segout = qdf['Segments'].find('Segment')
1687 sigs_of_seg = [ [segs_of_signal[c][i] for c in xrange(signals.size)] for i in xrange(len(segs_of_signal[0]))]
1688 prog_seg = 1.0 / len(sigs_of_seg)
1689 for s, seg_of_signal in enumerate(sigs_of_seg):
1690 if not progressf(s * prog_seg):
1691 raise KeyboardInterrupt()
1692 channels = segout['Channels'].storage.data
1693 prog_sig = prog_seg / len(seg_of_signal)
1694 for c, seg in enumerate(seg_of_signal):
1695 if not progressf(s*prog_seg + c*prog_sig):
1696 raise KeyboardInterrupt()
1697 for buf_first in xrange(0, seg.n, SAVEBUFLEN):
1698 if not progressf(s*prog_seg + (c + buf_first*1.0/seg.n)*prog_sig):
1699 raise KeyboardInterrupt()
1700 buf_last = min(seg.n, buf_first + SAVEBUFLEN) - 1
1701 buf = seg.get_samples(buf_first, buf_last).samples
1702 buf -= signals.get(c, 'Offset')
1703 buf *= scaling / signals.get(c, 'Scale')
1704 channels[buf_first:buf_last+1,c] = buf
1705 segout = segout.nextSameName()
1706
1707 dchan = qsf['DataChannels']['Channel']
1708 for c in xrange(signals.size):
1709 dchan['Scaling'].data = 1 / signals.get(c, 'Scale')
1710 dchan['Offset'].data = signals.get(c, 'Offset')
1711 dchan['Name'].data = signals.get(c, 'Name')
1712 dchan['Units'].data = signals.get(c, 'Units')
1713 if dchan.sibling:
1714 dchan = dchan.sibling
1715 elif (c+1) < signals.size:
1716 dchan = dchan.parent.insert('Channel', dchan)
1717 qdf.appendClone(qsf)
1718 qdf.save()
1719 del seg, segout, channels
1720 del qdf
1721
1722 qsf['FileExt'].data = ext.lower()
1723 qsf.saveAs(base_path+'.qsf', as_copy=True)
1724 del qsf
1725 SetWriter('.qdf', 'QuB data files', Save_QDF)
1726
1727
1728 -def Save_Float32(path, base_qsf, segs_of_signal, lists, progressf, *tables, **kw):
1729 tbl = dict((t.label, t) for t in tables)
1730 signals = tbl['Signals']
1731 base_path, ext = os.path.splitext(path)
1732 qsf = BuildSession(base_qsf, segs_of_signal, lists, *tables)
1733
1734 fi = open(path, 'wb')
1735 sigs_of_seg = [ [segs_of_signal[c][i] for c in xrange(signals.size)] for i in xrange(len(segs_of_signal[0]))]
1736 for s, seg_of_signal in enumerate(sigs_of_seg):
1737 if not progressf(s * 1.0 / len(sigs_of_seg)):
1738 raise KeyboardInterrupt()
1739 channels = numpy.zeros(shape=(seg_of_signal[0].n, len(seg_of_signal)), dtype='float32')
1740 for c, seg in enumerate(seg_of_signal):
1741 for buf_first in xrange(0, seg.n, SAVEBUFLEN):
1742 buf_last = min(seg.n, buf_first + SAVEBUFLEN) - 1
1743 buf = seg.get_samples(buf_first, buf_last).samples
1744 channels[buf_first:buf_last+1,c] = buf
1745 channels.tofile(fi)
1746
1747 dchan = qsf['DataChannels']['Channel']
1748 for c in xrange(signals.size):
1749 dchan['Scaling'].data = 1 / signals.get(c, 'Scale')
1750 dchan['Offset'].data = signals.get(c, 'Offset')
1751 if dchan.sibling:
1752 dchan = dchan.sibling
1753 elif (c+1) < signals.size:
1754 dchan = dchan.parent.insert('Channel', dchan)
1755
1756 qsf['SinglePrecision'].data = 1
1757 qsf['ADDataSize'].data = 4
1758 qsf['ADChannelCount'].data = len(segs_of_signal)
1759 qsf['FileExt'].data = ext.lower()
1760 qsf.saveAs(base_path+'.qsf', as_copy=True)
1761 del qsf
1762 SetWriter('.float32', 'Raw binary (float32)', Save_Float32)
1763
1764
1765 -def Save_Int16(path, base_qsf, segs_of_signal, lists, progressf, *tables, **kw):
1766 tbl = dict((t.label, t) for t in tables)
1767 signals = tbl['Signals']
1768 base_path, ext = os.path.splitext(path)
1769 qsf = BuildSession(base_qsf, segs_of_signal, lists, *tables)
1770
1771 fi = open(path, 'wb')
1772 points = 0
1773 lo = [1e20] * signals.size
1774 hi = [1e-20] * signals.size
1775 sigs_of_seg = [ [segs_of_signal[c][i] for c in xrange(signals.size)] for i in xrange(len(segs_of_signal[0]))]
1776 for s, seg_of_signal in enumerate(sigs_of_seg):
1777 f = points
1778 l = f + seg_of_signal[0].n - 1
1779 points += seg_of_signal[0].n
1780 for c, seg in enumerate(seg_of_signal):
1781 for buf_first in xrange(0, seg.n, SAVEBUFLEN):
1782 buf_last = min(seg.n, buf_first + SAVEBUFLEN) - 1
1783 buf = seg.get_samples(buf_first, buf_last).samples
1784 lo[c] = min(lo[c], numpy.min(buf))
1785 hi[c] = max(hi[c], numpy.max(buf))
1786 lo = min((x - signals.get(c, 'Offset')) / signals.get(c, 'Scale') for c,x in enumerate(lo))
1787 hi = max((x - signals.get(c, 'Offset')) / signals.get(c, 'Scale') for c,x in enumerate(hi))
1788 scaling = 32767/max(abs(lo), abs(hi))
1789 qsf['Scaling'].data = scaling
1790 sigs_of_seg = [ [segs_of_signal[c][i] for c in xrange(signals.size)] for i in xrange(len(segs_of_signal[0]))]
1791 for s, seg_of_signal in enumerate(sigs_of_seg):
1792 if not progressf(s * 1.0 / len(sigs_of_seg)):
1793 raise KeyboardInterrupt()
1794 channels = numpy.zeros(shape=(seg_of_signal[0].n, len(seg_of_signal)), dtype='int16')
1795 for c, seg in enumerate(seg_of_signal):
1796 for buf_first in xrange(0, seg.n, SAVEBUFLEN):
1797 buf_last = min(seg.n, buf_first + SAVEBUFLEN) - 1
1798 buf = seg.get_samples(buf_first, buf_last).samples
1799 buf -= signals.get(c, 'Offset')
1800 buf *= scaling / signals.get(c, 'Scale')
1801 channels[buf_first:buf_last+1,c] = buf
1802 channels.tofile(fi)
1803
1804 dchan = qsf['DataChannels']['Channel']
1805 for c in xrange(signals.size):
1806 dchan['Scaling'].data = 1 / signals.get(c, 'Scale')
1807 dchan['Offset'].data = signals.get(c, 'Offset')
1808 if dchan.sibling:
1809 dchan = dchan.sibling
1810 elif (c+1) < signals.size:
1811 dchan = dchan.parent.insert('Channel', dchan)
1812
1813 qsf['SinglePrecision'].data = 0
1814 qsf['ADDataSize'].data = 2
1815 qsf['ADChannelCount'].data = len(segs_of_signal)
1816 qsf['FileExt'].data = ext.lower()
1817 qsf.saveAs(base_path+'.qsf', as_copy=True)
1818 del qsf
1819 SetWriter('.int16', 'Raw binary (int16)', Save_Int16)
1820
1821
1823 def Save(path, base_qsf, segs_of_signal, lists, progressf, *tables, **kw):
1824 tbl = dict((t.label, t) for t in tables)
1825 signals = tbl['Signals']
1826 base_path, ext = os.path.splitext(path)
1827 qsf = BuildSession(base_qsf, segs_of_signal, lists, *tables)
1828
1829 total_lines = written_lines = 0
1830 for seg in segs_of_signal[0]:
1831 total_lines += seg.n
1832
1833 file = open(path, 'w')
1834 file.write('Time%s' % separator)
1835 file.write(separator.join(signals.get(c, 'Name') for c in xrange(signals.size)))
1836 file.write('\n')
1837 sampling = segs_of_signal[0][0].sampling
1838 sigs_of_seg = [ [segs_of_signal[c][i] for c in xrange(signals.size)] for i in xrange(len(segs_of_signal[0]))]
1839 for s, seg_of_signal in enumerate(sigs_of_seg):
1840 if s:
1841 file.write('\n')
1842 for buf_first in xrange(0, seg_of_signal[0].n, SAVEBUFLEN):
1843 buf_last = min(seg_of_signal[0].n, buf_first + SAVEBUFLEN) - 1
1844 time = numpy.arange((buf_last - buf_first + 1), dtype='float32')
1845 time += buf_first
1846 time *= sampling
1847 time += 1e-3*(seg_of_signal[0].start - seg_of_signal[0].file.segments[0, 'Start'])
1848 columns = [time]
1849 columns.extend([seg.get_samples(buf_first, buf_last).samples for seg in seg_of_signal])
1850 for tup in itertools.izip(*columns):
1851 file.write(separator.join("%.9g" % x for x in tup))
1852 file.write('\n')
1853 written_lines += 1
1854 if not progressf(written_lines * 1.0 / total_lines):
1855 raise KeyboardInterrupt()
1856 qsf['TimeColumn'].data = int(True)
1857 qsf['Sampling'].data = sampling
1858 qsf['FileExt'].data = ext.lower()
1859 qsf.saveAs(base_path+'.qsf', as_copy=True)
1860 del qsf
1861 return Save
1862 Save_TXT = Save_TXT_Base('\t')
1863 Save_CSV = Save_TXT_Base(',')
1864 SetWriter('.txt', 'Text files', Save_TXT)
1865 SetWriter('.csv', 'Comma-separated ext files', Save_CSV)
1866
1867
1868 -def ReadSessionIdl(data, sess, addsegs=False, progressf=lambda pct:True):
1869 """
1870 Fills in data.ideal (and optionally data.segmentation) from sess, a
1871 U{QSF<http://www.qub.buffalo.edu/qubdoc/files/qsf.html>} tree.
1872 """
1873 for i, chan in enumerate(qubx.tree.children(sess['Idealization'], 'Channel')):
1874 for seg in qubx.tree.children(chan, 'Segment'):
1875 if not progressf(0.999): return
1876 if addsegs and (i == 0):
1877 data.segmentation.add_seg(seg.data[0], seg.data[1], seg.data[0]*data.sampling*1e3)
1878 dwell_count = 0
1879 if seg.find('DwellCount') and seg['DwellCount'].data:
1880 dwell_count = seg['DwellCount'].data[0]
1881 elif seg.find('Firsts') and seg['Firsts'].data:
1882 dwell_count = len(seg['Firsts'].data)
1883 if dwell_count:
1884 data.ideal[i].idl.set_dwells(dwell_count,
1885 seg['Firsts'].storage.data,
1886 seg['Lasts'].storage.data,
1887 seg['Classes'].storage.data)
1888 j = data.segmentation.index_at(seg.data[0])
1889 data.ideal[i].seg[j].amp = seg['amp'].data[:]
1890 data.ideal[i].seg[j].std = seg['sd'].data[:]
1891 sess.remove(sess['Idealization'])
1892
1894 data.store_fit_curves = sess.find('StoreFitCurves').data and sess['StoreFitCurves'].data[0] or 0
1895 for i, chan in enumerate(qubx.tree.children(sess.find('FitCurves'), 'Channel')):
1896 for j, seg in enumerate(qubx.tree.children(chan, 'Segment')):
1897 if not progressf(0.999): return
1898 dwell_count = 0
1899 if seg.find('DwellCount') and seg['DwellCount'].data:
1900 dwell_count = seg['DwellCount'].data[0]
1901 elif seg.find('Firsts') and seg['Firsts'].data:
1902 dwell_count = len(seg['Firsts'].data)
1903 if dwell_count:
1904 data.fits[i].idl.set_dwells(dwell_count,
1905 seg['Firsts'].storage.data,
1906 seg['Lasts'].storage.data,
1907 seg['Classes'].storage.data)
1908 data.fits[i].idl.segmeans[j] = seg['Mean'].storage.data.flatten()
1909 data.fits[i].idl.segstds[j] = seg['Std'].storage.data.flatten()
1910 sess.remove(sess['FitCurves'])
1911
1913 for i, chan in enumerate(qubx.tree.children(sess.find('Overlays'), 'Channel')):
1914 data_sel = chan.find('Sels').data
1915 if data_sel:
1916 overlays = data.get_overlays(i)
1917 for r in xrange(data_sel.rows):
1918 overlays.set_source(* data_sel.row(r)[:2])
1919 for j, seg in enumerate(qubx.tree.children(chan, 'Segment')):
1920 if not progressf(0.999): return
1921 dwell_count = 0
1922 if seg.find('DwellCount') and seg['DwellCount'].data:
1923 dwell_count = seg['DwellCount'].data[0]
1924 elif seg.find('Firsts') and seg['Firsts'].data:
1925 dwell_count = len(seg['Firsts'].data)
1926 if dwell_count:
1927 overlays.idl.set_dwells(dwell_count,
1928 seg['Firsts'].storage.data,
1929 seg['Lasts'].storage.data,
1930 seg['Classes'].storage.data)
1931 sess.remove(sess['Overlays'])
1932
1934 for j,seg in enumerate(qubx.tree.children(sess.find('Exclusion'), 'Segment')):
1935 if not progressf(0.999): return
1936 dwell_count = 0
1937 if seg.find('DwellCount') and seg['DwellCount'].data:
1938 dwell_count = seg['DwellCount'].data[0]
1939 elif seg.find('Firsts') and seg['Firsts'].data:
1940 dwell_count = len(seg['Firsts'].data)
1941 if dwell_count:
1942 data.exclusion.idl.set_dwells(dwell_count,
1943 seg['Firsts'].storage.data,
1944 seg['Lasts'].storage.data,
1945 seg['Classes'].storage.data)
1946 data.exclusion.OnChange(data.exclusion)
1947
1949 if sess.find('BaselineNodes'):
1950 for i, chan in enumerate(qubx.tree.children(sess['BaselineNodes'], 'Channel')):
1951 for j, seg in enumerate(qubx.tree.children(chan, 'Segment')):
1952 if not progressf(0.999): return
1953 points, values = seg.find('Points'), seg.find('Values')
1954 if points.data and values.data:
1955 data.baseline[i].add_nodes(points.storage.data.T[0,:], values.storage.data.T[0,:])
1956 for i, basechan in enumerate(data.baseline):
1957 if any(len(bseg) for bseg in basechan.segs):
1958 break
1959 else:
1960 for i, chan in enumerate(qubx.tree.children(sess.find('Baseline'), 'BaselineSegments')):
1961 for j, seg in enumerate(qubx.tree.children(chan, 'BaselineSegment')):
1962 if not progressf(0.999): return
1963 points, values = seg.find('Points'), seg.find('Values')
1964 if points.data and values.data:
1965 pp, vv = points.storage.data.T[0,:], values.storage.data.T[0,:].astype('float32')
1966 vv *= - data.signals[i, 'Scale'] / sess['Scaling'].data[0]
1967 vv += data.signals[i, 'Offset']
1968 data.baseline[i].add_nodes(pp, vv)
1969 sess.remove(sess['BaselineNodes'])
1970
1980
1981
1983 cached = overlay.data[sel_index]
1984 if cached is None:
1985 cached = overlay.data[sel_index] = analog.read(*overlay.sels[sel_index])
1986 Nover = len(cached)
1987 Nsample = len(samples)
1988 for at in xrange(0, Nsample, Nover):
1989 batch = min(Nsample-at, Nover)
1990 samples[at:at+batch] = cached[:batch]
1991
1992
1994 """Base class for one analog signal."""
1995 - def read(self, first, last, latency=0, skip=1):
1996 """Override this method to return a numpy.array(float32) of data samples from within one segment.
1997
1998 @param latency: the data is shifted to the right by this many samples; repeats seg. first/last sample as needed
1999 @param skip: e.g. 1: every point, 2: every other point, 3: every third point, ...
2000 """
2001 pass
2002 - def read_overlaid(self, datafile, signal, first, last, latency=0, skip=1):
2003 """Returns samples, like L{QubData_Analog.read}, but replaces regions marked in datafile.overlays[signal] with data from elsewhere in the file."""
2004 overlays = datafile.overlays[signal]
2005 if overlays:
2006 ff, ll, cc = overlays.idl.get_dwells_and_gaps(first, last)
2007 Nd = len(cc)
2008 if Nd:
2009 samples = numpy.zeros(shape=(last-first+1,), dtype='float32')
2010 for f, l, c in izip(ff, ll, cc):
2011 if c < 0:
2012 samples[f-first:l+1-first] = self.read(f, l, latency, skip)
2013 else:
2014 read_signal_overlay(samples[f-first:l+1-first], datafile, overlays, self, c)
2015 return samples
2016 return self.read(first, last, latency, skip)
2017
2018
2020 try:
2021 obj = qubx.pyenv.env.eval_str(expr, False)
2022 if hasattr(obj, 'shape') or hasattr(obj, 'get_samples'):
2023 return acceptF(static=[], custom=False, typ=None)(expr)
2024 except:
2025 pass
2026 func = acceptF(static=[], custom=True)(expr)
2027 allowed = set(['x', 't'] + list(source_names))
2028 for name in func.args:
2029 if not (name in allowed):
2030 raise NameError('No signal named "%s" in computed signal: %s' % (name, func.expr))
2031 return func
2032
2034 - def __init__(self, func, sources, data, index):
2039 - def read(self, first, last, latency=0, skip=1):
2040 if not self.func.args:
2041 arr = numpy.zeros(shape=(last-first+1,), dtype='float32')
2042 src = self.func()
2043 if hasattr(src, 'get_samples'):
2044 src.get_samples(self.sampling, first, last, arr)
2045 else:
2046 if len(src) > first:
2047 src = src[first:min(len(src), last+1)]
2048 arr[:len(src)] = src
2049 if skip > 1:
2050 arr = arr[::skip]
2051 return arr
2052 source_samples = []
2053 for arg in self.func.args:
2054 if arg in ('x', 't'):
2055 arr = numpy.arange(0, (last-first+1), skip, dtype='float32')
2056 arr += first
2057 arr *= self.sampling
2058 source_samples.append(arr)
2059 elif arg in self.sources:
2060 source_samples.append(self.sources[arg].read_overlaid(self.data, self.index, first, last, latency, skip))
2061 else:
2062 raise NameError('"%s" is unkown in computed signal expression: %s' % (arg, func.expr))
2063 try:
2064 return self.func(*source_samples)
2065 except:
2066
2067 arr = numpy.zeros(shape=source_samples[0].shape, dtype=numpy.float32)
2068 for i, args in enumerate(izip(*source_samples)):
2069 try:
2070 arr[i] = self.func(*args)
2071 except:
2072 pass
2073 return arr
2074
2075
2077 lat_f, lat_l = first - latency, last - latency
2078 lat_fill_pre = max(0, segfirst - lat_f)
2079 lat_fill_post = max(0, lat_l - seglast)
2080 lat_f += lat_fill_pre
2081 lat_l -= lat_fill_post
2082 samples = read(iseg, lat_f-segfirst, lat_l-segfirst, skip)
2083 if lat_fill_pre or lat_fill_post:
2084 samples, sub = numpy.zeros(shape=(last-first+1,), dtype=numpy.float32), samples
2085 if lat_fill_pre:
2086 samples[:lat_fill_pre] = sub[0]
2087 if lat_fill_post:
2088 samples[-lat_fill_post:] = sub[-1]
2089 samples[lat_fill_pre:-lat_fill_post or len(samples)] = sub[:]
2090 return samples
2091
2092
2095 """
2096 @param data: L{QubData_QDF}
2097 @param channel: integer 0-based signal index
2098 """
2099 self.data = data
2100 self.channel = channel
2101 - def read(self, first, last, latency=0, skip=1):
2102 ifirst, ilast = [self.data.segmentation.index_at(x) for x in (first, last)]
2103 if ifirst != ilast:
2104 raise Exception("QDF: can't read() from multiple segments at once")
2105 segfirst = self.data.segmentation.segments[ifirst][0]
2106 seglast = self.data.segmentation.segments[ifirst][1]
2107 return read_with_latency(self.read_in_seg, first, last, latency, skip, ifirst, segfirst, seglast)
2126
2127
2129 """One open U{QDF<http://www.qub.buffalo.edu/qubdoc/files/qdf.html>}
2130 (or U{QSF<http://www.qub.buffalo.edu/qubdoc/files/qsf.html>}) data file.
2131
2132 @ivar qdf: L{qubx.tree.Node_numpy} the actual file
2133 @ivar scaling: float_val = int_val / scaling
2134 """
2135 - def __init__(self, path, progressf, sess):
2136 QubData.__init__(self)
2137 self.path = path
2138 base, ext = os.path.splitext(path)
2139 if ext.lower() == '.qsf':
2140 self.qdf = qubx.tree.NullNode()
2141 else:
2142 self.qdf = qubx.tree.Open(path, True)
2143
2144 if sess.child.isNull:
2145 sess = qubx.tree.AS_FLAVOR(self.qdf.find('SessionFile'), 'numpy', True)
2146 self.qsf = sess
2147
2148 self.sampling = ((sess.find('Sampling') and sess['Sampling'].data[0])
2149 or (self.qdf.find('Sampling') and self.qdf['Sampling'].data[0])
2150 or 1.0)
2151 self.scaling = ((sess.find('Scaling') and sess['Scaling'].data[0])
2152 or (self.qdf.find('Scaling') and self.qdf['Scaling'].data[0])
2153 or 1.0)
2154 self.nchan = ((sess.find('ADChannelCount') and sess['ADChannelCount'].data[0])
2155 or (self.qdf.find('ADChannelCount') and self.qdf['ADChannelCount'].data[0])
2156 or 0)
2157 c = 0
2158 for chan in qubx.tree.children(sess.find('DataChannels'), 'Channel'):
2159 self.signals.append({'Name' : str(chan['Name'].data)})
2160
2161
2162
2163
2164
2165 c += 1
2166 while c < self.nchan:
2167 self.signals.append({'Name' : 'Channel %i' % (c+1)})
2168
2169
2170 c += 1
2171
2172 if self.qdf:
2173 last = -1
2174 i = 0
2175 for seg in qubx.tree.children(self.qdf.find('Segments'), 'Segment'):
2176 channels = seg['Channels']
2177 if not channels.data:
2178 continue
2179 first = last + 1
2180 start = seg.find('StartTime').data and (1e3 * seg['StartTime'].data[0]) or (first*1e3*self.sampling)
2181 last = last + (channels.data and channels.data.rows or (first - 1))
2182 if first <= last:
2183 self.segmentation.add_seg(first, last, start)
2184 self.seg[i].channels = channels
2185 i += 1
2186
2187 for i in xrange(self.nchan):
2188 self.set_analog(i, QubData_QDF_Analog(self, i))
2189
2190 self.read_session(sess, progressf)
2196
2197
2203 SetReader('.qdf', 'QuB data files', OpenQDF)
2204
2216 SetReader('.qsf', 'QuB session files', OpenQSF)
2217
2218
2221 """
2222 @param data: L{QubData_Bin}
2223 @param channel: integer 0-based signal index
2224 """
2225 self.data = data
2226 self.channel = channel
2227 - def read(self, first, last, latency=0, skip=1):
2228 ifirst, ilast = [self.data.segmentation.index_at(x) for x in (first, last)]
2229 if ifirst != ilast:
2230 raise Exception("can't read() from multiple segments at once")
2231 segfirst = self.data.segmentation.segments[ifirst][0]
2232 seglast = self.data.segmentation.segments[ifirst][1]
2233 return read_with_latency(self.read_in_seg, first, last, latency, skip, ifirst, segfirst, seglast)
2245
2246
2248 """One open raw binary data file.
2249
2250 @ivar scaling: float_val = int_val / scaling
2251 """
2253 QubData.__init__(self)
2254 self.path = path
2255 self.fileno = self.mmap = None
2256 exists = os.path.exists(path)
2257 size = 0
2258 if exists:
2259
2260 size = os.stat(path)[6]
2261 if size:
2262 self.fileno = os.open(path, os.O_RDONLY)
2263 self.mmap = mmap.mmap(self.fileno, size, access=mmap.ACCESS_READ)
2264 if not self.mmap:
2265 raise Exception('failed to open %s'%path)
2266 base, ext = os.path.splitext(path)
2267 sess = qubx.tree.Open(base+'.qsf', True)
2268 sess.close()
2269 self.qsf = sess
2270 self.sampling = ((sess.find('Sampling') and sess['Sampling'].data[0])
2271 or 1.0e-4)
2272 self.scaling = ((sess.find('Scaling') and sess['Scaling'].data[0])
2273 or 1.0)
2274 self.floating = ((sess.find('SinglePrecision').data and bool(sess['SinglePrecision'].data[0]))
2275 or False)
2276 self.bytes = ((sess.find('ADDataSize') and sess['ADDataSize'].data[0])
2277 or (self.floating and 4 or 2))
2278 self.nchan = ((sess.find('ADChannelCount') and sess['ADChannelCount'].data[0])
2279 or 1)
2280 self.sampling, self.scaling, self.floating, self.bytes, self.nchan = \
2281 RequestBinaryParams(self.sampling, self.scaling, self.floating, self.bytes, self.nchan)
2282 if self.floating:
2283 self.scaling = 1.0
2284 sess['Sampling'].data = self.sampling
2285 sess['Scaling'].data = self.scaling
2286 sess['SinglePrecision'].data = int(self.floating)
2287 sess['ADDataSize'].data = self.bytes
2288 sess['ADChannelCount'].data = self.nchan
2289 c = 0
2290 for chan in qubx.tree.children(sess.find('DataChannels'), 'Channel'):
2291 self.signals.append({'Name' : str(chan['Name'].data)})
2292
2293
2294
2295
2296
2297 c += 1
2298 while c < self.nchan:
2299 self.signals.append({'Name' : 'Channel %i' % (c+1)})
2300
2301
2302 c += 1
2303
2304 nsample = int(size / self.bytes)
2305 nframe = nsample / self.nchan
2306 self.segmentation.add_seg(0, nframe-1, 0.0)
2307
2308 if self.floating:
2309 if self.bytes == 8:
2310 dtype = 'float64'
2311 elif self.bytes == 4:
2312 dtype = 'float32'
2313 else:
2314 raise Exception('There are no 2-byte floats')
2315 else:
2316 dtype = 'int%d'%(8*self.bytes)
2317
2318 self.buf = numpy.frombuffer(self.mmap, dtype, nsample, 0)
2319 self.buf = self.buf.reshape((nframe, self.nchan))
2320
2321 for i in xrange(self.nchan):
2322 self.set_analog(i, QubData_Bin_Analog(self, i))
2323
2324 self.read_session(sess, progressf)
2325
2326
2329 """
2330 @param data: L{QubData_LDT}
2331 """
2332 self.data = data
2333 - def read(self, first, last, latency=0, skip=1):
2334 ifirst, ilast = [self.data.segmentation.index_at(x) for x in (first, last)]
2335 if ifirst != ilast:
2336 raise Exception("can't read() from multiple segments at once")
2337 segfirst = self.data.segmentation.segments[ifirst][0]
2338 seglast = self.data.segmentation.segments[ifirst][1]
2339 return read_with_latency(self.read_in_seg, first, last, latency, skip, ifirst, segfirst, seglast)
2351
2352
2354 """One open LDT file (classic QuB data)
2355
2356 @ivar scaling: float_val = int_val / scaling
2357 """
2359 QubData.__init__(self)
2360 self.path = path
2361 self.fileno = self.mmap = None
2362 exists = os.path.exists(path)
2363 size = 0
2364 if exists:
2365
2366 size = os.stat(path)[6]
2367 if size:
2368 self.fileno = os.open(path, os.O_RDONLY)
2369 self.mmap = mmap.mmap(self.fileno, size, access=mmap.ACCESS_READ)
2370 if not self.mmap:
2371 raise Exception('failed to open %s'%path)
2372
2373 self.signals.append({'Name' : 'Current', 'Units' : 'pA'})
2374
2375 read_val = lambda offset, typ: numpy.frombuffer(self.mmap, typ, 1, offset)[0]
2376 segpad = read_val(0, '<i4')
2377 self.sampling = 1e-6 * read_val(4, '<u2')
2378 self.scaling = 1e-2 * read_val(6, '<u2')
2379 self.segbuf = []
2380 offset = 8
2381 points = 0
2382 while offset < size:
2383 start = 1e-3 * read_val(offset, '<i4')
2384 offset += 4
2385 nsample = read_val(offset, '<i4') - 2*segpad
2386 offset += 4
2387 self.segmentation.add_seg(points, points+nsample-1, start)
2388 self.segbuf.append(numpy.frombuffer(self.mmap, '<i2', nsample, offset+2*segpad))
2389 points += nsample
2390 offset += 2*(nsample+2*segpad)
2391
2392 self.set_analog(0, QubData_LDT_Analog(self))
2393
2394 base, ext = os.path.splitext(path)
2395 sess = qubx.tree.Open(base+'.qsf', True)
2396 sess.close()
2397 self.qsf = sess
2398
2399 self.read_session(sess, progressf)
2400
2402 if self.mmap:
2403 self.mmap.close()
2404 self.mmap = None
2405 if self.fileno:
2406 os.close(self.fileno)
2407
2408 SetReader('.ldt', 'QuB classic data files', QubData_LDT)
2409
2410
2412 """Reads data from a qub express scratch file (simulation)."""
2417 - def read(self, first, last, latency=0, skip=1):
2418 ifirst, ilast = [self.data.segmentation.index_at(x) for x in (first, last)]
2419 if ifirst != ilast:
2420 raise Exception("scratch: can't read() from multiple segments at once")
2421 try:
2422 segfirst = self.data.segmentation.segments[ifirst][0]
2423 seglast = self.data.segmentation.segments[ifirst][1]
2424 samples = read_with_latency(self.read_in_seg, first, last, latency, skip, ifirst, segfirst, seglast)
2425 except:
2426
2427 samples = numpy.array(numpy.random.randn((last-first+1)/skip), copy=True, dtype='float32')
2428 samples *= 0.15
2429 return samples
2439
2440
2441
2442
2448
2449
2451 """One open qub express scratch file (simulation); .tree can be repeatedly re-assigned.
2452
2453 @ivar tree: a L{qubx.tree.Node_numpy} in scratch format, or L{qubx.tree.NullNode_numpy}()"""
2470 if x:
2471 self.undoIdl.clear()
2472 nchan = x['SignalCount'].data[0]
2473 while nchan < self.signals.size:
2474 self.signals.remove(self.signals.size-1)
2475
2476 sig = x['Signals'].find('Signal')
2477 for c in xrange(nchan):
2478 if sig:
2479 name, units = str(sig['Name'].data), str(sig['Units'].data)
2480 else:
2481 name, units = ('Signal %i'%(c+1), '')
2482 sig = sig.next('Signal')
2483 if c == self.signals.size:
2484 self.signals.append({'Name' : name, 'Units' : units})
2485 else:
2486 self.signals.set(c, 'Name', name)
2487 self.signals.set(c, 'Units', units)
2488 self.ideal[c].idl.clear()
2489 self.fits[c].idl.clear()
2490 self.set_analog(c, QubData_Scratch_Analog(self, c))
2491 self.sampling = x['Sampling'].data[0]
2492
2493 segm = []
2494 first = 0
2495 for i, seg in enumerate(qubx.tree.children(x['Segments'], 'Segment')):
2496 sample_count = seg['SampleCount'].data[0]
2497 last = first + sample_count - 1
2498 segm.append((first, last, sample_count))
2499 first = last + 1
2500 for c, sig in enumerate(qubx.tree.children(seg['Signals'], 'Signal')):
2501 self.analog[c].segments.append( sig['Samples'].storage.data )
2502 if tuple(segm) != tuple(self.segmentation.segments):
2503 self.segmentation.clear()
2504 for i, fln in enumerate(segm):
2505 self.segmentation.add_seg(fln[0], fln[1], fln[0]*self.sampling*1e3)
2506
2507 for s, sig in enumerate(qubx.tree.children(x['IdealSignals'], 'IdealSignal')):
2508 if sig['SignalIndex'].data:
2509 c = sig['SignalIndex'].data[0]
2510 else:
2511 c = s
2512 for j, seg in enumerate(qubx.tree.children(sig['Segments'], 'Segment')):
2513 self.ideal[c].idl.set_dwells(seg['DwellCount'].data[0],
2514 seg['Firsts'].storage.data,
2515 seg['Lasts'].storage.data,
2516 seg['Classes'].storage.data)
2517 self.ideal[c].seg[j].amp = seg['amp'].data[:]
2518 self.ideal[c].seg[j].std = seg['sd'].data[:]
2519
2520 for con in qubx.tree.children(x['Constants']):
2521 try:
2522 self.constants.set( self.constants.index(con.name), 'Value', con.data[0] )
2523 except:
2524 self.constants.append({'Name' : con.name,
2525 'Value': con.data[0]})
2526 else:
2527 self.segmentation.clear()
2528 self.__tree = x
2529 if self.temppath:
2530 try:
2531 print 'del',self.temppath,'...'
2532 os.remove(self.temppath)
2533 print ' ok'
2534 except:
2535 pass
2536 self.temppath = None
2537 self.OnChangeSamples(self, -1)
2538 self.OnChangeIdealization(self, -1)
2539 self.OnChangeFits(self, -1)
2540 setup_segments_list(self)
2541 tree = property(lambda self: self.__tree, lambda self, x: self.set_tree(x))
2542
2543
2544
2545
2546 -class TextStats(object):
2547 - def __init__(self, tokenize, convert, debug=False):
2548 self.tokenize = tokenize
2549 self.convert = convert
2550 self.debug = debug
2551 self.min_fields = None
2552 self.max_fields = 0
2553 self.numeric_lines = 0
2554 self.headers = []
2555 self.sampling = None
2556 self.last_time = None
2557 self.seg_lens = []
2558 self.seg_lines = 0
2560 self.col_count = self.max_fields
2561 self.signal_count = self.col_count - int(bool(self.sampling))
2562 return (self.max_fields - (self.min_fields or 0)) + self.numeric_lines + int(bool(self.headers)) + int(bool(self.sampling))
2564 if self.seg_lines:
2565 self.seg_lens.append(self.seg_lines)
2566 self.seg_lines = 0
2567 self.col_count = self.max_fields
2568 self.signal_count = self.col_count - int(bool(self.sampling))
2569 - def add(self, line):
2570 fields = self.tokenize(line)
2571 if len(fields) and fields[0]:
2572 if (self.min_fields is None) or (self.min_fields > len(fields)):
2573 self.min_fields = len(fields)
2574 self.max_fields = max(self.max_fields, len(fields))
2575 try:
2576 if self.debug:
2577 print fields
2578 nums = [self.convert(x) for x in [xx.strip() for xx in fields] if x]
2579 if self.debug:
2580 print 'nums:',nums
2581 self.seg_lines += 1
2582 self.numeric_lines += 1
2583 if self.sampling is None:
2584 if not (self.last_time is None):
2585 self.sampling = nums[0] - self.last_time
2586 elif self.sampling:
2587 if (not (self.last_time is None)) and (abs(self.sampling - (nums[0] - self.last_time)) > (self.sampling*0.1)):
2588 self.sampling = 0.0
2589 self.last_time = nums[0]
2590 except ValueError:
2591 if fields and not self.numeric_lines:
2592 if self.debug:
2593 print 'headers:',fields
2594 self.headers = fields
2595 else:
2596 self.last_time = None
2597 if self.seg_lines:
2598 self.seg_lens.append(self.seg_lines)
2599 self.seg_lines = 0
2600
2602 return float(s.replace(',', '.'))
2603
2605
2606
2607 tok_tab = lambda s: s.split('\t')
2608 re_comma = re.compile(r", *")
2609 tok_comma = lambda s: re_comma.split(s)
2610 re_spaces = re.compile(r" +")
2611 tok_spaces = lambda s: re_spaces.split(s)
2612 formats = [TextStats(tok_tab, float), TextStats(tok_tab, float_decimal_comma),
2613 TextStats(tok_comma, float), TextStats(tok_comma, float_decimal_comma),
2614 TextStats(tok_spaces, float), TextStats(tok_spaces, float_decimal_comma)]
2615 lines = [line.strip() for line in open(path, 'r')]
2616 for line in lines[:min(len(lines), 10)]:
2617 for format in formats:
2618 format.add(line)
2619 if not [format for format in formats if format.numeric_lines]:
2620 return QubData_Bin(path, progressf)
2621 scores = numpy.array([format.score() for format in formats])
2622 format = formats[numpy.argmax(scores)]
2623 if len(lines) > 10:
2624 for iline, line in enumerate(lines[10:]):
2625 if (not (iline % 1000)) and (not progressf(float(iline)/(2*len(lines)))):
2626 return None
2627 format.add(line)
2628 format.finish()
2629 iline = 0
2630 tokenize = format.tokenize
2631 convert = format.convert
2632 time_col = int(bool(format.sampling))
2633 f_out = qubx.tree.Node('QubData')
2634
2635 base, ext = os.path.splitext(path)
2636 sess = qubx.tree.Open(base+'.qsf', True)
2637 sess.close()
2638 sampling = sess['Sampling'].data and sess['Sampling'].data[0] or 1e-4
2639 sampling = format.sampling or RequestSampling(sampling)
2640 f_out['Sampling'].data = sess['Sampling'].data = sampling
2641
2642 f_out['SignalCount'].data = format.signal_count
2643 signals = f_out['Signals']
2644 for s in xrange(format.signal_count):
2645 sig = signals.append('Signal')
2646 sig['Name'].data = ((s + time_col) < len(format.headers)) and format.headers[s+time_col] or ('ch%d'%(s+1))
2647 segments = f_out['Segments']
2648 seg = qubx.tree.NullNode()
2649 prog_n = numpy.sum
2650
2651 for seg_len in format.seg_lens:
2652 seg = segments.insert('Segment', seg)
2653 seg['SampleCount'].data = seg_len
2654 signals = seg['Signals']
2655 sig_storage = []
2656 for s in xrange(format.signal_count):
2657 sig = signals.append('Signal')['Samples']
2658 sig.data.setup(qubx.tree.QTR_TYPE_FLOAT, seg_len, 1)
2659
2660 sig_storage.append(sig)
2661
2662 for s in xrange(format.signal_count):
2663 sig_storage[s] = sig_storage[s].storage.data
2664 isample = 0
2665 while isample < seg_len:
2666 try:
2667 if (not (iline % 1000)) and (not progressf(0.5 + float(iline)/(len(lines)))):
2668 return None
2669 line = lines[iline]
2670 iline += 1
2671 tokens = [x for x in [t.strip() for t in tokenize(line)] if x]
2672 if len(tokens) > time_col:
2673 for s,x in enumerate(convert(field) for field in tokens[time_col:]):
2674 sig_storage[s][isample] = x
2675 isample += 1
2676 except ValueError:
2677 pass
2678
2679
2680 datafile = QubData_Scratch(path)
2681 datafile.tree = f_out
2682
2683
2684 datafile.read_session(sess, progressf)
2685
2686 return datafile
2687
2688 SetReader('.txt', 'Text files', OpenTXT)
2689 SetReader('.csv', 'Comma-separated files', OpenTXT)
2690
2691
2693 data = QubData()
2694 data.path = path
2695 data.signals.append({"Name" : "Current", "Units" : "pA"})
2696
2697
2698 sampling = [0.0]
2699 first = [0]
2700 segi = [0]
2701 class_count = 10
2702 amps = [1.0*i for i in xrange(class_count)]
2703 stds = [0.1] * class_count
2704 classes = []
2705 durations = []
2706 iDwell = 0
2707
2708 def set_seg_dwells():
2709 if not classes:
2710 return
2711 cc = numpy.array(classes, dtype='int32')
2712 dd = numpy.array(durations, dtype='float32')
2713 ff = numpy.zeros(shape=cc.shape, dtype='int32')
2714 ll = numpy.zeros(shape=cc.shape, dtype='int32')
2715 aa = [x for x in amps]
2716 ss = [x for x in stds]
2717 sampling[0] = sampling[0] or numpy.min(dd)
2718 data.sampling = sampling[0] * 1e-3
2719 dw_samp_ct = numpy.array(dd, copy=True)
2720 dw_samp_ct /= sampling[0]
2721 dw_samp_ct += 0.5
2722 dw_samp_ct = dw_samp_ct.astype('int32')
2723 f = first[0]
2724 for i in xrange(len(cc)):
2725 nextf = f + dw_samp_ct[i]
2726 ff[i] = f
2727 ll[i] = nextf - 1
2728 f = nextf
2729 data.segmentation.add_seg(first[0], f-1, first[0]*data.sampling*1e3)
2730 data.ideal[0].idl.set_dwells(len(cc), ff, ll, cc)
2731 data.ideal[0].seg[ segi[0] ].amp = aa
2732 data.ideal[0].seg[ segi[0] ].std = ss
2733 first[0] = f
2734 segi[0] = segi[0] + 1
2735
2736 hdr_pat = re.compile(r"\S+\s+(\d+)\s+\S+\s+(\d+)(.*)")
2737 hdr_ext_pat = re.compile(r"\s+\S+\s+([0-9.+-eE]+)\s+\S+\s+\S+\s+\S+\s+(\d+)(.*)")
2738 amp_std_pat = re.compile(r"\s+([0-9.+-eE]+)\s+([0-9.+-eE]+)(.*)")
2739 dwell_pat = re.compile(r"^\s*(\d+)\s([0-9.+-eE]+)")
2740 for line in open(path, "r"):
2741 match = dwell_pat.match(line)
2742 if match:
2743 if len(classes) <= iDwell:
2744 classes.append(int(match.group(1)))
2745 durations.append(float(match.group(2)))
2746 else:
2747 classes[iDwell] = int(match.group(1))
2748 durations[iDwell] = float(match.group(2))
2749 iDwell += 1
2750 continue
2751 match = hdr_pat.match(line)
2752 if match:
2753 set_seg_dwells()
2754 dwell_count = int(match.group(2))
2755 classes = [-1] * dwell_count
2756 durations = [1.0] * dwell_count
2757 iDwell = 0
2758 match = hdr_ext_pat.match(match.group(3))
2759 if match:
2760 if not sampling[0]:
2761 sampling[0] = float(match.group(1))
2762 class_count = int(match.group(2))
2763 tail = match.group(3)
2764 amp = [1.0*i for i in xrange(class_count)]
2765 std = [0.1] * class_count
2766 for i in xrange(class_count):
2767 match = amp_std_pat.match(tail)
2768 if not match:
2769 break
2770 amp[i] = float(match.group(1))
2771 std[i] = float(match.group(2))
2772 tail = match.group(3)
2773 set_seg_dwells()
2774
2775 return data
2776
2777 SetReader('.dwt', 'QuB idealized data', OpenDWT)
2778
2779
2780
2782 """A table-like collection of open files."""
2784 ObjectTable.__init__(self, ' Data', global_name='QubX.Data.table')
2785 self.add_field('Name', "", acceptNothing, '%s', '', get=self.get_name, set=None)
2786 self.add_field('In Folder', '', acceptNothing, '%s', '', get=self.get_folder, set=None)
2791
2792
2793
2794 if __name__ == "__main__":
2795 import sys
2796 qubx.tree.CHOOSE_FLAVOR('numpy')
2797 data = QubData_QDF(sys.argv[1])
2798 for s in xrange(data.signals.size):
2799 print '------------------ Signal %i --------------------------------------------' % (s+1)
2800 i = 0
2801 for f, l, n in data.segmentation.segments:
2802 firsts, lasts, classes, durations = data.ideal[s].idl.get_dwells(f, l, False, True)
2803 amp, std = data.ideal[s].seg[i].amp, data.ideal[s].seg[i].std
2804 print ("Segment: %i Dwells: %i Sampling(ms): %f Start(ms): %f ClassCount: %i"
2805 % (i+1, len(firsts), data.sampling*1e3, f*data.sampling*1e3, len(amp))),
2806 for c in xrange(len(amp)):
2807 print amp[c], std[c],
2808 print
2809 for d in xrange(len(firsts)):
2810 print "\t%i\t%f" % (classes[d], durations[d])
2811 i += 1
2812
2813
2814
2815
2816
2817
2818
2819
2820
2821
2822
2823
2824
2825
2826
2827
2828
2829
2830
2831
2832
2833
2834
2835
2836
2837
2838
2839
2840
2841
2842
2843