Package qubx :: Module extract
[hide private]
[frames] | no frames]

Source Code for Module qubx.extract

   1  """ 
   2  Panel to chop, process and save extracts of data. 
   3   
   4  Copyright 2008-2013 Research Foundation State University of New York  
   5  This file is part of QUB Express.                                           
   6   
   7  QUB Express is free software; you can redistribute it and/or modify           
   8  it under the terms of the GNU General Public License as published by  
   9  the Free Software Foundation, either version 3 of the License, or     
  10  (at your option) any later version.                                   
  11   
  12  QUB Express is distributed in the hope that it will be useful,                
  13  but WITHOUT ANY WARRANTY; without even the implied warranty of        
  14  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the         
  15  GNU General Public License for more details.                          
  16   
  17  You should have received a copy of the GNU General Public License,    
  18  named LICENSE.txt, in the QUB Express program directory.  If not, see         
  19  <http://www.gnu.org/licenses/>.                                       
  20   
  21  """ 
  22   
  23  import gobject 
  24  import gtk 
  25  from gtk import gdk 
  26  from itertools import izip 
  27  import numpy 
  28  import os 
  29  import re 
  30   
  31  import qubx.data_types 
  32  import qubx.faces 
  33  import qubx.GTK 
  34  import qubx.pyenv 
  35  import qubx.pyenvGTK 
  36  import qubx.settings 
  37  import qubx.settingsGTK 
  38  import qubx.table 
  39  import qubx.tree 
  40  from qubx.accept import * 
  41  from qubx.GTK import pack_item, pack_space, pack_hsep, pack_vsep, pack_label, pack_button, pack_check, pack_radio, pack_scrolled, build_menuitem 
  42  from qubx.settings import Property, Propertied 
  43  from qubx.util_types import * 
  44   
  45  SOURCE_DATASOURCE, SOURCE_LIST, SOURCE_OTHER, SOURCE_FILEGROUP = (0, 1, 2, 3) 
  46  OTHER_SEG_ALL, OTHER_SEG_ONSCREEN, OTHER_SEG_GROUP = (0, 1, 2) 
  47  OTHER_SEL_ALL, OTHER_SEL_ONSCREEN = (0, 1) 
  48  SIGNALS_ALL, SIGNALS_SOURCE, SIGNALS_CUSTOM = (0, 1, 2) 
  49  PROC_NONE, PROC_AVG, PROC_ADD, PROC_SUB = (0, 1, 2, 3) 
  50  SUBTRACT_FIT_NONE, SUBTRACT_FIT_CURVE, SUBTRACT_FIT_NODES = (0, 1, 2) 
  51   
  52  RADIO_SPACE = 16 
  53  CHKS_SPACE = 24 
  54   
  55  @Propertied(Property('source', SOURCE_DATASOURCE, "where to initiate extraction from", 
  56                       value_names={SOURCE_DATASOURCE : 'SOURCE_DATASOURCE', SOURCE_LIST : 'SOURCE_LIST', SOURCE_OTHER : 'SOURCE_OTHER', 
  57                                    SOURCE_FILEGROUP : 'SOURCE_FILEGROUP'}), 
  58              Property('other_seg', OTHER_SEG_ALL, "source == SOURCE_OTHER: which set of segments", 
  59                       value_names={OTHER_SEG_ALL : 'OTHER_SEG_ALL', OTHER_SEG_ONSCREEN : 'OTHER_SEG_ONSCREEN', OTHER_SEG_GROUP : 'OTHER_SEG_GROUP'}), 
  60              Property('other_seg_group', 0, "other_seg == OTHER_SEG_GROUP: which group of segments"), 
  61              Property('other_sel', OTHER_SEL_ONSCREEN, "source == SOURCE_OTHER: which portion each segment", 
  62                       value_names={OTHER_SEL_ALL : 'OTHER_SEL_ALL', OTHER_SEL_ONSCREEN : 'OTHER_SEL_ONSCREEN'}), 
  63              Property('file_group', 0, "source == SOURCE_FILEGROUP: which group of files"), 
  64              Property('file_group_by_name', True, "False to match signals by index"), 
  65              Property('limit_to_group', False, 'True to filter the source list by Group column'), 
  66              Property('limit_group', 1, 'Selected Group index, if limit_to_group is True'), 
  67              Property('join_segments', False, 'True to concatenate segments into one'), 
  68              Property('signals', SIGNALS_ALL, "which signals to extract", 
  69                       value_names={SIGNALS_ALL : 'SIGNALS_ALL', SIGNALS_SOURCE : 'SIGNALS_SOURCE', SIGNALS_CUSTOM : 'SIGNALS_CUSTOM'}), 
  70              Property('signals_custom', "", "signals == SIGNALS_CUSTOM: semicolon-separated list of signal names or expressions"), 
  71              Property('no_filter', True, 'False to use "Data Source" filter settings'), 
  72              Property('subtract_fit', SUBTRACT_FIT_NONE, 'which type of baseline subtraction', 
  73                       value_names={SUBTRACT_FIT_NONE : 'SUBTRACT_FIT_NONE', SUBTRACT_FIT_CURVE : 'SUBTRACT_FIT_CURVE', SUBTRACT_FIT_NODES : 'SUBTRACT_FIT_NODES'}), 
  74              Property('resample', False, 'whether to resample output'), 
  75              Property('resample_kHz', 1.0, 'output sampling rate, if resample'), 
  76              Property('process', PROC_NONE, 'whether to average, add or subtract traces', 
  77                       value_names={PROC_NONE : 'PROC_NONE', PROC_AVG : 'PROC_AVG', PROC_ADD : 'PROC_ADD', PROC_SUB : 'PROC_SUB'}), 
  78              Property('process_sub_trace', 1, 'process == PROC_SUB: 1-based index of trace to subtract from the rest'), 
  79              Property('process_sub_from_file', False, 'process == PROC_SUB: whether the subtracted trace is from an external file', in_tree=False), 
  80              Property('process_sub_file_ix', 0, 'index of external subtracted trace, in Data table', in_tree=False), 
  81              Property('process_all', False, 'False to process only the active (Data Source) signal'), 
  82              Property('path_expr', "%(path)s/%(name)s_ext.qdf", "output filename in terms of %(path)s and %(name)s"), 
  83              Property('open_after', True, "whether to open the newly saved file") 
  84              ) 
85 -class ExtractFace(qubx.faces.Face):
86 __explore_featured = ['click_save', 'save']
87 - def __init__(self, name='Extract', global_name='QubX.Tools.Extract'):
88 qubx.faces.Face.__init__(self, name, global_name) 89 self.__ref = Reffer() 90 self.propertied_connect_settings('Extract') 91 self.__base_path = self.__base_name = "" 92 self.__data = None 93 self.__changing_custom_signals = False 94 95 self.scrolled = gtk.VBox() 96 self.scroll = pack_scrolled(self.scrolled, self, with_vp=True, expand=True) 97 panel = pack_item(gtk.HBox(), self.scrolled) 98 v = pack_item(gtk.VBox(), panel) 99 pack_label('Source:', v) 100 pack_space(panel, x=6) 101 v = pack_item(gtk.VBox(), panel, expand=True) 102 vh = pack_item(gtk.HBox(), v) 103 pack_space(vh, x=RADIO_SPACE) 104 self.chkSourceDS = pack_radio('Data Source', vh) 105 pack_space(vh, x=RADIO_SPACE) 106 self.chkSourceList = pack_radio('List', vh, group=self.chkSourceDS) 107 self.chkSourceList.set_tooltip_text('Selections from the "List" table') 108 pack_space(vh, x=RADIO_SPACE) 109 self.chkSourceFileGroup = pack_radio('Files in Group:', vh, group=self.chkSourceDS) 110 self.chkSourceFileGroup.set_tooltip_text('All segments of multiple files, chosen by "Group" in the Data table') 111 self.txtFileGroup = pack_item(qubx.GTK.NumEntry(self.file_group, acceptIntGreaterThanOrEqualTo(0), width_chars=3), vh) 112 self.propertied_connect_NumEntry('file_group', self.txtFileGroup) 113 self.panSourceFileGroup = pack_item(gtk.HBox(), v, show=(self.source == SOURCE_FILEGROUP)) 114 pack_label('Match signals:', self.panSourceFileGroup) 115 pack_space(self.panSourceFileGroup, x=RADIO_SPACE) 116 self.chkMatchByName = pack_radio('by Name', self.panSourceFileGroup) 117 pack_space(self.panSourceFileGroup, x=RADIO_SPACE) 118 self.chkMatchByIndex = pack_radio('by Index', self.panSourceFileGroup, group=self.chkMatchByName) 119 self.propertied_connect_radios('file_group_by_name', [(True, self.chkMatchByName), (False, self.chkMatchByIndex)]) 120 pack_space(vh, x=RADIO_SPACE) 121 self.chkSourceOther = pack_radio('Other...', vh, group=self.chkSourceDS) 122 self.propertied_connect_radios('source', [(SOURCE_DATASOURCE, self.chkSourceDS), 123 (SOURCE_LIST, self.chkSourceList), 124 (SOURCE_OTHER, self.chkSourceOther), 125 (SOURCE_FILEGROUP, self.chkSourceFileGroup)]) 126 self.mnuPresets = pack_item(qubx.settingsGTK.PresetsMenu('Extract', 'QubX'), vh, at_end=True) 127 self.panSourceOther = pack_item(gtk.VBox(), v, show=(self.source == SOURCE_OTHER)) 128 h = pack_item(gtk.HBox(), self.panSourceOther) 129 pack_label('Segments:', h) 130 pack_space(h, x=RADIO_SPACE) 131 self.chkOtherSegAll = pack_radio('All', h) 132 pack_space(h, x=RADIO_SPACE) 133 self.chkOtherSegOnscreen = pack_radio('On screen', h, group=self.chkOtherSegAll) 134 pack_space(h, x=RADIO_SPACE) 135 self.chkOtherSegGroup = pack_radio('Group', h, group=self.chkOtherSegAll) 136 self.propertied_connect_radios('other_seg', [(OTHER_SEG_ALL, self.chkOtherSegAll), 137 (OTHER_SEG_ONSCREEN, self.chkOtherSegOnscreen), 138 (OTHER_SEG_GROUP, self.chkOtherSegGroup)]) 139 self.txtOtherSegGroup = pack_item(qubx.GTK.NumEntry(self.other_seg_group, acceptIntGreaterThanOrEqualTo(0), width_chars=3), h) 140 self.propertied_connect_NumEntry('other_seg_group', self.txtOtherSegGroup) 141 self.colorsOtherSegGroup = pack_item(qubx.toolspace.Palette(vertical=False), h, expand=True) 142 self.colorsOtherSegGroup.set_tooltip_text("To assign groups, use the Charts panel or edit the Segments table") 143 self.colorsOtherSegGroup.color = self.other_seg_group 144 self.colorsOtherSegGroup.OnClickColor += self.__ref(self.__onClickOtherSegGroupColor) 145 pack_space(h, x=RADIO_SPACE) 146 h = pack_item(gtk.HBox(), self.panSourceOther) 147 pack_label('Selection:', h) 148 pack_space(h, x=RADIO_SPACE) 149 self.chkOtherSelAll = pack_radio('Whole segment', h) 150 pack_space(h, x=RADIO_SPACE) 151 self.chkOtherSelOnscreen = pack_radio('On screen', h, group=self.chkOtherSelAll) 152 pack_space(h, x=RADIO_SPACE) 153 self.propertied_connect_radios('other_sel', [(OTHER_SEL_ALL, self.chkOtherSelAll), 154 (OTHER_SEL_ONSCREEN, self.chkOtherSelOnscreen)]) 155 156 h = pack_item(gtk.HBox(), self.scrolled) 157 pack_space(h, x=RADIO_SPACE) 158 self.chkLimitToGroup = pack_check('Limit to segments (selections) in Group:', h) 159 self.propertied_connect_check('limit_to_group', self.chkLimitToGroup) 160 self.colorsLimitGroup = pack_item(qubx.toolspace.Palette(vertical=False), h, expand=True) 161 self.colorsLimitGroup.set_tooltip_text("To assign groups, edit the Group column in the Segments (or List) table, or Plot and assign colors") 162 self.colorsLimitGroup.color = self.limit_group 163 self.colorsLimitGroup.OnClickColor += self.__ref(self.__onClickLimitGroupColor) 164 165 pack_hsep(7, self.scrolled) 166 167 panel = pack_item(gtk.HBox(), self.scrolled) 168 pack_label('Signals:', panel) 169 pack_space(panel, x=RADIO_SPACE) 170 self.chkSignalsAll = pack_radio('All', panel) 171 pack_space(panel, x=RADIO_SPACE) 172 self.chkSignalsSource = pack_radio('Source', panel, group=self.chkSignalsAll) 173 pack_space(panel, x=RADIO_SPACE) 174 self.chkSignalsCustom = pack_radio('Custom:', panel, group=self.chkSignalsAll) 175 self.propertied_connect_radios('signals', [(SIGNALS_ALL, self.chkSignalsAll), 176 (SIGNALS_SOURCE, self.chkSignalsSource), 177 (SIGNALS_CUSTOM, self.chkSignalsCustom)]) 178 self.txtSignalsCustom = pack_item(SignalMenuEntry(), panel, expand=True) 179 self.txtSignalsCustom.OnChange += self.__ref(self.__onChangeCustomSignal) 180 self.chkNoFilter = pack_check('No filter', panel) 181 self.propertied_connect_check('no_filter', self.chkNoFilter) 182 183 panel = pack_item(gtk.HBox(), self.scrolled) 184 pack_label('Process:', panel) 185 pack_space(panel, x=RADIO_SPACE) 186 self.chkProcNone = pack_radio('None', panel) 187 pack_space(panel, x=RADIO_SPACE) 188 self.chkProcAvg = pack_radio('Average', panel, group=self.chkProcNone) 189 pack_space(panel, x=RADIO_SPACE) 190 self.chkProcAdd = pack_radio('Add', panel, group=self.chkProcNone) 191 pack_space(panel, x=RADIO_SPACE) 192 self.chkProcSub = pack_radio('Subtract trace', panel, group=self.chkProcNone) 193 self.propertied_connect_radios('process', [(PROC_NONE, self.chkProcNone), 194 (PROC_AVG, self.chkProcAvg), 195 (PROC_ADD, self.chkProcAdd), 196 (PROC_SUB, self.chkProcSub)]) 197 self.txtProcSubTrace = pack_item(qubx.GTK.NumEntry(self.process_sub_trace, acceptIntGreaterThan(0), width_chars=4), panel) 198 self.txtProcSubTrace.set_sensitive(self.process == PROC_SUB) 199 self.propertied_connect_NumEntry('process_sub_trace', self.txtProcSubTrace) 200 self.chkProcSubFromFile = pack_check('of file:', panel) 201 self.propertied_connect_check('process_sub_from_file', self.chkProcSubFromFile) 202 self.mnuProcSubFile = pack_item(qubx.GTK.DynamicComboBox(), panel, expand=True) 203 self.mnuProcSubFile.OnPopulate += self.__ref(self.__onPopulateSubFile) 204 self.mnuProcSubFile.OnChanged += self.__ref(self.__onSubFileChanged) 205 self.chkProcessAll = pack_check('all signals', panel, at_end=True) 206 self.chkProcessAll.set_sensitive(self.process) 207 self.propertied_connect_check('process_all', self.chkProcessAll) 208 209 panel = pack_item(gtk.HBox(), self.scrolled) 210 pack_label('Subtract baseline:', panel) 211 pack_space(panel, x=RADIO_SPACE) 212 self.chkSubtractFitNone = pack_radio('None', panel) 213 self.chkSubtractFitNone.set_tooltip_text('"segment baseline" is always subtracted') 214 pack_space(panel, x=RADIO_SPACE) 215 self.chkSubtractFitCurve = pack_radio('Fit curve', panel, group=self.chkSubtractFitNone) 216 pack_space(panel, x=RADIO_SPACE) 217 self.chkSubtractFitNodes = pack_radio('Baseline nodes', panel, group=self.chkSubtractFitNone) 218 pack_space(panel, x=CHKS_SPACE) 219 self.propertied_connect_radios('subtract_fit', [(SUBTRACT_FIT_NONE, self.chkSubtractFitNone), 220 (SUBTRACT_FIT_CURVE, self.chkSubtractFitCurve), 221 (SUBTRACT_FIT_NODES, self.chkSubtractFitNodes)]) 222 pack_vsep(10, panel) 223 pack_space(panel, x=CHKS_SPACE) 224 self.chkResample = pack_check('Resample:', panel) 225 self.propertied_connect_check('resample', self.chkResample) 226 self.txtResample = pack_item(qubx.GTK.NumEntry(self.resample_kHz, acceptFloatGreaterThan(0.0), '%.3g', width_chars=6), panel) 227 self.propertied_connect_NumEntry('resample_kHz', self.txtResample) 228 pack_label(' kHz', panel) 229 230 h = pack_item(gtk.HBox(), self.scrolled) 231 self.chkJoin = pack_check('Join segments', h) 232 self.propertied_connect_check('join_segments', self.chkJoin) 233 234 panel = pack_item(gtk.HBox(), self.scrolled) 235 pack_label('Save as:', panel) 236 pack_space(panel, x=RADIO_SPACE) 237 self.txtPath = pack_item(qubx.GTK.NumEntry(''), panel, expand=True) 238 self.txtPath.OnChange += self.__ref(self.__onChangePath) 239 self.btnBrowse = pack_button('...', panel, on_click=self.__onClickBrowse) 240 self.btnSave = pack_button('Save', panel, on_click=self.__onClickSave) 241 242 panel = pack_item(gtk.HBox(), self.scrolled) 243 self.chkOpenAfter = pack_check('open after saving', panel, at_end=True) 244 self.propertied_connect_check('open_after', self.chkOpenAfter) 245 246 QubX = qubx.pyenv.env.globals['QubX'] 247 QubX.Data.OnSwitch += self.__ref(self.__onSwitchData) 248 self.__onSwitchData(QubX.Data, QubX.Data.file)
249
250 - def __onSwitchData(self, Data, file):
251 if self.__data and self.__data.signals: 252 self.__view.signals.OnInsert -= self.__ref(self.__onInsertSignal) 253 self.__view.signals.OnRemoving -= self.__ref(self.__onRemovingSignal) 254 self.__view.signals.OnSet -= self.__ref(self.__onSetSignal) 255 path, name = os.path.split(os.path.splitext(file.path)[0]) 256 self.__base_path = path or self.__base_path or qubx.pyenv.env.globals['documents_path'] 257 self.__base_name = name.replace('<', '').replace('>', '') 258 self.path_expr = self.path_expr 259 if self.__data and self.__data.signals and file.signals and self.showing: 260 self.__preserve_signals_custom(file, Data.view) 261 elif file.signals: 262 self.__replace_signals_custom(file, Data.view) 263 self.__data = file 264 self.__view = Data.view 265 if self.__data and self.__data.signals: 266 self.__view.signals.OnInsert += self.__ref(self.__onInsertSignal) 267 self.__view.signals.OnRemoving += self.__ref(self.__onRemovingSignal) 268 self.__view.signals.OnSet += self.__ref(self.__onSetSignal)
269 - def __onInsertSignal(self, i, undoing):
270 if i < 1: return 271 self.__changing_custom_signals = True 272 self.txtSignalsCustom.insert(i-1, self.__view.signals[i, 'Name'], True) 273 self.__changing_custom_signals = False
274 - def __onRemovingSignal(self, i, undoing):
275 if i < 1: return 276 self.__changing_custom_signals = True 277 self.txtSignalsCustom.remove(i-1) 278 self.__changing_custom_signals = False
279 - def __onSetSignal(self, i, field, val, prev, undoing):
280 if i < 1: return 281 if field == 'Name': 282 self.__changing_custom_signals = True 283 self.txtSignalsCustom.items[i-1].label = val 284 self.__changing_custom_signals = False 285 self.__read_signals_custom()
286 - def __read_signals_custom(self, by_user=False):
287 if by_user: 288 self.propertied_on_user_set('signals_custom', self.txtSignalsCustom.get_text()) 289 else: 290 new_signals = self.txtSignalsCustom.get_text() 291 if new_signals != self.signals_custom: 292 self.signals_custom = new_signals
293 - def __preserve_signals_custom(self, file, view, activate_new_signals=True):
294 self.__changing_custom_signals = True 295 old = dict((item.label, item.active) for item in self.txtSignalsCustom.items) 296 self.txtSignalsCustom.clear() 297 for s in xrange(1, view.signals.size): 298 name = view.signals.get(s, 'Name') 299 if name in old: 300 active = old[name] 301 else: 302 active = activate_new_signals 303 self.txtSignalsCustom.append(name, active) 304 self.__changing_custom_signals = False 305 self.__read_signals_custom()
306 - def __replace_signals_custom(self, file, view):
307 self.__changing_custom_signals = True 308 self.txtSignalsCustom.clear() 309 for s in xrange(1, view.signals.size): 310 self.txtSignalsCustom.append(view.signals.get(s, 'Name'), True) 311 self.__changing_custom_signals = False 312 self.__read_signals_custom()
313 - def propertied_set(self, value, name):
314 if name == 'signals_custom': # rewrite old comma-separated prefs 315 if (', ' in value) and not (('(' in value) or ('[' in value) or ('{' in value)): # they'd only need a comma in combo with parens/brackets? 316 value = value.replace(',', ';') 317 super(ExtractFace, self).propertied_set(value, name) 318 if name == 'source': 319 if value == SOURCE_OTHER: 320 self.panSourceOther.show() 321 else: 322 self.panSourceOther.hide() 323 if value == SOURCE_FILEGROUP: 324 self.panSourceFileGroup.show() 325 self.txtFileGroup.set_sensitive(True) 326 else: 327 self.panSourceFileGroup.hide() 328 self.txtFileGroup.set_sensitive(False) 329 elif name == 'other_seg_group': 330 self.txtOtherSegGroup = value 331 self.colorsOtherSegGroup.color = value 332 self.other_seg = OTHER_SEG_GROUP 333 elif name == 'signals': 334 self.txtSignalsCustom.set_sensitive(value == SIGNALS_CUSTOM) 335 elif name == 'signals_custom': 336 labels = re.split(r"; *", value) # only these active 337 is_custom = False 338 for lbl in labels: 339 if not re.match(r'^[a-zA-Z_0-9]+$', lbl): 340 is_custom = True 341 break 342 self.__changing_custom_signals = True 343 if is_custom: 344 self.txtSignalsCustom.custom_val = value 345 else: 346 for index, item in enumerate(self.txtSignalsCustom.items): 347 if (item.label in labels) != item.active: 348 self.txtSignalsCustom.set_active(index, not item.active) 349 self.__changing_custom_signals = False 350 self.__read_signals_custom() 351 elif name == 'process': 352 self.chkProcessAll.set_sensitive(value) 353 self.txtProcSubTrace.set_sensitive(value == PROC_SUB) 354 self.chkProcSubFromFile.set_sensitive(value == PROC_SUB) 355 elif name == 'process_sub_use_file': 356 self.mnuProcSubFile.set_sensitive((self.process == PROC_SUB) and value) 357 elif name == 'process_sub_file_ix': 358 self.mnuProcSubFile.active_text = os.path.split(qubx.global_namespace.QubX.Data.table[value, 'Name'])[1] 359 elif name == 'resample': 360 self.txtResample.set_sensitive(value) 361 self.chkNoFilter.set_sensitive(not value) 362 elif name == 'path_expr': 363 self.txtPath.value = value % {'path' : self.__base_path, 364 'name' : self.__base_name}
365
366 - def __onClickOtherSegGroupColor(self, palette, color):
367 self.propertied_on_user_set('other_seg_group', color)
368 - def __onClickLimitGroupColor(self, palette, color):
369 self.propertied_on_user_set('limit_group', color)
370 - def __onChangeCustomSignal(self, chks, val):
371 self.__read_signals_custom(by_user=not self.__changing_custom_signals)
372 - def __onPopulateSubFile(self, add):
373 for entry in qubx.global_namespace.QubX.Data.table: 374 add(os.path.split(entry.Name)[1])
375 - def __onSubFileChanged(self, mnu, txt):
376 mnu = self.mnuProcSubFile 377 try: 378 if os.path.split(qubx.global_namespace.QubX.Data.table[mnu.active_i, 'Name'])[1] == mnu.active_text: 379 self.propertied_on_user_set('process_sub_file_ix', mnu.active_i) 380 return 381 except: 382 pass 383 for entry in qubx.global_namespace.QubX.Data.table: 384 if os.path.split(entry.Name)[1] == mnu.active_text: 385 self.propertied_on_user_set('process_sub_file_ix', entry.Index) 386 break
387 - def __onChangePath(self, txt, val):
388 if not val: 389 self.propertied_on_user_set('path_expr', os.path.join('%(path)s', '%(name)s_ext.qdf')) 390 else: 391 new_path, new_name = os.path.split(val) 392 new_path = new_path.replace(self.__base_path, '%(path)s') 393 new_name = new_name.replace(self.__base_name, '%(name)s') 394 if new_name and not os.path.splitext(new_name)[1]: 395 new_name = new_name + '.qdf' 396 self.propertied_on_user_set('path_expr',os.path.join(new_path or '%(path)s', new_name or '%(name)s_ext.qdf'))
397 - def __onClickBrowse(self, btn):
398 def do_save(fname): 399 self.__onChangePath(self.txtPath, fname)
400 path, name = os.path.split(self.txtPath.value) 401 qubx.GTK.SaveAs('Save extracted data as...', path, name, do_save, parent=self.parent_window, 402 filters=qubx.pyenv.env.globals['QubX'].Data.get_save_filters())
403 - def click_save(self, fname=None, silent=False):
404 self.__onClickSave(self.btnSave, silent=silent, fname=fname)
405 - def __onClickSave(self, btn, fname=None, silent=False):
406 path = fname or self.txtPath.value 407 QubX = qubx.pyenv.env.globals['QubX'] 408 for other_view in QubX.Data.views: 409 if other_view.file != QubX.Data.file: 410 if other_view.file.path == path: 411 if silent: 412 print "Didn't extract to %s:\n - the file is already open." 413 else: 414 mdlg = gtk.MessageDialog(parent=self.parent_window, 415 buttons=gtk.BUTTONS_OK, 416 flags=gtk.DIALOG_MODAL, 417 message_format="The file at that location is in use. Please close it first.") 418 mdlg.run() 419 mdlg.destroy() 420 return 421 if os.path.exists(path) and not silent: 422 mdlg = gtk.MessageDialog(parent=self.parent_window, 423 buttons=gtk.BUTTONS_YES_NO, 424 flags=gtk.DIALOG_MODAL, 425 message_format= "There is already a file named %s. Overwrite it?" % path) 426 response = mdlg.run() 427 mdlg.destroy() 428 if response == gtk.RESPONSE_NO: 429 return 430 busy = qubx.GTK.BusyDialog('Extracting...', lambda progressf: self.save(progressf, path), parent=self.parent_window) 431 busy.set_size_request(300, 75) 432 try: 433 busy.run() 434 except: 435 mdlg = gtk.MessageDialog(None, buttons=gtk.BUTTONS_OK, flags=gtk.DIALOG_MODAL, 436 message_format=traceback.format_exc()) 437 mdlg.run() 438 mdlg.destroy() 439 busy.destroy()
440 - def save(self, progressf, fname):
441 QubX = qubx.pyenv.env.globals['QubX'] 442 view = QubX.Data.view 443 file = view.file 444 445 script_keys = ['source', 'limit_to_group', 'join_segments', 'signals', 'no_filter', 'subtract_fit', 'resample', 'process'] 446 447 # get the tables: constants, segments, signals, stimuli, scope 448 constants = file.constants.clone() 449 segments = file.segments.clone() 450 signals = file.signals.clone() 451 stimuli = file.stimuli.clone() 452 scope = view.signals.clone() 453 lists = None # unless whole-file 454 sampling_out_kHz = 1.0 / (file.sampling * 1e3) 455 456 # get the segments template 457 if self.source == SOURCE_DATASOURCE: 458 segs = QubX.DataSource.get_segmentation(baseline_nodes=False) 459 segs_of_file = [ segs ] 460 if QubX.DataSource.source == qubx.dataGTK.DATASOURCE_FILE: 461 lists = file.lists 462 elif self.source == SOURCE_LIST: 463 segs = file.get_segmentation_list(signal=QubX.DataSource.signal, baseline_nodes=False) 464 segs_of_file = [ segs ] 465 lists = file.lists.clone() 466 segments = qubx.table.SimpleTable('Segments', auto_add_fields=True) 467 segments.add_field('Start', 0.0, acceptNothing, '%.5g', 'ms', independent=True) 468 segments.add_field('Duration', 0.0, acceptNothing, '%.5g', 'ms') 469 segments.add_field('Label', '', acceptString, str, '') # required for extract:list to copy List table to Segments table 470 as_tree = qubx.table_tree.table_to_tree(lists.item) 471 qubx.table_tree.tree_to_table(as_tree, segments) 472 for i in reversed(xrange(len(lists.lists))): 473 if i == lists.index: 474 at = 0 475 for j in xrange(lists.lists[i].size): 476 sel = lists.lists[i].get_row(j) 477 n = (sel.To - sel.From + 1) 478 lists.lists[i].set(j, 'From', at) 479 lists.lists[i].set(j, 'To', at+n-1) 480 at += n 481 else: 482 lists.del_list(i) 483 elif self.source == SOURCE_OTHER: 484 script_keys.extend(['other_seg', 'other_sel']) 485 if self.other_seg == OTHER_SEG_ONSCREEN: 486 first_seg = view.time.Iseg 487 last_seg = view.time.Iseg + view.time.Greal - 1 488 else: 489 first_seg = 0 490 last_seg = len(file.segmentation.segments) - 1 491 if self.other_sel == OTHER_SEL_ONSCREEN: 492 left = view.time.left 493 right = view.time.right 494 else: 495 left, right = view.time.timeRange.bounds 496 segs = QubX.DataSource.get_segmentation(first_seg=first_seg, last_seg=last_seg, left=left, right=right, baseline_nodes=False) 497 if self.other_seg == OTHER_SEG_GROUP: 498 script_keys.append('other_seg_group') 499 segs = [seg for seg in segs if self.other_seg_group == file.segments.get(seg.index, 'Group')] 500 segs_of_file = [ segs ] 501 502 # cull segments table 503 si = len(segs)-1 504 for i in reversed(xrange(segments.size)): 505 if (si < 0) or (segs[si].index != i): 506 segments.remove(i) 507 else: 508 si -= 1 509 elif self.source == SOURCE_FILEGROUP: 510 script_keys.extend(['file_group', 'file_group_by_name']) 511 views = [vw for i, vw in enumerate(QubX.Data.views) if (self.file_group == QubX.Data.table[i,'Group'])] 512 # replace file, view, constants, stimuli from first in group 513 # build new segments, signals, scope from union 514 # sampling_out_kHz = max in group 515 # build segs, also as segs_of_file 516 if not (view in views): 517 view = views[0] 518 file = view.file 519 constants = file.constants.clone() 520 stimuli = file.stimuli.clone() 521 segments = file.segments.clone() 522 signals = file.signals.clone() 523 scope = view.signals.clone() 524 sampling_out_kHz = max([1.0 / (vw.file.sampling * 1e3) for vw in views]) 525 segs_of_file = [] 526 for vw in views: 527 for entry in vw.file.segments.entries: 528 segments.append(entry.copy()) 529 if self.file_group_by_name: 530 for i in xrange(len(vw.signals)-1): 531 try: 532 scope.index(vw.signals[i+1, 'Name']) 533 except: 534 signals.append(vw.file.signals.entries[i]) 535 scope.append(vw.signals.entries[i+1]) 536 try: 537 i = vw.signals.index(scope[QubX.DataSource.signal+1, 'Name']) - 1 538 except: 539 i = QubX.DataSource.signal 540 segs_of_file.append( vw.file.get_segmentation_file(signal=i, baseline_nodes=False) ) 541 else: 542 for i in xrange(signals.size, vw.file.signals.size): 543 signals.append(vw.file.signals.entries[i].copy()) 544 scope.append(vw.signals.entries[i+1].copy()) 545 segs_of_file.append( vw.file.get_segmentation_file(signal=QubX.DataSource.signal, baseline_nodes=False) ) 546 segs = [] 547 map(segs.extend, segs_of_file) 548 549 if self.limit_to_group: 550 script_keys.append('limit_group') 551 limit_group = self.limit_group 552 segs_of_file = [ [s for s in segs_one_file if (s.group == limit_group)] for segs_one_file in segs_of_file ] 553 554 # drop excluded; break out included chunks as own segs 555 old_sof = segs_of_file 556 segs_of_file = [] 557 for ivw, old_segs in enumerate(old_sof): 558 fsegs = [] 559 for old_seg in old_segs: 560 if any(not chunk.included for chunk in old_seg.chunks): 561 segments.clear() # segmentation has changed 562 for old_chunk in old_seg.chunks: 563 if old_chunk.included and old_chunk.n: 564 seg = qubx.data_types.SourceSeg(old_seg.file, old_seg.signal, old_seg.index, old_seg.offset, 565 old_chunk.f, old_chunk.l, [old_chunk], included=True, 566 latency=old_seg.latency, start=old_chunk.start, 567 sampling=old_seg.sampling, baseline_nodes=False) 568 fsegs.append(seg) 569 elif old_seg.n: 570 fsegs.append(old_seg) 571 else: 572 segments.clear() # dropping empty segment invalidates table 573 segs_of_file.append(fsegs) 574 segs = [] 575 map(segs.extend, segs_of_file) 576 577 if not segs: 578 mdlg = gtk.MessageDialog(parent=self.parent_window, 579 buttons=gtk.BUTTONS_OK, 580 flags=gtk.DIALOG_MODAL, 581 message_format="Extract: No matching data.") 582 mdlg.run() 583 mdlg.destroy() 584 return 585 586 ## correct segments:Start,Duration 587 for i, seg in enumerate(segs): 588 if i < segments.size: 589 segments.set(i, 'Start', segs[i].start) 590 segments.set(i, 'Duration', segs[i].n * segs[i].sampling * 1e3) 591 else: 592 segments.append({'Start' : segs[i].start, 593 'Duration' : segs[i].n * segs[i].sampling * 1e3}) 594 while len(segments) > len(segs): 595 segments.remove(len(segments)-1) 596 597 # no_filter: 598 if self.no_filter: 599 for seg in segs: 600 seg.filter_Hz = 0.0 601 for chunk in seg.chunks: 602 chunk.filter_Hz = 0.0 603 604 # get corresponding list of SourceSeg for each requested signal 605 # remap signals and scope tables 606 segs_of_signal = [] 607 sig_ixs = [] 608 custom_signals = (self.signals == SIGNALS_CUSTOM) and self.txtSignalsCustom.custom 609 if (self.signals == SIGNALS_ALL) or custom_signals: 610 if (self.source == SOURCE_FILEGROUP) and self.file_group_by_name: 611 segs_of_signal = [ [] for i in xrange(signals.size) ] 612 for ivw, fsegs in enumerate(segs_of_file): 613 vw_sig = [views[ivw].signals.index(scope[i+1,'Name'], raise_error=False)-1 for i in xrange(len(signals))] 614 for seg in fsegs: 615 for i in xrange(signals.size): 616 if seg.signal == vw_sig[i]: 617 segs_of_signal[i].append(seg) 618 else: 619 segs_of_signal[i].append( copy_segmentation([seg], vw_sig[i], self.no_filter)[0] ) 620 else: 621 sig_ixs.extend(range(signals.size)) 622 for i in xrange(signals.size): 623 if i == segs[0].signal: 624 segs_of_signal.append(segs) 625 else: 626 segs_of_signal.append( copy_segmentation(segs, i, self.no_filter) ) 627 if custom_signals: 628 customs = parse_signals_expr(self.signals_custom) 629 segs_of_signal = build_custom_signal_segs(customs, segs_of_signal, signals, scope) 630 elif self.signals == SIGNALS_SOURCE: 631 sig_ixs.append(segs[0].signal) 632 segs_of_signal.append(segs) 633 for i in reversed(xrange(segs[0].signal+1, signals.size)): 634 signals.remove(i) 635 scope.remove(i+1) 636 for i in reversed(xrange(segs[0].signal)): 637 signals.remove(i) 638 scope.remove(i+1) 639 elif self.signals == SIGNALS_CUSTOM: # indexed; not including custom_signals (parsed expression) 640 script_keys.append('signals_custom') 641 for i in reversed(xrange(signals.size)): 642 if self.txtSignalsCustom.get_active(i): 643 sig_ixs.insert(0, i) 644 if i == segs[0].signal: 645 segs_of_signal.insert(0, segs) 646 else: 647 segs_of_signal.insert(0, copy_segmentation(segs, i, self.no_filter)) 648 else: 649 signals.remove(i) 650 scope.remove(i+1) 651 652 # remap or remove stimuli signal indices 653 for i in xrange(stimuli.size): 654 stim = stimuli.get_row(i) 655 if stim.Type == qubx.data_types.STIM_TYPE_SIGNAL: 656 try: 657 stimuli.set(i, 'Signal_index', sig_ixs.index(stim.Signal_index)) 658 except ValueError: 659 stimuli.set(i, 'Type', qubx.data_types.STIM_TYPE_CONST) 660 661 # existing scale, offset is applied to points before saving; 662 # remove it from the copy, so as not to apply it twice; 663 # also, copy name and units from scope 664 for s in xrange(signals.size): 665 signals.set(s, 'Scale', 1.0) 666 signals.set(s, 'Offset', 0.0) 667 signals.set(s, 'Name', scope.get(s+1, 'Name')) 668 signals.set(s, 'Units', scope.get(s+1, 'Units')) 669 670 # assumption: qubx.data_types.Save ignores chunks except to determine exclusion. 671 # since we removed excluded chunks, only the SourceSegs have to be wrapped. 672 673 # maybe wrap primary signal's segs with a fit-subtracting SourceSeg-alike 674 if self.subtract_fit == SUBTRACT_FIT_CURVE: 675 segs[:] = [SourceSegMinusFit(seg) for seg in segs] 676 ## needs to push its filter_Hz inward 677 elif self.subtract_fit == SUBTRACT_FIT_NODES: 678 segs[:] = [SourceSegMinusNodes(seg) for seg in segs] 679 680 # maybe wrap segs with resamplers 681 if self.resample: 682 script_keys.append('resample_kHz') 683 sampling_out_kHz = self.resample_kHz 684 for i, seg in enumerate(segs_of_signal[0]): 685 segments.set(i, 'SampleCount', seg.n) 686 sampling_out_sec = 1.0 / (sampling_out_kHz * 1e3) 687 segs_of_signal = [ [(seg.sampling == sampling_out_sec) and seg or SourceSegResampled(seg, sampling_out_kHz) for seg in sigsegs] 688 for sigsegs in segs_of_signal ] 689 690 # maybe average, add, or subtract 691 proc_signal = QubX.DataSource.signal 692 congruent = False # identical segmentation in multi files 693 if (self.source == SOURCE_FILEGROUP) and (len(views) > 1): 694 congruent = True 695 for vw in views: 696 if len(vw.file.segmentation.segments) != len(views[0].file.segmentation.segments): 697 congruent = False 698 if congruent: 699 with nested_break() as NON_CONGRUENT: 700 for dims in izip(*[vw.file.segmentation.segments for vw in views]): 701 for f,l,n in dims: 702 if n != dims[0][2]: 703 congruent = False 704 raise NON_CONGRUENT 705 if congruent: 706 segs_of_signal_of_file = [] 707 at = 0 708 for fsegs in segs_of_file: 709 segs_of_signal_of_file.append( segs_of_signal[at:at+len(fsegs)] ) 710 at += len(fsegs) 711 if self.process == PROC_AVG: 712 if congruent: 713 segs_of_signal = [ [] for s in xrange(len(signals)) ] 714 for s, sigsegses in enumerate(izip(*segs_of_signal_of_file)): 715 if self.process_all: 716 segs_of_signal[s].extend([SourceSegAvg(sigsegs) for sigsegs in izip(*sigsegses)]) 717 else: 718 segs_of_signal[s].extend( (sig_ixs[s] == proc_signal) and [SourceSegAvg(sigsegs) for sigsegs in izip(*sigsegses)] or sigsegses[0] ) 719 else: 720 if self.process_all: 721 segs_of_signal = [ [SourceSegAvg(sigsegs)] for sigsegs in segs_of_signal ] 722 else: 723 segs_of_signal = [ (sig_ixs[s] == proc_signal) and [SourceSegAvg(sigsegs)] or [sigsegs[0]] 724 for s, sigsegs in enumerate(segs_of_signal) ] 725 elif self.process == PROC_ADD: 726 if congruent: 727 segs_of_signal = [ [] for s in xrange(len(signals)) ] 728 for s, sigsegses in enumerate(izip(*segs_of_signal_of_file)): 729 if self.process_all: 730 segs_of_signal[s].extend([SourceSegAdd(sigsegs) for sigsegs in izip(*sigsegses)]) 731 else: 732 segs_of_signal[s].extend( (sig_ixs[s] == proc_signal) and [SourceSegAdd(sigsegs) for sigsegs in izip(*sigsegses)] or sigsegses[0] ) 733 else: 734 if self.process_all: 735 segs_of_signal = [ [SourceSegAdd(sigsegs)] for sigsegs in segs_of_signal ] 736 else: 737 segs_of_signal = [ (sig_ixs[s] == proc_signal) and [SourceSegAdd(sigsegs)] or [sigsegs[0]] 738 for s, sigsegs in enumerate(segs_of_signal) ] 739 elif self.process == PROC_SUB: 740 script_keys.append('process_sub_trace') 741 script_keys.append('process_sub_from_file') 742 if self.process_sub_from_file: 743 script_keys.append('process_sub_file_ix') 744 sub_view = QubX.Data.views[self.process_sub_file_ix] 745 if self.process_sub_trace > len(sub_view.file.segmentation.segments): 746 raise IndexError('Trace index %d out of range in file %s' % (self.process_sub_trace, os.path.split(sub_view.file.path)[1])) 747 sub_first, sub_last, sub_n = sub_view.file.segmentation.segments[self.process_sub_trace-1] 748 active_signal = segs[0].signal 749 active_signal_name = view.signals[active_signal+1,'Name'] 750 # if proc all, same num signals, same name of active signal index: subtract all signals 751 if self.process_all and (len(sub_view.file.signals) == len(file.signals)) and (sub_view.signals[active_signal+1,'Name'] == active_signal_name): 752 segs_of_signal = [ [SourceSegSub(seg, sub_view.file.get_segmentation_indexed(sub_first, sub_last, s, baseline_nodes=False)[0]) for seg in sigsegs] 753 for s, sigsegs in enumerate(segs_of_signal) ] 754 else: 755 # pick sub signal by name, then index, then 0 756 sub_signal = active_signal if (active_signal < len(sub_view.file.signals)) else 0 757 for s in xrange(1, len(sub_view.signals)): 758 if sub_view.signals[s,'Name'] == active_signal_name: 759 sub_signal = s-1 760 break 761 segs_of_signal = [ (sig_ixs[s] == proc_signal) and [SourceSegSub(seg, sub_view.file.get_segmentation_indexed(sub_first, sub_last, sub_signal, baseline_nodes=False)[0]) for seg in sigsegs] 762 or sigsegs 763 for s, sigsegs in enumerate(segs_of_signal) ] 764 else: 765 if self.process_sub_trace > len(segs_of_signal[0]): 766 raise IndexError('Trace index %d out of range' % self.process_sub_trace) 767 segs_to_subtract = [sigsegs[self.process_sub_trace-1] for sigsegs in segs_of_signal] 768 if self.process_all: 769 segs_of_signal = [ [SourceSegSub(seg, seg_to_subtract) for i,seg in enumerate(sigsegs) if (i+1) != self.process_sub_trace] 770 for sigsegs, seg_to_subtract in izip(segs_of_signal, segs_to_subtract) ] 771 else: 772 segs_of_signal = [ (sig_ixs[s] == proc_signal) and [SourceSegSub(seg, segs_to_subtract[s]) 773 for i,seg in enumerate(sigsegs) if (i+1) != self.process_sub_trace] 774 or [seg for i,seg in enumerate(sigsegs) if (i+1) != self.process_sub_trace] 775 for s, sigsegs in enumerate(segs_of_signal) ] 776 777 if self.process: 778 script_keys.append('process_all') 779 if self.process or self.resample: 780 ## correct segments:Start,Duration 781 segments.clear() 782 for i, seg in enumerate(segs_of_signal[0]): 783 segments.append({'Start' : seg.start, 784 'Duration' : seg.n * seg.sampling * 1e3}) 785 lists = None 786 else: 787 # segment baselines will be subtracted permanently in extraction; don't double-apply 788 for field in reversed(segments.fields): 789 if 'Baseline offset ' in field: 790 segments.remove_field(field) 791 792 if self.join_segments and segs_of_signal and segs_of_signal[0]: 793 segs_of_signal = [ [SourceSegJoin(sigsegs)] for sigsegs in segs_of_signal ] 794 seg = segs_of_signal[0][0] 795 segments.clear() 796 segments.append({'Start' : seg.start, 'Duration' : seg.n * seg.sampling * 1e3}) 797 798 outpath = fname or self.txtPath.value 799 settings = qubx.settings.SettingsMgr['Extract'].as_pairs(script_keys, {'source' : ['SOURCE_DATASOURCE', 'SOURCE_LIST', 'SOURCE_OTHER', 'SOURCE_FILEGROUP'], 800 'other_seg' : ['OTHER_SEG_ALL', 'OTHER_SEG_ONSCREEN', 'OTHER_SEG_GROUP'], 801 'other_sel' : ['OTHER_SEL_ALL', 'OTHER_SEL_ONSCREEN'], 802 'signals' : ['SIGNALS_ALL', 'SIGNALS_SOURCE', 'SIGNALS_CUSTOM'], 803 'process' : ['PROC_NONE', 'PROC_AVG', 'PROC_ADD', 'PROC_SUB'], 804 'subtract_fit' : ['SUBTRACT_FIT_NONE', 'SUBTRACT_FIT_CURVE', 'SUBTRACT_FIT_NODES']}) 805 # if open_after is actually true, the script will record its opening in a separate line 806 qubx.pyenv.env.OnScriptable('Extract(%s, fname=%s, silent=True, open_after=False)' % 807 (', '.join("%s=%s"%(k,v) for k,v in settings), 808 repr(outpath))) 809 810 # save via qubx.data_types.Writers, maybe open 811 qubx.data_types.Save(outpath, qubx.tree.Node("SessionFile"), segs_of_signal, lists, progressf, 812 constants, segments, signals, stimuli, scope) 813 814 qubx.global_namespace.extsegs = segs_of_signal 815 816 if self.open_after: 817 QubX.Data.open(self.txtPath.value)
818
819 820 -def copy_segmentation(template, signal, no_filter=True):
821 segs = [] 822 for tseg in template: 823 filter_Hz = 1e3*(tseg.file.signals.get(signal, 'Filter') and 824 tseg.file.signals.get(signal, 'Filter Freq') or 0.0) if ((not no_filter) and (0 <= signal < len(tseg.file.signals))) else 0.0 825 chunks = [qubx.data_types.SourceChunk(file=tchunk.file, signal=signal, f=tchunk.f, l=tchunk.l, 826 included=tchunk.included, latency=tchunk.latency, start=tchunk.start, 827 sampling=tchunk.sampling, filter_Hz=filter_Hz, baseline_nodes=tchunk.baseline_nodes, group=tchunk.group) 828 for tchunk in tseg.chunks] 829 seg = qubx.data_types.SourceSeg(file=tseg.file, signal=signal, index=tseg.index, offset=tseg.offset, 830 f=tseg.f, l=tseg.l, chunks=chunks, included=tseg.included, 831 latency=tseg.latency, # prob. wrong 832 start=tseg.start, sampling=tseg.sampling, filter_Hz=filter_Hz, baseline_nodes=tseg.baseline_nodes, group=tseg.group) 833 segs.append(seg) 834 return segs
835
836 -class SourceSegTime(Anon):
837 - def __init__(self, seg):
838 Anon.__init__(self, seg=seg, index=seg.index, offset=seg.offset, chunks=seg.chunks, file=seg.file, 839 signal=seg.signal, f=seg.f, l=seg.l, n=seg.n, start=seg.start, included=seg.included, 840 latency=seg.latency, filter_Hz=seg.filter_Hz, sampling=seg.sampling, get_actual_bounds=seg.get_actual_bounds)
841 - def get_samples(self, first_offset=None, last_offset=None):
842 f, l, start = self.get_actual_bounds(first_offset, last_offset) 843 samples = numpy.arange(l-f+1, dtype='float32') 844 samples += (f - self.offset) 845 samples *= self.sampling 846 return qubx.data_types.SourceSeg(chunks=[], samples=samples, file=self.file, signal=self.signal, 847 f=f, l=l, n=l-f+1, start=start, included=self.included, 848 latency=self.latency, filter_Hz=self.filter_Hz, sampling=self.sampling, group=self.group)
849
850 -class SourceSegFunc(Anon):
851 - def __init__(self, seg, get_samples_of_signal, func):
852 """@param seg: prototype 853 @param seg_of_signal: list of SourceSegs corresponding to func args 854 @param func: f(signal_i_array_or_value, ...) -> array_or_value e.g. lambda t, Current: 2*Current + sin(t) 855 """ 856 Anon.__init__(self, seg=seg, get_samples_of_signal=get_samples_of_signal, func=func, index=seg.index, offset=seg.offset, chunks=seg.chunks, file=seg.file, 857 signal=seg.signal, f=seg.f, l=seg.l, n=seg.n, start=seg.start, included=seg.included, 858 latency=seg.latency, filter_Hz=seg.filter_Hz, sampling=seg.sampling, get_actual_bounds=seg.get_actual_bounds)
859 - def get_samples(self, first_offset=None, last_offset=None):
860 signals = [get_samples(first_offset, last_offset).samples for get_samples in self.get_samples_of_signal] 861 f, l, start = self.get_actual_bounds(first_offset, last_offset) 862 if not signals: 863 samples = numpy.zeros(shape=(l-f+1,), dtype='float32') 864 samples += self.func() 865 else: 866 try: 867 samples = self.func(*signals) 868 except: 869 # the function doesn't broadcast to numpy arrays? 870 samples = numpy.array([self.func(*args) for args in izip(*signals)], dtype=numpy.float32) 871 return qubx.data_types.SourceSeg(chunks=[], samples=samples, file=self.file, signal=self.signal, 872 f=f, l=l, n=l-f+1, start=start, included=self.included, 873 latency=self.latency, filter_Hz=self.filter_Hz, sampling=self.sampling, group=self.group)
874 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False, 875 get_amps=False, get_stds=False):
876 result = [ [], [], [] ] # firsts, lasts, classes 877 if get_durations: 878 result.append([]) 879 if get_amps: 880 result.append([]) 881 if get_stds: 882 result.append([]) 883 return result
884
885 886 -class SourceSegMinusFit(Anon):
887 - def __init__(self, seg):
888 Anon.__init__(self, seg=seg, index=seg.index, offset=seg.offset, chunks=seg.chunks, file=seg.file, 889 signal=seg.signal, f=seg.f, l=seg.l, n=seg.n, start=seg.start, included=seg.included, group=seg.group, 890 latency=seg.latency, filter_Hz=seg.filter_Hz, sampling=seg.sampling, get_actual_bounds=seg.get_actual_bounds)
891 - def get_samples(self, first_offset=None, last_offset=None):
892 f, l = self.seg.f, self.seg.l 893 if not (first_offset is None): 894 f += first_offset 895 if not (last_offset is None): 896 l = self.f + last_offset 897 self.seg.filter_Hz = self.filter_Hz 898 chunk = self.seg.get_samples(first_offset, last_offset) 899 data = chunk.samples 900 fit = numpy.zeros(shape=data.shape, dtype='float32') 901 self.seg.file.fits[self.seg.signal].idl.get_samples_into(f, l, fit) 902 data -= fit 903 return chunk
904 - def get_idealization(self, *args, **kw):
905 return self.seg.get_idealization(*args, **kw)
906
907 908 -class SourceSegMinusNodes(Anon):
909 - def __init__(self, seg):
910 Anon.__init__(self, seg=seg, index=seg.index, offset=seg.offset, chunks=seg.chunks, file=seg.file, 911 signal=seg.signal, f=seg.f, l=seg.l, n=seg.n, start=seg.start, included=seg.included, group=seg.group, 912 latency=seg.latency, filter_Hz=seg.filter_Hz, sampling=seg.sampling, get_actual_bounds=seg.get_actual_bounds)
913 - def get_samples(self, first_offset=None, last_offset=None):
914 f, l = self.seg.f, self.seg.l 915 if not (first_offset is None): 916 f += first_offset 917 if not (last_offset is None): 918 l = self.f + last_offset 919 self.seg.filter_Hz = self.filter_Hz 920 chunk = self.seg.get_samples(first_offset, last_offset) 921 chunk.samples -= self.seg.file.baseline[self.seg.signal].get_sampled_nodes(f, l) 922 return chunk
923 - def get_idealization(self, *args, **kw):
924 return self.seg.get_idealization(*args, **kw)
925
926 927 -class SourceSegResampled(Anon):
928 - def __init__(self, seg, resample_kHz):
929 Anon.__init__(self, seg=seg, index=seg.index, offset=seg.offset, chunks=seg.chunks, file=seg.file, 930 signal=seg.signal, f=seg.f, l=seg.l, n=seg.n, start=seg.start, included=seg.included, group=seg.group, 931 latency=seg.latency, sampling=1e-3/resample_kHz) 932 self.filter_Hz = seg.filter_Hz = min(1e3*resample_kHz, seg.filter_Hz) or (1e3*resample_kHz) 933 self.n = int(seg.n * seg.sampling / self.sampling)
934 - def get_samples(self, first_offset=None, last_offset=None):
935 f_off, l_off = first_offset, last_offset 936 if f_off is None: 937 f_off = 0 938 if l_off is None: 939 l_off = self.n - 1 940 n = l_off - f_off + 1 941 inc = self.sampling / self.seg.sampling 942 raw_f_off = int(round(f_off * inc)) 943 raw_l_off = int(round(l_off * inc)) 944 self.seg.filter_Hz = self.filter_Hz 945 chunk = self.seg.get_samples(raw_f_off, raw_l_off) 946 raw = chunk.samples 947 resampled = numpy.zeros(shape=(n,), dtype='float32') 948 ixfrac = 0.0 949 if inc >= 1.0: 950 for i in xrange(n): 951 resampled[i] = raw[int(ixfrac)] 952 ixfrac += inc 953 else: 954 inc = 1.0 / inc 955 for i in xrange(chunk.n): 956 resampled[int(ixfrac):int(round(ixfrac+inc))] = raw[i] 957 ixfrac += inc 958 chunk.samples = resampled 959 chunk.sampling = self.sampling 960 chunk.n = n 961 return chunk
962 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False, 963 get_amps=False, get_stds=False):
964 result = list(self.seg.get_idealization(signal, mark_excluded, get_fragments, get_durations, get_amps, get_stds)) 965 ff, ll, cc = result[:3] 966 if len(ff): 967 ll -= ff[0] 968 ff -= ff[0] 969 ll *= int(self.seg.sampling / self.sampling) 970 ff *= int(self.seg.sampling / self.sampling) 971 ff[ff >= self.n] = self.n-1 972 ll[ll >= self.n] = self.n-1 973 ridl = qubx.ideal.Idealization(self.sampling * 1e3) 974 ridl.add_seg(0, self.n - 1) 975 ridl.set_dwells(len(ff), ff, ll, cc) 976 ff, ll, cc, dd = ridl.get_dwells(0, self.n-1, get_fragments, True) 977 result[:3] = ff, ll, cc 978 if len(result) > 3: 979 result[3] = dd 980 return result
981
982 983 -class SourceSegAvg(Anon):
984 - def __init__(self, segs):
985 n = segs[0].n if segs else 0 # max(seg.n for seg in segs) 986 Anon.__init__(self, segs=segs, index=segs[0].index, offset=segs[0].offset, chunks=[], file=segs[0].file, 987 signal=segs[0].signal, f=segs[0].f, l=segs[0].f+n-1, n=n, start=segs[0].start, included=segs[0].included, 988 latency=segs[0].latency, filter_Hz=segs[0].filter_Hz, sampling=segs[0].sampling, group=segs[0].group)
989 - def get_samples(self, first_offset=None, last_offset=None):
990 first = self.f if (first_offset is None) else first_offset 991 last = self.l if (last_offset is None) else last_offset 992 n = last - first + 1 993 samples = numpy.zeros(dtype='float32', shape=(n,)) 994 counts = numpy.zeros(dtype='int32', shape=(n,)) 995 for seg in self.segs: 996 if first < seg.n: 997 l = min(last, seg.n-1) 998 seg_samples = seg.get_samples(first, l).samples 999 n_common = min(len(samples), len(seg_samples)) 1000 samples[:n_common] += seg_samples 1001 counts[:n_common] += 1 1002 counts[counts < 1] = 1 1003 samples /= counts 1004 return qubx.data_types.SourceSeg(chunks=[], samples=samples, file=self.file, signal=self.signal, 1005 f=first, l=last, n=n, start=self.start + 1e3*(first-self.f)*self.sampling, included=self.included, 1006 latency=self.latency, filter_Hz=self.filter_Hz, sampling=self.sampling, group=self.group)
1007 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False, 1008 get_amps=False, get_stds=False):
1009 result = [ [], [], [] ] # firsts, lasts, classes 1010 if get_durations: 1011 result.append([]) 1012 if get_amps: 1013 result.append([]) 1014 if get_stds: 1015 result.append([]) 1016 return result
1017
1018 -class SourceSegAdd(Anon):
1019 - def __init__(self, segs):
1020 n = segs[0].n if segs else 0 # max(seg.n for seg in segs) 1021 Anon.__init__(self, segs=segs, index=segs[0].index, offset=segs[0].offset, chunks=[], file=segs[0].file, 1022 signal=segs[0].signal, f=segs[0].f, l=segs[0].f+n-1, n=n, start=segs[0].start, included=segs[0].included, 1023 latency=segs[0].latency, filter_Hz=segs[0].filter_Hz, sampling=segs[0].sampling, group=segs[0].group)
1024 - def get_samples(self, first_offset=None, last_offset=None):
1025 first = self.f if (first_offset is None) else first_offset 1026 last = self.l if (last_offset is None) else last_offset 1027 n = last - first + 1 1028 samples = numpy.zeros(dtype='float32', shape=(n,)) 1029 for seg in self.segs: 1030 if first < seg.n: 1031 l = min(last, seg.n-1) 1032 seg_samples = seg.get_samples(first, l).samples 1033 n_common = min(n, len(seg_samples)) 1034 samples[:n_common] += seg_samples 1035 return qubx.data_types.SourceSeg(chunks=[], samples=samples, file=self.file, signal=self.signal, 1036 f=first, l=last, n=n, start=self.start + 1e3*(first-self.f)*self.sampling, included=self.included, 1037 latency=self.latency, filter_Hz=self.filter_Hz, sampling=self.sampling, group=self.group)
1038 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False, 1039 get_amps=False, get_stds=False):
1040 result = [ [], [], [] ] # firsts, lasts, classes 1041 if get_durations: 1042 result.append([]) 1043 if get_amps: 1044 result.append([]) 1045 if get_stds: 1046 result.append([]) 1047 return result
1048
1049 -class SourceSegSub(Anon):
1050 - def __init__(self, seg_plus, seg_minus):
1051 Anon.__init__(self, seg_plus=seg_plus, seg_minus=seg_minus, index=seg_plus.index, offset=seg_plus.offset, chunks=[], 1052 file=seg_plus.file, signal=seg_plus.signal, f=seg_plus.f, l=seg_plus.l, n=seg_plus.n, 1053 start=seg_plus.start, included=seg_plus.included, group=seg_plus.group, 1054 latency=seg_plus.latency, filter_Hz=seg_plus.filter_Hz, sampling=seg_plus.sampling)
1055 - def get_samples(self, first_offset=None, last_offset=None):
1056 first = self.f if (first_offset is None) else first_offset 1057 samples = self.seg_plus.get_samples(first_offset, last_offset).samples 1058 samples_minus = self.seg_minus.get_samples(first_offset, last_offset).samples 1059 n = len(samples) 1060 last = first + n - 1 1061 n_common = min(len(samples), len(samples_minus)) 1062 samples[:n_common] -= samples_minus[:n_common] 1063 return qubx.data_types.SourceSeg(chunks=[], samples=samples, file=self.file, signal=self.signal, 1064 f=first, l=last, n=n, start=self.start + 1e3*(first-self.f)*self.sampling, included=self.included, 1065 latency=self.latency, filter_Hz=self.filter_Hz, sampling=self.sampling, group=self.group)
1066 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False, 1067 get_amps=False, get_stds=False):
1068 return self.seg_plus.get_idealization(signal, mark_excluded, get_fragments, get_durations, get_amps, get_stds)
1069
1070 1071 -class SourceSegJoin(Anon):
1072 - def __init__(self, segs):
1073 n = sum(seg.n for seg in segs) 1074 Anon.__init__(self, segs=segs, index=0, offset=0, chunks=[], 1075 file=segs[0].file, signal=segs[0].signal, f=0, l=n-1, n=n, 1076 start=segs[0].start, included=segs[0].included, group=segs[0].group, 1077 latency=segs[0].latency, filter_Hz=segs[0].filter_Hz, sampling=segs[0].sampling) 1078 jf = 0 1079 for seg in segs: 1080 seg.jf = jf 1081 seg.jl = jf + seg.n - 1 1082 jf += seg.n
1083 - def get_samples(self, first_offset=None, last_offset=None):
1084 f_off, l_off = first_offset, last_offset 1085 if f_off is None: 1086 f_off = 0 1087 if l_off is None: 1088 l_off = self.n - 1 1089 samples = numpy.zeros(shape=(l_off-f_off+1,), dtype='float32') 1090 f_rem = f_off 1091 for seg in self.segs: 1092 if seg.jl < f_rem: 1093 continue 1094 if seg.jf > l_off: 1095 break 1096 Nisect = min(l_off, seg.jl) - f_rem + 1 1097 samples[f_rem-f_off:f_rem+Nisect-f_off] = seg.get_samples(f_rem-seg.jf, f_rem+Nisect-seg.jf-1).samples 1098 f_rem += Nisect 1099 return qubx.data_types.SourceSeg(chunks=[], samples=samples, file=self.file, signal=self.signal, 1100 f=f_off, l=l_off, n=l_off-f_off+1, start=self.start + 1e3*(f_off)*self.sampling, included=self.included, 1101 latency=self.latency, filter_Hz=self.filter_Hz, sampling=self.sampling, group=self.group)
1102 - def get_idealization(self, signal=None, mark_excluded=False, get_fragments=False, get_durations=False, 1103 get_amps=False, get_stds=False):
1104 ff, ll, cc, dd, aa, ss = [], [], [], [], [], [] 1105 n_done = 0 1106 for seg in self.segs: 1107 idl = seg.get_idealization(signal, mark_excluded, get_fragments, get_durations, get_amps, get_stds) 1108 # ff starts at 0 1109 if len(idl[0]): 1110 offset = n_done - idl[0][0] 1111 idl[0] += offset 1112 idl[1] += offset 1113 ff.append(idl[0]) 1114 ll.append(idl[1]) 1115 cc.append(idl[2]) 1116 if get_durations: 1117 dd.append(idl[3]) 1118 if get_amps and len(idl[3+int(get_durations)]) > len(aa): 1119 aa = idl[3+int(get_durations)] 1120 if get_stds and len(idl[3+int(get_durations)+int(get_amps)]) > len(ss): 1121 ss = idl[3+int(get_durations)+int(get_amps)] 1122 n_done += seg.n 1123 result = [numpy.hstack(ff), numpy.hstack(ll), numpy.hstack(cc)] if len(ff) else [[], [], []] 1124 if get_durations: 1125 if len(dd): 1126 result.append(numpy.hstack(dd)) 1127 else: 1128 result.append([]) 1129 if get_amps: 1130 result.append(aa) 1131 if get_stds: 1132 result.append(ss) 1133 return tuple(result)
1134
1135 1136 -class SignalMenuEntry(qubx.GTK.CustomCheckListEntry):
1137 - def __init__(self):
1138 qubx.GTK.CustomCheckListEntry.__init__(self, self.__edit_custom, 'Custom/function...')
1139 - def set_expr(self, expr):
1140 labels = set(item.label for item in self.items) 1141 subs = re.split("; *", expr) 1142 custom = True # until proven False 1143 if all(sub in labels for sub in subs): 1144 custom = False 1145 else: 1146 try: 1147 self.help_accept(expr) 1148 except: 1149 custom = False 1150 if custom: 1151 self.custom_val = expr 1152 else: # un/check the boxes 1153 for i, item in enumerate(self.items): 1154 if item.label in subs: 1155 if self.custom or not item.active: 1156 self.set_active(i, True) 1157 self.OnToggle(self, i, True) 1158 else: 1159 if self.custom or item.active: 1160 self.set_active(i, False) 1161 self.OnToggle(self, i, False)
1162 expr = property(lambda self: self.get_text(), lambda self, x: self.set_expr(x))
1163 - def __edit_custom(self, expr):
1164 name_hint = "Signal names: %s\n" % ", ".join(SafeName(item.label) for item in self.items) 1165 dlg = qubx.pyenvGTK.HelpTestDialog(self.help_accept, "Enter a semicolon-separated list of signal names or expressions", "Signals:", 1166 SIGNAL_MENU_HELP+name_hint+qubx.pyenv.PYTHON_HELP, self.help_bind, self.help_write_test) 1167 response = dlg.run(expr) 1168 expr = dlg.expr 1169 dlg.destroy() 1170 if response == gtk.RESPONSE_ACCEPT: 1171 return expr 1172 else: 1173 return None
1174 - def help_accept(self, expr):
1175 signal_vals = dict([('x', 10.0), ('t', 10.0)] + [(SafeName(item.label), i+1) for i, item in enumerate(self.items)]) 1176 make_test_signals(expr)(**signal_vals) 1177 return expr
1178 - def help_bind(self, val):
1180 - def help_write_test(self, val):
1181 args = [('x', 10.0), ('t', 10.0)] + [(SafeName(item.label), i+1) for i, item in enumerate(self.items)] 1182 return "test_signals(%s)" % (", ".join("%s=%s" % (arg, repr(val)) for arg, val in args))
1183
1184 -def make_test_signals(expr):
1185 def test_signals(**signal_vals): 1186 return "; ".join(tuple(str(float(eval(parsed.expr, qubx.pyenv.env.globals, signal_vals))) for parsed in parse_signals_expr(expr)))
1187 return test_signals 1188 1189 SIGNAL_MENU_HELP = """Enter a list of signals, separated by semicolons ";" 1190 For example: 1191 Current; Voltage 1192 Instead of a signal name, you can define a function: 1193 I=Current; G=Current/((Voltage - (-60)) * 1e-3); V=Voltage 1194 Or mix and match: 1195 Current; G=Current/((Voltage - (-60)) * 1e-3); Voltage 1196 1197 """ 1198 1199 MAX_SIGNAL_NAME_LEN = 16
1200 1201 -def SafeName(nm):
1202 """Returns nm, modified to be a legal python identifier.""" 1203 alnum = re.sub(r"[^a-zA-Z0-9_]", '_', nm) 1204 safe = re.sub(r"(^[0-9])", r"_\1", alnum) 1205 if safe == 'x': safe = '_x_' 1206 if safe == 't': safe = '_t_' 1207 return safe[:MAX_SIGNAL_NAME_LEN]
1208 1209 parse_signal_expr_ = acceptF(static=[], custom=True) 1210 parse_signal_re = re.compile(r'([^ =]+) *= *(.*)')
1211 -def parse_signal_expr(expr):
1212 named = parse_signal_re.match(expr) 1213 if named: 1214 return Anon(name=named.group(1), expr=named.group(2), func=parse_signal_expr_(named.group(2))) 1215 else: 1216 return Anon(name=expr, expr=expr, func=parse_signal_expr_(expr))
1217 -def parse_signals_expr(expr):
1218 return [parse_signal_expr(sub) for sub in re.split(r'; *', expr)] # "customs"
1219
1220 -def build_custom_signal_segs(customs, segs_of_signal, signals, scope):
1221 signal_of_seg = transpose_arrays(segs_of_signal) 1222 names = [scope[i, 'Name'] for i in xrange(1, len(scope))] 1223 signal_ixs = dict([(SafeName(name), i) for i, name in enumerate(names)]) 1224 new_segs_of_signal = [ [] for custom in customs ] 1225 for iseg, segsigs in enumerate(signal_of_seg): 1226 for c, custom in enumerate(customs): 1227 arg_segs = [] 1228 for arg in custom.func.args: 1229 if arg in ('x', 't'): 1230 arg_segs.append(SourceSegTime(segsigs[0])) 1231 elif arg in signal_ixs: 1232 arg_segs.append(segsigs[signal_ixs[arg]]) 1233 else: 1234 raise NameError('"%s" is unknown in Extract custom signal expression' % arg) 1235 arg_funcs = [seg.get_samples for seg in arg_segs] # memoize_last_n(seg.get_samples) 1236 new_segs_of_signal[c].append(SourceSegFunc(segsigs[0], arg_funcs, custom.func)) 1237 preserve_units = collections.defaultdict(lambda: "", [(signal.Name, signal.Units) for signal in signals]) 1238 for i in reversed(xrange(len(signals))): 1239 signals.remove(i) 1240 scope.remove(i+1) 1241 for c, custom in enumerate(customs): 1242 entry = {'Name' : custom.name, 1243 'Units' : preserve_units[custom.expr]} 1244 signals.append(entry) 1245 scope.append(entry) 1246 return new_segs_of_signal
1247
1248 1249 -def Extract(fname=None, silent=True, **kw):
1250 """Creates a new data file from (parts of) the currently showing data. 1251 To extract some other selection of data (non-graphically defined), build a list of L{qubx.data_types.SourceSeg} 1252 and use L{qubx.data_types.Save}. 1253 1254 Keyword options: 1255 1256 * fname: overrides path_expr with a specific path/to/file.ext 1257 * silent (=True): doesn't graphically prompt the user for options 1258 * source - SOURCE_DATASOURCE, SOURCE_LIST, or SOURCE_OTHER 1259 * other_seg - OTHER_SEG_ALL, OTHER_SEG_ONSCREEN, or OTHER_SEG_GROUP 1260 * other_seg_group 1261 * other_sel - OTHER_SEL_ALL, OTHER_SEL_ONSCREEN, or OTHER_SEL_GROUP 1262 * limit_to_group - True to filter by Group column 1263 * limit_group - Group index for filtering (comes from List if DATASOURCE_LIST, else from Segments table) 1264 * signals - SIGNALS_ALL, SIGNALS_SOURCE, or SIGNALS_CUSTOM 1265 * signals_custom 1266 * no_filter: False to use "Data Source" filter settings 1267 * subtract_fit - SUBTRACT_FIT_NONE, SUBTRACT_FIT_CURVE, or SUBTRACT_FIT_NODES 1268 * resample 1269 * resample_kHz 1270 * process - PROC_NONE, PROC_AVG, PROC_ADD, or PROC_SUB 1271 * process_sub_trace 1272 * process_all 1273 * path_expr 1274 * open_after 1275 """ 1276 if kw: 1277 qubx.settings.SettingsMgr['Extract'].setPropertiesDict(**kw) 1278 qubx.pyenv.env.globals['QubX'].Tools.Extract.click_save(fname=fname, silent=silent)
1279