Package fabio :: Module edfimage
[hide private]
[frames] | no frames]

Source Code for Module fabio.edfimage

  1  #!/usr/bin/env python 
  2  # -*- coding: utf8 -*- 
  3  """ 
  4   
  5  License: GPLv2+ 
  6   
  7  Authors: Henning O. Sorensen & Erik Knudsen 
  8           Center for Fundamental Research: Metal Structures in Four Dimensions 
  9           Risoe National Laboratory 
 10           Frederiksborgvej 399 
 11           DK-4000 Roskilde 
 12           email:erik.knudsen@risoe.dk 
 13   
 14          + Jon Wright, ESRF 
 15           
 16  2011-02-11: Mostly rewritten by Jérôme Kieffer (Jerome.Kieffer@esrf.eu)  
 17              European Synchrotron Radiation Facility 
 18              Grenoble (France) 
 19   
 20  """ 
 21   
 22  import numpy as np, logging 
 23  from fabioimage import fabioimage 
 24  import gzip, bz2, zlib, os, StringIO 
 25   
 26   
 27  BLOCKSIZE = 512 
 28  DATA_TYPES = {  "SignedByte"    :  np.int8, 
 29                  "Signed8"       :  np.int8, 
 30                  "UnsignedByte"  :  np.uint8, 
 31                  "Unsigned8"     :  np.uint8, 
 32                  "SignedShort"   :  np.int16, 
 33                  "Signed16"      :  np.int16, 
 34                  "UnsignedShort" :  np.uint16, 
 35                  "Unsigned16"    :  np.uint16, 
 36                  "UnsignedShortInteger" : np.uint16, 
 37                  "SignedInteger" :  np.int32, 
 38                  "Signed32"      :  np.int32, 
 39                  "UnsignedInteger":  np.uint32, 
 40                  "Unsigned32"    :  np.uint32, 
 41                  "SignedLong"    :  np.int32, 
 42                  "UnsignedLong"  :  np.uint32, 
 43                  "Signed64"      :  np.int64, 
 44                  "Unsigned64"    :  np.uint64, 
 45                  "FloatValue"    :  np.float32, 
 46                  "FLOATVALUE"    :  np.float32, 
 47                  "FLOAT"         :  np.float32, # fit2d 
 48                  "Float"         :  np.float32, # fit2d 
 49                  "FloatIEEE32"   :  np.float32, 
 50                  "Float32"       :  np.float32, 
 51                  "Double"        :  np.float64, 
 52                  "DoubleValue"   :  np.float64, 
 53                  "FloatIEEE64"   :  np.float64, 
 54                  "DoubleIEEE64"  :  np.float64 
 55                  } 
 56   
 57  NUMPY_EDF_DTYPE = {"int8"       :"SignedByte", 
 58                     "int16"      :"SignedShort", 
 59                     "int32"      :"SignedInteger", 
 60                     "int64"      :"Signed64", 
 61                     "uint8"      :"UnsignedByte", 
 62                     "uint16"     :"UnsignedShort", 
 63                     "uint32"     :"UnsignedInteger", 
 64                     "uint64"     :"Unsigned64", 
 65                     "float32"    :"FloatValue", 
 66                     "float64"    :"DoubleValue" 
 67               } 
 68   
 69  MINIMUM_KEYS = ['HEADERID', 
 70                  'IMAGE', 
 71                  'BYTEORDER', 
 72                  'DATATYPE', 
 73                  'DIM_1', 
 74                  'DIM_2', 
 75                  'SIZE'] # Size is thought to be essential for writing at least 
 76   
 77  DEFAULT_VALUES = { 
 78                    # I do not define default values as they will be calculated at write time 
 79                    # JK20110415 
 80                    } 
81 82 -class Frame(object):
83 """ 84 A class representing a single frame in an EDF file 85 """
86 - def __init__(self, data=None, header=None, header_keys=None, number=None):
87 if header is None: 88 self.header = {} 89 else: 90 self.header = dict(header) 91 92 if header_keys is None: 93 self.header_keys = self.header.keys() 94 else: 95 self.header_keys = header_keys[:] 96 for key in header_keys: 97 if key not in self.header: 98 logging.warning("Header key %s, in header_keys is not in header dictionary, poping !!!" % key) 99 self.header_keys.remove(key) 100 101 self.capsHeader = {} 102 for key in self.header_keys: 103 try: 104 self.capsHeader[key.upper()] = key 105 except AttributeError: 106 logging.warning("Header key %s is not a string" % key) 107 108 self.rawData = None 109 self._data = data 110 self.dims = [] 111 self.dim1 = 0 112 self.dim2 = 0 113 self.size = None 114 self.bpp = None 115 self.bytecode = None 116 if (number is not None) and isinstance(number, int): 117 self.iFrame = number 118 else: 119 self.iFrame = 0
120
121 - def parseheader(self, block):
122 """ 123 Parse the header in some EDF format from an already open file 124 125 @param block: string representing the header block 126 @type block: string, should be full ascii 127 @return: size of the binary blob 128 """ 129 #reset values ... 130 self.header = {} 131 self.capsHeader = {} 132 self.header_keys = [] 133 self.size = None 134 calcsize = 1 135 self.dims = [] 136 137 for line in block.split(';'): 138 if '=' in line: 139 key, val = line.split('=' , 1) 140 key = key.strip() 141 self.header[key] = val.strip() 142 self.capsHeader[key.upper()] = key 143 self.header_keys.append(key) 144 145 # Compute image size 146 if "SIZE" in self.capsHeader: 147 try: 148 self.size = int(self.header[self.capsHeader["SIZE"]]) 149 except ValueError: 150 logging.warning("Unable to convert to integer : %s %s " % (self.capsHeader["SIZE"], self.header[self.capsHeader["SIZE"]])) 151 if "DIM_1" in self.capsHeader: 152 try: 153 dim1 = int(self.header[self.capsHeader['DIM_1']]) 154 except ValueError: 155 logging.error("Unable to convert to integer Dim_1: %s %s" % (self.capsHeader["DIM_1"], self.header[self.capsHeader["DIM_1"]])) 156 else: 157 calcsize *= dim1 158 self.dims.append(dim1) 159 else: 160 logging.error("No Dim_1 in headers !!!") 161 if "DIM_2" in self.capsHeader: 162 try: 163 dim2 = int(self.header[self.capsHeader['DIM_2']]) 164 except ValueError: 165 logging.error("Unable to convert to integer Dim_3: %s %s" % (self.capsHeader["DIM_2"], self.header[self.capsHeader["DIM_2"]])) 166 else: 167 calcsize *= dim2 168 self.dims.append(dim2) 169 else: 170 logging.error("No Dim_2 in headers !!!") 171 iDim = 3 172 while iDim is not None: 173 strDim = "DIM_%i" % iDim 174 if strDim in self.capsHeader: 175 try: 176 dim3 = int(self.header[self.capsHeader[strDim]]) 177 except ValueError: 178 logging.error("Unable to convert to integer %s: %s %s" 179 % (strDim, self.capsHeader[strDim], self.header[self.capsHeader[strDim]])) 180 dim3 = None 181 iDim = None 182 else: 183 calcsize *= dim3 184 self.dims.append(dim3) 185 iDim += 1 186 else: 187 logging.debug("No Dim_3 -> it is a 2D image") 188 iDim = None 189 if self.bytecode is None: 190 if "DATATYPE" in self.capsHeader: 191 self.bytecode = DATA_TYPES[self.header[self.capsHeader['DATATYPE']]] 192 else: 193 self.bytecode = np.uint16 194 logging.warning("Defaulting type to uint16") 195 self.bpp = len(np.array(0, self.bytecode).tostring()) 196 calcsize *= self.bpp 197 if (self.size is None): 198 self.size = calcsize 199 elif (self.size != calcsize): 200 if ("COMPRESSION" in self.capsHeader) and (self.header[self.capsHeader['COMPRESSION']].upper().startswith("NO")): 201 logging.info("Mismatch between the expected size %s and the calculated one %s" % (self.size, calcsize)) 202 self.size = calcsize 203 204 for i, n in enumerate(self.dims): 205 exec "self.dim%i=%i" % (i + 1, n) 206 207 return self.size
208 209
210 - def swap_needed(self):
211 """ 212 Decide if we need to byteswap 213 """ 214 if ('Low' in self.header[self.capsHeader['BYTEORDER']] and np.little_endian) or \ 215 ('High' in self.header[self.capsHeader['BYTEORDER']] and not np.little_endian): 216 return False 217 if ('High' in self.header[self.capsHeader['BYTEORDER']] and np.little_endian) or \ 218 ('Low' in self.header[self.capsHeader['BYTEORDER']] and not np.little_endian): 219 if self.bpp in [2, 4, 8]: 220 return True 221 else: 222 return False
223 224
225 - def getData(self):
226 """ 227 Unpack a binary blob according to the specification given in the header 228 229 @return: dataset as numpy.ndarray 230 """ 231 data = None 232 if self._data is not None: 233 data = self._data 234 elif self.rawData is None: 235 data = self._data 236 else: 237 if self.bytecode is None: 238 if "DATATYPE" in self.capsHeader: 239 self.bytecode = DATA_TYPES[self.header[self.capsHeader["DATATYPE"]]] 240 else: 241 self.bytecode = np.uint16 242 dims = self.dims[:] 243 dims.reverse() 244 245 if ("COMPRESSION" in self.capsHeader): 246 compression = self.header[self.capsHeader["COMPRESSION"]].upper() 247 uncompressed_size = self.bpp 248 for i in dims: 249 uncompressed_size *= i 250 if "OFFSET" in compression : 251 try: 252 import byte_offset 253 except ImportError: 254 logging.error("Unimplemented compression scheme: %s" % compression) 255 else: 256 myData = byte_offset.analyseCython(self.rawData, size=uncompressed_size) 257 rawData = myData.astype(self.bytecode).tostring() 258 self.size = uncompressed_size 259 elif compression == "NONE": 260 rawData = self.rawData 261 elif "GZIP" in compression: 262 fileobj = StringIO.StringIO(self.rawData) 263 try: 264 rawData = gzip.GzipFile(fileobj=fileobj).read() 265 except IOError: 266 logging.warning("Encounter the python-gzip bug with trailing garbage, trying subprocess gzip") 267 try: 268 #This is as an ugly hack against a bug in Python gzip 269 import subprocess 270 sub = subprocess.Popen(["gzip", "-d", "-f"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) 271 rawData, err = sub.communicate(input=self.rawData) 272 logging.debug("Gzip subprocess ended with %s err= %s; I got %s bytes back" % (sub.wait(), err, len(rawData))) 273 except: 274 logging.warning("Unable to use the subprocess gzip. is gzip available? ") 275 for i in range(1, 513): 276 try: 277 fileobj = StringIO.StringIO(self.rawData[:-i]) 278 rawData = gzip.GzipFile(fileobj=fileobj).read() 279 except IOError: 280 logging.debug("trying with %s bytes less, doesn't work" % i) 281 else: 282 break 283 else: 284 logging.error("I am totally unable to read this gzipped compressed data block, giving up") 285 286 self.size = uncompressed_size 287 elif "BZ" in compression : 288 rawData = bz2.decompress(self.rawData) 289 self.size = uncompressed_size 290 elif "Z" in compression : 291 rawData = zlib.decompress(self.rawData) 292 self.size = uncompressed_size 293 else: 294 logging.warning("Unknown compression scheme %s" % compression) 295 rawData = self.rawData 296 297 else: 298 rawData = self.rawData 299 300 expected = self.size 301 obtained = len(rawData) 302 if expected > obtained: 303 logging.error("Data stream is incomplete: %s < expected %s bytes" % (obtained, expected)) 304 rawData += "\x00" * (expected - obtained) 305 elif expected < len(rawData): 306 logging.info("Data stream contains trailing junk : %s > expected %s bytes" % (obtained, expected)) 307 rawData = rawData[:expected] 308 # logging.debug("dims = %s, bpp = %s, expected= %s obtained = %s" % (dims, self.bpp, expected, obtained)) 309 if self.swap_needed(): 310 data = np.fromstring(rawData, self.bytecode).byteswap().reshape(tuple(dims)) 311 else: 312 data = np.fromstring(rawData, self.bytecode).reshape(tuple(dims)) 313 self._data = data 314 self.rawData = None #no need to keep garbage in memory 315 self.bytecode = data.dtype.type 316 return data
317 - def setData(self, npa=None):
318 """Setter for data in edf frame""" 319 self._data = npa
320 data = property(getData, setData, "property: (edf)frame.data, uncompress the datablock when needed") 321 322
323 - def getEdfBlock(self, force_type=None):
324 """ 325 @param force_type: type of the dataset to be enforced like "float64" or "uint16" 326 @type force_type: string or numpy.dtype 327 @return: ascii header block 328 @rtype: python string with the concatenation of the ascii header and the binary data block 329 """ 330 if force_type is not None: 331 data = self.data.astype(force_type) 332 else: 333 data = self.data 334 335 for key in self.header: 336 KEY = key.upper() 337 if KEY not in self.capsHeader: 338 self.capsHeader[KEY] = key 339 if key not in self.header_keys: 340 self.header_keys.append(key) 341 342 header = self.header.copy() 343 header_keys = self.header_keys[:] 344 capsHeader = self.capsHeader.copy() 345 346 listHeader = ["{\n"] 347 # First of all clean up the headers: 348 for i in capsHeader: 349 if "DIM_" in i: 350 header.pop(capsHeader[i]) 351 header_keys.remove(capsHeader[i]) 352 for KEY in ["SIZE", "EDF_BINARYSIZE", "EDF_HEADERSIZE", "BYTEORDER", "DATATYPE", "HEADERID", "IMAGE"]: 353 if KEY in capsHeader: 354 header.pop(capsHeader[KEY]) 355 header_keys.remove(capsHeader[KEY]) 356 if "EDF_DATABLOCKID" in capsHeader: 357 header_keys.remove(capsHeader["EDF_DATABLOCKID"]) 358 #but do not remove the value from dict, instead reset the key ... 359 if capsHeader["EDF_DATABLOCKID"] != "EDF_DataBlockID": 360 header["EDF_DataBlockID"] = header.pop(capsHeader["EDF_DATABLOCKID"]) 361 capsHeader["EDF_DATABLOCKID"] = "EDF_DataBlockID" 362 363 # Then update static headers freshly deleted 364 header_keys.insert(0, "Size") 365 header["Size"] = len(data.tostring()) 366 header_keys.insert(0, "HeaderID") 367 header["HeaderID"] = "EH:%06d:000000:000000" % self.iFrame 368 header_keys.insert(0, "Image") 369 header["Image"] = str(self.iFrame) 370 371 dims = list(data.shape) 372 nbdim = len(dims) 373 for i in dims: 374 key = "Dim_%i" % nbdim 375 header[key] = i 376 header_keys.insert(0, key) 377 nbdim -= 1 378 header_keys.insert(0, "DataType") 379 header["DataType"] = NUMPY_EDF_DTYPE[str(np.dtype(data.dtype))] 380 header_keys.insert(0, "ByteOrder") 381 if np.little_endian: 382 header["ByteOrder"] = "LowByteFirst" 383 else: 384 header["ByteOrder"] = "HighByteFirst" 385 approxHeaderSize = 100 386 for key in header: 387 approxHeaderSize += 7 + len(key) + len(str(header[key])) 388 approxHeaderSize = BLOCKSIZE * (approxHeaderSize // BLOCKSIZE + 1) 389 header_keys.insert(0, "EDF_HeaderSize") 390 header["EDF_HeaderSize"] = str(BLOCKSIZE * (approxHeaderSize // BLOCKSIZE + 1)) 391 header_keys.insert(0, "EDF_BinarySize") 392 header["EDF_BinarySize"] = len(data.tostring()) 393 header_keys.insert(0, "EDF_DataBlockID") 394 if not "EDF_DataBlockID" in header: 395 header["EDF_DataBlockID"] = "%i.Image.Psd" % self.iFrame 396 preciseSize = 4 #2 before {\n 2 after }\n 397 for key in header_keys: 398 line = str("%s = %s ;\n" % (key, header[key])) 399 preciseSize += len(line) 400 listHeader.append(line) 401 # print type(line), line 402 if preciseSize > approxHeaderSize: 403 logging.error("I expected the header block only at %s in fact it is %s" % (approxHeaderSize, preciseSize)) 404 for idx, line in enumerate(listHeader[:]): 405 if line.startswith("EDF_HeaderSize"): 406 headerSize = BLOCKSIZE * (preciseSize // BLOCKSIZE + 1) 407 newline = "EDF_HeaderSize = %s ;\n" % headerSize 408 delta = len(newline) - len(line) 409 if (preciseSize // BLOCKSIZE) != ((preciseSize + delta) // BLOCKSIZE): 410 headerSize = BLOCKSIZE * ((preciseSize + delta) // BLOCKSIZE + 1) 411 newline = "EDF_HeaderSize = %s ;\n" % headerSize 412 preciseSize = preciseSize + delta 413 listHeader[idx] = newline 414 break 415 else: 416 headerSize = approxHeaderSize 417 listHeader.append(" "*(headerSize - preciseSize) + "}\n") 418 return "".join(listHeader) + data.tostring()
419
420 421 422 -class edfimage(fabioimage):
423 """ Read and try to write the ESRF edf data format """ 424
425 - def __init__(self, data=None , header=None, header_keys=None, frames=None):
426 self.currentframe = 0 427 try: 428 dim = len(data.shape) 429 except: 430 data = None 431 dim = 0 432 if dim == 2: 433 fabioimage.__init__(self, data, header) 434 elif dim == 1 : 435 data.shape = (0, len(data)) 436 fabioimage.__init__(self, data, header) 437 elif dim == 3 : 438 fabioimage.__init__(self, data[0, :, :], header) 439 elif dim == 4 : 440 fabioimage.__init__(self, data[0, 0, :, :], header) 441 elif dim == 5 : 442 fabioimage.__init__(self, data[0, 0, 0, :, :], header) 443 444 if frames is None: 445 frame = Frame(data=data, header=header, 446 header_keys=header_keys , 447 number=self.currentframe) 448 self.__frames = [frame] 449 else: 450 self.__frames = frames
451 452 @staticmethod
453 - def _readHeaderBlock(infile):
454 """ 455 Read in a header in some EDF format from an already open file 456 457 @param infile: file object open in read mode 458 @return: string (or None if no header was found. 459 """ 460 461 block = infile.read(BLOCKSIZE) 462 if len(block) < BLOCKSIZE: 463 logging.debug("Under-short header: only %i bytes in %s" % (len(block), infile.name)) 464 return 465 if (block.find("{") < 0) : 466 # This does not look like an edf file 467 logging.warning("no opening {. Corrupt header of EDF file %s" % infile.name) 468 return 469 while '}' not in block: 470 block = block + infile.read(BLOCKSIZE) 471 if len(block) > BLOCKSIZE * 20: 472 logging.warning("Runaway header in EDF file") 473 return 474 start = block.find("{") + 1 475 end = block.find("}") 476 477 # Now it is essential to go to the start of the binary part 478 if block[end: end + 3] == "}\r\n": 479 offset = end + 3 - len(block) 480 elif block[end: end + 2] == "}\n": 481 offset = end + 2 - len(block) 482 else: 483 logging.error("Unable to locate start of the binary section") 484 offset = None 485 if offset is not None: 486 try: 487 infile.seek(offset, os.SEEK_CUR) 488 except TypeError: #JK20110407 bugfix specific to MacOSX 489 pos = infile.tell() 490 infile.seek(pos + offset) 491 return block[start:end]
492 493
494 - def _readheader(self, infile):
495 """ 496 Read all headers in a file and populate self.header 497 data is not yet populated 498 @type infile: file object open in read mode 499 """ 500 self.__frames = [] 501 502 bContinue = True 503 while bContinue: 504 block = self._readHeaderBlock(infile) 505 if block is None: 506 bContinue = False 507 break 508 frame = Frame(number=self.nframes) 509 self.__frames.append(frame) 510 size = frame.parseheader(block) 511 frame.rawData = infile.read(size) 512 if len(frame.rawData) != size: 513 logging.warning("Non complete datablock: got %s, expected %s" % (len(frame.rawData), size)) 514 bContinue = False 515 break 516 517 for i, frame in enumerate(self.__frames): 518 missing = [] 519 for item in MINIMUM_KEYS: 520 if item not in frame.capsHeader: 521 missing.append(item) 522 if len(missing) > 0: 523 logging.info("EDF file %s frame %i misses mandatory keys: %s " % (self.filename, i, " ".join(missing))) 524 525 self.currentframe = 0
526 527
528 - def read(self, fname):
529 """ 530 Read in header into self.header and 531 the data into self.data 532 """ 533 self.resetvals() 534 self.filename = fname 535 536 infile = self._open(fname, "rb") 537 self._readheader(infile) 538 if self.data is None: 539 self.data = self.unpack() 540 # self.bytecode = self.data.dtype.type 541 self.resetvals() 542 # ensure the PIL image is reset 543 self.pilimage = None 544 return self
545
546 - def swap_needed(self):
547 """ 548 Decide if we need to byteswap 549 """ 550 if ('Low' in self.header[self.capsHeader['BYTEORDER']] and np.little_endian) or \ 551 ('High' in self.header[self.capsHeader['BYTEORDER']] and not np.little_endian): 552 return False 553 if ('High' in self.header[self.capsHeader['BYTEORDER']] and np.little_endian) or \ 554 ('Low' in self.header[self.capsHeader['BYTEORDER']] and not np.little_endian): 555 if self.bpp in [2, 4, 8]: 556 return True 557 else: 558 return False
559 560
561 - def unpack(self):
562 """ 563 Unpack a binary blob according to the specification given in the header and return the dataset 564 565 @return: dataset as numpy.ndarray 566 """ 567 return self.__frames[self.currentframe].getData()
568 569
570 - def getframe(self, num):
571 """ returns the file numbered 'num' in the series as a fabioimage """ 572 newImage = None 573 if self.nframes == 1: 574 logging.debug("Single frame EDF; having fabioimage default behavour: %s" % num) 575 newImage = fabioimage.getframe(self, num) 576 elif num in xrange(self.nframes): 577 logging.debug("Multi frame EDF; having edfimage specific behavour: %s/%s" % (num, self.nframes)) 578 newImage = edfimage(frames=self.__frames) 579 newImage.currentframe = num 580 newImage.filename = self.filename 581 else: 582 txt = "Cannot access frame: %s/%s" % (num, self.nframes) 583 logging.error(txt) 584 raise ValueError("edfimage.getframe:" + txt) 585 return newImage
586 587
588 - def previous(self):
589 """ returns the previous file in the series as a fabioimage """ 590 newImage = None 591 if self.nframes == 1: 592 newImage = fabioimage.previous(self) 593 else: 594 newFrameId = self.currentframe - 1 595 newImage = self.getframe(newFrameId) 596 return newImage
597 598
599 - def next(self):
600 """ returns the next file in the series as a fabioimage """ 601 newImage = None 602 if self.nframes == 1: 603 newImage = fabioimage.next(self) 604 else: 605 newFrameId = self.currentframe + 1 606 newImage = self.getframe(newFrameId) 607 return newImage
608 609
610 - def write(self, fname, force_type=None):
611 """ 612 Try to write a file 613 check we can write zipped also 614 mimics that fabian was writing uint16 (we sometimes want floats) 615 616 @param force_type: can be numpy.uint16 or simply "float" 617 @return: None 618 619 """ 620 621 outfile = self._open(fname, mode="wb") 622 for i, frame in enumerate(self.__frames): 623 frame.iFrame = i 624 outfile.write(frame.getEdfBlock(force_type=force_type)) 625 outfile.close()
626 627
628 - def appendFrame(self, frame=None, data=None, header=None):
629 """ 630 Method used add a frame to an EDF file 631 @param frame: frame to append to edf image 632 @type frame: instance of Frame 633 @return: None 634 """ 635 if isinstance(frame, Frame): 636 self.__frames.append(frame) 637 else: 638 self.__frames.append(Frame(data, header))
639 640
641 - def deleteFrame(self, frameNb=None):
642 """ 643 Method used to remove a frame from an EDF image. by default the last one is removed. 644 @param frameNb: frame number to remove, by default the last. 645 @type frameNb: integer 646 @return: None 647 """ 648 if frameNb is None: 649 self.__frames.pop() 650 else: 651 self.__frames.pop(frameNb)
652 653 654 ################################################################################ 655 # Properties definition for header, data, header_keys and capsHeader 656 ################################################################################
657 - def getNbFrames(self):
658 """ 659 Getter for number of frames 660 """ 661 return len(self.__frames)
662 - def setNbFrames(self, val):
663 """ 664 Setter for number of frames ... should do nothing. Here just to avoid bugs 665 """ 666 if val != len(self.__frames): 667 logging.warning("trying to set the number of frames ")
668 nframes = property(getNbFrames, setNbFrames, "property: number of frames in EDF file") 669 670
671 - def getHeader(self):
672 """ 673 Getter for the headers. used by the property header, 674 """ 675 return self.__frames[self.currentframe].header
676 - def setHeader(self, _dictHeader):
677 """ 678 Enforces the propagation of the header to the list of frames 679 """ 680 try: 681 self.__frames[self.currentframe].header = _dictHeader 682 except AttributeError: 683 self.__frames = [Frame(header=_dictHeader)] 684 except IndexError: 685 if self.currentframe < len(self.__frames): 686 self.__frames.append(Frame(header=_dictHeader))
687 - def delHeader(self):
688 """ 689 Deleter for edf header 690 """ 691 self.__frames[self.currentframe].header = {}
692 header = property(getHeader, setHeader, delHeader, "property: header of EDF file") 693
694 - def getHeaderKeys(self):
695 """ 696 Getter for edf header_keys 697 """ 698 return self.__frames[self.currentframe].header_keys
699 - def setHeaderKeys(self, _listtHeader):
700 """ 701 Enforces the propagation of the header_keys to the list of frames 702 @param _listtHeader: list of the (ordered) keys in the header 703 @type _listtHeader: python list 704 """ 705 try: 706 self.__frames[self.currentframe].header_keys = _listtHeader 707 except AttributeError: 708 self.__frames = [Frame(header_keys=_listtHeader)] 709 except IndexError: 710 if self.currentframe < len(self.__frames): 711 self.__frames.append(Frame(header_keys=_listtHeader))
712 - def delHeaderKeys(self):
713 """ 714 Deleter for edf header_keys 715 """ 716 self.__frames[self.currentframe].header_keys = []
717 header_keys = property(getHeaderKeys, setHeaderKeys, delHeaderKeys, "property: header_keys of EDF file") 718
719 - def getData(self):
720 """ 721 getter for edf Data 722 @return: data for current frame 723 @rtype: numpy.ndarray 724 """ 725 npaData = None 726 try: 727 npaData = self.__frames[self.currentframe].data 728 except AttributeError: 729 self.__frames = [Frame()] 730 npaData = self.__frames[self.currentframe].data 731 except IndexError: 732 if self.currentframe < len(self.__frames): 733 self.__frames.append(Frame()) 734 npaData = self.__frames[self.currentframe].data 735 # logging.warning("Data of %s, currentframe=%s is type %s" % (id(self), self.currentframe, type(npaData))) 736 return npaData
737
738 - def setData(self, _data):
739 """ 740 Enforces the propagation of the data to the list of frames 741 @param _data: numpy array representing data 742 """ 743 try: 744 self.__frames[self.currentframe].data = _data 745 except AttributeError: 746 self.__frames = [Frame(data=_data)] 747 except IndexError: 748 if self.currentframe < len(self.__frames): 749 self.__frames.append(Frame(data=_data))
750 - def delData(self):
751 """ 752 deleter for edf Data 753 """ 754 self.__frames[self.currentframe].data = None
755 data = property(getData, setData, delData, "property: data of EDF file") 756
757 - def getCapsHeader(self):
758 """ 759 getter for edf headers keys in upper case 760 @return: data for current frame 761 @rtype: dict 762 """ 763 return self.__frames[self.currentframe].capsHeader
764 - def setCapsHeader(self, _data):
765 """ 766 Enforces the propagation of the header_keys to the list of frames 767 @param _data: numpy array representing data 768 """ 769 self.__frames[self.currentframe].capsHeader = _data
770 - def delCapsHeader(self):
771 """ 772 deleter for edf capsHeader 773 """ 774 self.__frames[self.currentframe].capsHeader = {}
775 capsHeader = property(getCapsHeader, setCapsHeader, delCapsHeader, "property: capsHeader of EDF file, i.e. the keys of the header in UPPER case.") 776
777 - def getDim1(self):
778 return self.__frames[self.currentframe].dim1
779 - def setDim1(self, _iVal):
780 try: 781 self.__frames[self.currentframe].dim1 = _iVal 782 except AttributeError: 783 self.__frames = [Frame()] 784 except IndexError: 785 if self.currentframe < len(self.__frames): 786 self.__frames.append(Frame()) 787 self.__frames[self.currentframe].dim1 = _iVal
788 dim1 = property(getDim1, setDim1)
789 - def getDim2(self):
790 return self.__frames[self.currentframe].dim2
791 - def setDim2(self, _iVal):
792 try: 793 self.__frames[self.currentframe].dim2 = _iVal 794 except AttributeError: 795 self.__frames = [Frame()] 796 except IndexError: 797 if self.currentframe < len(self.__frames): 798 self.__frames.append(Frame()) 799 self.__frames[self.currentframe].dim2 = _iVal
800 dim2 = property(getDim2, setDim2) 801
802 - def getDims(self):
803 return self.__frames[self.currentframe].dims
804 dims = property(getDims)
805 - def getByteCode(self):
806 return self.__frames[self.currentframe].bytecode
807 - def setByteCode(self, _iVal):
808 try: 809 self.__frames[self.currentframe].bytecode = _iVal 810 except AttributeError: 811 self.__frames = [Frame()] 812 except IndexError: 813 if self.currentframe < len(self.__frames): 814 self.__frames.append(Frame()) 815 self.__frames[self.currentframe].bytecode = _iVal
816 817 bytecode = property(getByteCode, setByteCode)
818 - def getBpp(self):
819 return self.__frames[self.currentframe].bpp
820 - def setBpp(self, _iVal):
821 try: 822 self.__frames[self.currentframe].bpp = _iVal 823 except AttributeError: 824 self.__frames = [Frame()] 825 except IndexError: 826 if self.currentframe < len(self.__frames): 827 self.__frames.append(Frame()) 828 self.__frames[self.currentframe].bpp = _iVal
829 bpp = property(getBpp, setBpp)
830