Package fabio :: Module edfimage
[hide private]
[frames] | no frames]

Source Code for Module fabio.edfimage

  1  # Automatically adapted for numpy.oldnumeric Oct 05, 2007 by alter_code1.py 
  2   
  3  #!/usr/bin/env python 
  4  """ 
  5  Authors: Henning O. Sorensen & Erik Knudsen 
  6           Center for Fundamental Research: Metal Structures in Four Dimensions 
  7           Risoe National Laboratory 
  8           Frederiksborgvej 399 
  9           DK-4000 Roskilde 
 10           email:erik.knudsen@risoe.dk 
 11   
 12          + Jon Wright, ESRF 
 13  """ 
 14   
 15  import numpy as np, logging 
 16   
 17  from fabio.fabioimage import fabioimage 
 18   
 19   
 20  DATA_TYPES = {  "SignedByte"     :  np.int8, 
 21                  "UnsignedByte"   :  np.uint8, 
 22                  "SignedShort"    :  np.int16, 
 23                  "UnsignedShort"  :  np.uint16, 
 24                  "UnsignedShortInteger" : np.uint16, 
 25                  "SignedInteger"  :  np.int32, 
 26                  "UnsignedInteger":  np.uint32, 
 27                  "SignedLong"     :  np.int32, 
 28                  "UnsignedLong"   :  np.uint32, 
 29                  "FloatValue"     :  np.float32, 
 30                  "FLOATVALUE"     :  np.float32, 
 31                  "FLOAT"          :  np.float32, # fit2d 
 32                  "Float"          :  np.float32, # fit2d 
 33                  "DoubleValue"    :  np.float64 
 34                  } 
 35   
 36  MINIMUM_KEYS = ['HeaderID', 
 37                  'Image', 
 38                  'ByteOrder', 
 39                  'DataType', 
 40                  'Dim_1', 
 41                  'Dim_2', 
 42                  'Size'] # Size is thought to be essential for writing at least 
 43   
 44  DEFAULT_VALUES = {"HeaderID":  "EH:000001:000000:000000", 
 45                    "Image":   "1", 
 46                    "ByteOrder":  "LowByteFirst", # FIXME? 
 47                    "DataType": "FLOAT" 
 48                    } 
 49   
 50   
 51   
 52   
53 -class edfimage(fabioimage):
54 """ Read and try to write the ESRF edf data format """ 55 56
57 - def _readheader(self, infile):
58 """ 59 Read in a header in some EDF format from an already open file 60 61 TODO : test for minimal attributes? 62 """ 63 BLOCKSIZE = 512 64 block = infile.read(BLOCKSIZE) 65 if block[:4].find("{") < 0 : 66 # This does not look like an edf file 67 logging.warning("no opening {. Corrupt header of EDF file " + \ 68 str(infile.name)) 69 70 while '}' not in block: 71 block = block + infile.read(BLOCKSIZE) 72 if len(block) > BLOCKSIZE * 20: 73 raise Exception("Runaway header in EDF file") 74 start , end = block.find("{") + 1, block.find("}") 75 for line in block[start:end].split(';'): 76 if '=' in line: 77 key, val = line.split('=' , 1) 78 # Users cannot type in significant whitespace 79 key = key.rstrip().lstrip() 80 self.header_keys.append(key) 81 self.header[key] = val.lstrip().rstrip() 82 missing = [] 83 for item in MINIMUM_KEYS: 84 if item not in self.header_keys: 85 missing.append(item) 86 if len(missing) > 0: 87 logging.debug("EDF file misses the keys " + " ".join(missing))
88
89 - def read(self, fname):
90 """ 91 Read in header into self.header and 92 the data into self.data 93 """ 94 self.header = {} 95 self.resetvals() 96 infile = self._open(fname, "rb") 97 self._readheader(infile) 98 # Compute image size 99 try: 100 self.dim1 = int(self.header['Dim_1']) 101 self.dim2 = int(self.header['Dim_2']) 102 except: 103 raise Exception("EDF file", str(fname) + \ 104 "is corrupt, cannot read it") 105 try: 106 bytecode = DATA_TYPES[self.header['DataType']] 107 except KeyError: 108 bytecode = np.uint16 109 logging.warning("Defaulting type to uint16") 110 self.bpp = len(np.array(0, bytecode).tostring()) 111 112 # Sorry - this was a safe way to read old ID11 imagepro edfs 113 # assumes corrupted headers are shorter, they could be longer 114 if self.header.has_key('Image') and self.header['Image'] not in ["0", '1']: 115 logging.warning("Could be a multi-image file") 116 117 block = infile.read() 118 expected_size = self.dim1 * self.dim2 * self.bpp 119 120 if len(block) != expected_size: 121 # The binary which has been read in does not match the size 122 # expected. Two cases are known: 123 #### 1 extra byte (\0) at the end of the header (ImagePro) 124 #### Padding to 512 bytes, image is at the beginning 125 # These overlap in the case of an image of, eg: 126 # 1024x1024-1 == 825x1271 127 # To distinguish, we look for a header key: 128 padded = False 129 nbytesread = len(block) 130 if self.header.has_key("EDF_BinarySize"): 131 if int(self.header["EDF_BinarySize"]) == nbytesread: 132 padded = True 133 if self.header.has_key("Size"): 134 if int(self.header["Size"]) == nbytesread: 135 padded = True 136 if padded: 137 block = block[:expected_size] 138 if self.header.has_key("EDF_BlockBoundary"): 139 chunksize = int(self.header["EDF_BlockBoundary"]) 140 else: 141 chunksize = 512 142 if nbytesread % chunksize != 0: 143 # Unexpected padding 144 logging.warning("EDF file is strangely padded, size " + 145 str(nbytesread) + " is not multiple of " + 146 str(chunksize) + ", please verify your image") 147 else: # perhaps not padded 148 # probably header overspill (\0) 149 logging.warning("Read too many bytes, got " + str(len(block)) + \ 150 " want " + str(expected_size)) 151 block = block[-expected_size:] 152 if len(block) < expected_size: 153 # FIXME 154 logging.warning("Padded") 155 infile.close() 156 157 #now read the data into the array 158 try: 159 self.data = np.reshape( 160 np.fromstring(block, bytecode), 161 [self.dim2, self.dim1]) 162 except: 163 print len(block), bytecode, self.bpp, self.dim2, self.dim1 164 raise IOError, \ 165 'Size spec in edf-header does not match size of image data field' 166 self.bytecode = self.data.dtype.type 167 swap = self.swap_needed() 168 if swap: 169 self.data = self.data.byteswap() 170 # Remove verbose arg - use logging and levels 171 logging.info('Byteswapped from ' + self.header['ByteOrder']) 172 else: 173 logging.info('using ' + self.header['ByteOrder']) 174 self.resetvals() 175 # ensure the PIL image is reset 176 self.pilimage = None 177 return self
178
179 - def swap_needed(self):
180 """ 181 Decide if we need to byteswap 182 """ 183 if ('Low' in self.header['ByteOrder'] and np.little_endian) or \ 184 ('High' in self.header['ByteOrder'] and not np.little_endian): 185 return False 186 if ('High' in self.header['ByteOrder'] and np.little_endian) or \ 187 ('Low' in self.header['ByteOrder'] and not np.little_endian): 188 if self.bpp in [2, 4, 8]: 189 return True 190 else: 191 return False
192 193 194
195 - def _fixheader(self):
196 """ put some rubbish in to allow writing""" 197 self.header['Dim_2'], self.header['Dim_1'] = self.data.shape 198 self.bpp = len(self.data[0, 0].tostring()) 199 self.header['Size'] = len(self.data.tostring()) 200 for k in MINIMUM_KEYS: 201 if k not in self.header: 202 self.header[k] = DEFAULT_VALUES[k]
203 204
205 - def write(self, fname, force_type=None):
206 """ 207 Try to write a file 208 check we can write zipped also 209 mimics that fabian was writing uint16 (we sometimes want floats) 210 211 @param force_type: can be numpy.uint16 or simply "float" 212 @return: None 213 214 """ 215 self._fixheader() 216 # Fabian was forcing uint16 - make this a default 217 if force_type is not None: 218 data = self.data.astype(force_type) 219 else: 220 data = self.data 221 # Update header values to match the function local data object 222 bpp = len(data[0, 0].tostring()) 223 if bpp not in [1, 2, 4]: 224 logging.info("edfimage.write do you really want" + str(bpp) + \ 225 "bytes per pixel??") 226 bytecode = data.dtype.type 227 for name , code in DATA_TYPES.items(): 228 if code == bytecode: 229 self.header['DataType'] = name 230 break 231 dim2, dim1 = data.shape 232 self.header['Dim_1'] = dim1 233 self.header['Dim_2'] = dim2 234 self.header['Size'] = dim1 * dim2 * bpp 235 # checks for consistency: 236 if bpp != self.bpp : 237 logging.debug("Array upcasted? now " + str(bpp) + " was " + str(self.bpp)) 238 if dim1 != self.dim1 or dim2 != self.dim2 : 239 logging.debug("corrupted image dimensions") 240 outfile = self._open(fname, mode="wb") 241 outfile.write('{\n') # Header start 242 i = 4 # 2 so far, 2 to come at the end 243 for k in self.header_keys: 244 # We remove the extra whitespace on the key names to 245 # avoiding making headers greater then 4 kb unless they already 246 # were too big 247 out = (("%-14s = %s ;\n") % (k, self.header[k])) 248 i = i + len(out) 249 outfile.write(out) 250 # if additional items in the header just write them out in the 251 # order they happen to be in 252 for key, val in self.header.iteritems(): 253 if key in self.header_keys: 254 continue 255 out = (("%s = %s;\n") % (key, val)) 256 i = i + len(out) 257 outfile.write(out) 258 if i < 4096: 259 out = (4096 - i) * ' ' 260 else: 261 out = (1024 - i % 1024) * ' ' # Should make a total 262 logging.warning("EDF Header is greater than 4096 bytes") 263 outfile.write(out) 264 i = i + len(out) 265 assert i % 1024 == 0 266 outfile.write('}\n') 267 # print "Byteswapping?", 268 if self.swap_needed(): 269 # print "did a swap" 270 # data has "astype" from start of this function 271 outfile.write(data.byteswap().tostring()) 272 else: 273 # print "did not" 274 outfile.write(data.tostring()) 275 outfile.close()
276