Package fabio :: Module fabioimage
[hide private]
[frames] | no frames]

Source Code for Module fabio.fabioimage

  1  #!/usr/bin/env python  
  2   
  3  """ 
  4   
  5  Authors: Henning O. Sorensen & Erik Knudsen 
  6           Center for Fundamental Research: Metal Structures in Four Dimensions 
  7           Risoe National Laboratory 
  8           Frederiksborgvej 399 
  9           DK-4000 Roskilde 
 10           email:erik.knudsen@risoe.dk 
 11            
 12           and Jon Wright, Jerome Kieffer: ESRF 
 13   
 14  """ 
 15   
 16  import numpy, math, os, gzip, bz2, StringIO 
 17  import Image 
 18  import fabioutils 
 19   
 20   
21 -class fabioStream(StringIO.StringIO):
22 """ 23 just an interface providing the name anf mode property to a StringIO 24 25 BugFix for MacOSX 26 """
27 - def __init__(self, data, fname=None, mode="r"):
28 StringIO.StringIO.__init__(self, data) 29 if fname == None: 30 self.name = "fabioStream" 31 else: 32 self.name = fname 33 self.mode = mode
34 35
36 -class fabioimage(object):
37 """ 38 A common object for images in fable 39 Contains a numpy array (.data) and dict of meta data (.header) 40 """ 41 42 _need_a_seek_to_read = False 43 _need_a_real_file = False 44
45 - def __init__(self, data=None , header=None):
46 """ 47 Set up initial values 48 """ 49 if type(data) == type("string"): 50 raise Exception("fabioimage.__init__ bad argument - " + \ 51 "data should be numpy array") 52 self.data = data 53 self.pilimage = None 54 if header is None: 55 self.header = {} 56 else: 57 self.header = header 58 self.header_keys = self.header.keys() # holds key ordering 59 if data is not None: 60 self.dim1, self.dim2 = data.shape 61 else: 62 self.dim1 = self.dim2 = 0 63 self.bytecode = None # numpy typecode 64 self.bpp = 2 # bytes per pixel 65 # cache for image statistics 66 self.mean = self.maxval = self.stddev = self.minval = None 67 # Cache roi 68 self.area_sum = None 69 self.slice = None 70 # New for multiframe files 71 self.nframes = 1 72 self.currentframe = 0 73 self.filename = None
74
75 - def getframe(self, num):
76 """ returns the file numbered 'num' in the series as a fabioimage """ 77 if self.nframes == 1: 78 # single image per file 79 import openimage 80 return openimage.openimage( 81 fabioutils.jump_filename(self.filename, num)) 82 raise Exception("getframe out of range")
83
84 - def previous(self):
85 """ returns the previous file in the series as a fabioimage """ 86 import openimage 87 return openimage.openimage( 88 fabioutils.previous_filename(self.filename))
89
90 - def next(self):
91 """ returns the next file in the series as a fabioimage """ 92 import openimage 93 return openimage.openimage( 94 fabioutils.next_filename(self.filename))
95
96 - def toPIL16(self, filename=None):
97 """ 98 Convert to Python Imaging Library 16 bit greyscale image 99 100 FIXME - this should be handled by the libraries now 101 """ 102 if filename: 103 self.read(filename) 104 if self.pilimage is not None: 105 return self.pilimage 106 # mode map 107 size = self.data.shape[:2][::-1] 108 typmap = { 109 'float32' : "F" , 110 'int32' : "F;32S" , 111 'uint32' : "F;32" , 112 'int16' : "F;16S" , 113 'uint16' : "F;16" , 114 'int8' : "F;8S" , 115 'uint8' : "F;8" } 116 if typmap.has_key(self.data.dtype.name): 117 mode2 = typmap[ self.data.dtype.name ] 118 mode1 = mode2[0] 119 else: 120 raise Exception("Unknown numpy type " + str(self.data.dtype.type)) 121 # 122 # hack for byteswapping for PIL in MacOS 123 testval = numpy.array((1, 0), numpy.uint8).view(numpy.uint16)[0] 124 if testval == 1: 125 dats = self.data.tostring() 126 elif testval == 256: 127 dats = self.data.byteswap().tostring() 128 else: 129 raise Exception("Endian unknown in fabioimage.toPIL16") 130 131 self.pilimage = Image.frombuffer(mode1, 132 size, 133 dats, 134 "raw", 135 mode2, 136 0, 137 1) 138 139 return self.pilimage
140
141 - def getheader(self):
142 """ returns self.header """ 143 return self.header
144
145 - def getmax(self):
146 """ Find max value in self.data, caching for the future """ 147 if self.maxval is None: 148 self.maxval = numpy.max(self.data) 149 return self.maxval
150
151 - def getmin(self):
152 """ Find min value in self.data, caching for the future """ 153 if self.minval is None: 154 self.minval = numpy.min(self.data) 155 return self.minval
156
157 - def make_slice(self, coords):
158 """ 159 Convert a len(4) set of coords into a len(2) 160 tuple (pair) of slice objects 161 the latter are immutable, meaning the roi can be cached 162 """ 163 assert len(coords) == 4 164 if len(coords) == 4: 165 # fabian edfimage preference 166 if coords[0] > coords[2]: 167 coords[0:3:2] = [coords[2], coords[0]] 168 if coords[1] > coords[3]: 169 coords[1:4:2] = [coords[3], coords[1]] 170 #in fabian: normally coordinates are given as (x,y) whereas 171 # a matrix is given as row,col 172 # also the (for whichever reason) the image is flipped upside 173 # down wrt to the matrix hence these tranformations 174 fixme = (self.dim2 - coords[3] - 1, 175 coords[0] , 176 self.dim2 - coords[1] - 1, 177 coords[2]) 178 return (slice(int(fixme[0]), int(fixme[2]) + 1) , 179 slice(int(fixme[1]), int(fixme[3]) + 1))
180 181
182 - def integrate_area(self, coords):
183 """ 184 Sums up a region of interest 185 if len(coords) == 4 -> convert coords to slices 186 if len(coords) == 2 -> use as slices 187 floor -> ? removed as unused in the function. 188 """ 189 if self.data == None: 190 # This should return NAN, not zero ? 191 return 0 192 if len(coords) == 4: 193 sli = self.make_slice(coords) 194 elif len(coords) == 2 and isinstance(coords[0], slice) and \ 195 isinstance(coords[1], slice): 196 sli = coords 197 if sli == self.slice and self.area_sum is not None: 198 return self.area_sum 199 self.slice = sli 200 self.area_sum = numpy.sum( 201 numpy.ravel( 202 self.data[ self.slice ].astype(numpy.float))) 203 return self.area_sum
204
205 - def getmean(self):
206 """ return the mean """ 207 if self.mean is None: 208 self.mean = numpy.mean(self.data) 209 return float(self.mean)
210
211 - def getstddev(self):
212 """ return the standard deviation """ 213 if self.stddev == None: 214 self.stddev = numpy.std(self.data) 215 return float(self.stddev)
216
217 - def add(self, other):
218 """ 219 Add another Image - warnign, does not clip to 16 bit images by default 220 """ 221 if not hasattr(other, 'data'): 222 print 'edfimage.add() called with something that ' + \ 223 'does not have a data field' 224 assert self.data.shape == other.data.shape , \ 225 'incompatible images - Do they have the same size?' 226 self.data = self.data + other.data 227 self.resetvals()
228 229
230 - def resetvals(self):
231 """ Reset cache - call on changing data """ 232 self.mean = self.stddev = self.maxval = self.minval = None 233 self.area_sum = None
234
235 - def rebin(self, x_rebin_fact, y_rebin_fact, keep_I=True):
236 """ 237 Rebin the data and adjust dims 238 @param x_rebin_fact: x binning factor 239 @param y_rebin_fact: y binning factor 240 @param keep_I: shall the signal increase ? 241 @type x_rebin_fact: int 242 @type y_rebin_fact: int 243 @type keep_I: boolean 244 245 246 """ 247 if self.data == None: 248 raise Exception('Please read in the file you wish to rebin first') 249 250 if (self.dim1 % x_rebin_fact != 0) or (self.dim2 % y_rebin_fact != 0): 251 raise('image size is not divisible by rebin factor - ' + \ 252 'skipping rebin') 253 else: 254 dataIn = self.data.astype("float64") 255 shapeIn = self.data.shape 256 shapeOut = (shapeIn[0] / y_rebin_fact, shapeIn[1] / x_rebin_fact) 257 binsize = y_rebin_fact * x_rebin_fact 258 if binsize < 50: #method faster for small binning (4x4) 259 out = numpy.zeros(shapeOut, dtype="float64") 260 for j in range(x_rebin_fact): 261 for i in range(y_rebin_fact): 262 out += dataIn[i::y_rebin_fact, j::x_rebin_fact] 263 else: #method faster for large binning (8x8) 264 temp = self.data.astype("float64") 265 temp.shape = (shapeOut[0], y_rebin_fact, shapeOut[1], x_rebin_fact) 266 out = temp.sum(axis=3).sum(axis=1) 267 self.resetvals() 268 if keep_I: 269 self.data = (out / (y_rebin_fact * x_rebin_fact)).astype(self.data.dtype) 270 else: 271 self.data = out.astype(self.data.dtype) 272 273 self.dim1 = self.dim1 / x_rebin_fact 274 self.dim2 = self.dim2 / y_rebin_fact 275 276 #update header 277 self.update_header()
278
279 - def write(self, fname):
280 """ 281 To be overwritten - write the file 282 """ 283 raise Exception("Class has not implemented readheader method yet")
284
285 - def readheader(self, filename):
286 """ 287 Call the _readheader function... 288 """ 289 # Override the needs asserting that all headers can be read via python modules 290 save_state = self._need_a_real_file , self._need_a_seek_to_read 291 self._need_a_real_file , self._need_a_seek_to_read = False, False 292 fin = self._open(filename) 293 self._readheader(fin) 294 fin.close() 295 self._need_a_real_file , self._need_a_seek_to_read = save_state
296
297 - def _readheader(self, fik_obj):
298 """ 299 Must be overridden in classes 300 """ 301 raise Exception("Class has not implemented _readheader method yet")
302
303 - def update_header(self , **kwds):
304 """ 305 update the header entries 306 by default pass in a dict of key, values. 307 """ 308 self.header.update(kwds)
309
310 - def read(self, filename):
311 """ 312 To be overridden - fill in self.header and self.data 313 """ 314 raise Exception("Class has not implemented read method yet")
315 316
317 - def _open(self, fname, mode="rb"):
318 """ 319 Try to handle compressed files, streams, shared memory etc 320 Return an object which can be used for "read" and "write" 321 ... FIXME - what about seek ? 322 """ 323 fileObject = None 324 self.filename = fname 325 if hasattr(fname, "read") and hasattr(fname, "write"): 326 # It is already something we can use 327 return fname 328 if isinstance(fname, (str, unicode)): 329 self.header["filename"] = fname 330 if os.path.splitext(fname)[1] == ".gz": 331 fileObject = self._compressed_stream(fname, 332 fabioutils.COMPRESSORS['.gz'], 333 gzip.GzipFile, 334 mode) 335 elif os.path.splitext(fname)[1] == '.bz2': 336 fileObject = self._compressed_stream(fname, 337 fabioutils.COMPRESSORS['.bz2'], 338 bz2.BZ2File, 339 mode) 340 # 341 # Here we return the file even though it may be bzipped or gzipped 342 # but named incorrectly... 343 # 344 # FIXME - should we fix that or complain about the daft naming? 345 else: 346 fileObject = open(fname, mode) 347 if "name" not in dir(fileObject): 348 fileObject.name = fname 349 350 return fileObject
351
352 - def _compressed_stream(self, 353 fname, 354 system_uncompress, 355 python_uncompress, 356 mode='rb'):
357 """ 358 Try to transparently handle gzip / bzip without always getting python 359 performance 360 """ 361 # assert that python modules are always OK based on performance benchmark 362 # Try to fix the way we are using them? 363 fobj = None 364 if self._need_a_real_file and mode[0] == "r": 365 fo = python_uncompress(fname, mode) 366 fobj = os.tmpfile() 367 fobj.write(fo.read()) 368 fo.close() 369 fobj.seek(0) 370 elif self._need_a_seek_to_read and mode[0] == "r": 371 fo = python_uncompress(fname, mode) 372 fobj = fabioStream(fo.read(), fname, mode) 373 else: 374 fobj = python_uncompress(fname, mode) 375 return fobj
376 377
378 -def test():
379 """ 380 check some basic fabioimage functionality 381 """ 382 import time 383 start = time.time() 384 385 dat = numpy.ones((1024, 1024), numpy.uint16) 386 dat = (dat * 50000).astype(numpy.uint16) 387 assert dat.dtype.char == numpy.ones((1), numpy.uint16).dtype.char 388 hed = {"Title":"50000 everywhere"} 389 obj = fabioimage(dat, hed) 390 391 assert obj.getmax() == 50000 392 assert obj.getmin() == 50000 393 assert obj.getmean() == 50000 , obj.getmean() 394 assert obj.getstddev() == 0. 395 396 dat2 = numpy.zeros((1024, 1024), numpy.uint16, savespace=1) 397 cord = [ 256, 256, 790, 768 ] 398 slic = obj.make_slice(cord) 399 dat2[slic] = dat2[slic] + 100 400 401 obj = fabioimage(dat2, hed) 402 403 # New object, so... 404 assert obj.maxval is None 405 assert obj.minval is None 406 407 assert obj.getmax() == 100, obj.getmax() 408 assert obj.getmin() == 0 , obj.getmin() 409 npix = (slic[0].stop - slic[0].start) * (slic[1].stop - slic[1].start) 410 obj.resetvals() 411 area1 = obj.integrate_area(cord) 412 obj.resetvals() 413 area2 = obj.integrate_area(slic) 414 assert area1 == area2 415 assert obj.integrate_area(cord) == obj.integrate_area(slic) 416 assert obj.integrate_area(cord) == npix * 100, obj.integrate_area(cord) 417 418 419 def clean(): 420 """ clean up the created testfiles""" 421 for name in ["testfile", "testfile.gz", "testfile.bz2"]: 422 try: 423 os.remove(name) 424 except: 425 continue
426 427 428 clean() 429 430 gzip.open("testfile.gz", "wb").write("{ hello }") 431 fout = obj._open("testfile.gz") 432 readin = fout.read() 433 assert readin == "{ hello }", readin + " gzipped file" 434 435 436 bz2.BZ2File("testfilebz", "wb").write("{ hello }") 437 fout = obj._open("testfile.bz2") 438 readin = fout.read() 439 assert readin == "{ hello }", readin + " bzipped file" 440 441 ftest = open("testfile", "wb") 442 ftest.write("{ hello }") 443 assert ftest == obj._open(ftest) 444 ftest.close() 445 fout = obj._open("testfile") 446 readin = fout.read() 447 assert readin == "{ hello }", readin + "plain file" 448 fout.close() 449 ftest.close() 450 clean() 451 452 print "Passed in", time.time() - start, "s" 453 454 if __name__ == '__main__': 455 test() 456