Browse Source

#11 - partial refactor

feature/load_slices
jim 9 years ago
parent
commit
7fcf2a5740
3 changed files with 94 additions and 346 deletions
  1. +80
    -4
      nd2reader/__init__.py
  2. +1
    -173
      nd2reader/model/__init__.py
  3. +13
    -169
      nd2reader/reader.py

+ 80
- 4
nd2reader/__init__.py View File

@ -1,18 +1,25 @@
# -*- coding: utf-8 -*-
from collections import namedtuple
from nd2reader.model import Channel
import logging
from nd2reader.service import BaseNd2
from nd2reader.model import Image, ImageSet
from nd2reader.reader import Nd2FileReader
chunk = namedtuple('Chunk', ['location', 'length'])
field_of_view = namedtuple('FOV', ['number', 'x', 'y', 'z', 'pfs_offset'])
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
class Nd2(BaseNd2):
class Nd2(Nd2FileReader):
def __init__(self, filename):
super(Nd2, self).__init__(filename)
def get_image(self, time_index, fov, channel_name, z_level):
image_set_number = self._calculate_image_set_number(time_index, fov, z_level)
timestamp, raw_image_data = self._reader.get_raw_image_data(image_set_number, self.channel_offset[channel_name])
timestamp, raw_image_data = self.get_raw_image_data(image_set_number, self.channel_offset[channel_name])
return Image(timestamp, raw_image_data, fov, channel_name, z_level, self.height, self.width)
def __iter__(self):
@ -44,4 +51,73 @@ class Nd2(BaseNd2):
image = self.get_image(timepoint, field_of_view, channel_name, z_level)
if image.is_valid:
image_set.add(image)
yield image_set
yield image_set
self._channel_offset = None
@property
def height(self):
"""
:return: height of each image, in pixels
"""
return self._metadata['ImageAttributes']['SLxImageAttributes']['uiHeight']
@property
def width(self):
"""
:return: width of each image, in pixels
"""
return self._metadata['ImageAttributes']['SLxImageAttributes']['uiWidth']
@property
def channels(self):
metadata = self._metadata['ImageMetadataSeq']['SLxPictureMetadata']['sPicturePlanes']
try:
validity = self._metadata['ImageMetadata']['SLxExperiment']['ppNextLevelEx'][''][0]['ppNextLevelEx'][''][0]['pItemValid']
except KeyError:
# If none of the channels have been deleted, there is no validity list, so we just make one
validity = [True for i in metadata]
# Channel information is contained in dictionaries with the keys a0, a1...an where the number
# indicates the order in which the channel is stored. So by sorting the dicts alphabetically
# we get the correct order.
for (label, chan), valid in zip(sorted(metadata['sPlaneNew'].items()), validity):
if not valid:
continue
name = chan['sDescription']
exposure_time = metadata['sSampleSetting'][label]['dExposureTime']
camera = metadata['sSampleSetting'][label]['pCameraSetting']['CameraUserName']
yield Channel(name, camera, exposure_time)
@property
def channel_names(self):
"""
A convenience method for getting an alphabetized list of channel names.
:return: list[str]
"""
for channel in sorted(self.channels, key=lambda x: x.name):
yield channel.name
@property
def _image_count(self):
return self._metadata['ImageAttributes']['SLxImageAttributes']['uiSequenceCount']
@property
def _sequence_count(self):
return self._metadata['ImageEvents']['RLxExperimentRecord']['uiCount']
@property
def channel_offset(self):
if self._channel_offset is None:
self._channel_offset = {}
for n, channel in enumerate(self.channels):
self._channel_offset[channel.name] = n
return self._channel_offset
def _calculate_image_set_number(self, time_index, fov, z_level):
return time_index * self.field_of_view_count * self.z_level_count + (fov * self.z_level_count + z_level)

+ 1
- 173
nd2reader/model/__init__.py View File

@ -1,10 +1,6 @@
import numpy as np
import skimage.io
import logging
from io import BytesIO
import array
import struct
log = logging.getLogger(__name__)
@ -96,172 +92,4 @@ class Image(object):
def show(self):
skimage.io.imshow(self.data)
skimage.io.show()
class MetadataItem(object):
def __init__(self, start, data):
self._datatype = ord(data[start])
self._label_length = 2 * ord(data[start + 1])
self._data = data
@property
def is_valid(self):
return self._datatype > 0
@property
def key(self):
return self._data[2:self._label_length].decode("utf16").encode("utf8")
@property
def length(self):
return self._length
@property
def data_start(self):
return self._label_length + 2
@property
def _body(self):
"""
All data after the header.
"""
return self._data[self.data_start:]
def _get_bytes(self, count):
return self._data[self.data_start: self.data_start + count]
@property
def value(self):
parser = {1: self._parse_unsigned_char,
2: self._parse_unsigned_int,
3: self._parse_unsigned_int,
5: self._parse_unsigned_long,
6: self._parse_double,
8: self._parse_string,
9: self._parse_char_array,
11: self._parse_metadata_item
}
return parser[self._datatype]()
def _parse_unsigned_char(self):
self._length = 1
return self._unpack("B", self._get_bytes(self._length))
def _parse_unsigned_int(self):
self._length = 4
return self._unpack("I", self._get_bytes(self._length))
def _parse_unsigned_long(self):
self._length = 8
return self._unpack("Q", self._get_bytes(self._length))
def _parse_double(self):
self._length = 8
return self._unpack("d", self._get_bytes(self._length))
def _parse_string(self):
# the string is of unknown length but ends at the first instance of \x00\x00
stop = self._body.index("\x00\x00")
self._length = stop
return self._body[:stop - 1].decode("utf16").encode("utf8")
def _parse_char_array(self):
array_length = self._unpack("Q", self._get_bytes(8))
self._length = array_length + 8
return array.array("B", self._body[8:array_length])
def _parse_metadata_item(self):
count, length = struct.unpack("<IQ", self._get_bytes(12))
metadata_set = MetadataSet(self._body, 0, count)
def _unpack(self, kind, data):
"""
:param kind: the datatype to interpret the bytes as (see: https://docs.python.org/2/library/struct.html#struct-format-strings)
:type kind: str
:param data: the bytes to be converted
:type data: bytes
Parses a sequence of bytes and converts them to a Python data type.
struct.unpack() returns a tuple but we only want the first element.
"""
return struct.unpack(kind, data)[0]
class MetadataSet(object):
"""
A container of metadata items. Can contain other MetadataSet objects.
"""
def __init__(self, data, start, item_count):
self._items = []
self._parse(data, start, item_count)
def _parse(self, data, start, item_count):
for item in range(item_count):
metadata_item = MetadataItem(start, data)
if not metadata_item.is_valid:
break
start += metadata_item.length
class Chunkmap(object):
def __init__(self):
pass
def read(self, filename):
with open(filename, "rb") as f:
data = f.read(-1)
self.parse(data, 1)
def parse(self, data, count):
data = BytesIO(data)
res = {}
total_count = 0
for c in range(count):
lastpos = data.tell()
total_count += 1
hdr = data.read(2)
if not hdr:
break
typ = ord(hdr[0])
bname = data.read(2*ord(hdr[1]))
name = bname.decode("utf16")[:-1].encode("utf8")
if typ == 1:
value, = struct.unpack("B", data.read(1))
elif typ in [2, 3]:
value, = struct.unpack("I", data.read(4))
elif typ == 5:
value, = struct.unpack("Q", data.read(8))
elif typ == 6:
value, = struct.unpack("d", data.read(8))
elif typ == 8:
value = data.read(2)
while value[-2:] != "\x00\x00":
value += data.read(2)
value = value.decode("utf16")[:-1].encode("utf8")
elif typ == 9:
cnt, = struct.unpack("Q", data.read(8))
value = array.array("B", data.read(cnt))
elif typ == 11:
curpos = data.tell()
newcount, length = struct.unpack("<IQ", data.read(12))
curpos = data.tell()
length -= data.tell()-lastpos
nextdata = data.read(length)
value = self.parse(nextdata, newcount)
# Skip some offsets
data.read(newcount * 8)
else:
assert 0, "%s hdr %x:%x unknown" % (name, ord(hdr[0]), ord(hdr[1]))
if not name in res:
res[name] = value
else:
if not isinstance(res[name], list):
res[name] = [res[name]]
res[name].append(value)
x = data.read()
assert not x, "skip %d %s" % (len(x), repr(x[:30]))
return res
skimage.io.show()

nd2reader/service/__init__.py → nd2reader/reader.py View File


Loading…
Cancel
Save