Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for 2GB+ files with virtual stacks #17

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,5 @@ The HDF5 plugin saves and loads the pixel/voxel size in micrometer of the image

## Wish list for next version

* Support for single data sets with more than 2GB size (will require a slice-wise or block-wise loadin/saving)
* disable the Log Window
* load a sub cube of the data set (e.g. for large 5D arrays stored in a single dataset)
3 changes: 3 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@
<contributor>
<name>Kun Liu</name>
</contributor>
<contributor>
<name>Ezra Newman</name>
</contributor>
</contributors>

<mailingLists>
Expand Down
114 changes: 114 additions & 0 deletions src/main/java/sc/fiji/hdf5/CustomLayoutHDF5VirtualStack.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
package sc.fiji.hdf5;

import ch.systemsx.cisd.hdf5.IHDF5Reader;
import ij.process.ByteProcessor;
import ij.process.FloatProcessor;
import ij.process.ImageProcessor;
import ij.process.ShortProcessor;
import org.apache.commons.lang.NotImplementedException;

public class CustomLayoutHDF5VirtualStack extends HDF5VirtualStack
{
private int levelToLevelOffset;
private int rowToRowOffset;
private int colToColOffset;

public CustomLayoutHDF5VirtualStack(IHDF5Reader reader, String dsetName, int nRows, int nCols, int nLevels, boolean isRGB, String typeText, int levelToLevelOffset, int rowToRowOffset, int colToColOffset)
{
super(reader, dsetName, nRows, nCols, nLevels, isRGB, typeText);
this.levelToLevelOffset = levelToLevelOffset;
this.rowToRowOffset = rowToRowOffset;
this.colToColOffset = colToColOffset;
}


@Override
public ImageProcessor getProcessor(int n)
{
ImageProcessor ip = null;
n = n - 1; // zero index n
if (typeText.equals("uint8"))
{
byte[] rawdata = reader.uint8().readMDArray(dsetName).getAsFlatArray();

ip = new ByteProcessor(nCols, nRows);
for (int row = 0; row < nRows; ++row)
{
byte[] trgData = (byte[]) ip.getPixels();
int trgOffset = row * nCols;
int srcOffset =
n * levelToLevelOffset + row * rowToRowOffset;
for (int col = 0; col < nCols; ++col)
{
trgData[trgOffset] = rawdata[srcOffset];
++trgOffset;
srcOffset += colToColOffset;
}
}

} else if (typeText.equals("uint16"))
{
short[] rawdata = reader.uint16().readMDArray(dsetName).getAsFlatArray();

ip = new ShortProcessor(nCols, nRows);
for (int row = 0; row < nRows; ++row)
{
short[] trgData = (short[]) ip.getPixels();
int trgOffset = row * nCols;
int srcOffset =
n * levelToLevelOffset + row * rowToRowOffset;
for (int col = 0; col < nCols; ++col)
{
trgData[trgOffset] = rawdata[srcOffset];
++trgOffset;
srcOffset += colToColOffset;
}
}


} else if (typeText.equals("int16"))
{
short[] rawdata = reader.int16().readMDArray(dsetName).getAsFlatArray();

ip = new ShortProcessor(nCols, nRows);
for (int row = 0; row < nRows; ++row)
{
short[] trgData = (short[]) ip.getPixels();
int trgOffset = row * nCols;
int srcOffset =
n * levelToLevelOffset + row * rowToRowOffset;
for (int col = 0; col < nCols; ++col)
{
trgData[trgOffset] = rawdata[srcOffset];
++trgOffset;
srcOffset += colToColOffset;
}
}


} else if (typeText.equals("float32") || typeText.equals("float64"))
{
float[] rawdata = reader.float32().readMDArray(dsetName).getAsFlatArray();

ip = new FloatProcessor(nCols, nRows);
for (int row = 0; row < nRows; ++row)
{
float[] trgData = (float[]) ip.getPixels();
int trgOffset = row * nCols;
int srcOffset =
n * levelToLevelOffset + row * rowToRowOffset;
for (int col = 0; col < nCols; ++col)
{
trgData[trgOffset] = rawdata[srcOffset];
++trgOffset;
srcOffset += colToColOffset;
}
}

} else
{
throw new NotImplementedException("Type " + typeText + " not supported yet");
}
return ip;
}
}
212 changes: 210 additions & 2 deletions src/main/java/sc/fiji/hdf5/HDF5ImageJ.java
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import java.awt.HeadlessException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

public class HDF5ImageJ
Expand Down Expand Up @@ -193,6 +194,109 @@ static ImagePlus loadDataSetsToHyperStack( String filename, String[] dsetNames,
return loadDataSetsToHyperStack( filename, dsetNames, nFrames, nChannels, true);
}

static ImagePlus loadDataSetsToVirtualStack(String filename, String[] dsetNames)
{
boolean show = true;
String dsetName = "";
try
{
IHDF5ReaderConfigurator conf = HDF5Factory.configureForReading(filename);
conf.performNumericConversions();
IHDF5Reader reader = conf.reader();
ImagePlus imp = null;
int rank = 0;
int nLevels = 0;
int nRows = 0;
int nCols = 0;
boolean isRGB = false;
int nBits = 0;
double maxGray = 1;
String typeText = "";
// load data set
//
dsetName = dsetNames[0];
IJ.showStatus("Loading " + dsetName);
HDF5DataSetInformation dsInfo = reader.object().getDataSetInformation(dsetName);
float[] element_size_um = {1, 1, 1};
try
{
element_size_um = reader.float32().getArrayAttr(dsetName, "element_size_um");
} catch (HDF5Exception err)
{
IJ.log("Warning: Can't read attribute 'element_size_um' from file '" + filename
+ "', dataset '" + dsetName + "':\n"
+ err + "\n"
+ "Assuming element size of 1 x 1 x 1 um^3");
}

rank = dsInfo.getRank();
typeText = dsInfoToTypeString(dsInfo);
if (rank == 2)
{
nLevels = 1;
nRows = (int) dsInfo.getDimensions()[0];
nCols = (int) dsInfo.getDimensions()[1];
} else if (rank == 3)
{
nLevels = (int) dsInfo.getDimensions()[0];
nRows = (int) dsInfo.getDimensions()[1];
nCols = (int) dsInfo.getDimensions()[2];
if (typeText.equals("uint8") && nCols == 3)
{
nLevels = 1;
nRows = (int) dsInfo.getDimensions()[0];
nCols = (int) dsInfo.getDimensions()[1];
isRGB = true;
}
} else if (rank == 4 && typeText.equals("uint8"))
{
nLevels = (int) dsInfo.getDimensions()[0];
nRows = (int) dsInfo.getDimensions()[1];
nCols = (int) dsInfo.getDimensions()[2];
isRGB = true;
} else
{
IJ.error(dsetName + ": rank " + rank + " of type " + typeText + " not supported (yet)");
return null;
}
typeText = dsInfoToTypeString(dsInfo);

ImageStack stack = new HDF5VirtualStack(reader, dsetName, nRows, nCols, nLevels, isRGB, typeText);

imp = new ImagePlus(filename + ": " + dsetName, stack);
imp.getCalibration().pixelDepth = element_size_um[0];
imp.getCalibration().pixelHeight = element_size_um[1];
imp.getCalibration().pixelWidth = element_size_um[2];
imp.getCalibration().setUnit("micrometer");
imp.setDisplayRange(0, 255);

imp.setC(1);

try
{
imp.show();
} catch (HeadlessException ignored)
{
}
return imp;
} catch (Exception err)
{

IJ.log(Arrays.toString(err.getStackTrace()));

IJ.error("Error while opening '" + filename
+ "', dataset '" + dsetName + "':\n"
+ err);
} catch (OutOfMemoryError o)
{
IJ.log(Arrays.toString(o.getStackTrace()));
IJ.outOfMemory("Load HDF5");
}
return null;

}


//-----------------------------------------------------------------------------
static ImagePlus loadDataSetsToHyperStack( String filename, String[] dsetNames,
int nFrames, int nChannels, boolean show)
Expand Down Expand Up @@ -390,6 +494,112 @@ static ImagePlus loadDataSetsToHyperStack( String filename, String[] dsetNames,

}


static ImagePlus loadCustomLayoutDataSetToVirtualStack(String filename, String dsetName, String layout)
{
try
{
IHDF5ReaderConfigurator conf = HDF5Factory.configureForReading(filename);
conf.performNumericConversions();
IHDF5Reader reader = conf.reader();
ImagePlus imp = null;

// get datat set info and check layout string
//
IJ.showStatus( "Loading " + dsetName);
// IJ.showProgress( frame*nChannels+channel+1, nFrames*nChannels);
HDF5DataSetInformation dsInfo = reader.object().getDataSetInformation(dsetName);
float[] element_size_um = {1,1,1};
try {
element_size_um = reader.float32().getArrayAttr(dsetName, "element_size_um");
}
catch (HDF5Exception err) {
IJ.log("Warning: Can't read attribute 'element_size_um' from file '" + filename
+ "', dataset '" + dsetName + "':\n"
+ err + "\n"
+ "Assuming element size of 1 x 1 x 1 um^3");
}

int rank = dsInfo.getRank();
String typeText = dsInfoToTypeString(dsInfo);

if( rank != layout.length()) {
IJ.error( dsetName + ": rank " + rank + " is incompatible with your given layout string '" + layout +"' (rank " + layout.length() + ")");
return null;
}


// compute dset stride (element-to-element offset in the linear array)
//
long[] dsetExtent = dsInfo.getDimensions();
int[] stride = new int[rank];
stride[rank-1] = 1;
for (int d = rank-2; d >= 0; --d) {
stride[d] = (int)dsetExtent[d+1] * stride[d+1];
}

// interpret layout string and get assigned data set extents
//
int nLevels = 1;
int nRows = 1;
int nCols = 1;
int levelToLevelOffset = 0;
int rowToRowOffset = 0;
int colToColOffset = 0;

int nBits = 0;

//
for (int d = 0; d < rank; ++d) {
switch( layout.charAt(d)) {
case 'x': nCols = (int)dsetExtent[d]; colToColOffset = stride[d]; break;
case 'y': nRows = (int)dsetExtent[d]; rowToRowOffset = stride[d]; break;
case 'z': nLevels = (int)dsetExtent[d]; levelToLevelOffset = stride[d]; break;
default:
IJ.error( "your given layout string '" + layout +"' contains the illegal character '" + layout.charAt(d) + "'. Allowed characters are 'xyz'. 'ct' are not allowed in virtual stacks yet.");
return null;
}
}

boolean isRGB = false;
nBits = assignHDF5TypeToImagePlusBitdepth( typeText, isRGB);
ImageStack stack = new CustomLayoutHDF5VirtualStack(reader, dsetName, nRows, nCols, nLevels, isRGB, typeText, levelToLevelOffset, rowToRowOffset, colToColOffset);

imp = new ImagePlus(filename + ": " + dsetName, stack);

imp.getCalibration().pixelDepth = element_size_um[0];
imp.getCalibration().pixelHeight = element_size_um[1];
imp.getCalibration().pixelWidth = element_size_um[2];
imp.getCalibration().setUnit("micrometer");
imp.setDisplayRange(0,255);




try {
imp.show();
}
catch (HeadlessException herr) {}
return imp;
}
catch (Exception err)
{

IJ.log(Arrays.toString(err.getStackTrace()));

IJ.error("Error while opening '" + filename
+ "', dataset '" + dsetName + "':\n"
+ err);
} catch (OutOfMemoryError o)
{
IJ.log(Arrays.toString(o.getStackTrace()));
IJ.outOfMemory("Load HDF5");
}
return null;

}


static ImagePlus loadCustomLayoutDataSetToHyperStack( String filename, String dsetName, String layout) {
return loadCustomLayoutDataSetToHyperStack(filename, dsetName, layout, true);
}
Expand Down Expand Up @@ -603,8 +813,6 @@ else if (typeText.equals( "float32") || typeText.equals( "float64") ) {
}




// int sliceSize = nCols * nRows;
//
// if (typeText.equals( "uint8") && rank < 4) {
Expand Down
Loading