diff --git a/README.md b/README.md
index f0233ff..64ff329 100644
--- a/README.md
+++ b/README.md
@@ -71,6 +71,5 @@ The HDF5 plugin saves and loads the pixel/voxel size in micrometer of the image
## Wish list for next version
-* Support for single data sets with more than 2GB size (will require a slice-wise or block-wise loadin/saving)
* disable the Log Window
* load a sub cube of the data set (e.g. for large 5D arrays stored in a single dataset)
diff --git a/pom.xml b/pom.xml
index fa54f36..e838742 100644
--- a/pom.xml
+++ b/pom.xml
@@ -68,6 +68,9 @@
Kun Liu
+
+ Ezra Newman
+
diff --git a/src/main/java/sc/fiji/hdf5/CustomLayoutHDF5VirtualStack.java b/src/main/java/sc/fiji/hdf5/CustomLayoutHDF5VirtualStack.java
new file mode 100644
index 0000000..86b613e
--- /dev/null
+++ b/src/main/java/sc/fiji/hdf5/CustomLayoutHDF5VirtualStack.java
@@ -0,0 +1,114 @@
+package sc.fiji.hdf5;
+
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ij.process.ByteProcessor;
+import ij.process.FloatProcessor;
+import ij.process.ImageProcessor;
+import ij.process.ShortProcessor;
+import org.apache.commons.lang.NotImplementedException;
+
+public class CustomLayoutHDF5VirtualStack extends HDF5VirtualStack
+{
+ private int levelToLevelOffset;
+ private int rowToRowOffset;
+ private int colToColOffset;
+
+ public CustomLayoutHDF5VirtualStack(IHDF5Reader reader, String dsetName, int nRows, int nCols, int nLevels, boolean isRGB, String typeText, int levelToLevelOffset, int rowToRowOffset, int colToColOffset)
+ {
+ super(reader, dsetName, nRows, nCols, nLevels, isRGB, typeText);
+ this.levelToLevelOffset = levelToLevelOffset;
+ this.rowToRowOffset = rowToRowOffset;
+ this.colToColOffset = colToColOffset;
+ }
+
+
+ @Override
+ public ImageProcessor getProcessor(int n)
+ {
+ ImageProcessor ip = null;
+ n = n - 1; // zero index n
+ if (typeText.equals("uint8"))
+ {
+ byte[] rawdata = reader.uint8().readMDArray(dsetName).getAsFlatArray();
+
+ ip = new ByteProcessor(nCols, nRows);
+ for (int row = 0; row < nRows; ++row)
+ {
+ byte[] trgData = (byte[]) ip.getPixels();
+ int trgOffset = row * nCols;
+ int srcOffset =
+ n * levelToLevelOffset + row * rowToRowOffset;
+ for (int col = 0; col < nCols; ++col)
+ {
+ trgData[trgOffset] = rawdata[srcOffset];
+ ++trgOffset;
+ srcOffset += colToColOffset;
+ }
+ }
+
+ } else if (typeText.equals("uint16"))
+ {
+ short[] rawdata = reader.uint16().readMDArray(dsetName).getAsFlatArray();
+
+ ip = new ShortProcessor(nCols, nRows);
+ for (int row = 0; row < nRows; ++row)
+ {
+ short[] trgData = (short[]) ip.getPixels();
+ int trgOffset = row * nCols;
+ int srcOffset =
+ n * levelToLevelOffset + row * rowToRowOffset;
+ for (int col = 0; col < nCols; ++col)
+ {
+ trgData[trgOffset] = rawdata[srcOffset];
+ ++trgOffset;
+ srcOffset += colToColOffset;
+ }
+ }
+
+
+ } else if (typeText.equals("int16"))
+ {
+ short[] rawdata = reader.int16().readMDArray(dsetName).getAsFlatArray();
+
+ ip = new ShortProcessor(nCols, nRows);
+ for (int row = 0; row < nRows; ++row)
+ {
+ short[] trgData = (short[]) ip.getPixels();
+ int trgOffset = row * nCols;
+ int srcOffset =
+ n * levelToLevelOffset + row * rowToRowOffset;
+ for (int col = 0; col < nCols; ++col)
+ {
+ trgData[trgOffset] = rawdata[srcOffset];
+ ++trgOffset;
+ srcOffset += colToColOffset;
+ }
+ }
+
+
+ } else if (typeText.equals("float32") || typeText.equals("float64"))
+ {
+ float[] rawdata = reader.float32().readMDArray(dsetName).getAsFlatArray();
+
+ ip = new FloatProcessor(nCols, nRows);
+ for (int row = 0; row < nRows; ++row)
+ {
+ float[] trgData = (float[]) ip.getPixels();
+ int trgOffset = row * nCols;
+ int srcOffset =
+ n * levelToLevelOffset + row * rowToRowOffset;
+ for (int col = 0; col < nCols; ++col)
+ {
+ trgData[trgOffset] = rawdata[srcOffset];
+ ++trgOffset;
+ srcOffset += colToColOffset;
+ }
+ }
+
+ } else
+ {
+ throw new NotImplementedException("Type " + typeText + " not supported yet");
+ }
+ return ip;
+ }
+}
diff --git a/src/main/java/sc/fiji/hdf5/HDF5ImageJ.java b/src/main/java/sc/fiji/hdf5/HDF5ImageJ.java
index e497ce9..b3d8f20 100644
--- a/src/main/java/sc/fiji/hdf5/HDF5ImageJ.java
+++ b/src/main/java/sc/fiji/hdf5/HDF5ImageJ.java
@@ -39,6 +39,7 @@
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import java.awt.HeadlessException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
public class HDF5ImageJ
@@ -193,6 +194,109 @@ static ImagePlus loadDataSetsToHyperStack( String filename, String[] dsetNames,
return loadDataSetsToHyperStack( filename, dsetNames, nFrames, nChannels, true);
}
+ static ImagePlus loadDataSetsToVirtualStack(String filename, String[] dsetNames)
+ {
+ boolean show = true;
+ String dsetName = "";
+ try
+ {
+ IHDF5ReaderConfigurator conf = HDF5Factory.configureForReading(filename);
+ conf.performNumericConversions();
+ IHDF5Reader reader = conf.reader();
+ ImagePlus imp = null;
+ int rank = 0;
+ int nLevels = 0;
+ int nRows = 0;
+ int nCols = 0;
+ boolean isRGB = false;
+ int nBits = 0;
+ double maxGray = 1;
+ String typeText = "";
+ // load data set
+ //
+ dsetName = dsetNames[0];
+ IJ.showStatus("Loading " + dsetName);
+ HDF5DataSetInformation dsInfo = reader.object().getDataSetInformation(dsetName);
+ float[] element_size_um = {1, 1, 1};
+ try
+ {
+ element_size_um = reader.float32().getArrayAttr(dsetName, "element_size_um");
+ } catch (HDF5Exception err)
+ {
+ IJ.log("Warning: Can't read attribute 'element_size_um' from file '" + filename
+ + "', dataset '" + dsetName + "':\n"
+ + err + "\n"
+ + "Assuming element size of 1 x 1 x 1 um^3");
+ }
+
+ rank = dsInfo.getRank();
+ typeText = dsInfoToTypeString(dsInfo);
+ if (rank == 2)
+ {
+ nLevels = 1;
+ nRows = (int) dsInfo.getDimensions()[0];
+ nCols = (int) dsInfo.getDimensions()[1];
+ } else if (rank == 3)
+ {
+ nLevels = (int) dsInfo.getDimensions()[0];
+ nRows = (int) dsInfo.getDimensions()[1];
+ nCols = (int) dsInfo.getDimensions()[2];
+ if (typeText.equals("uint8") && nCols == 3)
+ {
+ nLevels = 1;
+ nRows = (int) dsInfo.getDimensions()[0];
+ nCols = (int) dsInfo.getDimensions()[1];
+ isRGB = true;
+ }
+ } else if (rank == 4 && typeText.equals("uint8"))
+ {
+ nLevels = (int) dsInfo.getDimensions()[0];
+ nRows = (int) dsInfo.getDimensions()[1];
+ nCols = (int) dsInfo.getDimensions()[2];
+ isRGB = true;
+ } else
+ {
+ IJ.error(dsetName + ": rank " + rank + " of type " + typeText + " not supported (yet)");
+ return null;
+ }
+ typeText = dsInfoToTypeString(dsInfo);
+
+ ImageStack stack = new HDF5VirtualStack(reader, dsetName, nRows, nCols, nLevels, isRGB, typeText);
+
+ imp = new ImagePlus(filename + ": " + dsetName, stack);
+ imp.getCalibration().pixelDepth = element_size_um[0];
+ imp.getCalibration().pixelHeight = element_size_um[1];
+ imp.getCalibration().pixelWidth = element_size_um[2];
+ imp.getCalibration().setUnit("micrometer");
+ imp.setDisplayRange(0, 255);
+
+ imp.setC(1);
+
+ try
+ {
+ imp.show();
+ } catch (HeadlessException ignored)
+ {
+ }
+ return imp;
+ } catch (Exception err)
+ {
+
+ IJ.log(Arrays.toString(err.getStackTrace()));
+
+ IJ.error("Error while opening '" + filename
+ + "', dataset '" + dsetName + "':\n"
+ + err);
+ } catch (OutOfMemoryError o)
+ {
+ IJ.log(Arrays.toString(o.getStackTrace()));
+ IJ.outOfMemory("Load HDF5");
+ }
+ return null;
+
+ }
+
+
//-----------------------------------------------------------------------------
static ImagePlus loadDataSetsToHyperStack( String filename, String[] dsetNames,
int nFrames, int nChannels, boolean show)
@@ -390,6 +494,112 @@ static ImagePlus loadDataSetsToHyperStack( String filename, String[] dsetNames,
}
+
+ static ImagePlus loadCustomLayoutDataSetToVirtualStack(String filename, String dsetName, String layout)
+ {
+ try
+ {
+ IHDF5ReaderConfigurator conf = HDF5Factory.configureForReading(filename);
+ conf.performNumericConversions();
+ IHDF5Reader reader = conf.reader();
+ ImagePlus imp = null;
+
+ // get datat set info and check layout string
+ //
+ IJ.showStatus( "Loading " + dsetName);
+ // IJ.showProgress( frame*nChannels+channel+1, nFrames*nChannels);
+ HDF5DataSetInformation dsInfo = reader.object().getDataSetInformation(dsetName);
+ float[] element_size_um = {1,1,1};
+ try {
+ element_size_um = reader.float32().getArrayAttr(dsetName, "element_size_um");
+ }
+ catch (HDF5Exception err) {
+ IJ.log("Warning: Can't read attribute 'element_size_um' from file '" + filename
+ + "', dataset '" + dsetName + "':\n"
+ + err + "\n"
+ + "Assuming element size of 1 x 1 x 1 um^3");
+ }
+
+ int rank = dsInfo.getRank();
+ String typeText = dsInfoToTypeString(dsInfo);
+
+ if( rank != layout.length()) {
+ IJ.error( dsetName + ": rank " + rank + " is incompatible with your given layout string '" + layout +"' (rank " + layout.length() + ")");
+ return null;
+ }
+
+
+ // compute dset stride (element-to-element offset in the linear array)
+ //
+ long[] dsetExtent = dsInfo.getDimensions();
+ int[] stride = new int[rank];
+ stride[rank-1] = 1;
+ for (int d = rank-2; d >= 0; --d) {
+ stride[d] = (int)dsetExtent[d+1] * stride[d+1];
+ }
+
+ // interpret layout string and get assigned data set extents
+ //
+ int nLevels = 1;
+ int nRows = 1;
+ int nCols = 1;
+ int levelToLevelOffset = 0;
+ int rowToRowOffset = 0;
+ int colToColOffset = 0;
+
+ int nBits = 0;
+
+ //
+ for (int d = 0; d < rank; ++d) {
+ switch( layout.charAt(d)) {
+ case 'x': nCols = (int)dsetExtent[d]; colToColOffset = stride[d]; break;
+ case 'y': nRows = (int)dsetExtent[d]; rowToRowOffset = stride[d]; break;
+ case 'z': nLevels = (int)dsetExtent[d]; levelToLevelOffset = stride[d]; break;
+ default:
+ IJ.error( "your given layout string '" + layout +"' contains the illegal character '" + layout.charAt(d) + "'. Allowed characters are 'xyz'. 'ct' are not allowed in virtual stacks yet.");
+ return null;
+ }
+ }
+
+ boolean isRGB = false;
+ nBits = assignHDF5TypeToImagePlusBitdepth( typeText, isRGB);
+ ImageStack stack = new CustomLayoutHDF5VirtualStack(reader, dsetName, nRows, nCols, nLevels, isRGB, typeText, levelToLevelOffset, rowToRowOffset, colToColOffset);
+
+ imp = new ImagePlus(filename + ": " + dsetName, stack);
+
+ imp.getCalibration().pixelDepth = element_size_um[0];
+ imp.getCalibration().pixelHeight = element_size_um[1];
+ imp.getCalibration().pixelWidth = element_size_um[2];
+ imp.getCalibration().setUnit("micrometer");
+ imp.setDisplayRange(0,255);
+
+
+
+
+ try {
+ imp.show();
+ }
+ catch (HeadlessException herr) {}
+ return imp;
+ }
+ catch (Exception err)
+ {
+
+ IJ.log(Arrays.toString(err.getStackTrace()));
+
+ IJ.error("Error while opening '" + filename
+ + "', dataset '" + dsetName + "':\n"
+ + err);
+ } catch (OutOfMemoryError o)
+ {
+ IJ.log(Arrays.toString(o.getStackTrace()));
+ IJ.outOfMemory("Load HDF5");
+ }
+ return null;
+
+ }
+
+
static ImagePlus loadCustomLayoutDataSetToHyperStack( String filename, String dsetName, String layout) {
return loadCustomLayoutDataSetToHyperStack(filename, dsetName, layout, true);
}
@@ -603,8 +813,6 @@ else if (typeText.equals( "float32") || typeText.equals( "float64") ) {
}
-
-
// int sliceSize = nCols * nRows;
//
// if (typeText.equals( "uint8") && rank < 4) {
diff --git a/src/main/java/sc/fiji/hdf5/HDF5VirtualStack.java b/src/main/java/sc/fiji/hdf5/HDF5VirtualStack.java
new file mode 100644
index 0000000..32507a4
--- /dev/null
+++ b/src/main/java/sc/fiji/hdf5/HDF5VirtualStack.java
@@ -0,0 +1,100 @@
+package sc.fiji.hdf5;
+
+import ch.systemsx.cisd.base.mdarray.MDByteArray;
+import ch.systemsx.cisd.base.mdarray.MDFloatArray;
+import ch.systemsx.cisd.base.mdarray.MDIntArray;
+import ch.systemsx.cisd.base.mdarray.MDShortArray;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ij.IJ;
+import ij.VirtualStack;
+import ij.process.ByteProcessor;
+import ij.process.FloatProcessor;
+import ij.process.ImageProcessor;
+import ij.process.ShortProcessor;
+import org.apache.commons.lang.NotImplementedException;
+
+import java.util.Arrays;
+
+public class HDF5VirtualStack extends VirtualStack
+{
+ protected IHDF5Reader reader;
+ protected String dsetName;
+ protected int nRows, nCols, nLevels;
+ protected String typeText;
+ protected boolean isRGB;
+
+ public HDF5VirtualStack(IHDF5Reader reader, String dsetName, int nRows, int nCols, int nLevels, boolean isRGB, String typeText)
+ {
+ this.reader = reader;
+ this.dsetName = dsetName;
+ this.nRows = nRows;
+ this.nCols = nCols;
+ this.nLevels = nLevels;
+ this.typeText = typeText;
+ this.isRGB = isRGB;
+ }
+
+ @Override
+ public int getSize()
+ {
+ return this.nLevels;
+ }
+
+
+ @Override
+ public ImageProcessor getProcessor(int n)
+ {
+ n = n - 1; // zero index
+ ImageProcessor ip;
+ if (typeText.equals("uint8") && !isRGB)
+ {
+ MDByteArray rawdata = reader.uint8().readMDArrayBlock(dsetName, new int[]{1, nRows, nCols}, new long[]{n, 0, 0});
+ ip = new ByteProcessor(nCols, nRows);
+ System.arraycopy(rawdata.getAsFlatArray(), 0,
+ (byte[]) ip.getPixels(), 0,
+ nRows * nCols);
+ } else if (typeText.equals("uint8"))
+ { // RGB data
+ throw new NotImplementedException("RGB 8 bit pixel images not supported in virtual stacks (yet)"); // I don't have a test file to check behavior on
+
+ } else if (typeText.equals("uint16"))
+ {
+ MDShortArray rawdata = reader.uint16().readMDArrayBlock(dsetName, new int[]{1, nRows, nCols}, new long[]{n, 0, 0});
+ ip = new ShortProcessor(nCols, nRows);
+ System.arraycopy(rawdata.getAsFlatArray(), 0,
+ (short[]) ip.getPixels(), 0,
+ nRows * nCols);
+ } else if (typeText.equals("int16"))
+ {
+ MDShortArray rawdata = reader.int16().readMDArrayBlock(dsetName, new int[]{1, nRows, nCols}, new long[]{n, 0, 0});
+ ip = new ShortProcessor(nCols, nRows);
+ System.arraycopy(rawdata.getAsFlatArray(), 0,
+ (short[]) ip.getPixels(), 0,
+ nRows * nCols);
+ } else if (typeText.equals("float32") || typeText.equals("float64"))
+ {
+ MDFloatArray rawdata = reader.float32().readMDArrayBlock(dsetName, new int[]{1, nRows, nCols}, new long[]{n, 0, 0});
+ ip = new FloatProcessor(nCols, nRows);
+ System.arraycopy(rawdata.getAsFlatArray(), 0,
+ (float[]) ip.getPixels(), 0,
+ nRows * nCols);
+ } else if (typeText.equals("int32"))
+ {
+ int[] rawdata_ints = reader.int32().readMDArrayBlock(dsetName, new int[]{1, nRows, nCols}, new long[]{n, 0, 0}).getAsFlatArray();
+ float[] rawdata = new float[rawdata_ints.length];
+ for (int i = 0; i < rawdata_ints.length; i++)
+ {
+ rawdata[i] = (float) rawdata_ints[i];
+ }
+ ip = new FloatProcessor(nCols, nRows);
+ System.arraycopy(rawdata, 0,
+ (float[]) ip.getPixels(), 0,
+ nRows * nCols);
+ } else
+ {
+ throw new NotImplementedException("Type " + typeText + " not supported by virtual stacks yet");
+ }
+ return ip;
+ }
+}
+
diff --git a/src/main/java/sc/fiji/hdf5/HDF5_Reader_Vibez.java b/src/main/java/sc/fiji/hdf5/HDF5_Reader_Vibez.java
index 1fcd28c..98ed74f 100644
--- a/src/main/java/sc/fiji/hdf5/HDF5_Reader_Vibez.java
+++ b/src/main/java/sc/fiji/hdf5/HDF5_Reader_Vibez.java
@@ -8,12 +8,12 @@
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
- *
+ *
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
- *
+ *
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* .
@@ -48,22 +48,23 @@
import ch.systemsx.cisd.hdf5.IHDF5Reader;
-public class HDF5_Reader_Vibez extends JFrame implements PlugIn, ActionListener
+public class HDF5_Reader_Vibez extends JFrame implements PlugIn, ActionListener
{
// Private Members
private ArrayList dataSets_;
private JTable pathTable_;
private String fullFileName_;
// private JCheckBox loadAsHyperstackCheckBox_;
+ private JCheckBox useVirtualStack_;
private JRadioButton[] loadAsRadioButtons_;
private int loadAsMode_;
-
+
private SpinnerNumberModel nChannelsSpinner_;
private JTextField dsetLayoutTextField_;
- public void run(String arg)
+ public void run(String arg)
{
-
+
// Let User select the filename
//
String directory = "";
@@ -77,24 +78,24 @@ public void run(String arg)
od = new OpenDialog(openMSG, "");
else
od = new OpenDialog(openMSG, directory, "");
-
+
directory = od.getDirectory();
name = od.getFileName();
if (name == null)
return;
if (name == "")
return;
-
+
File testFile = new File(directory + name);
if (!testFile.exists() || !testFile.canRead())
return;
-
+
if (testFile.isDirectory()) {
directory = directory + name;
tryAgain = true;
}
} while (tryAgain);
-
+
// Get All Dataset names
//
@@ -133,14 +134,14 @@ public void run(String arg)
line.addElement(""+dataSets_.get(row).element_size_um_text.replace("x", "×")+"");
tableData.addElement( line);
}
-
+
String[] columnTitles = {"path", "size", "type"};
Vector columnNames = new Vector();
columnNames.addElement("data set path");
columnNames.addElement("size");
columnNames.addElement("type");
columnNames.addElement("element size [um]");
-
+
// make table non-editable
DefaultTableModel tableModel = new DefaultTableModel(tableData, columnNames) {
@@ -150,12 +151,12 @@ public boolean isCellEditable(int row, int column) {
return false;
}
};
- pathTable_ = new JTable( tableModel);
-
+ pathTable_ = new JTable( tableModel);
+
setLayout(new GridBagLayout());
GridBagConstraints cs = new GridBagConstraints();
int currentRow = 0;
-
+
cs.anchor = GridBagConstraints.FIRST_LINE_START;
//natural height, maximum width
@@ -164,7 +165,7 @@ public boolean isCellEditable(int row, int column) {
cs.weightx = 0;
cs.weighty = 0;
cs.gridx = 0;
- cs.gridy = currentRow;
+ cs.gridy = currentRow;
cs.insets = new Insets(3,3,0,0);
cs.gridwidth = 2;
JLabel titleText = new JLabel("Select data sets
");
@@ -176,7 +177,7 @@ public boolean isCellEditable(int row, int column) {
cs.weightx = 1;
cs.weighty = 1;
cs.gridx = 0;
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
cs.insets = new Insets(3,3,0,0);
cs.gridwidth = 2;
JScrollPane scrollPaneT = new JScrollPane(pathTable_);
@@ -196,26 +197,25 @@ public boolean isCellEditable(int row, int column) {
int loadAsMode = (int)Prefs.get("hdf5readervibez.loadasmode", 0);
loadAsRadioButtons_[loadAsMode].setSelected(true);
ButtonGroup group = new ButtonGroup();
- for( int i = 0; i < 5; ++i)
+ for( int i = 0; i < 5; ++i)
{
group.add(loadAsRadioButtons_[i]);
}
-
// loadAsHyperstackCheckBox_ = new JCheckBox( "Combine Stacks to Hyperstack", true);
cs.fill = GridBagConstraints.HORIZONTAL;
- cs.ipady = 0;
+ cs.ipady = 0;
cs.weightx = 0;
cs.weighty = 0;
cs.gridx = 0;
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
cs.insets = new Insets(3,3,0,0);
cs.gridwidth = 2;
JLabel subtitleText = new JLabel("Load as ...");
add(subtitleText, cs);
for( int i = 0; i < 2; ++i)
{
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
add(loadAsRadioButtons_[i], cs);
}
@@ -223,7 +223,7 @@ public boolean isCellEditable(int row, int column) {
cs.weightx = 1;
cs.weighty = 0;
cs.gridx = 0;
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
cs.insets = new Insets(3,3,0,0);
cs.gridwidth = 1;
//natural height, natural width
@@ -232,32 +232,37 @@ public boolean isCellEditable(int row, int column) {
cs.fill = GridBagConstraints.HORIZONTAL;
String dsetLayout = Prefs.get("hdf5readervibez.dsetLayout", "zyx");
- dsetLayoutTextField_ = new JTextField(dsetLayout, 6);
+ dsetLayoutTextField_ = new JTextField(dsetLayout, 6);
cs.gridx = 1;
add(dsetLayoutTextField_, cs);
+ cs.gridy = ++currentRow;
+ cs.gridx = 0;
+ useVirtualStack_ = new JCheckBox("Use HDF5-backed virtual stacks", false);
+ add(useVirtualStack_, cs);
+
cs.fill = GridBagConstraints.HORIZONTAL;
- cs.ipady = 0;
+ cs.ipady = 0;
cs.weightx = 0;
cs.weighty = 0;
cs.gridx = 0;
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
cs.insets = new Insets(3,3,0,0);
cs.gridwidth = 2;
JLabel subtitleText2 = new JLabel("Combine to ...");
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
add(subtitleText2, cs);
for( int i = 2; i < 5; ++i)
{
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
add(loadAsRadioButtons_[i], cs);
}
-
+
JLabel spinnerText = new JLabel(" - Number of channels:");
cs.weightx = 1;
cs.weighty = 0;
cs.gridx = 0;
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
cs.insets = new Insets(3,3,0,0);
cs.gridwidth = 1;
//natural height, natural width
@@ -271,16 +276,16 @@ public boolean isCellEditable(int row, int column) {
add(spinner, cs);
int nChannels = (int)Prefs.get("hdf5readervibez.nchannels", 1);
spinner.setValue(nChannels);
-
+
JButton b1 = new JButton("Load");
b1.setActionCommand("load");
b1.addActionListener(this);
- cs.ipady = 0;
+ cs.ipady = 0;
cs.weightx = 1;
cs.weighty = 0;
cs.gridx = 0;
- cs.gridy = ++currentRow;
+ cs.gridy = ++currentRow;
cs.gridwidth = 1;
//natural height, natural width
cs.fill = GridBagConstraints.NONE;
@@ -290,21 +295,21 @@ public boolean isCellEditable(int row, int column) {
b2.setActionCommand("cancel");
b2.addActionListener(this);
cs.gridx = 1;
- cs.gridy = currentRow;
+ cs.gridy = currentRow;
cs.gridwidth = 1;
add(b2,cs);
pack();
Dimension si = getSize();
- si.height = 400;
+ si.height = 420;
si.width = pathTable_.getWidth()+40;
setSize( si);
setVisible(true);
-
-
+
+
}
-
+
// private String dsInfoToTypeString( HDF5DataSetInformation dsInfo) {
@@ -334,19 +339,19 @@ private void recursiveGetInfo(IHDF5Reader reader, HDF5LinkInformation link)
HDF5ImageJ.recursiveGetInfo(reader, link, dataSets_);
}
- public void actionPerformed(ActionEvent event)
+ public void actionPerformed(ActionEvent event)
{
- if (event.getActionCommand().equals("load"))
+ if (event.getActionCommand().equals("load"))
{
loadHDF5();
}
- else if (event.getActionCommand().equals("cancel"))
+ else if (event.getActionCommand().equals("cancel"))
{
dispose();
}
}
-
+
public void loadHDF5() {
int[] selection = pathTable_.getSelectedRows();
if (selection.length == 0) {
@@ -361,31 +366,42 @@ public void loadHDF5() {
}
Prefs.set("hdf5readervibez.loadasmode", loadAsMode);
- if (loadAsMode == 0)
+ if (loadAsMode == 0)
{
// load as multiple standard stacks
-
+
for (int i : selection) {
IJ.log( "i = " + i + dataSets_.get(i).path);
String[] dsetNames = new String[1];
dsetNames[0] = dataSets_.get(i).path;
String type = dataSets_.get(i).typeText;
- HDF5ImageJ.loadDataSetsToHyperStack( fullFileName_, dsetNames, 1, 1);
+ if (useVirtualStack_.isSelected()){
+ HDF5ImageJ.loadDataSetsToVirtualStack( fullFileName_, dsetNames);
+ } else
+ {
+ HDF5ImageJ.loadDataSetsToHyperStack(fullFileName_, dsetNames, 1, 1);
+ }
}
}
- else if (loadAsMode == 1)
+ else if (loadAsMode == 1)
{
// load as multiple hyper stacks with custom layout
-
+
for (int i : selection) {
IJ.log( "i = " + i + dataSets_.get(i).path);
String dsetLayout = dsetLayoutTextField_.getText();
Prefs.set("hdf5readervibez.dsetLayout", dsetLayout);
-
- HDF5ImageJ.loadCustomLayoutDataSetToHyperStack( fullFileName_, dataSets_.get(i).path,
- dsetLayout);
+
+ if (useVirtualStack_.isSelected())
+ {
+ HDF5ImageJ.loadCustomLayoutDataSetToVirtualStack(fullFileName_, dataSets_.get(i).path, dsetLayout);
+ } else
+ {
+ HDF5ImageJ.loadCustomLayoutDataSetToHyperStack( fullFileName_, dataSets_.get(i).path,
+ dsetLayout);
+ }
}
-
+
}
else
{
@@ -397,10 +413,10 @@ else if (loadAsMode == 1)
int nChannels = 1;
if( loadAsMode == 2) nChannels = selection.length;
if( loadAsMode == 3) nChannels = 1;
- if( loadAsMode == 4)
+ if( loadAsMode == 4)
{
nChannels = nChannelsSpinner_.getNumber().intValue();
- }
+ }
if (nChannels > dsetNames.length) {
nChannels = dsetNames.length;
}
@@ -414,17 +430,23 @@ else if (loadAsMode == 1)
commaSeparatedDsetNames += dsetNames[i];
}
Prefs.set("hdf5readervibez.dsetnames",commaSeparatedDsetNames);
-
+
String type = dataSets_.get(selection[0]).typeText;
- HDF5ImageJ.loadDataSetsToHyperStack( fullFileName_, dsetNames,
- nFrames, nChannels);
-
- }
+ if (useVirtualStack_.isSelected())
+ {
+ HDF5ImageJ.loadDataSetsToVirtualStack( fullFileName_, dsetNames);
+ } else
+ {
+ HDF5ImageJ.loadDataSetsToHyperStack( fullFileName_, dsetNames,
+ nFrames, nChannels);
+ }
+
+ }
dispose();
}
-
-
+
+
//
// int assignHDF5TypeToImagePlusBitdepth( String type, int rank) {
// int nBits = 0;