1 | /* |
---|
2 | * File: WriteSlicesToHDF_5.java |
---|
3 | * |
---|
4 | * Copyright (C) 2011, Dennis Mikkelson |
---|
5 | * |
---|
6 | * This program is free software; you can redistribute it and/or |
---|
7 | * modify it under the terms of the GNU General Public License |
---|
8 | * as published by the Free Software Foundation; either version 2 |
---|
9 | * of the License, or (at your option) any later version. |
---|
10 | * |
---|
11 | * This program is distributed in the hope that it will be useful, |
---|
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
14 | * GNU General Public License for more details. |
---|
15 | * |
---|
16 | * You should have received a copy of the GNU General Public License |
---|
17 | * along with this library; if not, write to the Free Software |
---|
18 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. |
---|
19 | * |
---|
20 | * Contact : Dennis Mikkelson <mikkelsond@uwstout.edu> |
---|
21 | * Department of Mathematics, Statistics and Computer Science |
---|
22 | * University of Wisconsin-Stout |
---|
23 | * Menomonie, WI 54751, USA |
---|
24 | * |
---|
25 | * This work was supported by the Spallation Neutron Source Division |
---|
26 | * of Oak Ridge National Laboratory, Oak Ridge, TN, USA. |
---|
27 | * |
---|
28 | * Last Modified: |
---|
29 | * |
---|
30 | * $Author: eu7 $ |
---|
31 | * $Date: 2011-10-13 22:06:21 -0500 (Thu, 13 Oct 2011) $ |
---|
32 | * $Revision: 21314 $ |
---|
33 | */ |
---|
34 | |
---|
35 | |
---|
36 | package EventTools.ShowEventsApp.DataHandlers; |
---|
37 | |
---|
38 | |
---|
39 | import ncsa.hdf.object.*; // the common object package |
---|
40 | import ncsa.hdf.object.h5.*; // the HDF5 implementation |
---|
41 | |
---|
42 | import ncsa.hdf.hdf5lib.HDF5Constants; |
---|
43 | |
---|
44 | import gov.anl.ipns.MathTools.Geometry.*; |
---|
45 | |
---|
46 | /** |
---|
47 | * This class handles writing slices of reciprocal space to an HDF 5 file |
---|
48 | * in the form required by the ZODS program. |
---|
49 | */ |
---|
50 | public class WriteSlicesToHDF_5 |
---|
51 | { |
---|
52 | |
---|
53 | public static H5File OpenH5_File( String filename ) throws Exception |
---|
54 | { |
---|
55 | FileFormat file_format = |
---|
56 | FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); |
---|
57 | |
---|
58 | if (file_format == null) |
---|
59 | throw new IllegalArgumentException("Couldn't get HDF5 FileFormat"); |
---|
60 | |
---|
61 | H5File file = (H5File)file_format.createFile(filename, |
---|
62 | FileFormat.FILE_CREATE_DELETE); |
---|
63 | |
---|
64 | if ( file == null ) |
---|
65 | throw new IllegalArgumentException("Could not create file " + filename ); |
---|
66 | |
---|
67 | return file; |
---|
68 | } |
---|
69 | |
---|
70 | |
---|
71 | private static void AddVectorAttribute( Group group, |
---|
72 | Datatype type, |
---|
73 | String name, |
---|
74 | Vector3D vec ) throws Exception |
---|
75 | { |
---|
76 | long[] dims = {3}; |
---|
77 | Attribute attr = new Attribute( name, type, dims ); |
---|
78 | double[] coords = new double[3]; |
---|
79 | coords[0] = vec.getX(); |
---|
80 | coords[1] = vec.getY(); |
---|
81 | coords[2] = vec.getZ(); |
---|
82 | attr.setValue( coords ); |
---|
83 | group.writeMetadata( attr ); |
---|
84 | } |
---|
85 | |
---|
86 | |
---|
87 | private static void AddMatrixAttribute( Group group, |
---|
88 | Datatype type, |
---|
89 | String name, |
---|
90 | float[][] matrix ) throws Exception |
---|
91 | { |
---|
92 | long[] dims = {3,3}; |
---|
93 | Attribute attr = new Attribute( name, type, dims ); |
---|
94 | double[][] d_mat = new double[3][3]; |
---|
95 | |
---|
96 | for ( int row = 0; row < 3; row++ ) |
---|
97 | for ( int col = 0; col < 3; col++ ) |
---|
98 | d_mat[row][col] = matrix[row][col]; |
---|
99 | |
---|
100 | attr.setValue( d_mat ); |
---|
101 | group.writeMetadata( attr ); |
---|
102 | } |
---|
103 | |
---|
104 | |
---|
105 | private static void AddSizeAttribute( Group group, |
---|
106 | Datatype type, |
---|
107 | String name, |
---|
108 | int[] sizes ) throws Exception |
---|
109 | { |
---|
110 | long[] dims = { 3 }; |
---|
111 | Attribute attr = new Attribute( name, type, dims ); |
---|
112 | attr.setValue( sizes ); |
---|
113 | group.writeMetadata( attr ); |
---|
114 | } |
---|
115 | |
---|
116 | |
---|
117 | private static void AddUintAttribute( Group group, |
---|
118 | Datatype type, |
---|
119 | String name, |
---|
120 | int value ) throws Exception |
---|
121 | { |
---|
122 | long[] attrDims = { 1 }; |
---|
123 | int[] attrValue = { value }; |
---|
124 | |
---|
125 | Attribute attr = new Attribute( name, type, attrDims ); |
---|
126 | attr.setValue( attrValue ); |
---|
127 | |
---|
128 | group.writeMetadata( attr ); |
---|
129 | } |
---|
130 | |
---|
131 | |
---|
132 | /** |
---|
133 | * Write all of the "pages" of the specified 3D array to the specified |
---|
134 | * file in HDF 5 format. The file will be overwritten if it already |
---|
135 | * exists. |
---|
136 | */ |
---|
137 | public static void WriteFile( String filename, |
---|
138 | boolean in_HKL, |
---|
139 | float[][] orientation_matrix, |
---|
140 | Vector3D origin, |
---|
141 | Vector3D dir_1_scaled, |
---|
142 | Vector3D dir_2_scaled, |
---|
143 | Vector3D dir_3_scaled, |
---|
144 | float[][][] data |
---|
145 | ) throws Exception |
---|
146 | { |
---|
147 | H5File out_file = OpenH5_File( filename ); |
---|
148 | out_file.open(); |
---|
149 | |
---|
150 | Datatype le_double_type = out_file.createDatatype( Datatype.CLASS_FLOAT, |
---|
151 | 8, |
---|
152 | Datatype.ORDER_LE, |
---|
153 | -1 ); |
---|
154 | |
---|
155 | Datatype le_uint_type = out_file.createDatatype( Datatype.CLASS_INTEGER, |
---|
156 | 4, |
---|
157 | Datatype.ORDER_LE, |
---|
158 | Datatype.SIGN_NONE ); |
---|
159 | |
---|
160 | Group root = (Group) |
---|
161 | ((javax.swing.tree.DefaultMutableTreeNode)out_file.getRootNode()) |
---|
162 | .getUserObject(); |
---|
163 | |
---|
164 | // create groups at the root |
---|
165 | Group coord_group = out_file.createGroup("CoordinateSystem", root); |
---|
166 | |
---|
167 | if ( in_HKL ) |
---|
168 | AddUintAttribute( coord_group, le_uint_type, "isLocal", 1 ); |
---|
169 | else |
---|
170 | AddUintAttribute( coord_group, le_uint_type, "isLocal", 0 ); |
---|
171 | |
---|
172 | if ( orientation_matrix != null ) |
---|
173 | AddMatrixAttribute( coord_group, |
---|
174 | le_double_type, |
---|
175 | "orientation_matrix", |
---|
176 | orientation_matrix ); |
---|
177 | |
---|
178 | int gzip_level = 0; |
---|
179 | |
---|
180 | Group data_group = out_file.createGroup("Data", root); |
---|
181 | |
---|
182 | int n_pages = data.length; |
---|
183 | int n_rows = data[0].length; |
---|
184 | int n_cols = data[0][0].length; |
---|
185 | |
---|
186 | Vector3D slice_origin = new Vector3D( origin ); |
---|
187 | for ( int page = 0; page < n_pages; page++ ) |
---|
188 | { |
---|
189 | Group p_group = out_file.createGroup("Data_"+page, data_group); |
---|
190 | AddVectorAttribute( p_group, le_double_type, "origin", slice_origin ); |
---|
191 | |
---|
192 | int[] sizes = { 1, n_rows, n_cols }; |
---|
193 | AddSizeAttribute( p_group, le_uint_type, "size", sizes ); |
---|
194 | |
---|
195 | AddVectorAttribute( p_group, le_double_type,"direction_1", dir_1_scaled ); |
---|
196 | AddVectorAttribute( p_group, le_double_type,"direction_2", dir_2_scaled ); |
---|
197 | AddVectorAttribute( p_group, le_double_type,"direction_3", dir_3_scaled ); |
---|
198 | |
---|
199 | /* Code to write as 1D array |
---|
200 | |
---|
201 | int data_size = n_rows * n_cols; |
---|
202 | long[] data_dims = { data_size }; |
---|
203 | double[] data_1D = new double[n_rows * n_cols]; |
---|
204 | |
---|
205 | int index = 0; |
---|
206 | for ( int row = 0; row < n_rows; row++ ) |
---|
207 | for ( int col = 0; col < n_cols; col++ ) |
---|
208 | { |
---|
209 | data_1D[index] = data[page][row][col]; |
---|
210 | index++; |
---|
211 | } |
---|
212 | |
---|
213 | Dataset dataset = out_file.createScalarDS ("Data", |
---|
214 | p_group, |
---|
215 | le_double_type, |
---|
216 | data_dims, null, null, |
---|
217 | gzip_level, |
---|
218 | data_1D ); |
---|
219 | */ |
---|
220 | /* End 1D array code */ |
---|
221 | |
---|
222 | /* Code to write as 2D array, so it can be viewed as an image in hdfview |
---|
223 | */ |
---|
224 | long[] two_D_dims = { n_rows, n_cols }; |
---|
225 | double[][] data_2D = new double[n_rows][n_cols]; |
---|
226 | for ( int row = 0; row < n_rows; row++ ) |
---|
227 | for ( int col = 0; col < n_cols; col++ ) |
---|
228 | data_2D[row][col] = data[page][row][col]; |
---|
229 | |
---|
230 | out_file.createScalarDS ("Data_2D", |
---|
231 | p_group, |
---|
232 | le_double_type, |
---|
233 | two_D_dims, null, null, |
---|
234 | gzip_level, |
---|
235 | data_2D ); |
---|
236 | /* End 2D array code */ |
---|
237 | |
---|
238 | slice_origin.add( dir_1_scaled ); // update origin for next slice |
---|
239 | } |
---|
240 | |
---|
241 | out_file.close(); |
---|
242 | } |
---|
243 | |
---|
244 | |
---|
245 | /** |
---|
246 | * This main program provides a simple unit test for writing a sequence |
---|
247 | * of slices in the ZODS hdf5 file format. |
---|
248 | */ |
---|
249 | public static void main( String args[] ) throws Exception |
---|
250 | { |
---|
251 | if ( args.length < 1 ) |
---|
252 | { |
---|
253 | System.out.println("Enter the name of the file to write on command line"); |
---|
254 | System.exit(1); |
---|
255 | } |
---|
256 | |
---|
257 | String filename = args[0]; |
---|
258 | boolean in_HKL = true; |
---|
259 | Vector3D origin = new Vector3D( -1.6f, -1.6f, 0 ); |
---|
260 | Vector3D dir_1_scaled = new Vector3D( 0.04f, 0, 0 ); |
---|
261 | Vector3D dir_2_scaled = new Vector3D( 0, 0.04f, 0 ); |
---|
262 | Vector3D dir_3_scaled = new Vector3D( 0, 0, 0.004f ); |
---|
263 | |
---|
264 | float[][] orientation_mat = { {1, 0, 3}, {0, 1, 2}, {0, 0, 1} }; |
---|
265 | |
---|
266 | int n_pages = 10; |
---|
267 | int n_rows = 9; |
---|
268 | int n_cols = 9; |
---|
269 | float[][][] data = new float[n_pages][n_rows][n_cols]; |
---|
270 | int index = 0; |
---|
271 | for ( int page = 0; page < n_pages; page++ ) |
---|
272 | for ( int row = 0; row < n_rows; row++ ) |
---|
273 | for ( int col = 0; col < n_cols; col++ ) |
---|
274 | { |
---|
275 | data[page][row][col] = index; |
---|
276 | index++; |
---|
277 | } |
---|
278 | |
---|
279 | WriteFile( filename, |
---|
280 | in_HKL, |
---|
281 | orientation_mat, |
---|
282 | origin, |
---|
283 | dir_1_scaled, |
---|
284 | dir_2_scaled, |
---|
285 | dir_3_scaled, |
---|
286 | data |
---|
287 | ); |
---|
288 | } |
---|
289 | |
---|
290 | |
---|
291 | } |
---|