Ticket #8554: ReduceSCD_OneRun_v1.py

File ReduceSCD_OneRun_v1.py, 13.0 KB (added by Peter Peterson, 7 years ago)
Line 
1# File: ReduceOneSCD_Run.py
2#
3# Version 2.0, modified to work with Mantid's new python interface.
4#
5# This script will reduce one SCD run.  The configuration file name and
6# the run to be processed must be specified as the first two command line
7# parameters.  This script is intended to be run in parallel using the
8# ReduceSCD_Parallel.py script, after this script and configuration file has
9# been tested to work properly for one run. This script will load, find peaks,
10# index and integrate either found or predicted peaks for the specified run. 
11# Either sphere integration or the Mantid PeakIntegration algorithms are
12# currently supported, but it may be updated to support other integration
13# methods.  Users should make a directory to hold the output of this script,
14# and must specify that output directory in the configuration file that
15# provides the parameters to this script.
16#
17# NOTE: All of the parameters that the user must specify are listed with
18# instructive comments in the sample configuration file: ReduceSCD.config.
19#
20
21#
22# _v1: December 3rd 2013. Mads Joergensen
23# This version now includes the posibility to use the 1D cylindrical integration method
24# and the posibility to load a UB matrix which will be used for integration of the individual
25# runs and to index the combined file (Code from Xiapoing).
26#
27
28
29import os
30import sys
31import shutil
32import time
33import ReduceDictionary
34#sys.path.append("/opt/mantidnightly/bin")
35sys.path.append("/opt/Mantid/bin")
36
37from mantid.simpleapi import *
38from mantid.api import *
39
40print "API Version"
41print apiVersion()
42
43start_time = time.time()
44
45#
46# Get the config file name and the run number to process from the command line
47#
48if (len(sys.argv) < 3):
49  print "You MUST give the config file name and run number on the command line"
50  exit(0)
51
52config_file_name = sys.argv[1]
53run              = sys.argv[2]
54
55#
56# Load the parameter names and values from the specified configuration file
57# into a dictionary and set all the required parameters from the dictionary.
58#
59params_dictionary = ReduceDictionary.LoadDictionary( config_file_name )
60
61instrument_name           = params_dictionary[ "instrument_name" ]
62calibration_file_1        = params_dictionary[ "calibration_file_1" ]
63calibration_file_2        = params_dictionary[ "calibration_file_2" ]
64data_directory            = params_dictionary[ "data_directory" ]
65output_directory          = params_dictionary[ "output_directory" ]
66min_tof                   = params_dictionary[ "min_tof" ] 
67max_tof                   = params_dictionary[ "max_tof" ] 
68min_monitor_tof           = params_dictionary[ "min_monitor_tof" ] 
69max_monitor_tof           = params_dictionary[ "max_monitor_tof" ] 
70monitor_index             = params_dictionary[ "monitor_index" ] 
71cell_type                 = params_dictionary[ "cell_type" ] 
72centering                 = params_dictionary[ "centering" ]
73num_peaks_to_find         = params_dictionary[ "num_peaks_to_find" ]
74min_d                     = params_dictionary[ "min_d" ]
75max_d                     = params_dictionary[ "max_d" ]
76tolerance                 = params_dictionary[ "tolerance" ]
77integrate_predicted_peaks = params_dictionary[ "integrate_predicted_peaks" ]
78min_pred_wl               = params_dictionary[ "min_pred_wl" ]
79max_pred_wl               = params_dictionary[ "max_pred_wl" ]
80min_pred_dspacing         = params_dictionary[ "min_pred_dspacing" ]
81max_pred_dspacing         = params_dictionary[ "max_pred_dspacing" ]
82
83use_sphere_integration    = params_dictionary[ "use_sphere_integration" ]
84use_ellipse_integration   = params_dictionary[ "use_ellipse_integration" ]
85use_fit_peaks_integration = params_dictionary[ "use_fit_peaks_integration" ]
86use_cylindrical_integration = params_dictionary[ "use_cylindrical_integration" ]
87
88peak_radius               = params_dictionary[ "peak_radius" ]
89bkg_inner_radius          = params_dictionary[ "bkg_inner_radius" ]
90bkg_outer_radius          = params_dictionary[ "bkg_outer_radius" ]
91integrate_if_edge_peak    = params_dictionary[ "integrate_if_edge_peak" ]
92
93rebin_step                = params_dictionary[ "rebin_step" ]
94preserve_events           = params_dictionary[ "preserve_events" ] 
95use_ikeda_carpenter       = params_dictionary[ "use_ikeda_carpenter" ]
96n_bad_edge_pixels         = params_dictionary[ "n_bad_edge_pixels" ]
97
98rebin_params = min_tof + "," + rebin_step + "," + max_tof
99
100ellipse_region_radius     = params_dictionary[ "ellipse_region_radius" ]
101ellipse_size_specified    = params_dictionary[ "ellipse_size_specified" ]
102
103cylinder_radius           = params_dictionary[ "cylinder_radius" ]
104cylinder_length           = params_dictionary[ "cylinder_length" ]
105
106read_UB                   = params_dictionary[ "read_UB" ]
107UB_filename               = params_dictionary[ "UB_filename" ]
108
109#
110# Get the fully qualified input run file name, either from a specified data
111# directory or from findnexus
112#
113if data_directory is not None:
114  full_name = data_directory + "/" + instrument_name + "_" + run + "_event.nxs"
115else:
116  temp_buffer = os.popen("findnexus --event -i "+instrument_name+" "+str(run) )
117  full_name = temp_buffer.readline()
118  full_name=full_name.strip()
119  if not full_name.endswith('nxs'):
120    print "Exiting since the data_directory was not specified and"
121    print "findnexus failed for event NeXus file: " + instrument_name + " " + str(run)
122    exit(0)
123
124print "\nProcessing File: " + full_name + " ......\n"
125
126#
127# Name the files to write for this run
128#
129run_niggli_matrix_file = output_directory + "/" + run + "_Niggli.mat"
130run_niggli_integrate_file = output_directory + "/" + run + "_Niggli.integrate"
131
132#
133# Load the run data and find the total monitor counts
134#
135event_ws = LoadEventNexus( Filename=full_name, 
136                           FilterByTofMin=min_tof, FilterByTofMax=max_tof )
137
138#
139# Load calibration file(s) if specified.  NOTE: The file name passed in to LoadIsawDetCal
140# can not be None.  TOPAZ has one calibration file, but SNAP may have two.
141#
142if (calibration_file_1 is not None ) or (calibration_file_2 is not None):
143  if (calibration_file_1 is None ):
144    calibration_file_1 = ""
145  if (calibration_file_2 is None ):
146    calibration_file_2 = ""
147  LoadIsawDetCal( event_ws, 
148                  Filename=calibration_file_1, Filename2=calibration_file_2 ) 
149
150monitor_ws = LoadNexusMonitors( Filename=full_name )
151
152integrated_monitor_ws = Integration( InputWorkspace=monitor_ws, 
153                                     RangeLower=min_monitor_tof, RangeUpper=max_monitor_tof, 
154                                     StartWorkspaceIndex=monitor_index, EndWorkspaceIndex=monitor_index )
155
156monitor_count = integrated_monitor_ws.dataY(0)[0]
157print "\n", run, " has calculated monitor count", monitor_count, "\n"
158
159#
160# Make MD workspace using Lorentz correction, to find peaks
161#
162MDEW = ConvertToMD( InputWorkspace=event_ws, QDimensions="Q3D",
163                    dEAnalysisMode="Elastic", QConversionScales="Q in A^-1",
164                    LorentzCorrection='1', MinValues="-50,-50,-50", MaxValues="50,50,50",
165                    SplitInto='2', SplitThreshold='50',MaxRecursionDepth='11' )
166#
167# Find the requested number of peaks.  Once the peaks are found, we no longer
168# need the weighted MD event workspace, so delete it.
169#
170distance_threshold = 0.9 * 6.28 / float(max_d)
171peaks_ws = FindPeaksMD( MDEW, MaxPeaks=num_peaks_to_find, 
172                        PeakDistanceThreshold=distance_threshold )
173AnalysisDataService.remove( MDEW.getName() )
174
175# Read or find UB for the run
176if read_UB:
177  # Read orientation matrix from file
178  LoadIsawUB(InputWorkspace=peaks_ws, Filename=UB_filename)
179else:
180  # Find a Niggli UB matrix that indexes the peaks in this run
181  FindUBUsingFFT( PeaksWorkspace=peaks_ws, MinD=min_d, MaxD=max_d, Tolerance=tolerance )
182 
183IndexPeaks( PeaksWorkspace=peaks_ws, Tolerance=tolerance)
184
185
186#
187# Save UB and peaks file, so if something goes wrong latter, we can at least
188# see these partial results
189#
190SaveIsawUB( InputWorkspace=peaks_ws,Filename=run_niggli_matrix_file )
191SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False,
192               Filename=run_niggli_integrate_file )
193
194#
195# Get complete list of peaks to be integrated and load the UB matrix into
196# the predicted peaks workspace, so that information can be used by the
197# PeakIntegration algorithm.
198#
199if integrate_predicted_peaks:
200  print "PREDICTING peaks to integrate...."
201  peaks_ws = PredictPeaks( InputWorkspace=peaks_ws,
202                WavelengthMin=min_pred_wl, WavelengthMax=max_pred_wl,
203                MinDSpacing=min_pred_dspacing, MaxDSpacing=max_pred_dspacing, 
204                ReflectionCondition='Primitive' )
205else:
206  print "Only integrating FOUND peaks ...."
207#
208# Set the monitor counts for all the peaks that will be integrated
209#
210num_peaks = peaks_ws.getNumberPeaks()
211for i in range(num_peaks):
212  peak = peaks_ws.getPeak(i)
213  peak.setMonitorCount( monitor_count )
214   
215if use_sphere_integration:
216#
217# Integrate found or predicted peaks in Q space using spheres, and save
218# integrated intensities, with Niggli indexing.  First get an un-weighted
219# workspace to do raw integration (we don't need high resolution or
220# LorentzCorrection to do the raw sphere integration )
221#
222  MDEW = ConvertToMD( InputWorkspace=event_ws, QDimensions="Q3D",
223                    dEAnalysisMode="Elastic", QConversionScales="Q in A^-1",
224                    LorentzCorrection='0', MinValues="-50,-50,-50", MaxValues="50,50,50",
225                    SplitInto='2', SplitThreshold='500',MaxRecursionDepth='10' )
226
227  peaks_ws = IntegratePeaksMD( InputWorkspace=MDEW, PeakRadius=peak_radius,
228                  CoordinatesToUse="Q (sample frame)",
229                  BackgroundOuterRadius=bkg_outer_radius, 
230                  BackgroundInnerRadius=bkg_inner_radius,
231                  PeaksWorkspace=peaks_ws, 
232                  IntegrateIfOnEdge=integrate_if_edge_peak )
233
234elif use_fit_peaks_integration:
235  event_ws = Rebin( InputWorkspace=event_ws,
236                    Params=rebin_params, PreserveEvents=preserve_events )
237  peaks_ws = PeakIntegration( InPeaksWorkspace=peaks_ws, InputWorkspace=event_ws, 
238                              IkedaCarpenterTOF=use_ikeda_carpenter,
239                              MatchingRunNo=True,
240                              NBadEdgePixels=n_bad_edge_pixels )
241
242elif use_ellipse_integration:
243  peaks_ws= IntegrateEllipsoids( InputWorkspace=event_ws, PeaksWorkspace = peaks_ws,
244                                 RegionRadius = ellipse_region_radius,
245                                 SpecifySize = ellipse_size_specified,
246                                 PeakSize = peak_radius,
247                                 BackgroundOuterSize = bkg_outer_radius,
248                                 BackgroundInnerSize = bkg_inner_radius )
249
250elif use_cylindrical_integration:
251  profiles_filename = output_directory + "/" + instrument_name + '_' + run + '.profiles'
252  MDEW = ConvertToMD( InputWorkspace=event_ws, QDimensions="Q3D",
253                    dEAnalysisMode="Elastic", QConversionScales="Q in A^-1",
254                    LorentzCorrection='0', MinValues="-50,-50,-50", MaxValues="50,50,50",
255                    SplitInto='2', SplitThreshold='500',MaxRecursionDepth='10' )
256
257  peaks_ws = IntegratePeaksMD( InputWorkspace=MDEW, PeakRadius=cylinder_radius,
258                  CoordinatesToUse="Q (sample frame)", 
259                  Cylinder='1', CylinderLength = cylinder_length, 
260                  PercentBackground = '20', ProfileFunction = 'NoFit',
261                  ProfilesFile = profiles_filename,
262                  PeaksWorkspace=peaks_ws, 
263                  )
264  if (not cell_type is None) and (not centering is None):
265    print "WARNING: Cylindrical profiles are NOT transformed!!!"
266
267#
268# Save the final integrated peaks, using the Niggli reduced cell. 
269# This is the only file needed, for the driving script to get a combined
270# result.
271#
272SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, 
273               Filename=run_niggli_integrate_file )
274
275# Print warning if user is trying to integrate using the cylindrical method and transorm the cell
276if use_cylindrical_integration: 
277  if (not cell_type is None) or (not centering is None):
278    print "WARNING: Cylindrical profiles are NOT transformed!!!"
279#
280# If requested, also switch to the specified conventional cell and save the
281# corresponding matrix and integrate file
282#
283if not use_ellipse_integration:
284  if (not cell_type is None) and (not centering is None) :
285    run_conventional_matrix_file = output_directory + "/" + run + "_" +    \
286                                   cell_type + "_" + centering + ".mat"
287    run_conventional_integrate_file = output_directory + "/" + run + "_" + \
288                                      cell_type + "_" + centering + ".integrate"
289    SelectCellOfType( PeaksWorkspace=peaks_ws, 
290                      CellType=cell_type, Centering=centering, 
291                      Apply=True, Tolerance=tolerance )
292    SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, 
293                   Filename=run_conventional_integrate_file )
294    SaveIsawUB( InputWorkspace=peaks_ws, Filename=run_conventional_matrix_file )
295
296end_time = time.time()
297print '\nReduced run ' + str(run) + ' in ' + str(end_time - start_time) + ' sec'
298print 'using config file ' + config_file_name
299
300#
301# Try to get this to terminate when run by ReduceSCD_Parallel.py, from NX session
302#
303sys.exit(0)
304