Ticket #10569: isis_reduction_steps_RKH.py

File isis_reduction_steps_RKH.py, 120.1 KB (added by Anders Markvardsen, 6 years ago)
Line 
1"""
2    This file defines what happens in each step in the data reduction, it's
3    the guts of the reduction. See ISISReducer for order the steps are run
4    in and the names they are given to identify them
5
6    Most of this code is a copy-paste from SANSReduction.py, organized to be used with
7    ReductionStep objects. The guts needs refactoring.
8"""
9import os
10import math
11import copy
12import re
13import traceback
14
15from mantid.kernel import Logger
16sanslog = Logger("SANS")
17
18from mantid.simpleapi import *
19from mantid.api import WorkspaceGroup, Workspace, IEventWorkspace
20from SANSUtility import (GetInstrumentDetails, MaskByBinRange,
21                         isEventWorkspace, getFilePathFromWorkspace,
22                         getWorkspaceReference, slice2histogram, getFileAndName,
23                         mask_detectors_with_masking_ws)
24import isis_instrument
25import isis_reducer
26from reducer_singleton import ReductionStep
27
28def _issueWarning(msg):
29    """
30        Prints a message to the log marked as warning
31        @param msg: message to be issued
32    """
33    print msg
34    sanslog.warning(msg)
35
36def _issueInfo(msg):
37    """
38        Prints a message to the log
39        @param msg: message to be issued
40    """
41    print msg
42    sanslog.notice(msg)
43
44
45class LoadRun(object):
46    UNSET_PERIOD = -1
47    def __init__(self, run_spec=None, trans=False, reload=True, entry=UNSET_PERIOD):
48        """
49            Load a data file, move its detector to the right position according
50            to the beam center and normalize the data.
51            @param run_spec: the run number followed by dot and the extension
52            @param trans: set to true if the file is from a transmission run (default: False)
53            @param reload: if to reload the workspace if it is already present
54            @param entry: the entry number of the run, useful for multi-period files (default: load the entire file)
55    """
56        super(LoadRun, self).__init__()
57        self._data_file = run_spec
58        self._is_trans = trans
59        self._reload = reload
60        #entry number of the run inside the run file that will be analysed, as requested by the caller
61        self._period = int(entry)
62        self._index_of_group = 0
63
64        #set to the total number of periods in the file
65        self.periods_in_file = None
66        self.ext = ''
67        self.shortrun_no = -1
68        #the name of the loaded workspace in Mantid
69        self._wksp_name = ''
70
71    def curr_period(self):
72        if self._period != self.UNSET_PERIOD:
73            return self._period
74        return self._index_of_group + 1
75
76    def move2ws(self, index):
77        if self.periods_in_file > 1:
78            if index < self.periods_in_file:
79                self._index_of_group = index
80                return True
81        else:
82            return False
83
84    def get_wksp_name(self):
85        ref_ws = mtd[str(self._wksp_name)]
86        if isinstance(ref_ws, WorkspaceGroup):
87            return ref_ws[self._index_of_group].name()
88        else:
89            return self._wksp_name
90
91    wksp_name = property(get_wksp_name, None, None, None)
92
93    def _load_transmission(self, inst=None, is_can=False, extra_options=dict()):
94        if '.raw' in self._data_file or '.RAW' in self._data_file:
95            self._load(inst, is_can, extra_options)
96            return
97
98        # the intension of the code below is a good idea. Hence the reason why
99        # I have left in the code but commented it out. As of this writing
100        # LoadNexusMonitors throws an error if LoadNexusMonitors is a histogram
101        # i.e. this algorithm only works for event files at present. The error
102        # gets presented in red to the user and causes confusion. When either
103        # LoadNexusMonitor can load histogram data as well or other equivalent
104        # change the code below which is not commented out can be deleted and
105        # the code commented out can be uncomment and modified as necessary
106
107        self._load(inst, is_can, extra_options)
108
109        workspace = self._get_workspace_name()
110        if workspace in mtd:
111            outWs = mtd[workspace]
112            if isinstance(outWs, IEventWorkspace):
113                if workspace + "_monitors" in mtd:
114                    RenameWorkspace(InputWorkspace=workspace + "_monitors", OutputWorkspace=workspace)
115                    self.periods_in_file = 1
116                    self._wksp_name = workspace
117
118        # For sans, in transmission, we care only about the monitors. Hence,
119        # by trying to load only the monitors we speed up the reduction process.
120        # besides, we avoid loading events which is useless for transmission.
121        # it may fail, if the input file was not a nexus file, in this case,
122        # it pass the job to the default _load method.
123        #try:
124        # outWs = LoadNexusMonitors(self._data_file, OutputWorkspace=workspace)
125        # self.periods_in_file = 1
126        # self._wksp_name = workspace
127        #except:
128        # self._load(inst, is_can, extra_options)
129
130    def _load(self, inst = None, is_can=False, extra_options=dict()):
131        """
132            Load a workspace and read the logs into the passed instrument reference
133            @param inst: a reference to the current instrument
134            @param iscan: set this to True for can runs
135            @param extra_options: arguments to pass on to the Load Algorithm.
136            @return: number of periods in the workspace
137        """
138        if self._period != self.UNSET_PERIOD:
139            workspace = self._get_workspace_name(self._period)
140            extra_options['EntryNumber'] = self._period
141        else:
142            workspace = self._get_workspace_name()
143
144        extra_options['OutputWorkspace'] = workspace
145
146        outWs = Load(self._data_file, **extra_options)
147
148        monitor_ws_name = workspace + "_monitors"
149
150        if isinstance(outWs, IEventWorkspace):
151            LoadNexusMonitors(self._data_file, OutputWorkspace=monitor_ws_name)
152        else:
153            if monitor_ws_name in mtd:
154                DeleteWorkspace(monitor_ws_name)
155
156        loader_name = outWs.getHistory().lastAlgorithm().getProperty('LoaderName').value
157
158        if loader_name == 'LoadRaw':
159            self._loadSampleDetails(workspace)
160
161        if self._period != self.UNSET_PERIOD and isinstance(outWs, WorkspaceGroup):
162            outWs = mtd[self._leaveSinglePeriod(outWs.name(), self._period)]
163
164        self.periods_in_file = self._find_workspace_num_periods(workspace)
165        self._wksp_name = workspace
166
167    def _get_workspace_name(self, entry_num=None):
168        """
169            Creates a name for the workspace that will contain the raw
170            data. If the entry number == 1 it is omitted, unless
171            optional_entry_no = False
172            @param entry_num: if this argument is set to an integer it will be added to the filename after a p
173        """
174        run = str(self.shortrun_no)
175        if entry_num:
176            if entry_num == self.UNSET_PERIOD:
177                entry_num = 1
178            run += 'p'+str(int(entry_num))
179
180        if self._is_trans:
181            return run + '_trans_' + self.ext.lower()
182        else:
183            return run + '_sans_' + self.ext.lower()
184
185
186    def _loadSampleDetails(self, ws_name):
187        ws_pointer = mtd[str(ws_name)]
188        if isinstance(ws_pointer, WorkspaceGroup):
189            workspaces = [ws for ws in ws_pointer]
190        else:
191            workspaces = [ws_pointer]
192        for ws in workspaces:
193            LoadSampleDetailsFromRaw(ws, self._data_file)
194
195
196    def _loadFromWorkspace(self, reducer):
197        """ It substitute the work of _assignHelper for workspaces, or, at least,
198        prepare the internal attributes, to be processed by the _assignHelper.
199
200        It is executed when the input for the constructor (run_spec) is given a workspace
201
202        If reload is False, it will try to get all information necessary to use the given
203        workspace as the one for the post-processing.
204        If reload is True, it will try to get all the information necessary to reload this
205        workspace from the data file.
206        """
207        assert(isinstance(self._data_file, Workspace))
208        ws_pointer = self._data_file
209
210        try:
211            _file_path = getFilePathFromWorkspace(ws_pointer)
212        except:
213            raise RuntimeError("Failed to retrieve information to reload this workspace " + str(self._data_file))
214        self._data_file = _file_path
215        self.ext = _file_path[-3:]
216        if isinstance(ws_pointer, WorkspaceGroup):
217            self.shortrun_no = ws_pointer[0].getRunNumber()
218        else:
219            self.shortrun_no = ws_pointer.getRunNumber()
220
221        if self._reload:
222            # give to _assignHelper the responsibility of loading this data.
223            return False
224
225        #test if the sample details are already loaded, necessary only for raw files:
226        if '.nxs' not in self._data_file[-4:]:
227            self._loadSampleDetails(ws_pointer)
228
229        # so, it will try, not to reload the workspace.
230        self._wksp_name = ws_pointer.name()
231        self.periods_in_file = self._find_workspace_num_periods(self._wksp_name)
232
233        #check that the current workspace has never been moved
234        hist_str = self._getHistory(ws_pointer)
235        if 'Algorithm: Move' in hist_str or 'Algorithm: Rotate' in hist_str:
236            raise RuntimeError('Moving components needs to be made compatible with not reloading the sample')
237
238        return True
239
240
241    # Helper function
242    def _assignHelper(self, reducer):
243        if isinstance(self._data_file, Workspace):
244            loaded_flag= self._loadFromWorkspace(reducer)
245            if loaded_flag:
246                return
247
248        if self._data_file == '' or self._data_file.startswith('.'):
249            raise RuntimeError('Sample needs to be assigned as run_number.file_type')
250
251        try:
252            if reducer.instrument.name() == "":
253                raise AttributeError
254        except AttributeError:
255            raise AttributeError('No instrument has been assign, run SANS2D or LOQ first')
256
257        self._data_file = self._extract_run_details(self._data_file)
258
259        if not self._reload:
260            raise NotImplementedError('Raw workspaces must be reloaded, run with reload=True')
261
262        spectrum_limits = dict()
263        if self._is_trans:
264            if reducer.instrument.name() == 'SANS2D' and int(self.shortrun_no) < 568:
265                dimension = GetInstrumentDetails(reducer.instrument)[0]
266                spec_min = dimension*dimension*2
267                spectrum_limits = {'SpectrumMin':spec_min, 'SpectrumMax':spec_min + 4}
268
269        try:
270            if self._is_trans and reducer.instrument.name() != 'LOQ':
271                # Unfortunatelly, LOQ in transmission acquire 3 monitors the 3 monitor usually
272                # is the first spectrum for detector. This causes the following method to fail
273                # when it tries to load only monitors. Hence, we are forced to skip this method
274                # for LOQ. ticket #8559
275                self._load_transmission(reducer.instrument, extra_options=spectrum_limits)
276            else:
277                # the spectrum_limits is not the default only for transmission data
278                self._load(reducer.instrument, extra_options=spectrum_limits)
279        except RuntimeError, details:
280            sanslog.warning(str(details))
281            self._wksp_name = ''
282            return
283
284        return
285
286    def _leaveSinglePeriod(self, workspace, period):
287        groupW = mtd[workspace]
288        if not isinstance(groupW, WorkspaceGroup):
289            logger.warning("Invalid request for getting single period in a non group workspace")
290            return workspace
291        if len(groupW) < period:
292            raise ValueError('Period number ' + str(period) + ' doesn\'t exist in workspace ' + groupW.getName())
293        ws_name = groupW[period].name()
294
295        # remove this workspace from the group
296        groupW.remove(ws_name)
297        # remove the entire group
298        DeleteWorkspace(groupW)
299
300        new_name = self._get_workspace_name(period)
301        if new_name != ws_name:
302            RenameWorkspace(ws_name, OutputWorkspace=new_name)
303        return new_name
304
305    def _extract_run_details(self, run_string):
306        """
307            Takes a run number and file type and generates the filename, workspace name and log name
308            @param run_string: either the name of a run file or a run number followed by a dot and then the file type, i.e. file extension
309        """
310        listOfFiles = FileFinder.findRuns(run_string)
311        firstFile = listOfFiles[0]
312        self.ext = firstFile[-3:]
313        self.shortrun_no = int(re.findall(r'\d+',run_string)[-1])
314        return firstFile
315
316    def _find_workspace_num_periods(self, workspace):
317        """
318            @param workspace: the name of the workspace
319        """
320        numPeriods = -1
321        pWorksp = mtd[workspace]
322        if isinstance(pWorksp, WorkspaceGroup) :
323            #get the number of periods in a group using the fact that each period has a different name
324            numPeriods = len(pWorksp)
325        else :
326            numPeriods = 1
327        return numPeriods
328
329    def _getHistory(self, wk_name):
330        ws = getWorkspaceReference(wk_name)
331
332        if isinstance(wk_name, Workspace):
333            ws_h = wk_name.getHistory()
334        else:
335            if wk_name not in mtd:
336                return ""
337            ws_h = mtd[wk_name].getHistory()
338        hist_str = str(ws_h)
339
340        return hist_str
341
342    def getCorrospondingPeriod(self, sample_period, reducer):
343        """
344            Gets the period number that corresponds to the passed sample period number, based on:
345            if the workspace has the same number of periods as the sample it gives returns requested
346            period, if it contains only one period it returns 1 and everything else is an error
347            @param sample_period: the period in the sample that is of interest
348            @return: depends on the number of entries in the workspace, could be the same number as passed or 1
349            @raise RuntimeError: if there is ambiguity
350        """
351        if self.periods_in_file == 1:
352            #this is a single entry file, don't consider entries
353            return 1
354        elif self._period != self.UNSET_PERIOD:
355            #the user specified a definite period, use it
356            return self._period
357        elif self.periods_in_file == reducer.get_sample().loader.periods_in_file:
358            #use corresponding periods, the same entry as the sample in each case
359            return sample_period
360        else:
361            raise RuntimeError('There is a mismatch in the number of periods (entries) in the file between the sample and another run')
362
363
364class LoadTransmissions():
365    """
366        Loads the file used to apply the transmission correction to the
367        sample or can
368    """
369
370    def __init__(self, is_can=False, reload=True):
371        """
372            Two settings can be set at initialization, if this is for
373            can and if the workspaces should be reloaded if they already
374            exist
375            @param is_can: if this is to correct the can (default false i.e. it's for the sample)
376            @param reload: setting this to false will mean the workspaces aren't reloaded if they already exist (default True i.e. reload)
377        """
378        self.trans = None
379        self.direct = None
380        self._reload = reload
381        self._period_t = -1
382        self._period_d = -1
383        self.can = is_can
384
385    def set_trans(self, trans, period=-1):
386        self._trans_name = trans
387        self._period_t = period
388
389    def set_direc(self, direct, period=-1):
390        self._direct_name = direct
391        self._period_d = period
392
393    def execute(self, reducer, workspace):
394        if self._trans_name not in [None, '']:
395            self.trans = LoadRun(self._trans_name, trans=True, reload=self._reload, entry=self._period_t)
396            self.trans._assignHelper(reducer)
397            if isinstance(self._trans_name, Workspace):
398                self._trans_name = self._trans_name.name()
399            if not self.trans.wksp_name:
400                # do nothing if no workspace was specified
401                return '', ''
402
403        if self._direct_name not in [None, '']:
404            self.direct = LoadRun(self._direct_name, trans=True, reload=self._reload, entry=self._period_d)
405            self.direct._assignHelper(reducer)
406            if isinstance(self._direct_name, Workspace):
407                self._direct_name = self._direct_name.name()
408            if not self.direct.wksp_name:
409                raise RuntimeError('Transmission run set without direct run error')
410
411        #transmission workspaces sometimes have monitor locations, depending on the instrument, load these locations
412        reducer.instrument.load_transmission_inst(self.trans.wksp_name, self.direct.wksp_name, reducer.get_beam_center())
413
414        return self.trans.wksp_name, self.direct.wksp_name
415
416class CanSubtraction(ReductionStep):
417    """
418        Apply the same corrections to the can that were applied to the sample and
419        then subtracts this can from the sample.
420    """
421    def __init__(self):
422        super(CanSubtraction, self).__init__()
423
424    def execute(self, reducer, workspace):
425        """
426            Apply same corrections as for sample workspace then subtract from data
427        """
428        if reducer.get_can() is None:
429            return
430
431        #rename the sample workspace, its name will be restored to the original once the subtraction has been done
432        tmp_smp = workspace+"_sam_tmp"
433        RenameWorkspace(InputWorkspace=workspace,OutputWorkspace= tmp_smp)
434
435        tmp_can = workspace+"_can_tmp"
436        #do same corrections as were done to the sample
437        reducer.reduce_can(tmp_can)
438
439        #we now have the can workspace, use it
440        Minus(LHSWorkspace=tmp_smp,RHSWorkspace= tmp_can,OutputWorkspace= workspace)
441
442        #clean up the workspaces ready users to see them if required
443        if reducer.to_Q.output_type == '1D':
444            rem_nans = StripEndNans()
445
446        self._keep_partial_results(tmp_smp, tmp_can)
447
448
449    def get_wksp_name(self):
450        return self.workspace.wksp_name
451
452    wksp_name = property(get_wksp_name, None, None, None)
453
454    def get_periods_in_file(self):
455        return self.workspace.periods_in_file
456
457    def _keep_partial_results(self, sample_name, can_name):
458        # user asked to keep these results 8970
459        gp_name = 'sample_can_reductions'
460        if mtd.doesExist(gp_name):
461            gpr = mtd[gp_name]
462            for wsname in [sample_name, can_name]:
463                if not gpr.contains(wsname):
464                    gpr.add(wsname)
465        else:
466            GroupWorkspaces([sample_name, can_name], OutputWorkspace=gp_name)
467
468    periods_in_file = property(get_periods_in_file, None, None, None)
469
470class Mask_ISIS(ReductionStep):
471    """
472        Marks some spectra so that they are not included in the analysis
473        Provides ISIS specific mask functionality (e.g. parsing
474        MASK commands from user files), inherits from Mask
475    """
476    def __init__(self, timemask='', timemask_r='', timemask_f='',
477                 specmask='', specmask_r='', specmask_f=''):
478        self._xml = []
479
480        #these spectra will be masked by the algorithm MaskDetectors
481        self.detect_list = []
482
483        # List of pixels to mask
484        self.masked_pixels = []
485
486        self.time_mask=timemask
487        self.time_mask_r=timemask_r
488        self.time_mask_f=timemask_f
489        self.spec_mask_r=specmask_r
490        self.spec_mask_f=specmask_f
491
492        # as far as I can used to possibly set phi masking
493        # not to be applied even though _lim_phi_xml has been set
494        self.mask_phi = True
495        self.phi_mirror = True
496        self._lim_phi_xml = ''
497        self.phi_min = -90.0
498        self.phi_max = 90.0
499        # read only phi (only used in ...)
500        # this option seems totally bizarre to me since it allow
501        # set_phi_limit to be called but not setting the _lim_phi_xml
502        # string.....
503        self._readonly_phi = False
504        # used to assess if set phi limit has been called just once
505        # in which case exactly one phi range has been masked
506        # and get_phi_limits
507        self._numberOfTimesSetPhiLimitBeenCalled = 0
508        self.spec_list = []
509
510        #is set when there is an arm to mask, it's the width in millimetres
511        self.arm_width = None
512        #when there is an arm to mask this is its angle in degrees
513        self.arm_angle = None
514        #RMD Mod 24/7/13
515        self.arm_x = None
516        self.arm_y = None
517
518        ########################## Masking  ################################################
519        # Mask the corners and beam stop if radius parameters are given
520
521        self.min_radius = None
522        self.max_radius = None
523
524    def add_xml_shape(self, complete_xml_element):
525        """
526            Add an arbitrary shape to region to be masked
527            @param complete_xml_element: description of the shape to add
528        """
529        if not complete_xml_element.startswith('<') :
530            raise ValueError('Excepted xml string but found: ' + str(complete_xml_element))
531        self._xml.append(complete_xml_element)
532
533    def _infinite_plane(self, id, plane_pt, normal_pt, complement = False):
534        """
535            Generates xml code for an infinte plane
536            @param id: a string to refer to the shape by
537            @param plane_pt: a point in the plane
538            @param normal_pt: the direction of a normal to the plane
539            @param complement: mask in the direction of the normal or away
540            @return the xml string
541        """
542        if complement:
543            addition = '#'
544        else:
545            addition = ''
546        return '<infinite-plane id="' + str(id) + '">' + \
547            '<point-in-plane x="' + str(plane_pt[0]) + '" y="' + str(plane_pt[1]) + '" z="' + str(plane_pt[2]) + '" />' + \
548            '<normal-to-plane x="' + str(normal_pt[0]) + '" y="' + str(normal_pt[1]) + '" z="' + str(normal_pt[2]) + '" />'+ \
549            '</infinite-plane>\n'
550
551    def _infinite_cylinder(self, centre, radius, axis, id='shape'):
552        """
553            Generates xml code for an infintely long cylinder
554            @param centre: a tupple for a point on the axis
555            @param radius: cylinder radius
556            @param axis: cylinder orientation
557            @param id: a string to refer to the shape by
558            @return the xml string
559        """
560        return '<infinite-cylinder id="' + str(id) + '">' + \
561            '<centre x="' + str(centre[0]) + '" y="' + str(centre[1]) + '" z="' + str(centre[2]) + '" />' + \
562            '<axis x="' + str(axis[0]) + '" y="' + str(axis[1]) + '" z="' + str(axis[2]) + '" />' + \
563            '<radius val="' + str(radius) + '" /></infinite-cylinder>\n'
564
565    def _finite_cylinder(self, centre, radius, height, axis, id='shape'):
566        """
567            Generates xml code for an infintely long cylinder
568            @param centre: a tupple for a point on the axis
569            @param radius: cylinder radius
570            @param height: cylinder height
571            @param axis: cylinder orientation
572            @param id: a string to refer to the shape by
573            @return the xml string
574        """
575        return '<cylinder id="' + str(id) + '">' + \
576            '<centre-of-bottom-base x="' + str(centre[0]) + '" y="' + str(centre[1]) + '" z="' + str(centre[2]) + '" />' + \
577            '<axis x="' + str(axis[0]) + '" y="' + str(axis[1]) + '" z="' + str(axis[2]) + '" />' + \
578            '<radius val="' + str(radius) + '" /><height val="' + str(height) + '" /></cylinder>\n'
579
580    def add_cylinder(self, radius, xcentre, ycentre, ID='shape'):
581        '''Mask the inside of an infinite cylinder on the input workspace.'''
582        self.add_xml_shape(
583            self._infinite_cylinder([xcentre, ycentre, 0.0], radius, [0,0,1], id=ID)+'<algebra val="' + str(ID) + '"/>')
584
585
586    def add_outside_cylinder(self, radius, xcentre = 0.0, ycentre = 0.0, ID='shape'):
587        '''Mask out the outside of a cylinder or specified radius'''
588        self.add_xml_shape(
589            self._infinite_cylinder([xcentre, ycentre, 0.0], radius, [0,0,1], id=ID)+'<algebra val="#' + str(ID) + '"/>')
590
591    def set_radi(self, min, max):
592        self.min_radius = float(min)/1000.
593        self.max_radius = float(max)/1000.
594
595    def _whichBank(self, instName, specNo):
596        """
597            Return either 'rear' or 'front' depending on which bank the spectrum number belong to
598
599            @param instName Instrument name. Used for MASK Ssp command to tell what bank it refer to
600            @param specNo Spectrum number
601        """
602        bank = 'rear'
603
604        if instName.upper() == 'LOQ':
605            if 16387 <= specNo <= 17784:
606                bank = 'front'
607        if instName.upper() == 'SANS2D':
608            if 36873 <= specNo <= 73736:
609                bank = 'front'
610
611        return bank
612
613    def parse_instruction(self, instName, details):
614        """
615            Parse an instruction line from an ISIS mask file
616            @param instName Instrument name. Used for MASK Ssp command to tell what bank it refer to
617            @param details Line to parse
618        """
619        details = details.lstrip()
620        details = details.upper()
621        if not details.startswith('MASK') and not details.startswith('L/PHI'):
622            _issueWarning('Ignoring malformed mask line ' + details)
623            return
624
625        if 'L/PHI' in details:
626            phiParts = details.split()
627            if len(phiParts) == 3:
628                mirror = phiParts[0] != 'L/PHI/NOMIRROR'
629                phiMin = phiParts[1]
630                phiMax = phiParts[2]
631                self.set_phi_limit(float(phiMin), float(phiMax), mirror)
632                return
633            else:
634                _issueWarning('Unrecognized L/PHI masking line command "' + details + '"')
635                return
636
637        parts = details.split('/')
638        # A spectrum mask or mask spectra range with H and V commands
639        if len(parts) == 1:     # Command is to type MASK something
640            argToMask = details[4:].lstrip().upper()
641            bank = 'rear'
642            # special case for MASK Ssp where try to infer the bank the spectrum number belong to
643            if 'S' in argToMask:
644                if '>' in argToMask:
645                    pieces = argToMask.split('>')
646                    low = int(pieces[0].lstrip('S'))
647                    upp = int(pieces[1].lstrip('S'))
648                    bankLow = self._whichBank(instName, low)
649                    bankUpp = self._whichBank(instName, upp)
650                    if bankLow != bankUpp:
651                        _issueWarning('The spectra in Mask command: ' + details +
652                                      ' belong to two different banks. Default to use bank ' +
653                                      bankLow)
654                    bank = bankLow
655                else:
656                    bank = self._whichBank(instName, int(argToMask.lstrip('S')))
657
658            #Default to the rear detector if not MASK Ssp command
659            self.add_mask_string(argToMask, detect=bank)
660        elif len(parts) == 2:   # Command is to type MASK/ something
661            type = parts[1]   # this is the part of the command following /
662            typeSplit = type.split()  # used for command such as MASK/REAR Hn and MASK/Line w a
663            if type == 'CLEAR':    # Command is specifically MASK/CLEAR
664                self.spec_mask_r = ''
665                self.spec_mask_f = ''
666            elif type.startswith('T'):
667                if type.startswith('TIME'):
668                    bin_range = type[4:].lstrip()
669                else:
670                    bin_range = type[1:].lstrip()
671                self.time_mask += ';' + bin_range
672            elif len(typeSplit) == 2:
673                # Commands such as MASK/REAR Hn, where typeSplit[0] then equal 'REAR'
674                if 'S' in typeSplit[1].upper():
675                    _issueWarning('MASK command of type ' + details +
676                                  ' deprecated. Please use instead MASK Ssp1[>Ssp2]')
677                if 'REAR' != typeSplit[0].upper() and instName == 'LOQ':
678                    _issueWarning('MASK command of type ' + details +
679                                  ' can, until otherwise requested, only be used for the REAR (default) Main detector of LOQ. ' +
680                                  'Default to the Main detector of LOQ for this mask command')
681                    self.add_mask_string(mask_string=typeSplit[1],detect='rear')
682                else:
683                    self.add_mask_string(mask_string=typeSplit[1],detect=typeSplit[0])
684            elif type.startswith('LINE'):
685                # RMD mod 24/7/13
686                if len(typeSplit) == 5:
687                    self.arm_width = float(typeSplit[1])
688                    self.arm_angle = float(typeSplit[2])
689                    self.arm_x = float(typeSplit[3])
690                    self.arm_y = float(typeSplit[4])
691                elif len(typeSplit) == 3:
692                    self.arm_width = float(typeSplit[1])
693                    self.arm_angle = float(typeSplit[2])
694                    self.arm_x=0.0
695                    self.arm_y=0.0
696                else:
697                    _issueWarning('Unrecognized line masking command "' + details + '" syntax is MASK/LINE width angle or MASK/LINE width angle x y')
698            else:
699                _issueWarning('Unrecognized masking option "' + details + '"')
700        elif len(parts) == 3:
701            type = parts[1]
702            if type == 'CLEAR':
703                self.time_mask = ''
704                self.time_mask_r = ''
705                self.time_mask_f = ''
706            elif (type == 'TIME' or type == 'T'):
707                parts = parts[2].split()
708                if len(parts) == 3:
709                    detname = parts[0].rstrip()
710                    bin_range = parts[1].rstrip() + ' ' + parts[2].lstrip()
711                    if detname.upper() == 'FRONT':
712                        self.time_mask_f += ';' + bin_range
713                    elif detname.upper() == 'REAR':
714                        self.time_mask_r += ';' + bin_range
715                    else:
716                        _issueWarning('Detector \'' + detname + '\' not found in currently selected instrument ' + self.instrument.name() + '. Skipping line.')
717                else:
718                    _issueWarning('Unrecognized masking line "' + details + '"')
719        else:
720             _issueWarning('Unrecognized masking line "' + details + '"')
721
722    def add_mask_string(self, mask_string, detect):
723        if detect.upper() == 'FRONT' or detect.upper() == 'HAB':
724            self.spec_mask_f += ',' + mask_string
725        elif detect.upper() == 'REAR':
726            self.spec_mask_r += ',' + mask_string
727        else:
728            _issueWarning('Detector \'' + detect + '\' not found in currently selected instrument ' + self.instrument.name() + '. Skipping line.')
729
730    def _ConvertToSpecList(self, maskstring, detector):
731        '''
732            Convert a mask string to a spectra list
733            6/8/9 RKH attempt to add a box mask e.g.  h12+v34 (= one pixel at intersection), h10>h12+v101>v123 (=block 3 wide, 23 tall)
734
735            @param maskstring Is a comma separated list of mask commands for masking spectra using the e.g. the h, s and v commands
736        '''
737        #Compile spectra ID list
738        if maskstring == '':
739            return ''
740        masklist = maskstring.split(',')
741
742        speclist = ''
743        for x in masklist:
744            x = x.lower()
745            if '+' in x:
746                bigPieces = x.split('+')
747                if '>' in bigPieces[0]:
748                    pieces = bigPieces[0].split('>')
749                    low = int(pieces[0].lstrip('hv'))
750                    upp = int(pieces[1].lstrip('hv'))
751                else:
752                    low = int(bigPieces[0].lstrip('hv'))
753                    upp = low
754                if '>' in bigPieces[1]:
755                    pieces = bigPieces[1].split('>')
756                    low2 = int(pieces[0].lstrip('hv'))
757                    upp2 = int(pieces[1].lstrip('hv'))
758                else:
759                    low2 = int(bigPieces[1].lstrip('hv'))
760                    upp2 = low2
761                if 'h' in bigPieces[0] and 'v' in bigPieces[1]:
762                    ydim=abs(upp-low)+1
763                    xdim=abs(upp2-low2)+1
764                    speclist += detector.spectrum_block(low, low2,ydim, xdim) + ','
765                elif 'v' in bigPieces[0] and 'h' in bigPieces[1]:
766                    xdim=abs(upp-low)+1
767                    ydim=abs(upp2-low2)+1
768                    speclist += detector.spectrum_block(low2, low,ydim, xdim)+ ','
769                else:
770                    print "error in mask, ignored:  " + x
771            elif '>' in x:  # Commands: MASK Ssp1>Ssp2, MASK Hn1>Hn2 and MASK Vn1>Vn2
772                pieces = x.split('>')
773                low = int(pieces[0].lstrip('hvs'))
774                upp = int(pieces[1].lstrip('hvs'))
775                if 'h' in pieces[0]:
776                    nstrips = abs(upp - low) + 1
777                    speclist += detector.spectrum_block(low, 0,nstrips, 'all')  + ','
778                elif 'v' in pieces[0]:
779                    nstrips = abs(upp - low) + 1
780                    speclist += detector.spectrum_block(0,low, 'all', nstrips)  + ','
781                else:
782                    for i in range(low, upp + 1):
783                        speclist += str(i) + ','
784            elif 'h' in x:
785                speclist += detector.spectrum_block(int(x.lstrip('h')), 0,1, 'all') + ','
786            elif 'v' in x:
787                speclist += detector.spectrum_block(0,int(x.lstrip('v')), 'all', 1) + ','
788            elif 's' in x:   # Command MASK Ssp. Although note commands of type MASK Ssp1>Ssp2 handled above
789                speclist += x.lstrip('s') + ','
790            elif x == '':
791                #empty entries are allowed
792                pass
793            elif len(x.split()) == 4:
794                _issueWarning('Box mask entry "%s" ignored. Box masking is not supported by Mantid'%('mask '+x))
795            else:
796                raise SyntaxError('Problem reading a mask entry: "%s"' % x)
797
798        #remove any trailing comma
799        if speclist.endswith(','):
800            speclist = speclist[0:len(speclist)-1]
801
802        return speclist
803
804    def _mask_phi(self, id, centre, phimin, phimax, use_mirror=True):
805        '''
806            Mask the detector bank such that only the region specified in the
807            phi range is left unmasked
808            Purpose of this method is to populate self._lim_phi_xml
809        '''
810        # convert all angles to be between 0 and 360
811        while phimax > 360 : phimax -= 360
812        while phimax < 0 : phimax += 360
813        while phimin > 360 : phimin -= 360
814        while phimin < 0 : phimin += 360
815        while phimax<phimin : phimax += 360
816
817        #Convert to radians
818        phimin = math.pi*phimin/180.0
819        phimax = math.pi*phimax/180.0
820
821        id = str(id)
822        self._lim_phi_xml = \
823            self._infinite_plane(id+'_plane1',centre, [math.cos(-phimin + math.pi/2.0),math.sin(-phimin + math.pi/2.0),0]) \
824            + self._infinite_plane(id+'_plane2',centre, [-math.cos(-phimax + math.pi/2.0),-math.sin(-phimax + math.pi/2.0),0])
825
826        if use_mirror:
827            self._lim_phi_xml += self._infinite_plane(id+'_plane3',centre, [math.cos(-phimax + math.pi/2.0),math.sin(-phimax + math.pi/2.0),0]) \
828            + self._infinite_plane(id+'_plane4',centre, [-math.cos(-phimin + math.pi/2.0),-math.sin(-phimin + math.pi/2.0),0]) \
829            + '<algebra val="#(('+id+'_plane1 '+id+'_plane2):('+id+'_plane3 '+id+'_plane4))" />'
830        else:
831            #the formula is different for acute verses obtuse angles
832            if phimax-phimin > math.pi :
833              # to get an obtruse angle, a wedge that's more than half the area, we need to add the semi-inifinite volumes
834                self._lim_phi_xml += '<algebra val="#('+id+'_plane1:'+id+'_plane2)" />'
835            else :
836              # an acute angle, wedge is more less half the area, we need to use the intesection of those semi-inifinite volumes
837                self._lim_phi_xml += '<algebra val="#('+id+'_plane1 '+id+'_plane2)" />'
838
839    def _mask_line(self, startPoint, length, width, angle):
840        '''
841            Creates the xml to mask a line of the given width and height at the given angle
842            into the member _line_xml. The masking object which is used to mask a line of say
843            a detector array is a finite cylinder
844            @param startPoint: startPoint of line
845            @param length: length of line
846            @param width: width of line in mm
847            @param angle: angle of line in xy-plane in units of degrees
848            @return: return xml shape string
849        '''
850        return self._finite_cylinder(startPoint, width/2000.0, length,
851                                               [math.cos(angle*math.pi/180.0),math.sin(angle*math.pi/180.0),0.0], "arm")
852
853
854    def get_phi_limits_tag(self):
855        """
856            Get the values of the lowest and highest boundaries
857            Used to append to output workspace name
858            @return 'Phi'low'_'high if it has been set
859        """
860        if self.mask_phi and self._lim_phi_xml != '' and (abs(self.phi_max - self.phi_min) != 180.0):
861          return 'Phi'+str(self.phi_min)+'_'+str(self.phi_max)
862        else:
863          return ''
864
865    def set_phi_limit(self, phimin, phimax, phimirror, override=True):
866        '''
867            ... (tx to Richard for changes to this function
868                 for ticket #)
869            @param phimin:
870            @param phimax:
871            @param phimirror:
872            @param override: This one I don't understand. It seem
873               dangerous to be allowed to set this one to false.
874               Also this option cannot be set from the command interface
875            @return: return xml shape string
876        '''
877        if phimirror :
878            if phimin > phimax:
879                phimin, phimax = phimax, phimin
880
881            if phimax - phimin == 180.0:
882                self.phi_min = -90.0
883                self.phi_max = 90.0
884            else:
885                self.phi_min = phimin
886                self.phi_max = phimax
887        else:
888            self.phi_min = phimin
889            self.phi_max = phimax
890
891        self.phi_mirror = phimirror
892
893        if override:
894            self._readonly_phi = True
895
896        if (not self._readonly_phi) or override:
897            self._mask_phi(
898                'unique phi', [0,0,0], self.phi_min,self.phi_max,self.phi_mirror)
899
900    def execute(self, reducer, workspace):
901        instrument = reducer.instrument
902        #set up the spectra lists and shape xml to mask
903        detector = instrument.cur_detector()
904        if detector.isAlias('rear'):
905            self.spec_list = self._ConvertToSpecList(self.spec_mask_r, detector)
906            #Time mask
907            MaskByBinRange (workspace,self.time_mask_r)
908            MaskByBinRange(workspace,self.time_mask)
909
910        if detector.isAlias('front'):
911            #front specific masking
912            self.spec_list = self._ConvertToSpecList(self.spec_mask_f, detector)
913            #Time mask
914            MaskByBinRange(workspace,self.time_mask_f)
915            MaskByBinRange(workspace,self.time_mask)
916
917        #reset the xml, as execute can be run more than once
918        self._xml = []
919
920        if ( not self.min_radius is None ) and ( self.min_radius > 0.0 ):
921            self.add_cylinder(self.min_radius, 0, 0, 'beam_stop')
922        if ( not self.max_radius is None ) and ( self.max_radius > 0.0 ):
923            self.add_outside_cylinder(self.max_radius, 0, 0, 'beam_area')
924        #now do the masking
925        for shape in self._xml:
926            MaskDetectorsInShape(Workspace=workspace, ShapeXML=shape)
927
928        if "MaskFiles" in reducer.settings:
929            for mask_file in reducer.settings["MaskFiles"].split(","):
930                try:
931                    mask_file_path, mask_ws_name = getFileAndName(mask_file)
932                    mask_ws_name = "__" + mask_ws_name
933                    LoadMask(Instrument=instrument.idf_path,
934                             InputFile=mask_file_path,
935                             OutputWorkspace=mask_ws_name)
936                    mask_detectors_with_masking_ws(workspace, mask_ws_name)
937                    DeleteWorkspace(Workspace=mask_ws_name)
938                except:
939                    raise RuntimeError("Invalid input for mask file. (%s)" % mask_file)
940
941        if len(self.spec_list)>0:
942            MaskDetectors(Workspace=workspace, SpectraList = self.spec_list)
943
944        if self._lim_phi_xml != '' and self.mask_phi:
945            MaskDetectorsInShape(Workspace=workspace,ShapeXML= self._lim_phi_xml)
946
947        if self.arm_width and self.arm_angle:
948            if instrument.name() == "SANS2D":
949                ws = mtd[str(workspace)]
950                det = ws.getInstrument().getComponentByName('rear-detector')
951                det_Z = det.getPos().getZ()
952                start_point = [self.arm_x, self.arm_y, det_Z]
953                MaskDetectorsInShape(Workspace=workspace,ShapeXML=
954                                 self._mask_line(start_point, 1e6, self.arm_width, self.arm_angle))
955
956        output_ws, detector_list = ExtractMask(InputWorkspace=workspace, OutputWorkspace="__mask")
957        _issueInfo("Mask check %s: %g masked pixels" % (workspace, len(detector_list)))
958
959    def view(self, instrum):
960        """
961            In MantidPlot this opens InstrumentView to display the masked
962            detectors in the bank in a different colour
963            @param instrum: a reference an instrument object to view
964        """
965        wksp_name = 'CurrentMask'
966        instrum.load_empty(wksp_name)
967
968        #apply masking to the current detector
969        self.execute(None, wksp_name, instrum)
970
971        #now the other detector
972        other = instrum.other_detector().name()
973        original = instrum.cur_detector().name()
974        instrum.setDetector(other)
975        self.execute(None, wksp_name, instrum)
976        #reset the instrument to mask the currecnt detector
977        instrum.setDetector(original)
978
979        # Mark up "dead" detectors with error value
980        FindDeadDetectors(InputWorkspace=wksp_name,OutputWorkspace= wksp_name, DeadValue=500)
981
982        #opens an instrument showing the contents of the workspace (i.e. the instrument with masked detectors)
983        instrum.view(wksp_name)
984
985    def display(self, wksp, reducer, counts=None):
986        """
987            Mask detectors in a workspace and display its show instrument
988            @param wksp: this named workspace will be masked and displayed
989            @param reducer: the reduction chain that contains all the settings
990            @param counts: optional workspace containing neutron counts data that the mask will be supperimposed on to
991        """
992        #apply masking to the current detector
993        self.execute(reducer, wksp)
994
995        instrum = reducer.instrument
996        #now the other detector
997        other = instrum.other_detector().name()
998        original = instrum.cur_detector().name()
999        instrum.setDetector(other)
1000        self.execute(reducer, wksp)
1001        #reset the instrument to mask the current detector
1002        instrum.setDetector(original)
1003
1004        if counts:
1005            Power(InputWorkspace=counts,OutputWorkspace= 'ones',Exponent= 0)
1006            Plus(LHSWorkspace=wksp,RHSWorkspace= 'ones',OutputWorkspace= wksp)
1007
1008        # Mark up "dead" detectors with error value
1009        FindDeadDetectors(InputWorkspace=wksp,OutputWorkspace= wksp, LiveValue = 0, DeadValue=1)
1010
1011        #check if we have a workspace to superimpose the mask on to
1012        if counts:
1013            #the code below is a proto-type for the ISIS SANS group, to make it perminent it should be improved
1014
1015            #create a workspace where the masked spectra have a value
1016            flags = mtd[wksp]
1017            #normalise that value to the data in the workspace
1018            vals = mtd[counts]
1019            maxval = 0
1020            Xs = []
1021            Ys = []
1022            Es = []
1023            for i in range(0, flags.getNumberHistograms()):
1024                Xs.append(flags.readX(i)[0])
1025                Xs.append(flags.readX(i)[1])
1026                Ys.append(flags.readY(i)[0])
1027                Es.append(0)
1028
1029                if (vals.readY(i)[0] > maxval):
1030                    #don't include masked or monitors
1031                    if (flags.readY(i)[0] == 0) and (vals.readY(i)[0] < 5000):
1032                        maxval = vals.readY(i)[0]
1033
1034            #now normalise to the max/5
1035            maxval /= 5.0
1036            for i in range(0, len(Ys)):
1037                if Ys[i] != 0:
1038                    Ys[i] = maxval*Ys[i] + vals.readY(i)[0]
1039
1040            CreateWorkspace(OutputWorkspace=wksp,DataX= Xs,DataY= Ys,DataE= Es,NSpec= len(Ys), UnitX='TOF', VerticalAxisValues=Ys)
1041            #change the units on the workspace so it is compatible with the workspace containing counts data
1042            Multiply(LHSWorkspace='ones',RHSWorkspace= wksp,OutputWorkspace= 'units')
1043            #do the super-position and clean up
1044            Minus(LHSWorkspace=counts,RHSWorkspace= 'units',OutputWorkspace= wksp)
1045            reducer.deleteWorkspaces(['ones', 'units'])
1046
1047        #opens an instrument showing the contents of the workspace (i.e. the instrument with masked detectors)
1048        instrum.view(wksp)
1049
1050    def __str__(self):
1051        return '    radius', self.min_radius, self.max_radius+'\n'+\
1052            '    rear spectrum mask: ', str(self.spec_mask_r)+'\n'+\
1053            '    front spectrum mask: ', str(self.spec_mask_f)+'\n'+\
1054            '    global time mask: ', str(self.time_mask)+'\n'+\
1055            '    rear time mask: ', str(self.time_mask_r)+'\n'+\
1056            '    front time mask: ', str(self.time_mask_f)+'\n'
1057
1058
1059class LoadSample(LoadRun):
1060    """
1061        Handles loading the sample run, this is the main experimental run with data
1062        about the sample of interest
1063    """
1064    def __init__(self, sample=None, reload=True, entry=-1):
1065        LoadRun.__init__(self, sample, reload=reload, entry=entry)
1066        self._scatter_sample = None
1067        self._SAMPLE_RUN = None
1068
1069        self.maskpt_rmin = None
1070        #is set to the entry (period) number in the sample to be run
1071        self.entries = []
1072
1073    def execute(self, reducer, isSample):
1074        self._assignHelper(reducer)
1075
1076        if self.wksp_name == '':
1077            raise RuntimeError('Unable to load SANS sample run, cannot continue.')
1078
1079        if self.periods_in_file > 1:
1080            self.entries = range(0, self.periods_in_file)
1081
1082        # applies on_load_sample for all the workspaces (single or groupworkspace)
1083        num = 0
1084        while True:
1085            reducer.instrument.on_load_sample(self.wksp_name, reducer.get_beam_center(), isSample)
1086            num += 1
1087            if num == self.periods_in_file:
1088                break
1089            self.move2ws(num)
1090        self.move2ws(0)
1091
1092
1093class CropDetBank(ReductionStep):
1094    """
1095        Takes the spectra range of the current detector from the instrument object
1096        and crops the input workspace to just those spectra. Supports optionally
1097        generating the output workspace from a different (sample) workspace
1098    """
1099    def __init__(self):
1100        """
1101            Sets up the object to either the output or sample workspace
1102        """
1103        super(CropDetBank, self).__init__()
1104
1105    def execute(self, reducer, workspace):
1106        in_wksp = workspace
1107
1108        # Get the detector bank that is to be used in this analysis leave the complete workspace
1109        reducer.instrument.cur_detector().crop_to_detector(in_wksp, workspace)
1110
1111class NormalizeToMonitor(ReductionStep):
1112    """
1113        Before normalisation the monitor spectrum's background is removed
1114        and for LOQ runs also the prompt peak. The input workspace is copied
1115        and accessible later as prenomed
1116    """
1117    NORMALISATION_SPEC_NUMBER = 1
1118    NORMALISATION_SPEC_INDEX = 0
1119    def __init__(self, spectrum_number=None):
1120        super(NormalizeToMonitor, self).__init__()
1121        self._normalization_spectrum = spectrum_number
1122
1123        #the result of this calculation that will be used by CalculateNorm() and the ConvertToQ
1124        self.output_wksp = None
1125
1126    def execute(self, reducer, workspace):
1127        normalization_spectrum = self._normalization_spectrum
1128        if normalization_spectrum is None:
1129            #the -1 converts from spectrum number to spectrum index
1130            normalization_spectrum = reducer.instrument.get_incident_mon()
1131
1132        sanslog.notice('Normalizing to monitor ' + str(normalization_spectrum))
1133
1134        self.output_wksp = str(workspace) + '_incident_monitor'
1135        mon = reducer.get_sample().get_monitor(normalization_spectrum-1)
1136        if reducer.event2hist.scale != 1:
1137            mon *= reducer.event2hist.scale
1138
1139        if str(mon) != self.output_wksp:
1140            RenameWorkspace(mon, OutputWorkspace=self.output_wksp)
1141
1142        if reducer.instrument.name() == 'LOQ':
1143            RemoveBins(InputWorkspace=self.output_wksp,OutputWorkspace= self.output_wksp,XMin= reducer.transmission_calculator.loq_removePromptPeakMin,XMax=
1144                       reducer.transmission_calculator.loq_removePromptPeakMax, Interpolation="Linear")
1145
1146        # Remove flat background
1147        TOF_start, TOF_end = reducer.inst.get_TOFs(normalization_spectrum)
1148
1149        if TOF_start and TOF_end:
1150            CalculateFlatBackground(InputWorkspace=self.output_wksp,OutputWorkspace= self.output_wksp, StartX=TOF_start, EndX=TOF_end, Mode='Mean')
1151
1152        #perform the same conversion on the monitor spectrum as was applied to the workspace but with a possibly different rebin
1153        if reducer.instrument.is_interpolating_norm():
1154            r_alg = 'InterpolatingRebin'
1155        else :
1156            r_alg = 'Rebin'
1157        reducer.to_wavelen.execute(reducer, self.output_wksp, bin_alg=r_alg)
1158
1159class TransmissionCalc(ReductionStep):
1160    """
1161        Calculates the proportion of neutrons that are transmitted through the sample
1162        as a function of wavelength. The results are stored as a workspace
1163    """
1164
1165    # The different ways of doing a fit, convert the possible ways of specifying this (also the way it is specified in the GUI to the way it can be send to CalculateTransmission
1166    TRANS_FIT_OPTIONS = {
1167        'YLOG' : 'Log',
1168        'STRAIGHT' : 'Linear',
1169        'CLEAR' : 'Linear',
1170        # Add Mantid ones as well
1171        'LOGARITHMIC' : 'Log',
1172        'LOG' : 'Log',
1173        'LINEAR' : 'Linear',
1174        'LIN' : 'Linear',
1175        'OFF' : 'Linear',
1176        'POLYNOMIAL':'Polynomial'}
1177
1178    #map to restrict the possible values of _trans_type
1179    CAN_SAMPLE_SUFFIXES = {
1180        False : 'sample',
1181        True : 'can'}
1182
1183    DEFAULT_FIT = 'LOGARITHMIC'
1184
1185    def __init__(self, loader=None):
1186        super(TransmissionCalc, self).__init__()
1187        #set these variables to None, which means they haven't been set and defaults will be set further down
1188        self.fit_props = ['lambda_min', 'lambda_max', 'fit_method', 'order']
1189        self.fit_settings = dict()
1190        for prop in self.fit_props:
1191            self.fit_settings['both::'+prop] = None
1192        # this contains the spectrum number of the monitor that comes after the sample from which the transmission calculation is done
1193        self._trans_spec = None
1194        # use InterpolatingRebin
1195        self.interpolate = None
1196        # a custom transmission workspace, if we have this there is much less to do
1197        self.calculated_samp = ''
1198        self.calculated_can = None
1199        #the result of this calculation that will be used by CalculateNorm() and the ConvertToQ
1200        self.output_wksp = None
1201        # Use for removing LOQ prompt peak from monitors. Units of micro-seconds
1202        self.loq_removePromptPeakMin = 19000.0
1203        self.loq_removePromptPeakMax = 20500.0
1204
1205
1206    def set_trans_fit(self, fit_method, min_=None, max_=None, override=True, selector='both'):
1207        """
1208            Set how the transmission fraction fit is calculated, the range of wavelengths
1209            to use and the fit method
1210            @param min: minimum wavelength to use
1211            @param max: highest wavelength to use
1212            @param fit_method: the fit type to pass to CalculateTransmission ('Logarithmic' or 'Linear')or 'Off'
1213            @param override: if set to False this call won't override the settings set by a previous call (default True)
1214            @param selector: define if the given settings is valid for SAMPLE, CAN or BOTH transmissions.
1215        """
1216        FITMETHOD = 'fit_method'
1217        LAMBDAMIN = 'lambda_min'
1218        LAMBDAMAX = 'lambda_max'
1219        ORDER = 'order'
1220        # processing the selector input
1221        select = selector.lower()
1222        if select not in ['both', 'can', 'sample']:
1223            _issueWarning('Invalid selector option ('+selector+'). Fit to transmission skipped')
1224            return
1225        select += "::"
1226
1227        if not override and self.fit_settings.has_key(select + FITMETHOD) and self.fit_settings[select + FITMETHOD]:
1228            #it was already configured and this request does not want to override
1229            return
1230
1231        if not fit_method:
1232            # there is not point calling fit_method without fit_method argument
1233            return
1234
1235        fit_method = fit_method.upper()
1236        if 'POLYNOMIAL' in fit_method:
1237            order_str = fit_method[10:]
1238            fit_method = 'POLYNOMIAL'
1239            self.fit_settings[select+ORDER] = int(order_str)
1240        if fit_method not in self.TRANS_FIT_OPTIONS.keys():
1241            _issueWarning('ISISReductionStep.Transmission: Invalid fit mode passed to TransFit, using default method (%s)' % self.DEFAULT_FIT)
1242            fit_method = self.DEFAULT_FIT
1243
1244        # get variables for this selector
1245        sel_settings = dict()
1246        for prop in self.fit_props:
1247            sel_settings[prop] = self.fit_settings[select+prop] if self.fit_settings.has_key(select+prop) else self.fit_settings['both::'+prop]
1248
1249        # copy fit_method
1250        sel_settings[FITMETHOD] = fit_method
1251
1252        if min_:
1253            sel_settings[LAMBDAMIN] = float(min_) if fit_method not in ['OFF', 'CLEAR'] else None
1254        if max_:
1255            sel_settings[LAMBDAMAX] = float(max_) if fit_method not in ['OFF', 'CLEAR'] else None
1256
1257        # apply the propertis to self.fit_settings
1258        for prop in self.fit_props:
1259            self.fit_settings[select+prop] = sel_settings[prop]
1260
1261        # When both is given, it is necessary to clean the specific settings for the individual selectors
1262        if select == 'both::':
1263            for selector_ in ['sample::','can::']:
1264                for prop_ in self.fit_props:
1265                    prop_name = selector_+prop_
1266                    if self.fit_settings.has_key(prop_name):
1267                        del self.fit_settings[prop_name]
1268
1269    def isSeparate(self):
1270        """ Returns true if the can or sample was given and false if just both was used"""
1271        return self.fit_settings.has_key('sample::fit_method') or self.fit_settings.has_key('can::fit_method')
1272
1273    def setup_wksp(self, inputWS, inst, wavbining, pre_monitor, post_monitor):
1274        """
1275            Creates a new workspace removing any background from the monitor spectra, converting units
1276            and re-bins. If the instrument is LOQ it zeros values between the x-values 19900 and 20500
1277            This method doesn't affect self.
1278            @param inputWS: contains the monitor spectra
1279            @param inst: the selected instrument
1280            @param wavbinning: the re-bin string to use after convert units
1281            @param pre_monitor: DETECTOR ID of the incident monitor
1282            @param post_monitor: DETECTOR ID of the transmission monitor
1283            @return the name of the workspace created
1284        """
1285        #the workspace is forked, below is its new name
1286        tmpWS = inputWS + '_tmp'
1287
1288        #exclude unused spectra because the sometimes empty/sometimes not spectra can cause errors with interpolate
1289        spectrum1 = min(pre_monitor, post_monitor)
1290        spectrum2 = max(pre_monitor, post_monitor)
1291        CropWorkspace(InputWorkspace=inputWS,OutputWorkspace= tmpWS,
1292            StartWorkspaceIndex=self._get_index(spectrum1),
1293            EndWorkspaceIndex=self._get_index(spectrum2))
1294
1295        if inst.name() == 'LOQ':
1296            RemoveBins(InputWorkspace=tmpWS,OutputWorkspace= tmpWS,XMin= self.loq_removePromptPeakMin,XMax= self.loq_removePromptPeakMax,
1297                       Interpolation='Linear')
1298
1299        for spectra_number in [pre_monitor, post_monitor]:
1300            back_start, back_end = inst.get_TOFs(spectra_number)
1301            if back_start and back_end:
1302                index = spectra_number - spectrum1
1303                CalculateFlatBackground(InputWorkspace=tmpWS,OutputWorkspace= tmpWS, StartX=back_start, EndX=back_end,
1304                               WorkspaceIndexList=index, Mode='Mean')
1305
1306        ConvertUnits(InputWorkspace=tmpWS,OutputWorkspace= tmpWS,Target="Wavelength")
1307
1308        if self.interpolate:
1309            InterpolatingRebin(InputWorkspace=tmpWS,OutputWorkspace= tmpWS,Params= wavbining)
1310        else :
1311            Rebin(InputWorkspace=tmpWS,OutputWorkspace= tmpWS,Params= wavbining)
1312
1313        return tmpWS
1314
1315    def _get_index(self, number):
1316        """
1317            Converts spectrum numbers to indices using the simple (minus 1) relationship
1318            that is true for raw files
1319            @param number: a spectrum number
1320            @return: its index
1321        """
1322        return number - 1
1323
1324    def execute(self, reducer, workspace):
1325        """
1326            Reads in the different settings, without affecting self. Calculates
1327            or estimates the proportion of neutrons that are transmitted
1328            through the sample
1329        """
1330        self.output_wksp = None
1331        #look for run files that contain transmission data
1332        test1, test2 = self._get_run_wksps(reducer)
1333        if test1 or test2:
1334            #we can calculate the transmission from some experimental runs
1335            if self.calculated_samp:
1336                raise RuntimeError('Cannot use TransWorkspace() and TransmissionSample() together')
1337
1338            self.output_wksp = self.calculate(reducer)
1339        else:
1340            #they have supplied a transmission file use it
1341            if reducer.is_can():
1342                self.output_wksp = self.calculated_can
1343            else:
1344                self.output_wksp = self.calculated_samp
1345
1346    def _get_run_wksps(self, reducer):
1347        """
1348            Retrieves the names runs that contain the user specified for calculation
1349            of the transmission
1350            @return: post_sample pre_sample workspace names
1351        """
1352        return reducer.get_transmissions()
1353
1354    def calculate(self, reducer):
1355        LAMBDAMIN = 'lambda_min'
1356        LAMBDAMAX = 'lambda_max'
1357        FITMETHOD = 'fit_method'
1358        ORDER = 'order'
1359        #get the settings required to do the calculation
1360        trans_raw, direct_raw = self._get_run_wksps(reducer)
1361
1362        if not trans_raw:
1363            raise RuntimeError('Attempting transmission correction with no specified transmission %s file' % self.CAN_SAMPLE_SUFFIXES[reducer.is_can()])
1364        if not direct_raw:
1365            raise RuntimeError('Attempting transmission correction with no direct file')
1366
1367        select = 'can::' if reducer.is_can() else 'direct::'
1368
1369        # get variables for this selector
1370        sel_settings = dict()
1371        for prop in self.fit_props:
1372            sel_settings[prop] = self.fit_settings[select+prop] if self.fit_settings.has_key(select+prop) else self.fit_settings['both::'+prop]
1373
1374        if self._trans_spec:
1375            post_sample = self._trans_spec
1376        else:
1377            post_sample = reducer.instrument.default_trans_spec
1378
1379        pre_sample = reducer.instrument.incid_mon_4_trans_calc
1380
1381        use_instrum_default_range = reducer.full_trans_wav
1382
1383        #there are a number of settings and defaults that determine the wavelength to use, go through each in order of increasing precedence
1384        if use_instrum_default_range:
1385            translambda_min = reducer.instrument.WAV_RANGE_MIN
1386            translambda_max = reducer.instrument.WAV_RANGE_MAX
1387        else:
1388            if sel_settings[LAMBDAMIN]:
1389                translambda_min = sel_settings[LAMBDAMIN]
1390            else:
1391                translambda_min = reducer.to_wavelen.wav_low
1392            if sel_settings[LAMBDAMAX]:
1393                translambda_max = sel_settings[LAMBDAMAX]
1394            else:
1395                translambda_max = reducer.to_wavelen.wav_high
1396
1397        wavbin = str(translambda_min)
1398        wavbin +=','+str(reducer.to_wavelen.wav_step)
1399        wavbin +=','+str(translambda_max)
1400
1401        #set up the input workspaces
1402        trans_tmp_out = self.setup_wksp(trans_raw, reducer.instrument,
1403            wavbin, pre_sample, post_sample)
1404        direct_tmp_out = self.setup_wksp(direct_raw, reducer.instrument,
1405            wavbin, pre_sample, post_sample)
1406
1407        fittedtransws, unfittedtransws = self.get_wksp_names(
1408                    trans_raw, translambda_min, translambda_max, reducer)
1409
1410        # If no fitting is required just use linear and get unfitted data from CalculateTransmission algorithm
1411        options = dict()
1412        if sel_settings[FITMETHOD]:
1413            options['FitMethod'] = self.TRANS_FIT_OPTIONS[sel_settings[FITMETHOD]]
1414            if sel_settings[FITMETHOD] == "POLYNOMIAL":
1415                options['PolynomialOrder'] = sel_settings[ORDER]
1416        else:
1417            options['FitMethod'] = self.TRANS_FIT_OPTIONS[self.DEFAULT_FIT]
1418
1419        CalculateTransmission(SampleRunWorkspace=trans_tmp_out, DirectRunWorkspace=direct_tmp_out,
1420                              OutputWorkspace=fittedtransws, IncidentBeamMonitor=pre_sample,
1421                              TransmissionMonitor=post_sample,
1422                              RebinParams=reducer.to_wavelen.get_rebin(),
1423                              OutputUnfittedData=True, **options) # options FitMethod, PolynomialOrder if present
1424
1425        # Remove temporaries
1426        files2delete = [trans_tmp_out]
1427
1428        if direct_tmp_out != trans_tmp_out:
1429            files2delete.append(direct_tmp_out)
1430
1431        if sel_settings[FITMETHOD] in ['OFF', 'CLEAR']:
1432            result = unfittedtransws
1433            files2delete.append(fittedtransws)
1434        else:
1435            result = fittedtransws
1436
1437        reducer.deleteWorkspaces(files2delete)
1438
1439        return result
1440
1441    def get_trans_spec(self):
1442        return self._trans_spec
1443
1444    def set_trans_spec(self, value):
1445        """
1446            Allows setting the which transmission monitor that is passed the sample
1447            if the new value is an integer
1448        """
1449        self._trans_spec = int(value)
1450
1451    trans_spec = property(get_trans_spec, set_trans_spec, None, None)
1452
1453    def get_wksp_names(self, raw_name, lambda_min, lambda_max, reducer):
1454        fitted_name = raw_name.split('_')[0] + '_trans_'
1455        fitted_name += self.CAN_SAMPLE_SUFFIXES[reducer.is_can()]
1456        fitted_name += '_'+str(lambda_min)+'_'+str(lambda_max)
1457
1458        unfitted = fitted_name + "_unfitted"
1459
1460        return fitted_name, unfitted
1461
1462    def _get_fit_property(self,selector, property_name):
1463        if self.fit_settings.has_key(selector+'::' + property_name):
1464            return self.fit_settings[selector+'::' + property_name]
1465        else:
1466            return self.fit_settings['both::'+property_name]
1467
1468
1469    def lambdaMin(self, selector):
1470        return self._get_fit_property(selector.lower(), 'lambda_min')
1471    def lambdaMax(self, selector):
1472        return self._get_fit_property(selector.lower(), 'lambda_max')
1473    def fitMethod(self, selector):
1474        """It will return LINEAR, LOGARITHM, POLYNOMIALx for x in 2,3,4,5"""
1475        resp = self._get_fit_property(selector.lower(), 'fit_method')
1476        if 'POLYNOMIAL' == resp:
1477            resp += str(self._get_fit_property(selector.lower(), 'order'))
1478        if resp  in ['LIN','STRAIGHT'] :
1479            resp = 'LINEAR'
1480        if resp in ['YLOG','LOG']:
1481            resp = 'LOGARITHMIC'
1482        return resp
1483
1484class AbsoluteUnitsISIS(ReductionStep):
1485    DEFAULT_SCALING = 100.0
1486    def __init__(self):
1487        # Scaling values [%]
1488        self.rescale= self.DEFAULT_SCALING
1489
1490    def execute(self, reducer, workspace):
1491        scalefactor = self.rescale
1492        # Data reduced with Mantid is a factor of ~pi higher than Colette.
1493        # For LOQ only, divide by this until we understand why.
1494        if reducer.instrument.name() == 'LOQ':
1495            rescaleToColette = math.pi
1496            scalefactor /= rescaleToColette
1497
1498        ws = mtd[workspace]
1499        ws *= scalefactor
1500
1501
1502class CalculateNormISIS(object):
1503    """
1504        Note this is not a reduction step, see CalculateNorm
1505
1506        Generates the normalization workspaces required by Q1D and Qxy for normalization
1507        produced by other, sometimes optional, reduction_steps or a specified
1508        workspace
1509    """
1510    TMP_ISIS_NAME = '__CalculateNormISIS_loaded_tmp'
1511    TMP_WORKSPACE_NAME = '__CalculateNorm_loaded_temp'
1512    WAVE_CORR_NAME = '__Q_WAVE_conversion_temp'
1513    PIXEL_CORR_NAME = '__Q_pixel_conversion_temp'
1514
1515    def  __init__(self, wavelength_deps=[]):
1516        super(CalculateNormISIS, self).__init__()
1517        self._wave_steps = wavelength_deps
1518        self._high_angle_pixel_file = ""
1519        self._low_angle_pixel_file = ""
1520        self._pixel_file = ""
1521
1522
1523    def setPixelCorrFile(self, filename, detector = ""):
1524        """
1525          For compatibility reason, it still uses the self._pixel_file,
1526          but, now, we need pixel_file (flood file) for both detectors.
1527          so, an extra parameter is allowed.
1528
1529        """
1530        detector = detector.upper()
1531
1532        if detector in ("FRONT","HAB","FRONT-DETECTOR-BANK"):
1533            self._high_angle_pixel_file = filename
1534        if detector in ("REAR","MAIN","","MAIN-DETECTOR-BANK"):
1535            self._low_angle_pixel_file = filename
1536
1537    def getPixelCorrFile(self, detector ):
1538        """
1539          For compatibility reason, it still uses the self._pixel_file,
1540          but, now, we need pixel_file (flood file) for both detectors.
1541          so, an extra parameter is allowed.
1542
1543        """
1544        detector = detector.upper()
1545        if detector in ("FRONT","HAB","FRONT-DETECTOR-BANK", "FRONT-DETECTOR"):
1546            return self._high_angle_pixel_file
1547        elif detector in ("REAR","MAIN","MAIN-DETECTOR-BANK","", "REAR-DETECTOR"):
1548            return self._low_angle_pixel_file
1549        else :
1550            logger.warning("Request of pixel correction file with unknown detector ("+ str(detector)+")")
1551            return self._pixel_file
1552
1553
1554    def _multiplyAll(self, wave_wksps, wksp2match):
1555        wave_adj = None
1556        for wksp in wave_wksps:
1557            #before the workspaces can be combined they all need to match
1558            RebinToWorkspace(WorkspaceToRebin=wksp, WorkspaceToMatch=wksp2match,
1559                             OutputWorkspace=self.TMP_WORKSPACE_NAME)
1560
1561            if not wave_adj:
1562                wave_adj = self.WAVE_CORR_NAME
1563                RenameWorkspace(InputWorkspace=self.TMP_WORKSPACE_NAME, OutputWorkspace=wave_adj)
1564            else:
1565                Multiply(LHSWorkspace=self.TMP_WORKSPACE_NAME, RHSWorkspace=wave_adj,
1566                         OutputWorkspace= wave_adj)
1567        return wave_adj
1568
1569    def _loadPixelCorrection(self):
1570        '''
1571        Reads in a pixel correction file if one has been specified.
1572
1573        @return the name of the workspace, else an empty string if there was no
1574                correction file.
1575        '''
1576        if self._pixel_file:
1577            LoadRKH(Filename=self._pixel_file,
1578                    OutputWorkspace=self.PIXEL_CORR_NAME,
1579                    FirstColumnValue="SpectrumNumber")
1580            return self.PIXEL_CORR_NAME
1581        return ''
1582
1583    def _is_point_data(self, wksp):
1584        """
1585            Tests if the workspace whose name is passed contains point or histogram data
1586            The test is if the X and Y array lengths are the same = True, different = false
1587            @param wksp: name of the workspace to test
1588            @return True for point data, false for histogram
1589        """
1590        handle = mtd[wksp]
1591        if len(handle.readX(0)) == len(handle.readY(0)):
1592            return True
1593        else:
1594            return False
1595
1596    def calculate(self, reducer, wave_wks=[]):
1597        """
1598            Multiplies all the wavelength scalings into one workspace and all the detector
1599            dependent scalings into another workspace that can be used by ConvertToQ
1600            @param reducer: settings used for this reduction
1601            @param wave_wks: additional wavelength dependent correction workspaces to include
1602        """
1603        #use the instrument's correction file
1604        corr_file = reducer.instrument.cur_detector().correction_file
1605        if corr_file:
1606            LoadRKH(Filename=corr_file,OutputWorkspace= self.TMP_ISIS_NAME,FirstColumnValue= "Wavelength")
1607            wave_wks.append(self.TMP_ISIS_NAME)
1608
1609            if self._is_point_data(self.TMP_ISIS_NAME):
1610                ConvertToHistogram(InputWorkspace=self.TMP_ISIS_NAME,OutputWorkspace= self.TMP_ISIS_NAME)
1611        ## try to redefine self._pixel_file to pass to CalculateNORM method calculate.
1612        detect_pixel_file = self.getPixelCorrFile(reducer.instrument.cur_detector().name())
1613        if (detect_pixel_file != ""):
1614            self._pixel_file = detect_pixel_file
1615
1616        for step in self._wave_steps:
1617            if step.output_wksp:
1618                wave_wks.append(step.output_wksp)
1619
1620        wave_adj = self._multiplyAll(wave_wks, reducer.output_wksp)
1621
1622        pixel_adj = self._loadPixelCorrection()
1623
1624        if pixel_adj:
1625            #remove all the pixels that are not present in the sample data (the other detector)
1626            reducer.instrument.cur_detector().crop_to_detector(pixel_adj, pixel_adj)
1627
1628        reducer.deleteWorkspaces([self.TMP_ISIS_NAME, self.TMP_WORKSPACE_NAME])
1629
1630        return wave_adj, pixel_adj
1631
1632class ConvertToQISIS(ReductionStep):
1633    """
1634    Runs the Q1D or Qxy algorithms to convert wavelength data into momentum transfer, Q
1635
1636    Currently, this allows the wide angle transmission correction.
1637    """
1638    # the list of possible Q conversion algorithms to use
1639    _OUTPUT_TYPES = {'1D' : 'Q1D',
1640                     '2D' : 'Qxy'}
1641    # defines if Q1D should correct for gravity by default
1642    _DEFAULT_GRAV = False
1643    def __init__(self, normalizations):
1644        """
1645            @param normalizations: CalculateNormISIS object contains the workspace, ReductionSteps or files require for the optional normalization arguments
1646        """
1647        if not issubclass(normalizations.__class__,  CalculateNormISIS):
1648            raise RuntimeError('Error initializing ConvertToQ, invalid normalization object')
1649        #contains the normalization optional workspaces to pass to the Q algorithm
1650        self._norms = normalizations
1651
1652        #this should be set to 1D or 2D
1653        self._output_type = '1D'
1654        #the algorithm that corresponds to the above choice
1655        self._Q_alg = self._OUTPUT_TYPES[self._output_type]
1656        #if true gravity is taken into account in the Q1D calculation
1657        self._use_gravity = self._DEFAULT_GRAV
1658        #used to implement a default setting for gravity that can be over written but doesn't over write
1659        self._grav_set = False
1660        #this should contain the rebin parameters
1661        self.binning = None
1662
1663        #The minimum distance in metres from the beam center at which all wavelengths are used in the calculation
1664        self.r_cut = 0.0
1665        #The shortest wavelength in angstrom at which counts should be summed from all detector pixels in Angstrom
1666        self.w_cut = 0.0
1667        # Whether to output parts when running either Q1D2 or Qxy
1668        self.outputParts = False
1669
1670    def set_output_type(self, descript):
1671        """
1672            Requests the given output from the Q conversion, either 1D or 2D. For
1673            the 1D calculation it asks the reducer to keep a workspace for error
1674            estimates
1675            @param descript: 1D or 2D
1676        """
1677        self._Q_alg = self._OUTPUT_TYPES[descript]
1678        self._output_type = descript
1679
1680    def get_output_type(self):
1681        return self._output_type
1682
1683    output_type = property(get_output_type, set_output_type, None, None)
1684
1685    def get_gravity(self):
1686        return self._use_gravity
1687
1688    def set_gravity(self, flag, override=True):
1689        """
1690            Enable or disable including gravity when calculating Q
1691            @param flag: set to True to enable the gravity correction
1692            @param override: over write the setting from a previous call to this method (default is True)
1693        """
1694        if override:
1695            self._grav_set = True
1696
1697        if (not self._grav_set) or override:
1698                self._use_gravity = bool(flag)
1699        else:
1700            msg = "User file can't override previous gravity setting, do gravity correction remains " + str(self._use_gravity)
1701            print msg
1702            sanslog.warning(msg)
1703
1704    def execute(self, reducer, workspace):
1705        """
1706        Calculate the normalization workspaces and then call the chosen Q conversion algorithm.
1707        """
1708        wavepixeladj = ""
1709        if (reducer.wide_angle_correction and reducer.transmission_calculator.output_wksp):
1710            #calculate the transmission wide angle correction
1711            _issueWarning("sans solid angle correction execution")
1712            SANSWideAngleCorrection(SampleData=workspace,
1713                                     TransmissionData = reducer.transmission_calculator.output_wksp,
1714                                     OutputWorkspace='transmissionWorkspace')
1715            wavepixeladj = 'transmissionWorkspace'
1716        #create normalization workspaces
1717        if self._norms:
1718            # the empty list at the end appears to be needed (the system test SANS2DWaveloops) is this a bug in Python?
1719            wave_adj, pixel_adj = self._norms.calculate(reducer, [])
1720        else:
1721            raise RuntimeError('Normalization workspaces must be created by CalculateNorm() and passed to this step')
1722
1723        try:
1724            if self._Q_alg == 'Q1D':
1725                Q1D(DetBankWorkspace=workspace,OutputWorkspace= workspace, OutputBinning=self.binning, WavelengthAdj=wave_adj, PixelAdj=pixel_adj, AccountForGravity=self._use_gravity, RadiusCut=self.r_cut*1000.0, WaveCut=self.w_cut, OutputParts=self.outputParts, WavePixelAdj = wavepixeladj)
1726            elif self._Q_alg == 'Qxy':
1727                Qxy(InputWorkspace=workspace,OutputWorkspace= workspace,MaxQxy= reducer.QXY2,DeltaQ= reducer.DQXY, WavelengthAdj=wave_adj, PixelAdj=pixel_adj, AccountForGravity=self._use_gravity, RadiusCut=self.r_cut*1000.0, WaveCut=self.w_cut, OutputParts=self.outputParts)
1728                ReplaceSpecialValues(InputWorkspace=workspace,OutputWorkspace= workspace, NaNValue="0", InfinityValue="0")
1729            else:
1730                raise NotImplementedError('The type of Q reduction has not been set, e.g. 1D or 2D')
1731        except:
1732            #when we are all up to Python 2.5 replace the duplicated code below with one finally:
1733            reducer.deleteWorkspaces([wave_adj, pixel_adj, wavepixeladj])
1734            raise
1735
1736        reducer.deleteWorkspaces([wave_adj, pixel_adj, wavepixeladj])
1737
1738class UnitsConvert(ReductionStep):
1739    """
1740        Executes ConvertUnits and then Rebin on the same workspace. If no re-bin limits are
1741        set for the x-values of the final workspace the range of the first spectrum is used.
1742    """
1743    def __init__(self, units, rebin = 'Rebin', bin_alg=None):
1744        """
1745            @param bin_alg: the name of the Mantid re-bin algorithm to use
1746        """
1747        super(UnitsConvert, self).__init__()
1748        self._units = units
1749        self.wav_low = None
1750        self.wav_high = None
1751        self.wav_step = None
1752        # currently there are two possible re-bin algorithms, the other is InterpolatingRebin
1753        self.rebin_alg = rebin
1754        self._bin_alg = bin_alg
1755
1756    #TODO: consider how to remove the extra argument after workspace
1757    def execute(self, reducer, workspace, bin_alg=None):
1758        """
1759            Runs the ConvertUnits() and a rebin algorithm on the specified
1760            workspace
1761            @param reducer:
1762            @param workspace: the name of the workspace to convert
1763            @param workspace: the name of the workspace to convert
1764        """
1765        ConvertUnits(InputWorkspace=workspace,OutputWorkspace= workspace,Target= self._units)
1766
1767        low_wav = self.wav_low
1768        high_wav = self.wav_high
1769
1770        if low_wav is None and high_wav is None:
1771            low_wav = min(mtd[workspace].readX(0))
1772            high_wav = max(mtd[workspace].readX(0))
1773
1774
1775        if not bin_alg:
1776            bin_alg = self.rebin_alg
1777
1778        rebin_com = bin_alg+'(workspace, "'+\
1779            self._get_rebin(low_wav, self.wav_step, high_wav)+'", OutputWorkspace=workspace)'
1780        eval(rebin_com)
1781
1782    def _get_rebin(self, low, step, high):
1783        """
1784            Convert the range limits and step into a form passable to re-bin
1785            @param low: first number in the Rebin string, the first bin boundary
1786            @param step: bin width
1787            @param high: high bin boundary
1788        """
1789        return str(low)+', ' + str(step) + ', ' + str(high)
1790
1791    def get_rebin(self):
1792        """
1793            Get the string that is passed as the "param" property to Rebin
1794            @return the string that is passed to Rebin
1795        """
1796        return self._get_rebin(self.wav_low, self.wav_step, self.wav_high)
1797
1798    def set_rebin(self, w_low = None, w_step = None, w_high = None, override=True):
1799        """
1800            Set the parameters that are passed to Rebin
1801            @param w_low: first number in the Rebin string, the first bin boundary
1802            @param w_step: bin width
1803            @param w_high: high bin boundary
1804        """
1805        if not w_low is None:
1806            if self.wav_low is None or override:
1807                self.wav_low = float(w_low)
1808        if not w_step is None:
1809            if self.wav_step is None or override:
1810                self.wav_step = float(w_step)
1811        if not w_high is None:
1812            if self.wav_high is None or override:
1813                self.wav_high = float(w_high)
1814
1815    def get_range(self):
1816        """
1817            Get the values of the highest and lowest boundaries
1818            @return low'_'high
1819        """
1820        return str(self.wav_low)+'_'+str(self.wav_high)
1821
1822    def set_range(self, w_low = None, w_high = None):
1823        """
1824            Set the highest and lowest bin boundary values
1825            @param w_low: first number in the Rebin string, the first bin boundary
1826            @param w_high: high bin boundary
1827        """
1828        self.set_rebin(w_low, None, w_high)
1829
1830    def __str__(self):
1831        return '    Wavelength range: ' + self.get_rebin()
1832
1833class SliceEvent(ReductionStep):
1834
1835    def __init__(self):
1836        super(SliceEvent, self).__init__()
1837        self.scale = 1
1838
1839    def execute(self, reducer, workspace):
1840        ws_pointer = getWorkspaceReference(workspace)
1841
1842        # it applies only for event workspace
1843        if not isinstance(ws_pointer, IEventWorkspace):
1844            self.scale = 1
1845            return
1846        start, stop = reducer.getCurrSliceLimit()
1847
1848        _monitor = reducer.get_sample().get_monitor()
1849
1850        if "events.binning" in reducer.settings:
1851            binning = reducer.settings["events.binning"]
1852        else:
1853            binning = ""
1854        hist, (tot_t, tot_c, part_t, part_c) = slice2histogram(ws_pointer, start, stop, _monitor, binning)
1855        self.scale = part_c / tot_c
1856
1857
1858class BaseBeamFinder(ReductionStep):
1859    """
1860        Base beam finder. Holds the position of the beam center
1861        and the algorithm for calculates it using the beam's
1862        displacement under gravity
1863    """
1864    def __init__(self, beam_center_x=None, beam_center_y=None):
1865        """
1866            Initial beam center is given in pixel coordinates
1867            @param beam_center_x: pixel position of the beam in x
1868            @param beam_center_y: pixel position of the beam in y
1869        """
1870        super(BaseBeamFinder, self).__init__()
1871        self._beam_center_x = beam_center_x
1872        self._beam_center_y = beam_center_y
1873        self._beam_radius = None
1874        self._datafile = None
1875        self._persistent = True
1876
1877    def set_persistent(self, persistent):
1878        self._persistent = persistent
1879        return self
1880
1881    def get_beam_center(self):
1882        """
1883            Returns the beam center
1884        """
1885        return [self._beam_center_x, self._beam_center_y]
1886
1887    def execute(self, reducer, workspace=None):
1888        return "Beam Center set at: %s %s" % (str(self._beam_center_x), str(self._beam_center_y))
1889
1890
1891class UserFile(ReductionStep):
1892    """
1893        Reads an ISIS SANS mask file of the format described here mantidproject.org/SANS_User_File_Commands
1894    """
1895    def __init__(self, file=None):
1896        """
1897            Optionally sets the location of the file and initialise the reader
1898        """
1899        super(UserFile, self).__init__()
1900        self.filename = file
1901        self._incid_monitor_lckd = False
1902        self.executed = False
1903
1904        # maps the keywords that the file can contains to the functions that read them
1905        self.key_functions = {
1906            'BACK/' : self._read_back_line,
1907            'TRANS/': self._read_trans_line,
1908            'MON/' : self._read_mon_line,
1909            'TUBECALIBFILE': self._read_calibfile_line,
1910            'MASKFILE': self._read_maskfile_line}
1911
1912    def __deepcopy__(self, memo):
1913        """Called when a deep copy is requested
1914        """
1915        fresh = UserFile(self.filename)
1916        fresh._incid_monitor_lckd = self._incid_monitor_lckd
1917        fresh.executed = self.executed
1918        fresh.key_functions = {
1919            'BACK/' : fresh._read_back_line,
1920            'TRANS/': fresh._read_trans_line,
1921            'MON/' : fresh._read_mon_line,
1922            'TUBECALIBFILE': self._read_calibfile_line,
1923            'MASKFILE': self._read_maskfile_line
1924            }
1925        return fresh
1926
1927    def execute(self, reducer, workspace=None):
1928        if self.filename is None:
1929            raise AttributeError('The user file must be set, use the function MaskFile')
1930        user_file = self.filename
1931
1932        #Check that the file exists.
1933        if not os.path.isfile(user_file):
1934            user_file = os.path.join(reducer.user_file_path, self.filename)
1935            if not os.path.isfile(user_file):
1936                user_file = FileFinder.getFullPath(self.filename)
1937                if not os.path.isfile(user_file):
1938                    raise RuntimeError, "Cannot read mask. File path '%s' does not exist or is not in the user path." % self.filename
1939
1940        reducer.user_file_path = os.path.dirname(user_file)
1941        # Re-initializes default values
1942        self._initialize_mask(reducer)
1943        reducer.prep_normalize.setPixelCorrFile('','REAR')
1944        reducer.prep_normalize.setPixelCorrFile('','FRONT')
1945
1946        file_handle = open(user_file, 'r')
1947        for line in file_handle:
1948            try:
1949                self.read_line(line, reducer)
1950            except:
1951                # Close the handle
1952                file_handle.close()
1953                raise RuntimeError("%s was specified in the MASK file (%s) but the file cannot be found." % (line.rsplit()[0], file_handle.name))
1954
1955        # Check if one of the efficency files hasn't been set and assume the other is to be used
1956        reducer.instrument.copy_correction_files()
1957
1958        self.executed = True
1959        return self.executed
1960
1961    def read_line(self, line, reducer):
1962        # This is so that I can be sure all EOL characters have been removed
1963        line = line.lstrip().rstrip()
1964        upper_line = line.upper()
1965
1966        #check for a recognised command
1967        for keyword in self.key_functions.keys():
1968            if upper_line.startswith(keyword):
1969                #remove the keyword as it has already been parsed
1970                params = line[len(keyword):]
1971                #call the handling function for that keyword
1972                error = self.key_functions[keyword](params, reducer)
1973
1974                if error:
1975                    _issueWarning(error+line)
1976
1977                return
1978
1979        if upper_line.startswith('L/'):
1980            self.readLimitValues(line, reducer)
1981
1982        elif upper_line.startswith('MASK'):
1983            if len(upper_line[5:].strip().split()) == 4:
1984                _issueInfo('Box masks can only be defined using the V and H syntax, not "mask x1 y1 x2 y2"')
1985            else:
1986                reducer.mask.parse_instruction(reducer.instrument.name(), upper_line)
1987
1988        elif upper_line.startswith('SET CENTRE'):
1989            # SET CENTRE accepts the following properties:
1990            # SET CENTRE X Y
1991            # SET CENTRE/MAIN X Y
1992            # SET CENTRE/HAB X Y
1993            main_str_pos = upper_line.find('MAIN')
1994            hab_str_pos = upper_line.find('HAB')
1995            x_pos = 0.0;
1996            y_pos = 0.0;
1997            if (main_str_pos > 0):
1998              values = upper_line[main_str_pos+5:].split() #remov the SET CENTRE/MAIN
1999              x_pos = float(values[0])/1000.0
2000              y_pos = float(values[1])/1000.0
2001            elif (hab_str_pos > 0):
2002              values = upper_line[hab_str_pos+4:].split() # remove the SET CENTRE/HAB
2003              print ' convert values ',values
2004              x_pos = float(values[0])/1000.0
2005              y_pos = float(values[1])/1000.0
2006            else:
2007              values = upper_line.split()
2008              x_pos = float(values[2])/1000.0
2009              y_pos = float(values[3])/1000.0
2010            if (hab_str_pos > 0):
2011              print 'Front values = ',x_pos,y_pos
2012              reducer.set_beam_finder(BaseBeamFinder(x_pos, y_pos),'front')
2013            else:
2014              reducer.set_beam_finder(BaseBeamFinder(x_pos, y_pos))
2015
2016        elif upper_line.startswith('SET SCALES'):
2017            values = upper_line.split()
2018            reducer._corr_and_scale.rescale = \
2019                float(values[2])*reducer._corr_and_scale.DEFAULT_SCALING
2020
2021        elif upper_line.startswith('SAMPLE/OFFSET'):
2022            values = upper_line.split()
2023            reducer.instrument.set_sample_offset(values[1])
2024
2025        elif upper_line.startswith('DET/'):
2026            det_specif = upper_line[4:]
2027            if det_specif.startswith('CORR'):
2028                self._readDetectorCorrections(upper_line[8:], reducer)
2029            elif det_specif.startswith('RESCALE') or det_specif.startswith('SHIFT'):
2030                self._readFrontRescaleShiftSetup(det_specif, reducer)
2031            else:
2032                # for /DET/FRONT and /DET/REAR commands
2033                reducer.instrument.setDetector(det_specif)
2034
2035        elif upper_line.startswith('GRAVITY'):
2036            flag = upper_line[8:].strip()
2037            if flag == 'ON' or flag == 'TRUE':
2038                reducer.to_Q.set_gravity(True, override=False)
2039            elif flag == 'OFF' or flag == 'FALSE':
2040                reducer.to_Q.set_gravity(False, override=False)
2041            else:
2042                _issueWarning("Gravity flag incorrectly specified, disabling gravity correction")
2043                reducer.to_Q.set_gravity(False, override=False)
2044
2045        elif upper_line.startswith('FIT/TRANS/'):
2046            #check if the selector is passed:
2047            selector = 'BOTH'
2048            if 'SAMPLE' in upper_line:
2049                selector = 'SAMPLE'
2050                params = upper_line[17:].split() # remove FIT/TRANS/SAMPLE/
2051            elif 'CAN' in upper_line:
2052                selector = 'CAN'
2053                params = upper_line[14:].split() # remove FIT/TRANS/CAN/
2054            else:
2055                params = upper_line[10:].split() # remove FIT/TRANS/
2056
2057            try:
2058                nparams = len(params)
2059                if nparams == 1:
2060                    fit_type = params[0]
2061                    lambdamin = lambdamax = None
2062                elif nparams == 3:
2063                    fit_type, lambdamin, lambdamax = params
2064                else:
2065                    raise 1
2066                reducer.transmission_calculator.set_trans_fit(min_=lambdamin, max_=lambdamax,
2067                                                              fit_method=fit_type, override=True,
2068                                                              selector=selector)
2069            except:
2070                _issueWarning('Incorrectly formatted FIT/TRANS line, %s, line ignored' % upper_line)
2071
2072        elif upper_line.startswith('FIT/MONITOR'):
2073            params = upper_line.split()
2074            nparams = len(params)
2075            if nparams == 3 and reducer.instrument.name() == 'LOQ':
2076                reducer.transmission_calculator.loq_removePromptPeakMin = float(params[1])
2077                reducer.transmission_calculator.loq_removePromptPeakMax = float(params[2])
2078            else:
2079                if reducer.instrument.name() == 'LOQ':
2080                  _issueWarning('Incorrectly formatted FIT/MONITOR line, %s, line ignored' % upper_line)
2081                else:
2082                  _issueWarning('FIT/MONITOR line specific to LOQ instrument. Line ignored')
2083
2084        elif upper_line == 'SANS2D' or upper_line == 'LOQ':
2085            self._check_instrument(upper_line, reducer)
2086
2087        elif upper_line.startswith('PRINT '):
2088            _issueInfo(upper_line[6:])
2089
2090        elif upper_line.startswith('SAMPLE/PATH'):
2091            flag = upper_line[12:].strip()
2092            if flag == 'ON' or flag == 'TRUE':
2093                reducer.wide_angle_correction = True
2094            else:
2095                reducer.wide_angle_correction = False
2096
2097        elif line.startswith('!') or not line:
2098            # this is a comment or empty line, these are allowed
2099            pass
2100
2101        else:
2102            _issueWarning('Unrecognized line in user file the line %s, ignoring' % upper_line)
2103
2104    def _initialize_mask(self, reducer):
2105        self._restore_defaults(reducer)
2106
2107        reducer.CENT_FIND_RMIN = None
2108        reducer.CENT_FIND_RMAX = None
2109
2110        reducer.QXY = None
2111        reducer.DQY = None
2112        reducer.to_Q.r_cut = 0
2113        reducer.to_Q.w_cut = 0
2114
2115        reducer._corr_and_scale.rescale = 100.0
2116
2117    # Read a limit line of a mask file
2118    def readLimitValues(self, limit_line, reducer):
2119        limits = limit_line.split('L/')
2120        if len(limits) != 2:
2121            _issueWarning("Incorrectly formatted limit line ignored \"" + limit_line + "\"")
2122            return
2123        limits = limits[1]
2124        limit_type = ''
2125
2126        if limits.startswith('SP '):
2127            # We don't use the L/SP line
2128            _issueWarning("L/SP lines are ignored")
2129            return
2130
2131        if limits.upper().startswith('Q/RCUT'):
2132            limits = limits.upper().split('RCUT')
2133            if len(limits) != 2:
2134                _issueWarning("Badly formed L/Q/RCUT line")
2135            else:
2136                # When read from user file the unit is in mm but stored here it units of meters
2137                reducer.to_Q.r_cut = float(limits[1]) / 1000.0
2138            return
2139        if limits.upper().startswith('Q/WCUT'):
2140            limits = limits.upper().split('WCUT')
2141            if len(limits) != 2:
2142                _issueWarning("Badly formed L/Q/WCUT line")
2143            else:
2144                reducer.to_Q.w_cut = float(limits[1])
2145            return
2146
2147        rebin_str = None
2148        if not ',' in limit_line:
2149            # Split with no arguments defaults to any whitespace character and in particular
2150            # multiple spaces are include
2151            elements = limits.split()
2152            if len(elements) == 4:
2153                limit_type, minval, maxval, step = elements[0], elements[1], elements[2], elements[3]
2154                step_details = step.split('/')
2155                if len(step_details) == 2:
2156                    step_size = step_details[0]
2157                    step_type = step_details[1]
2158                    if step_type.upper() == 'LOG':
2159                        step_type = '-'
2160                    else:
2161                        step_type = ''
2162                else:
2163                    step_size = step_details[0]
2164                    step_type = ''
2165            elif len(elements) == 3:
2166                limit_type, minval, maxval = elements[0], elements[1], elements[2]
2167            else:
2168                _issueWarning("Incorrectly formatted limit line ignored \"" + limit_line + "\"")
2169                return
2170        else:
2171            blocks = limits.split()
2172            limit_type = blocks[0].lstrip().rstrip()
2173            try:
2174                rebin_str = limits.split(limit_type)[1]
2175            except:
2176                _issueWarning("Incorrectly formatted limit line ignored \"" + limit_line + "\"")
2177                return
2178
2179            minval = maxval = step_type = step_size = None
2180
2181        if limit_type.upper() == 'WAV':
2182            if rebin_str:
2183                _issueWarning("General wave re-bin lines are not implemented, line ignored \"" + limit_line + "\"")
2184                return
2185            else:
2186                reducer.to_wavelen.set_rebin(
2187                        minval, step_type + step_size, maxval, override=False)
2188        elif limit_type.upper() == 'Q':
2189            if rebin_str:
2190                reducer.to_Q.binning = rebin_str
2191            else:
2192                reducer.to_Q.binning = minval + "," + step_type + step_size + "," + maxval
2193        elif limit_type.upper() == 'QXY':
2194            reducer.QXY2 = float(maxval)
2195            reducer.DQXY = float(step_type + step_size)
2196        elif limit_type.upper() == 'R':
2197            reducer.mask.set_radi(minval, maxval)
2198            reducer.CENT_FIND_RMIN = float(minval)/1000.
2199            reducer.CENT_FIND_RMAX = float(maxval)/1000.
2200        elif (limit_type.upper() == 'PHI') or (limit_type.upper() == 'PHI/NOMIRROR'):
2201            mirror = limit_type.upper() != 'PHI/NOMIRROR'
2202            if maxval.endswith('/NOMIRROR'):
2203                maxval = maxval.split('/NOMIRROR')[0]
2204                mirror = False
2205            reducer.mask.set_phi_limit(
2206                float(minval), float(maxval), mirror, override=False)
2207        elif limit_type.upper() == 'EVENTSTIME':
2208            if rebin_str:
2209                reducer.settings["events.binning"] = rebin_str
2210            else:
2211                reducer.settings["events.binning"] = minval + "," + step_type + step_size + "," + maxval
2212        else:
2213            _issueWarning('Error in user file after L/, "%s" is not a valid limit line' % limit_type.upper())
2214
2215    def _read_mon_line(self, details, reducer):
2216
2217        #MON/LENTH, MON/SPECTRUM and MON/TRANS all accept the INTERPOLATE option
2218        interpolate = False
2219        interPlace = details.upper().find('/INTERPOLATE')
2220        if interPlace != -1:
2221            interpolate = True
2222            details = details[0:interPlace]
2223
2224        if details.upper().startswith('SPECTRUM'):
2225            reducer.set_monitor_spectrum(
2226                int(details.split('=')[1]), interpolate, override=False)
2227            self._incid_monitor_lckd = True
2228
2229        elif details.upper().startswith('LENGTH'):
2230            details = details.split('=')[1]
2231            options = details.split()
2232            spectrum = int(options[1])
2233#            reducer.instrument.monitor_zs[spectrum] = options[0]
2234
2235            #the settings here are overriden by MON/SPECTRUM
2236            if not self._incid_monitor_lckd:
2237                reducer.set_monitor_spectrum(
2238                    spectrum, interpolate, override=False)
2239
2240        elif details.upper().startswith('TRANS'):
2241            parts = details.split('=')
2242            if len(parts) < 2 or parts[0].upper() != 'TRANS/SPECTRUM' :
2243                return 'Unable to parse MON/TRANS line, needs MON/TRANS/SPECTRUM=... not: '
2244            reducer.set_trans_spectrum(int(parts[1]), interpolate, override=False)
2245
2246        elif 'DIRECT' in details.upper() or details.upper().startswith('FLAT'):
2247            parts = details.split("=")
2248            if len(parts) == 2:
2249                filepath = parts[1].rstrip()
2250                #for VMS compatibility ignore anything in "[]", those are normally VMS drive specifications
2251                if '[' in filepath:
2252                    idx = filepath.rfind(']')
2253                    filepath = filepath[idx + 1:]
2254                if not os.path.isabs(filepath):
2255                    filepath = os.path.join(reducer.user_file_path, filepath)
2256
2257                # If a filepath has been provided, then it must exist to continue.
2258                if filepath and not os.path.isfile(filepath):
2259                    raise RuntimeError("The following MON/DIRECT datafile does not exist: %s" % filepath)
2260
2261                type = parts[0]
2262                parts = type.split("/")
2263                if len(parts) == 1:
2264                    if parts[0].upper() == 'DIRECT':
2265                        reducer.instrument.cur_detector().correction_file \
2266                            = filepath
2267                        reducer.instrument.other_detector().correction_file \
2268                           = filepath
2269                    elif parts[0].upper() == 'HAB':
2270                        try:
2271                            reducer.instrument.getDetector('HAB').correction_file \
2272                                = filepath
2273                        except AttributeError:
2274                            raise AttributeError('Detector HAB does not exist for the current instrument, set the instrument to LOQ first')
2275                    elif parts[0].upper() == 'FLAT':
2276                        reducer.prep_normalize.setPixelCorrFile(filepath,'REAR')
2277                    else:
2278                        pass
2279                elif len(parts) == 2:
2280                    detname = parts[1]
2281                    if detname.upper() == 'REAR':
2282                        if parts[0].upper() == "FLAT":
2283                            reducer.prep_normalize.setPixelCorrFile(filepath,'REAR')
2284                        else:
2285                            reducer.instrument.getDetector('REAR').correction_file \
2286                                = filepath
2287                    elif detname.upper() == 'FRONT' or detname.upper() == 'HAB':
2288                        if parts[0].upper() == "FLAT":
2289                            reducer.prep_normalize.setPixelCorrFile(filepath,'FRONT')
2290                        else:
2291                            reducer.instrument.getDetector('FRONT').correction_file \
2292                                = filepath
2293                    else:
2294                        return 'Incorrect detector specified for efficiency file: '
2295                else:
2296                    return 'Unable to parse monitor line: '
2297            else:
2298                return 'Unable to parse monitor line: '
2299        else:
2300            return 'Unable to parse monitor line: '
2301
2302    def _readDetectorCorrections(self, details, reducer):
2303        """
2304            Handle user commands of the type DET/CORR/FRONT/RADIUS x
2305            @param details: the contents of the line after DET/CORR
2306            @param reducer: the object that contains all the settings
2307        """
2308        if details[0]=='/':
2309            details = details.lstrip('/')
2310        values = details.split()
2311        if '/' in values[0]:
2312            # assume notation is e.g. FRONT/RADIUS x
2313            values2 = values[0].split('/')
2314            det_name = values2[0]
2315            det_axis = values2[1]
2316            shift = float(values[1])
2317        else:
2318            # assume notation is e.g. FRONT RADIUS x
2319            det_name = values[0]
2320            det_axis = values[1]
2321            shift = float(values[2])
2322
2323        detector = reducer.instrument.getDetector(det_name)
2324        if det_axis == 'X':
2325            detector.x_corr = shift
2326        elif det_axis == 'Y':
2327            detector.y_corr = shift
2328        elif det_axis == 'Z':
2329            detector.z_corr = shift
2330        elif det_axis == 'ROT':
2331            detector.rot_corr = shift
2332        # 21/3/12 RKH added 2 variables
2333        elif det_axis == 'RADIUS':
2334            detector.radius_corr = shift
2335        elif det_axis == 'SIDE':
2336            detector.side_corr = shift
2337                # 11/11/14 RKH add 2 more variables
2338        elif det_axis == 'XTILT':
2339            detector.x_tilt = shift
2340        elif det_axis == 'YTILT':
2341            detector.y_tilt = shift
2342        else:
2343            raise NotImplemented('Detector correction on "'+det_axis+'" is not supported')
2344
2345    def _readFrontRescaleShiftSetup(self, details, reducer):
2346        """
2347            Handle user commands of the type DET/RESCALE r and DET/RESCALE/FIT q1 q2
2348            which are used to scale+constant background shift front detector so that
2349            data from the front and rear detectors can be merged
2350
2351            @param details: the contents of the line after DET/
2352            @param reducer: the object that contains all the settings
2353        """
2354        values = details.split()
2355        rAnds = reducer.instrument.getDetector('FRONT').rescaleAndShift
2356        rAnds.qRangeUserSelected = False
2357        if details.startswith('RESCALE'):
2358            if 'FIT' in details:
2359                if len(values) == 1:
2360                    rAnds.fitScale = True
2361                elif len(values) == 3:
2362                    rAnds.fitScale = True
2363                    rAnds.qMin = float(values[1])
2364                    rAnds.qMax = float(values[2])
2365                    rAnds.qRangeUserSelected = True
2366                else:
2367                    _issueWarning("Command: \"DET/" + details + "\" not valid. Expected format is /DET/RESCALE/FIT [q1 q2]")
2368            else:
2369                if len(values) == 2:
2370                    rAnds.scale = float(values[1])
2371                else:
2372                    _issueWarning("Command: \"DET/" + details + "\" not valid. Expected format is /DET/RESCALE r")
2373        elif details.startswith('SHIFT'):
2374            if 'FIT' in details:
2375                if len(values) == 1:
2376                    rAnds.fitShift = True
2377                elif len(values) == 3:
2378                    rAnds.fitShift = True
2379                    rAnds.qMin = float(values[1])
2380                    rAnds.qMax = float(values[2])
2381                    rAnds.qRangeUserSelected = True
2382                else:
2383                    _issueWarning("Command: \"DET/" + details + "\" not valid. Expected format is /DET/SHIFT/FIT [q1 q2]")
2384            else:
2385                if len(values) == 2:
2386                    rAnds.shift = float(values[1])
2387                else:
2388                    _issueWarning("Command: \"DET/" + details + "\" not valid. Expected format is /DET/RESCALE r")
2389
2390    def _read_back_line(self, arguments, reducer):
2391        """
2392            Parses a line from the settings file
2393            @param arguments: the contents of the line after the first keyword
2394            @param reducer: the object that contains all the settings
2395            @return any errors encountered or ''
2396        """
2397        #a list of the key words this function can read and the functions it calls in response
2398        keys = ['MON/TIMES', 'M']
2399        funcs = [self._read_default_back_region, self._read_back_region]
2400        self._process(keys, funcs, arguments, reducer)
2401
2402    def _read_back_region(self, arguments, reducer):
2403        """
2404            Parses a line of the form BACK/M... to sets the default TOF
2405            window for the background region for a specific monitor, or
2406            turning off if of the format BACK/M3/OFF.
2407            @param arguments: the contents of the line after the first keyword
2408            @param reducer: the object that contains all the settings
2409            @return any errors encountered or ''
2410        """
2411        try:
2412            # check first if what to turn of a background for a specific
2413            # monitor using 'BACK/M2/OFF'.
2414            parts = arguments.split('/OFF')
2415            if len(parts) == 2:
2416                # set specific monitor to OFF
2417                reducer.inst.set_TOFs(None, None, int(parts[0]))
2418                return ''
2419
2420            # assume a line of the form BACK/M1/TIMES
2421            parts = arguments.split('/TIMES')
2422            if len(parts) == 2:
2423                times = parts[1].split()
2424            else:
2425                #try the other possibility, something like, BACK/M2
2426                parts =  arguments.split()
2427                times = [parts[1], parts[2]]
2428
2429            monitor = int(parts[0])
2430
2431            # parse the words after 'TIME' as first the start time and then the end
2432            reducer.inst.set_TOFs(int(times[0]), int(times[1]), monitor)
2433            return ''
2434        except Exception, reason:
2435            # return a description of any problems and then continue to read the next line
2436            return str(reason) + ' on line: '
2437
2438    def _read_default_back_region(self, arguments, reducer):
2439        """
2440            Parses a line of the form BACK/MON/TIMES form and sets the default TOF
2441            window for the background region assumed for the current instrument
2442            @param arguments: the contents of the line after the first keyword
2443            @param reducer: the object that contains all the settings
2444            @return any errors encountered or ''
2445        """
2446        times = arguments.split()
2447        if len(times) == 2:
2448            reducer.inst.set_TOFs(int(times[0]), int(times[1]))
2449        else:
2450            reducer.inst.set_TOFs(None, None)
2451            return 'Only monitor specific backgrounds will be applied, no default is set due to incorrectly formatted background line:'
2452
2453    def _read_trans_line(self, arguments, reducer):
2454        #a list of the key words this function can read and the functions it calls in response
2455        keys = ['TRANSPEC', 'SAMPLEWS', 'CANWS']
2456        funcs = [self._read_transpec, self._read_trans_samplews, self._read_trans_canws]
2457        return self._process(keys, funcs, arguments, reducer)
2458
2459    def _process(self, keys, funcs, params, reducer):
2460        #go through the list of recognised commands
2461        for i in range(0, len(keys)):
2462            if params.startswith(keys[i]):
2463                #remove the keyword as it has already been parsed
2464                params = params[len(keys[i]):]
2465                #call the handling function for that keyword returning any error
2466                return funcs[i](params, reducer)
2467        return 'Unrecognised line: '
2468
2469    def _read_transpec(self, arguments, reducer):
2470        arguments = arguments.split('/')
2471
2472        #check if there is an optional shift specification
2473        if len(arguments) == 2:
2474            #deal with the shift specification first
2475            shift = arguments[1]
2476            terms = shift.split('=')
2477            if len(terms) < 2:
2478                return 'Bad TRANS/TRANSPEC= / line: '
2479            reducer.instrument.monitor_4_offset= float(terms[1])
2480
2481        #now remove any shift specification and parse the first argument
2482        arguments = arguments[0]
2483        arguments = arguments.split('=')
2484        if len(arguments) == 1:
2485            raise RuntimeError('An "=" is required after TRANSPEC')
2486
2487        reducer.transmission_calculator.trans_spec = int(arguments[1])
2488
2489    def _read_trans_samplews(self, arguments, reducer):
2490        if arguments.find('=') > -1:
2491            arguments = arguments.split('=')
2492        else:
2493            arguments = arguments.split()
2494
2495        if len(arguments) != 2:
2496            return 'Unrecognised line: '
2497
2498        reducer.transmission_calculator.calculated_samp = arguments[1]
2499
2500    def _read_trans_canws(self, arguments, reducer):
2501        if arguments.find('=') > -1:
2502            arguments = arguments.split('=')
2503        else:
2504            arguments = arguments.split()
2505
2506        if len(arguments) != 2:
2507            return 'Unrecognised line: '
2508
2509        reducer.transmission_calculator.calculated_can = arguments[1]
2510
2511    def _check_instrument(self, inst_name, reducer):
2512        if reducer.instrument is None:
2513            raise RuntimeError('Use SANS2D() or LOQ() to set the instrument before Maskfile()')
2514        if not inst_name == reducer.instrument.name():
2515            raise RuntimeError('User settings file not compatible with the selected instrument '+reducer.instrument.name())
2516
2517    def _restore_defaults(self, reducer):
2518        reducer.mask.parse_instruction(reducer.instrument.name(), 'MASK/CLEAR')
2519        reducer.mask.parse_instruction(reducer.instrument.name(), 'MASK/CLEAR/TIME')
2520
2521        reducer.CENT_FIND_RMIN = reducer.CENT_FIND_RMAX
2522        reducer.QXY = None
2523        reducer.DQY = None
2524
2525        reducer.to_Q.binning = None
2526
2527        # Scaling values
2528        reducer._corr_and_scale.rescale = 100.  # percent
2529
2530        reducer.inst.reset_TOFs()
2531
2532    def _read_calibfile_line(self, arguments, reducer):
2533        # remove the equals from the beggining and any space around.
2534        parts = re.split("\s?=\s?", arguments)
2535        if len(parts) != 2:
2536            return "Invalid input for TUBECALIBFILE" + str(arguments)+ ". Expected TUBECALIBFILE = file_path"
2537        path2file = parts[1]
2538
2539        try:
2540          file_path, suggested_name = getFileAndName(path2file)
2541          __calibrationWs = Load(file_path, OutputWorkspace=suggested_name)
2542          reducer.instrument.setCalibrationWorkspace(__calibrationWs)
2543        except:
2544            # If we throw a runtime here, then we cannot execute 'Load Data'.
2545            raise RuntimeError("Invalid input for tube calibration file (" + path2file + " ).\n" \
2546            "Please do not run a reduction as it will not successfully complete.\n")
2547
2548    def _read_maskfile_line(self, line, reducer):
2549        try:
2550            _, value = re.split("\s?=\s?", line)
2551        except ValueError:
2552            return "Invalid input: \"%s\".  Expected \"MASKFILE = path to file\"." % line
2553
2554        reducer.settings["MaskFiles"] = value
2555
2556class GetOutputName(ReductionStep):
2557    def __init__(self):
2558        """
2559            Reads a SANS mask file
2560        """
2561        super(GetOutputName, self).__init__()
2562        self.name_holder = ['problem_setting_name']
2563
2564    def execute(self, reducer, workspace=None):
2565        """
2566            Generates the name of the sample workspace and changes the
2567            loaded workspace to that.
2568            @param reducer the reducer object that called this step
2569            @param workspace un-used
2570        """
2571        reducer.output_wksp = reducer.get_out_ws_name()
2572
2573class ReplaceErrors(ReductionStep):
2574    def __init__(self):
2575        super(ReplaceErrors, self).__init__()
2576        self.name = None
2577
2578    def execute(self, reducer, workspace):
2579        ReplaceSpecialValues(InputWorkspace = workspace,OutputWorkspace = workspace, NaNValue="0", InfinityValue="0")
2580
2581def _padRunNumber(run_no, field_width):
2582    nchars = len(run_no)
2583    digit_end = 0
2584    for i in range(0, nchars):
2585        if run_no[i].isdigit():
2586            digit_end += 1
2587        else:
2588            break
2589
2590    if digit_end == nchars:
2591        filebase = run_no.rjust(field_width, '0')
2592        return filebase, run_no
2593    else:
2594        filebase = run_no[:digit_end].rjust(field_width, '0')
2595        return filebase + run_no[digit_end:], run_no[:digit_end]
2596
2597
2598class StripEndNans(ReductionStep):
2599    # ISIS only
2600    def __init__(self):
2601        super(StripEndNans, self).__init__()
2602
2603    def _isNan(self, val):
2604        """
2605            Can replaced by isNaN in Python 2.6
2606            @param val: float to check
2607        """
2608        if val != val:
2609            return True
2610        else:
2611            return False
2612
2613    def execute(self, reducer, workspace):
2614        """
2615            Trips leading and trailing Nan values from workspace
2616            @param reducer: unused
2617            @param workspace: the workspace to convert
2618        """
2619        result_ws = mtd[workspace]
2620        if result_ws.getNumberHistograms() != 1:
2621            #Strip zeros is only possible on 1D workspaces
2622            return
2623
2624        y_vals = result_ws.readY(0)
2625        length = len(y_vals)
2626        # Find the first non-zero value
2627        start = 0
2628        for i in range(0, length):
2629            if not self._isNan(y_vals[i]):
2630                start = i
2631                break
2632        # Now find the last non-zero value
2633        stop = 0
2634        length -= 1
2635        for j in range(length, 0,-1):
2636            if not self._isNan(y_vals[j]):
2637                stop = j
2638                break
2639        # Find the appropriate X values and call CropWorkspace
2640        x_vals = result_ws.readX(0)
2641        startX = x_vals[start]
2642        # Make sure we're inside the bin that we want to crop
2643        endX = 1.001*x_vals[stop + 1]
2644        CropWorkspace(InputWorkspace=workspace, OutputWorkspace=workspace, XMin=startX, XMax=endX)
2645
2646
2647class GetSampleGeom(ReductionStep):
2648    """
2649        Loads, stores, retrieves, etc. data about the geometry of the sample
2650        On initialisation this class will return default geometry values (compatible with the Colette software)
2651        There are functions to override these settings
2652        On execute if there is geometry information in the workspace this will override any unset attributes
2653
2654        ISIS only
2655        ORNL only divides by thickness, in the absolute scaling step
2656
2657    """
2658    # IDs for each shape as used by the Colette software
2659    _shape_ids = {1 : 'cylinder-axis-up',
2660                  2 : 'cuboid',
2661                  3 : 'cylinder-axis-along'}
2662    _default_shape = 'cylinder-axis-along'
2663
2664    def __init__(self):
2665        super(GetSampleGeom, self).__init__()
2666
2667        # string specifies the sample's shape
2668        self._shape = None
2669        # sample's width
2670        self._width = None
2671        self._thickness = None
2672        self._height = None
2673
2674        self._use_wksp_shape = True
2675        self._use_wksp_width = True
2676        self._use_wksp_thickness = True
2677        self._use_wksp_height = True
2678
2679    def _get_default(self, attrib):
2680        if attrib == 'shape':
2681            return self._default_shape
2682        elif attrib == 'width' or attrib == 'thickness' or attrib == 'height':
2683            return 1.0
2684
2685    def set_shape(self, new_shape):
2686        """
2687            Sets the sample's shape from a string or an ID. If the ID is not
2688            in the list of allowed values the shape is set to the default but
2689            shape strings not in the list are not checked
2690        """
2691        try:
2692            # deal with ID numbers as arguments
2693            new_shape = self._shape_ids[int(new_shape)]
2694        except ValueError:
2695            # means that we weren't passed an ID number, the code below treats it as a shape name
2696            pass
2697        except KeyError:
2698            _issueWarning("Warning: Invalid geometry type for sample: " + str(new_shape) + ". Setting default to " + self._default_shape)
2699            new_shape = self._default_shape
2700
2701        self._shape = new_shape
2702        self._use_wksp_shape = False
2703
2704        #check that the dimensions that we have make sense for our new shape
2705        if self._width:
2706            self.width = self._width
2707        if self._thickness:
2708            self.thickness = self._thickness
2709
2710    def get_shape(self):
2711        if self._shape is None:
2712            return self._get_default('shape')
2713        else:
2714            return self._shape
2715
2716    def set_width(self, width):
2717        self._width = float(width)
2718        self._use_wksp_width = False
2719        # For a disk the height=width
2720        if self._shape and self._shape.startswith('cylinder'):
2721            self._height = self._width
2722            self._use_wksp_height = False
2723
2724    def get_width(self):
2725        self.raise_if_zero(self._width, "width")
2726        if self._width is None:
2727            return self._get_default('width')
2728        else:
2729            return self._width
2730
2731    def set_height(self, height):
2732        self._height = float(height)
2733        self._use_wksp_height = False
2734
2735        # For a cylinder and sphere the height=width=radius
2736        if (not self._shape is None) and (self._shape.startswith('cylinder')):
2737            self._width = self._height
2738        self._use_wksp_widtht = False
2739
2740    def get_height(self):
2741        self.raise_if_zero(self._height, "height")
2742        if self._height is None:
2743            return self._get_default('height')
2744        else:
2745            return self._height
2746
2747    def set_thickness(self, thickness):
2748        """
2749            Simply sets the variable _thickness to the value passed
2750        """
2751        #as only cuboids use the thickness the warning below may be informative
2752        #if (not self._shape is None) and (not self._shape == 'cuboid'):
2753        #    mantid.sendLogMessage('::SANS::Warning: Can\'t set thickness for shape "'+self._shape+'"')
2754        self._thickness = float(thickness)
2755        self._use_wksp_thickness = False
2756
2757    def get_thickness(self):
2758        self.raise_if_zero(self._thickness, "thickness")
2759        if self._thickness is None:
2760            return self._get_default('thickness')
2761        else:
2762            return self._thickness
2763
2764    def raise_if_zero(self, value, name):
2765        if value == 0.0:
2766            message = "Please set the sample geometry %s so that it is not zero."
2767            raise RuntimeError(message % name)
2768
2769    shape = property(get_shape, set_shape, None, None)
2770    width = property(get_width, set_width, None, None)
2771    height = property(get_height, set_height, None, None)
2772    thickness = property(get_thickness, set_thickness, None, None)
2773
2774    def execute(self, reducer, workspace):
2775        """
2776            Reads the geometry information stored in the workspace
2777            but doesn't replace values that have been previously set
2778        """
2779        wksp = mtd[workspace]
2780        if isinstance(wksp, WorkspaceGroup):
2781            wksp = wksp[0]
2782        sample_details = wksp.sample()
2783
2784        if self._use_wksp_shape:
2785            self.shape = sample_details.getGeometryFlag()
2786        if self._use_wksp_thickness:
2787            self.thickness = sample_details.getThickness()
2788        if self._use_wksp_width:
2789            self.width = sample_details.getWidth()
2790        if self._use_wksp_height:
2791            self.height = sample_details.getHeight()
2792
2793    def __str__(self):
2794        return '-- Sample Geometry --\n' + \
2795               '    Shape: ' + self.shape+'\n'+\
2796               '    Width: ' + str(self.width)+'\n'+\
2797               '    Height: ' + str(self.height)+'\n'+\
2798               '    Thickness: ' + str(self.thickness)+'\n'
2799
2800class SampleGeomCor(ReductionStep):
2801    """
2802        Correct the neutron count rates for the size of the sample
2803
2804        ISIS only
2805        ORNL only divides by thickness, in the absolute scaling step
2806
2807    """
2808    def __init__(self):
2809        self.volume = 1.0
2810
2811    def calculate_volume(self, reducer):
2812        geo = reducer.get_sample().geometry
2813        assert( issubclass(geo.__class__, GetSampleGeom))
2814
2815        try:
2816            if geo.shape == 'cylinder-axis-up':
2817                # Volume = circle area * height
2818                # Factor of four comes from radius = width/2
2819                volume = geo.height*math.pi
2820                volume *= math.pow(geo.width,2)/4.0
2821            elif geo.shape == 'cuboid':
2822                # Flat plate sample
2823                volume = geo.width
2824                volume *= geo.height*geo.thickness
2825            elif geo.shape == 'cylinder-axis-along':
2826                # Factor of four comes from radius = width/2
2827                # Disc - where height is not used
2828                volume = geo.thickness*math.pi
2829                volume *= math.pow(geo.width, 2)/4.0
2830            else:
2831                raise NotImplemented('Shape "'+geo.shape+'" is not in the list of supported shapes')
2832        except TypeError:
2833            raise TypeError('Error calculating sample volume with width='+str(geo.width) + ' height='+str(geo.height) + 'and thickness='+str(geo.thickness))
2834
2835        return volume
2836
2837    def execute(self, reducer, workspace):
2838        """
2839            Divide the counts by the volume of the sample
2840        """
2841        if not reducer.is_can():
2842            # it calculates the volume for the sample and may or not apply to the can as well.
2843            self.volume = self.calculate_volume(reducer)
2844
2845        ws = mtd[str(workspace)]
2846        ws /= self.volume