Source code for pipeline.hsd.cli.gotasks.hsd_k2jycal

##################### generated by xml-casa (v2) from hsd_k2jycal.xml ###############
##################### add86d5640eafa47cc98be25a3d73854 ##############################
from __future__ import absolute_import
from casashell.private.stack_manip import find_local as __sf__
from casashell.private.stack_manip import find_frame as _find_frame
from casatools.typecheck import validator as _pc
from casatools.coercetype import coerce as _coerce
from pipeline.hsd.cli import hsd_k2jycal as _hsd_k2jycal_t
from collections import OrderedDict
import numpy
import sys
import os

import shutil

[docs]def static_var(varname, value): def decorate(func): setattr(func, varname, value) return func return decorate
class _hsd_k2jycal: """ hsd_k2jycal ---- Derive Kelvin to Jy calibration tables Derive the Kelvin to Jy calibration for list of MeasurementSets. results -- If pipeline mode is 'getinputs' then None is returned. Otherwise the results object for the pipeline task is returned. --------- parameter descriptions --------------------------------------------- dbservice Whether or not accessing Jy/K DB to retrieve conversion factors. endpoint Which endpoints to use for query options: 'asdm', 'model-fit', 'interpolation' reffile Path to a file containing Jy/K factors for science data, which must be provided by associating calibrator reduction or the observatory measurements. Jy/K factor must take into account all efficiencies, i.e., it must be a direct conversion factor from Ta* to Jy. The file must be in either MS-based or session-based format. The MS-based format must be in an CSV format with five fields: MS name, antenna name, spectral window id, polarization string, and Jy/K conversion factor. Example for the file is as follows: MS,Antenna,Spwid,Polarization,Factor uid___A002_X316307_X6f.ms,CM03,5,XX,10.0 uid___A002_X316307_X6f.ms,CM03,5,YY,12.0 uid___A002_X316307_X6f.ms,PM04,5,XX,2.0 uid___A002_X316307_X6f.ms,PM04,5,YY,5.0 The first line in the above example is a header which may or may not exist. Example for the session-based format is as follows: #OUSID=XXXXXX #OBJECT=Uranus #FLUXJY=yy,zz,aa #FLUXFREQ=YY,ZZ,AA #sessionID,ObservationStartDate(UTC),ObservationEndDate(UTC),Antenna,BandCenter(MHz),BandWidth(MHz),POL,Factor 1,2011-11-11 01:00:00,2011-11-11 01:30:00,CM02,86243.0,500.0,I,10.0 1,2011-11-11 01:00:00,2011-11-11 01:30:00,CM02,86243.0,1000.0,I,30.0 1,2011-11-11 01:00:00,2011-11-11 01:30:00,CM03,86243.0,500.0,I,50.0 1,2011-11-11 01:00:00,2011-11-11 01:30:00,CM03,86243.0,1000.0,I,70.0 1,2011-11-11 01:00:00,2011-11-11 01:30:00,ANONYMOUS,86243.0,500.0,I,30.0 1,2011-11-11 01:00:00,2011-11-11 01:30:00,ANONYMOUS,86243.0,1000.0,I,50.0 2,2011-11-13 01:45:00,2011-11-13 02:15:00,PM04,86243.0,500.0,I,90.0 2,2011-11-13 01:45:00,2011-11-13 02:15:00,PM04,86243.0,1000.0,I,110.0 2,2011-11-13 01:45:00,2011-11-13 02:15:00,ANONYMOUS,86243.0,500.0,I,90.0 2,2011-11-13 01:45:00,2011-11-13 02:15:00,ANONYMOUS,86243.0,1000.0,I,110.0 The line starting with '#' indicates a meta data section and header. The header must exist. The factor to apply is identified by matching the session ID, antenna name, frequency and polarization of data in each line of the file. Note the observation date is supplementary information and not used for the matching so far. The lines whose antenna name is 'ANONYMOUS' are used when there is no measurement for specific antenna in the session. In the above example, if science observation of session 1 contains the antenna PM04, Jy/K factor for ANONYMOUS antenna will be applied since there is no measurement for PM04 in session 1. If no file name is specified or specified file doesn't exist, all Jy/K factors are set to 1.0. example: reffile='', reffile='working/jyperk.csv' pipelinemode The pipeline operating mode. In 'automatic' mode the pipeline determines the values of all context defined pipeline inputs automatically. In interactive mode the user can set the pipeline context defined parameters manually. In 'getinputs' mode the user can check the settings of all pipeline parameters without running the task. infiles List of input MeasurementSets. example: vis='ngc5921.ms' caltable Name of output gain calibration tables. example: caltable='ngc5921.gcal' dryrun Run the commands (True) or generate the commands to be run but do not execute (False). acceptresults Add the results of the task to the pipeline context (True) or reject them (False). --------- examples ----------------------------------------------------------- 1. Compute the Kevin to Jy calibration tables for a list of MeasurementSets: hsd_k2jycal() """ _info_group_ = """pipeline""" _info_desc_ = """Derive Kelvin to Jy calibration tables""" __schema = {'dbservice': {'type': 'cBool'}, 'endpoint': {'type': 'cStr', 'coerce': _coerce.to_str, 'allowed': [ 'asdm', 'model-fit', 'interpolation' ]}, 'reffile': {'type': 'cStr', 'coerce': _coerce.to_str}, 'pipelinemode': {'type': 'cStr', 'coerce': _coerce.to_str, 'allowed': [ 'automatic', 'interactive', 'getinputs' ]}, 'infiles': {'type': 'cStrVec', 'coerce': [_coerce.to_list,_coerce.to_strvec]}, 'caltable': {'type': 'cStrVec', 'coerce': [_coerce.to_list,_coerce.to_strvec]}, 'dryrun': {'type': 'cBool'}, 'acceptresults': {'type': 'cBool'}} def __init__(self): self.__stdout = None self.__stderr = None self.__root_frame_ = None def __globals_(self): if self.__root_frame_ is None: self.__root_frame_ = _find_frame( ) assert self.__root_frame_ is not None, "could not find CASAshell global frame" return self.__root_frame_ def __to_string_(self,value): if type(value) is str: return "'%s'" % value else: return str(value) def __validate_(self,doc,schema): return _pc.validate(doc,schema) def __do_inp_output(self,param_prefix,description_str,formatting_chars): out = self.__stdout or sys.stdout description = description_str.split( ) prefix_width = 23 + 16 + 4 output = [ ] addon = '' first_addon = True while len(description) > 0: ## starting a new line..................................................................... if len(output) == 0: ## for first line add parameter information............................................ if len(param_prefix)-formatting_chars > prefix_width - 1: output.append(param_prefix) continue addon = param_prefix + ' #' first_addon = True addon_formatting = formatting_chars else: ## for subsequent lines space over prefix width........................................ addon = (' ' * prefix_width) + '#' first_addon = False addon_formatting = 0 ## if first word of description puts us over the screen width, bail........................ if len(addon + description[0]) - addon_formatting + 1 > self.term_width: ## if we're doing the first line make sure it's output................................. if first_addon: output.append(addon) break while len(description) > 0: ## if the next description word puts us over break for the next line................... if len(addon + description[0]) - addon_formatting + 1 > self.term_width: break addon = addon + ' ' + description[0] description.pop(0) output.append(addon) out.write('\n'.join(output) + '\n') #--------- return nonsubparam values ---------------------------------------------- def __dbservice_dflt( self, glb ): return False def __dbservice( self, glb ): if 'dbservice' in glb: return glb['dbservice'] return False def __reffile_dflt( self, glb ): return 'jyperk.csv' def __reffile( self, glb ): if 'reffile' in glb: return glb['reffile'] return 'jyperk.csv' def __pipelinemode_dflt( self, glb ): return 'automatic' def __pipelinemode( self, glb ): if 'pipelinemode' in glb: return glb['pipelinemode'] return 'automatic' #--------- return inp/go default -------------------------------------------------- def __dryrun_dflt( self, glb ): if self.__pipelinemode( glb ) == "interactive": return bool(False) return None def __acceptresults_dflt( self, glb ): if self.__pipelinemode( glb ) == "interactive": return bool(True) return None def __endpoint_dflt( self, glb ): if self.__dbservice( glb ) == bool(True): return "asdm" return None def __caltable_dflt( self, glb ): if self.__pipelinemode( glb ) == "interactive": return [] if self.__pipelinemode( glb ) == "getinputs": return [] return None def __infiles_dflt( self, glb ): if self.__pipelinemode( glb ) == "interactive": return [] return None #--------- return subparam values ------------------------------------------------- def __endpoint( self, glb ): if 'endpoint' in glb: return glb['endpoint'] dflt = self.__endpoint_dflt( glb ) if dflt is not None: return dflt return 'asdm' def __infiles( self, glb ): if 'infiles' in glb: return glb['infiles'] dflt = self.__infiles_dflt( glb ) if dflt is not None: return dflt return [ ] def __caltable( self, glb ): if 'caltable' in glb: return glb['caltable'] dflt = self.__caltable_dflt( glb ) if dflt is not None: return dflt return [ ] def __dryrun( self, glb ): if 'dryrun' in glb: return glb['dryrun'] dflt = self.__dryrun_dflt( glb ) if dflt is not None: return dflt return False def __acceptresults( self, glb ): if 'acceptresults' in glb: return glb['acceptresults'] dflt = self.__acceptresults_dflt( glb ) if dflt is not None: return dflt return True #--------- subparam inp output ---------------------------------------------------- def __dbservice_inp(self): description = 'Access Jy/K DB or not' value = self.__dbservice( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'dbservice': value},{'dbservice': self.__schema['dbservice']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output('\x1B[1m\x1B[47m%-16.16s =\x1B[0m %s%-23s%s' % ('dbservice',pre,self.__to_string_(value),post),description,13+len(pre)+len(post)) def __endpoint_inp(self): if self.__endpoint_dflt( self.__globals_( ) ) is not None: description = 'Endpoint type (asdm, model-fit, interpolation)' value = self.__endpoint( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'endpoint': value},{'endpoint': self.__schema['endpoint']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output(' \x1B[92m%-13.13s =\x1B[0m %s%-23s%s' % ('endpoint',pre,self.__to_string_(value),post),description,9+len(pre)+len(post)) def __reffile_inp(self): description = 'File of Jy/K conversion factor' value = self.__reffile( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'reffile': value},{'reffile': self.__schema['reffile']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output('%-16.16s = %s%-23s%s' % ('reffile',pre,self.__to_string_(value),post),description,0+len(pre)+len(post)) def __pipelinemode_inp(self): description = 'The pipeline operations mode' value = self.__pipelinemode( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'pipelinemode': value},{'pipelinemode': self.__schema['pipelinemode']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output('\x1B[1m\x1B[47m%-16.16s =\x1B[0m %s%-23s%s' % ('pipelinemode',pre,self.__to_string_(value),post),description,13+len(pre)+len(post)) def __infiles_inp(self): if self.__infiles_dflt( self.__globals_( ) ) is not None: description = 'List of input MeasurementSets' value = self.__infiles( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'infiles': value},{'infiles': self.__schema['infiles']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output(' \x1B[92m%-13.13s =\x1B[0m %s%-23s%s' % ('infiles',pre,self.__to_string_(value),post),description,9+len(pre)+len(post)) def __caltable_inp(self): if self.__caltable_dflt( self.__globals_( ) ) is not None: description = 'List of output caltable(s)' value = self.__caltable( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'caltable': value},{'caltable': self.__schema['caltable']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output(' \x1B[92m%-13.13s =\x1B[0m %s%-23s%s' % ('caltable',pre,self.__to_string_(value),post),description,9+len(pre)+len(post)) def __dryrun_inp(self): if self.__dryrun_dflt( self.__globals_( ) ) is not None: description = 'Run the task (False) or list commands (True)' value = self.__dryrun( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'dryrun': value},{'dryrun': self.__schema['dryrun']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output(' \x1B[92m%-13.13s =\x1B[0m %s%-23s%s' % ('dryrun',pre,self.__to_string_(value),post),description,9+len(pre)+len(post)) def __acceptresults_inp(self): if self.__acceptresults_dflt( self.__globals_( ) ) is not None: description = 'Automatically apply results to context' value = self.__acceptresults( self.__globals_( ) ) (pre,post) = ('','') if self.__validate_({'acceptresults': value},{'acceptresults': self.__schema['acceptresults']}) else ('\x1B[91m','\x1B[0m') self.__do_inp_output(' \x1B[92m%-13.13s =\x1B[0m %s%-23s%s' % ('acceptresults',pre,self.__to_string_(value),post),description,9+len(pre)+len(post)) #--------- global default implementation------------------------------------------- @static_var('state', __sf__('casa_inp_go_state')) def set_global_defaults(self): self.set_global_defaults.state['last'] = self glb = self.__globals_( ) if 'dryrun' in glb: del glb['dryrun'] if 'pipelinemode' in glb: del glb['pipelinemode'] if 'acceptresults' in glb: del glb['acceptresults'] if 'endpoint' in glb: del glb['endpoint'] if 'caltable' in glb: del glb['caltable'] if 'dbservice' in glb: del glb['dbservice'] if 'reffile' in glb: del glb['reffile'] if 'infiles' in glb: del glb['infiles'] #--------- inp function ----------------------------------------------------------- def inp(self): print("# hsd_k2jycal -- %s" % self._info_desc_) self.term_width, self.term_height = shutil.get_terminal_size(fallback=(80, 24)) self.__dbservice_inp( ) self.__endpoint_inp( ) self.__reffile_inp( ) self.__pipelinemode_inp( ) self.__infiles_inp( ) self.__caltable_inp( ) self.__dryrun_inp( ) self.__acceptresults_inp( ) #--------- tget function ---------------------------------------------------------- @static_var('state', __sf__('casa_inp_go_state')) def tget(self,file=None): from casashell.private.stack_manip import find_frame from runpy import run_path filename = None if file is None: if os.path.isfile("hsd_k2jycal.last"): filename = "hsd_k2jycal.last" elif isinstance(file, str): if os.path.isfile(file): filename = file if filename is not None: glob = find_frame( ) newglob = run_path( filename, init_globals={ } ) for i in newglob: glob[i] = newglob[i] self.tget.state['last'] = self else: print("could not find last file, setting defaults instead...") self.set_global_defaults( ) def __call__( self, dbservice=None, endpoint=None, reffile=None, pipelinemode=None, infiles=None, caltable=None, dryrun=None, acceptresults=None ): def noobj(s): if s.startswith('<') and s.endswith('>'): return "None" else: return s _prefile = os.path.realpath('hsd_k2jycal.pre') _postfile = os.path.realpath('hsd_k2jycal.last') _return_result_ = None _arguments = [dbservice,endpoint,reffile,pipelinemode,infiles,caltable,dryrun,acceptresults] _invocation_parameters = OrderedDict( ) if any(map(lambda x: x is not None,_arguments)): # invoke python style # set the non sub-parameters that are not None local_global = { } if dbservice is not None: local_global['dbservice'] = dbservice if reffile is not None: local_global['reffile'] = reffile if pipelinemode is not None: local_global['pipelinemode'] = pipelinemode # the invocation parameters for the non-subparameters can now be set - this picks up those defaults _invocation_parameters['dbservice'] = self.__dbservice( local_global ) _invocation_parameters['reffile'] = self.__reffile( local_global ) _invocation_parameters['pipelinemode'] = self.__pipelinemode( local_global ) # the sub-parameters can then be set. Use the supplied value if not None, else the function, which gets the appropriate default _invocation_parameters['endpoint'] = self.__endpoint( _invocation_parameters ) if endpoint is None else endpoint _invocation_parameters['infiles'] = self.__infiles( _invocation_parameters ) if infiles is None else infiles _invocation_parameters['caltable'] = self.__caltable( _invocation_parameters ) if caltable is None else caltable _invocation_parameters['dryrun'] = self.__dryrun( _invocation_parameters ) if dryrun is None else dryrun _invocation_parameters['acceptresults'] = self.__acceptresults( _invocation_parameters ) if acceptresults is None else acceptresults else: # invoke with inp/go semantics _invocation_parameters['dbservice'] = self.__dbservice( self.__globals_( ) ) _invocation_parameters['endpoint'] = self.__endpoint( self.__globals_( ) ) _invocation_parameters['reffile'] = self.__reffile( self.__globals_( ) ) _invocation_parameters['pipelinemode'] = self.__pipelinemode( self.__globals_( ) ) _invocation_parameters['infiles'] = self.__infiles( self.__globals_( ) ) _invocation_parameters['caltable'] = self.__caltable( self.__globals_( ) ) _invocation_parameters['dryrun'] = self.__dryrun( self.__globals_( ) ) _invocation_parameters['acceptresults'] = self.__acceptresults( self.__globals_( ) ) try: with open(_prefile,'w') as _f: for _i in _invocation_parameters: _f.write("%-13s = %s\n" % (_i,noobj(repr(_invocation_parameters[_i])))) _f.write("#hsd_k2jycal( ") count = 0 for _i in _invocation_parameters: _f.write("%s=%s" % (_i,noobj(repr(_invocation_parameters[_i])))) count += 1 if count < len(_invocation_parameters): _f.write(",") _f.write(" )\n") except: pass try: _return_result_ = _hsd_k2jycal_t( _invocation_parameters['dbservice'],_invocation_parameters['endpoint'],_invocation_parameters['reffile'],_invocation_parameters['pipelinemode'],_invocation_parameters['infiles'],_invocation_parameters['caltable'],_invocation_parameters['dryrun'],_invocation_parameters['acceptresults'] ) except Exception as e: from traceback import format_exc from casatasks import casalog casalog.origin('hsd_k2jycal') casalog.post("Exception Reported: Error in hsd_k2jycal: %s" % str(e),'SEVERE') casalog.post(format_exc( )) _return_result_ = False try: os.rename(_prefile,_postfile) except: pass return _return_result_ hsd_k2jycal = _hsd_k2jycal( )