Download

#!/usr/bin/env python
#

"""
This file is part of ESA's XMM-Newton Scientific Analysis System (SAS).
SAS is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

SAS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with SAS.  If not, see .


Name    = 'plotOMTimeSeries'
VERSION = '1.8';
author  = 'Jose Vicente Perea';
date    = '2021-03-10';


ChangeLog
=========

Version 1.8 - 2021-03-10 (JVP)
------------

+ Update Fast srcLists:
  Re-building the header to keep the original correct keyword comments

Version 1.7 - 2021-03-08 (JVP)
------------

+ X-range for the plots changed to the same values for all the plots
  of the same observation:
    Tstart = T0 from the first exposure
    Tstop  = Tn from the last exposure

Version 1.6 - 2021-03-03 (JVP)
------------

+ Identify the first timeseries file in time order and
  select its header for the final merged timeseries FITS file.
  In this way we keep the DATE-OBS (+ other keywords) from the first timeries.

Version 1.5 - 2021-02-26 (JVP)
------------

+ Cross-matching loop: Set again search_ox = search_ox_orig to avoid exiting the loop if SRC_ID = -1
  Minor bug fix in return

Version 1.4 - 2021-02-19 (JVP)
------------

+ Version output string fixed

Version 1.3 - 2021-02-18 (JVP)
------------

+ X-range for the plots changed to the same values for all the plots
  of the same observation in case the number of sources > 1
  New object : reftimes contains all the 1st and nth time values from
  all the timeseries, excluding those for src_id = -1

Version 1.2 - 2021-02-16 (JVP)
------------

+ Skip updating the fastSrcList if SRC_ID column already exists
  Cross-matching loop: Set again search_ox = False to avoid exiting the loop if SRC_ID = -1
  Merged timeseries plot for non-identified sources (SRC_ID = -1) produced. Then it will be keep only in intermediate

Version 1.1 - 2021-01-18 (JVP)
------------

+ Timeseries plot in count/s instead of Fluxes if any fluxConvFactors are missing

Version 1.0 - 2020-07-13 (JVP)
------------

+ Initial version
  Task to merge OMFast timeseries:
  Fast and Image source cross-match
  Output merged timeseries as Plot and FITS files

"""

__version__   = "1.8"
__author__    = "Jose V. Perea"

__copyright__ = "ESA (C) 2000-2020"
__license__   = "GPL"



import os, datetime, time

#outputDir	= 'plots/'
outputDir       = 'intermediate/'

#fitsDir	= 'fits/'
##srcListsDir = 'sourceLists/'
##srcListsDir = 'sourceListsTest/'
srcListsDir = 'intermediate/'


filtersSort = ['V', 'B', 'U', 'UVW1', 'UVM2', 'UVW2']


#points_density = 100000
#points_density = 1024
#points_density = 512

# outplot = True
# outfits = True
# updateSrcList = True


#################################################################################################################
def sourceCrossMatch( obs_id=None, expid=None, osw_id=None, ox_exist=None, ox=None, omFastsrcLists=None, omImagesrcLists=None, max_dist=1.0, updateSrcList=None ):
	""" Fast - Image source cross-match
	"""

	#print(f'Source cross-matching for {expid}, osw_id: {osw_id}')

	sm = {}
	# Fast srcList SFSRLI for expid ( sf )
	#fastSrcList	= prods + 'P'+obs_id+'OM'+expid+'SFSRLI'+osw_id+'000.FTZ'
	fastSrcList = [ x for x in omFastsrcLists if 'P'+obs_id+'OM'+expid+'SFSRLI'+osw_id+'000.F' in x ][0]

	if not os.path.isfile(fastSrcList):
		 print(f'\tsf: No Fast sourceList for expid: {expid}, osw_id: {osw_id}')
		 return 0		# sm = 0

	with fits.open(fastSrcList) as fhdu:
		sf = fhdu['SRCLIST'].data
		h0 = fhdu[0].header

	# Image srcLists SWSRLI for the same exposure: expid
	imSrcListFiles = [ x for x in omImagesrcLists if 'OM'+expid+'SWSRLI' in x ]
	if imSrcListFiles:	imSrcListFiles.sort()

	#	Check which Image srcList has SRC_ID columns
	src_flg   = False	# Test: SRC_ID column
	search_ox = False	# Test: Sources cross-match in final marged srcList: ox = OBSMLI
	hi = None
	for f in imSrcListFiles:
		with fits.open(f) as imhdu:
			d = imhdu['SRCLIST'].data
			h = imhdu[0].header

		try:
			d.SRC_ID
			# if 'SRC_ID' in [x.name for x in d.columns]
			src_flg = True
			si		= d
			hi		= h
			imSrcListFile = f
			#print(f'\tDEBUG --- imSrcListFile with SRC_ID: {f}')
			break
		except:
			pass

	#	Either no Img srcLists or the Imge srcLists do not contain SRC_ID column
	#		==> Try sources cross-match in final marged srcList (ox = OBSMLI)
	if not src_flg:
		search_ox = True

	#		==> Try sources cross-match in the Image srcList (SFSRLI)
	else:
		### print(f'\tImg srcList with SRC_ID : {os.path.basename(imSrcListFile)}')
		search_ox = False

	# Source coordinates from the Summary srcList OMX000OBSMLI0000
	if ox_exist and (ox.RA.all() and ox.DEC.all()):
		allSrcCoords = SkyCoord(ra=ox.RA *u.degree, dec=ox.DEC *u.degree)


	# Loop over sources in the Fast srcList (SFSRLI) to search the corresponding SRC_ID
	search_ox_orig = search_ox

	smatch = {}
	nsf    = len(sf)
	messages = []
	for j in range(0, nsf):

		srcno = j+1
		if not search_ox:	# Try cross-match in 'SFSRLI'
			#print(f'** Trying Fast-Image cross-match')
			if not hi:
				print('Img srcList was not correctly read. ERROR...')
			bx = 2.**hi['BINAX1']
			by = 2.**hi['BINAX2']
			xf = ( h0['WINDOWX0'] + sf.XPOS - hi['WINDOWX0'] )/bx
			yf = ( h0['WINDOWY0'] + sf.YPOS - hi['WINDOWY0'] )/by

			dd	= np.sqrt( (xf[j]-si.XPOS)**2+(yf[j]-si.YPOS)**2)
			t=0.5
			dif = dd / [ x if x>t else t for x in (sf.POSERR[j]*bx + si.POSERR) ]
			# [ x if x>t else t for x in (sf.POSERR[j]*bx + si.POSERR) ] ==> x if x>0.5, x=0.5 otherwise
			###		dif=dd/((sf[j].poserr*bx+si.poserr)>0.1)
			###			> 0.5 ==> x if x>0.5, x=0.5 otherwise
			mdif = min(dif)		# position sigma diff
			imin = np.argmin(dif)	# position of the minimum

			# rate sigma diff
			fdif = abs(sf.RATE[j]-si.RATE[imin]) / (sf.RATE_ERR[j]+si.RATE_ERR[imin])

			# rate corr sigma diff
			fcdif = abs(sf.CORR_RATE[j]-si.CORR_RATE[imin]) / (sf.CORR_RATE_ERR[j]+si.CORR_RATE_ERR[imin])

			# rate corr percentage diff
			rcdif = abs(sf.CORR_RATE[j]-si.CORR_RATE[imin]) / (sf.CORR_RATE[j]+si.CORR_RATE[imin])

			# True if at least 2 criteria are also True
			#
			(dd_lim, mdif_lim, fdif_lim, fcdif_lim, rcdif_lim) = (1., 2., 3., 3., 0.05)
			#if ( int(dd[imin] < dd_lim) + int(mdif < mdif_lim) + int(fdif < fdif_lim) + int(fcdif < fcdif_lim) + int(rcdif < rcdif_lim) ) >= 2:
			if ( int(dd[imin] < dd_lim) + int(mdif < mdif_lim) ) >= 1:
				m = f"""Fast-Image match for srcno: {srcno}. SRC_ID: {si.SRC_ID[imin]}: \
dd<{dd_lim}: {dd[imin]:.2f}, \
mdif<{mdif_lim}: {mdif:.2f}, \
fdif<{fdif_lim}: {fdif:.2f}, \
fcdif<{fcdif_lim}: {fcdif:.2f}, \
rcdif<{rcdif_lim}: {rcdif:.2f}"""
				messages.append(m)
				# Cross-match in the Image Source list from the same exposure
				smatch[sf.SRCNUM[j]] = si.SRC_ID[imin]
				#
				sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': si.SRC_ID[imin]}
			else:
				#print(f'** NO Fast-Image match. Trying match with the summary srcList OBSMLI')
				search_ox = True

		if search_ox and ox_exist:
			c = SkyCoord(ra=sf.RA[j] *u.degree, dec=sf.DEC[j] *u.degree)			# c = source coords in Fast srcList
			#allSrcCoords = SkyCoord(ra=ox.RA *u.degree, dec=ox.DEC *u.degree)		# coords from all sources in OBSMLI srcList
			idx, d2d, d3d = c.match_to_catalog_sky(allSrcCoords)
			if d2d < max_dist:
				#print(f'Fast-Summary srcList match. SRC_ID: {ox.SRCID[idx]}, separation: {d2d[0].to(u.arcsec):.3f} < {max_dist}')
				messages.append(f'Fast-Summary match for srcno: {srcno}. SRC_ID: {ox.SRCID[idx]}, separation: {d2d[0].to(u.arcsec):.3f} < {max_dist}')
				SRCID = ox.SRCID[idx]
				# print(f'\t\tFast SRCNUM = {sf.SRCNUM[j]}, cross-match in OBSMLI, \
				# Distance, Obs. SRC_ID  = {d2d.to(u.arcsec)}, {SRCID}')
				smatch[sf.SRCNUM[j]] = SRCID
				#sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': SRCID, 'coord': c}
				sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': SRCID}
				#print(f'expid: {expid}, osw_id: {osw_id}, srcno: {j}, srcnum: {sf.SRCNUM[j]}, src_id: {SRCID}')
			else:
				#print(f'** NO Fast-Summary match. No Fast source identification.')
				# print(f'\t\tFast SRCNUM = {sf.SRCNUM[j]}	NO cross-match in OBSMLI')
				smatch[sf.SRCNUM[j]] = None
				#sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': -1, 'coord': c}
				sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': -1}
				#print(f'expid: {expid}, osw_id: {osw_id}, srcno: {j}, srcnum: {sf.SRCNUM[j]}, src_id: None')

		elif search_ox and not ox_exist:
			#print(f'** Try Fast-Summary match, but Summary srcList not found. No Fast source identification.')
			#sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': -1, 'coord': c}
			sm[expid+'_'+osw_id+'_'+str(srcno)] = {'osw_id': osw_id, 'srcno': srcno, 'srcnum': sf.SRCNUM[j], 'src_id': -1}

		search_ox = search_ox_orig	# Reset to orig value to keep going with the next sources in the fastSrcList

	key = f'{expid}_{osw_id}'
	src_id_col = [ sm[s]['src_id'] for s in sm.keys() if key in s ]
	print(f'Source cross-matching for exp, win: {expid}, {osw_id} - SRC_ID: {src_id_col}')
	for m in messages:
	    print(f'  {m}')
	#print(f'SRC_ID: {set(src_id_col)}')

	#---------------------------------------------------------------------------
	#  Update the Fast source list with the Image source identification: SRC_ID
	with fits.open(fastSrcList) as temp:
	     t = temp['SRCLIST'].data
	     isSRC_ID = 'SRC_ID' in t.columns.names
	if isSRC_ID: print(f'  fastSrcList file {fastSrcList} already has SRC_ID column. Skipping update that column.')
	#
	if updateSrcList and not isSRC_ID:

		# Be sure the FITS file is not compressed before openning with fits.open()
		# especially when the file is going to be modified
		#
		testGzip = ModuleUtil.testGzip(fastSrcList)
		if testGzip:
			srcList = ModuleUtil.decompress(fastSrcList, srcListsDir)
		else:
			srcList = fastSrcList

		with fits.open(srcList, mode='update') as fhdu:
			sf = fhdu['SRCLIST'].data
			h  = fhdu['SRCLIST'].header

			orig_cols	= sf.columns

			fitsColumnDefinitions = [ fits.Column( name='SRC_ID', format='J', array=src_id_col )  ]
			new_cols	= fits.ColDefs( fitsColumnDefinitions )
			sf = fits.BinTableHDU.from_columns( orig_cols + new_cols, header=h )	# header = h does not ensure the header is kept
			                                                                        # specially the original comments.
			fhdu['SRCLIST'] = sf

			# Re-build the header, including comments
			hn = fhdu['SRCLIST'].header

			# Values and comments from original header: h
			for k in h['TTYPE*']:
			    hn[k] = ( h[k], h.comments[k] )

			# Value and comment for the new included column: SRC_ID
			ttype_src_id = [ k for k,v in hn.items() if v == 'SRC_ID' ][0]  # Search for the keyword name: TTYPE40
			hn[ttype_src_id] = ( hn[ttype_src_id], 'Source identification' )

			fhdu.flush()				# Flush to the Complete HDUList == hdul

		## ModuleUtil.compress(srcList, remove=True) # Compress again
	#.................................................................................


	return sm


#################################################################################################################

def handleFiles( omTimeSeriesFiles, omFastsrcLists, omImagesrcLists, summarySrcList, max_dist=1.0, threading=False, updateSrcList=None ):
	""" Handle input timeseries files:
		- Extract data and metadata from timeseries files
		- Try Fast-Image sources cross-matching
	"""
	from astropy.table import Table
	#from astropy.io import fits
	#from astropy.time import Time
	#from astropy.coordinates import SkyCoord
	#from astropy import units as u

	#print(f'Timeseries files handling...')
	#----------------------------
	#  Input timesereis handling
	#
	omTimeSeries = {}
	##timeEdges = []

	record = 0
	for file in omTimeSeriesFiles:
		if not os.path.isfile(file):
		   continue

		record +=1
		with fits.open(file) as hdl:
		   tsr	   = hdl['RATE'].data
		   hdr	   = hdl['RATE'].header
		   filter  = hdr['FILTER']
		   if filter == 'WHITE': continue

		   timedef = {
				   'timedel': hdr['TIMEDEL'],
				   'timesys': hdr['TIMESYS'],
				   'timeref': hdr['TIMEREF'],
				   'mjdref' : hdr['MJDREF']
				   }

		   minTime = min(tsr['TIME'])
		   maxTime = max(tsr['TIME'])
		   ##timeEdges.append(minTime)
		   ##timeEdges.append(maxTime)

		   obs_id	   = hdl[0].header['OBS_ID']
		   expid	   = hdl[0].header['EXPIDSTR']
		   osw_id	   = hdl[0].header['OSW_ID']
		   #objectID	= hdl[0].header['OBJECT']
		   ra_deg	   = hdl[1].header['RA_OBJ']
		   dec_deg	   = hdl[1].header['DEC_OBJ']
		   srcno	   = hdl[0].header['SRCNO']

		   filename = os.path.basename(file).split('.')[0]

		   label = expid+'_'+osw_id+'_'+str(srcno)
		   omTimeSeries[label] = {
			'filter' : filter,
			'expid'  : expid,
			'osw_id' : osw_id,
			'srcno'  : srcno,
			'data'	 : Table(tsr),
			'Dt'	 : timedef['timedel'],
			'timedef': timedef,
			't0n'    : {'t0': minTime, 'tn': maxTime},
			'file'   : file
			}

	if not omTimeSeries:
	   print(f'No TimeSeries with valid filters. Probably all WHITE or there are not timeseries files.')
	   return None, 0,0,0, None

	# Identify the first timeseries in time order
	t0_file = { x['file'] : x['t0n']['t0'] for x in omTimeSeries.values() }
	firstTimeseriesFile = min(t0_file, key=t0_file.get)	# key of the minimum value
	#print(f'DEBUG -------- firstTimeseriesFile : {firstTimeseriesFile}')


	#--------------------------------------
	#  Fast - Image sources cross-matching
	#
	print('Fast-Image sources cross-matching')

	# Final merged srcList (OBSMLI) source positions (RA, DEC)
	##### summarySrcList	= prods + 'P'+obs_id+'OMX000OBSMLI0000.FTZ'
	fluxConvFactors   = {}
	if os.path.isfile(summarySrcList):
	   ox_exist = True
	   with fits.open(summarySrcList) as shdu:
               ox	= shdu['SRCLIST'].data
               hox = shdu['SRCLIST'].header
	   allSrcCoords = SkyCoord(ra=ox.RA *u.degree, dec=ox.DEC *u.degree)

	   # Flux conversion factors : [AB flux conversion constant (erg/s/cm**2/A]
	   facts = [ s for s in hox.keys() if s.startswith('ABF0') ]
	   for fact in facts:
		   fluxConvFactors[fact] = hox[fact]
	else:
	   ox_exist = False
	   ox	    = None
	   fluxConvFactors = None

	# Exposures and OSW_ID with source timeseries:
	#print(f'\nExposure, window with sources:')
	exp_osw = [ (x['expid'],x['osw_id']) for x in omTimeSeries.values()  ]

	#--------------------------------------------------------------------------------
	#  OM Fast - Image sources cross-match : sourceCrossMatch()
	#	 Cross-match and Fast srcLists update with SRC_ID
	#
	#	 * Using multi-threading or sequential workflow
	#	 * Run only over different expid, osw_id pairs where TIMESERIES were created
	#
	sm	= {}

	#	With multi-threading :
	if threading:
		#time0 = time.time()
		from concurrent.futures import ThreadPoolExecutor, as_completed
		pool = ThreadPoolExecutor( max_workers=100 )
		futures = []
		for expid,osw_id in set(exp_osw):
			futures.append( pool.submit( sourceCrossMatch, obs_id=obs_id, expid=expid, osw_id=osw_id,
						     ox_exist=ox_exist, ox=ox, omFastsrcLists=omFastsrcLists, omImagesrcLists=omImagesrcLists, 
						     max_dist=max_dist, updateSrcList=updateSrcList ) )

		for x in as_completed(futures):
			sm.update( x.result() )
		#time1 = time.time()

	#	No multi-threading :
	else:
		#time0 = time.time()
		for expid,osw_id in set(exp_osw):
			crossMatch = sourceCrossMatch( obs_id=obs_id, expid=expid, osw_id=osw_id, ox_exist=ox_exist, ox=ox,
				     omFastsrcLists=omFastsrcLists, omImagesrcLists=omImagesrcLists, max_dist=max_dist, 
				     updateSrcList=updateSrcList )
			sm.update(crossMatch)
		#time1 = time.time()


	#-----------------------------------------------
	#  Prepare the outpur dictionary : omTimeSeries
	#
	# Include sources cross-match in omTimeSeries and grouped by SRC_ID
	out = {}		# src_id1: [omTimeSeries1, omTimeSeries2, etc]
					# src_id2: [omTimeSeries1]
					# ...
	timeEdges = []
	sources = []
	for k, v in omTimeSeries.items():		# k : __
		omTimeSeries[k]['match'] = sm[k]
		src_id = v['match']['src_id']
		#match = v['match']
		#expid = v['expid']
		#win   = v['osw_id']
		#srcno = v['srcno']
		#print(f'-------------------------------------------------------------->>>>> {expid}, {win}, {srcno}  {match}')
		t0n = v['t0n']
		#if src_id != -1: timeEdges.append(t0n)	# Exclude timeEdges for src = -1
		timeEdges.append(t0n)
		#print(f'-------------------------------------------------------------->>>>> k:  {k}, src_id: {src_id}, t0n: {t0n}')
		if src_id not in sources:
			sources.append(src_id)
			out[src_id] = [ omTimeSeries[k] ]
		else:
			out[src_id].append(omTimeSeries[k])

	#for k,v in out.items():
	#	 print(f'\tsrc_id: {k}')
	#	 for i in v:
	#		 print(k,i['expid'], i['osw_id'], i['match'])

	#print(f'Number of omTimeSeries : {len(omTimeSeries)}, n_timeEdges after removing src_id=-1 : {len(timeEdges)}')
	if len(timeEdges) != 0:
	   gt0 = [ x['t0'] for x in timeEdges  ]
	   gtn = [ x['tn'] for x in timeEdges  ]
	   mingt0n = min(gt0)
	   maxgt0n = max(gtn)
	   reftimes = {'T0': mingt0n, 'TN': maxgt0n}	# Minimum and maximum times from all timeseries, excluding those for src_id = -1
	else:
	   reftimes = None


	return reftimes, obs_id, out, fluxConvFactors, firstTimeseriesFile

#------------------------------------------------------------------------------------------------------------------


####################################################################################################3

def plotOMTimeSeries( omTimeSeries, points_density=None, src_id='', fluxConvFactors={}, obs_id=None, reftimes=None, outfile=None, plotformat=None ):
	""" Plot all timeseries from the same identified source SRC_ID
		- All timeseries in the same time frame plot
	"""

	import matplotlib
	matplotlib.use('Agg')   # To avoid problems with DISPLAY variable
	                        # raise RuntimeError('Invalid DISPLAY variable')
	                        # RuntimeError: Invalid DISPLAY variable.
	import matplotlib.pyplot as plt
	#
	plt.style.use('classic')
	#
	#from matplotlib.ticker import FormatStrFormatter

	#-----------------------------------------------------------------------
	# Get the minimum TIME entry from all timeseries. Reference time = t_ref
	#
	timeEdges = []
	for i in omTimeSeries:

	   if i['match']['src_id'] != src_id:
		   print(f'ERROR: Some of the input omTimeSeries do not correspond to SRC_ID: {src_id}')
		   return 1

	   minTime = min(i['data']['TIME'])
	   maxTime = max(i['data']['TIME'])
	   timeEdges.append(minTime)
	   timeEdges.append(maxTime)

	if not timeEdges:
		print(f'No data for seqid: {seqid}. Returning')
		return 0

	T0, Tn = [ min(timeEdges), max(timeEdges) ]
	t_ref = T0
	#if reftimes and src_id != -1: t_ref = reftimes['T0']
	if reftimes: t_ref = reftimes['T0']

	mjdref	= i['timedef']['mjdref']
	timesys = i['timedef']['timesys']

	# Reference Time in UTC:
	#	 Reference values from the first Exposure EPIC LC
	#	 timesys, timeref, mjdref = TT LOCAL 50814.0
	from astropy.time import Time

	if 'mjdref' in locals() and 'timesys' in locals():
	   t_ref_utc = Time(mjdref + t_ref/86400,format='mjd',scale=(timesys).lower(), precision=0).utc.iso		# Conversion to UTC
	   #print(f'\tMinimum TIME from all Timeseries: Reference Time (UTC) = {t_ref_utc} ({t_ref:.2f})')
	else:
	   t_ref_utc = t_ref												# No UTC conversion
	   print(f'\tMinimum TIME from all Timeseries: Reference Time could not be converted to UTC = {t_ref}')

	#--------------------------------------------------------
	#  Start plotting timeseries only from Filters with data
	#
	filters_with_data = set([ s['filter'] for s in omTimeSeries if len(s['data']) !=0 ])
	n_filters_with_data = len(filters_with_data)
	#print(f'\tOM Filters with data {n_filters_with_data}: {filters_with_data}')

	# Filter colors
	ecol = {'V': 'red',
		'B': 'orange',
		'U': 'yellow',
		'UVW1': 'green',
		'UVM2': 'blue',
		'UVW2': 'violet'}
	#
	plt.rcParams['font.size'] = 10.0
	font= 10

	fig = plt.figure()
	fig_size = fig.get_size_inches()
	fig.set_size_inches( 8, 10 )   # 14.0, 25.0

	# Multi-grid plot and axis definitions
	gs	= fig.add_gridspec(5, 2)

	fig.suptitle('OM merged TimeSeries', fontsize=font)

	axAll  = fig.add_subplot(gs[:-3, :])
	axAll_pos = list( axAll.get_position().bounds )

	shift = 0.04
	axAll_pos[0] = axAll_pos[0] - 0.03
	axAll_pos[1] = axAll_pos[1] + shift
	axAll_pos[2] = axAll_pos[2] * 1.1
	axAll.set_position( axAll_pos )		# [left, bottom, width, height]

	# Individual Filter plots when #Filters > 1
	if n_filters_with_data != 1:
		#ax = {
		#  'V':    fig.add_subplot(gs[2, 0]),
		#  'U':    fig.add_subplot(gs[2, 1]),
		#  'B':    fig.add_subplot(gs[3, 0]),
		#  'UVW1': fig.add_subplot(gs[3, 1]),
		#  'UVM2': fig.add_subplot(gs[4, 0]),
		#  'UVW2': fig.add_subplot(gs[4, 1])
		#  }
		ax = {}
		xlabel_filters = {'0': 0, '1': 0, 'filters': {}}
		pos_order = [ [2,0], [2,1], [3,0], [3,1], [4,0], [4,1]	]
		i=-1
		for f in filtersSort:
		   if f in filters_with_data:
			   i +=1
			   pos = pos_order[i]
			   ax[f] = fig.add_subplot( gs[pos[0], pos[1]] )

			   if pos[0] == 1:
				   ax[f].set_ylabel(f'Flux', fontsize=font-2)

			   if pos[1] == 0:
				   if pos[0] > xlabel_filters['0']:
					   xlabel_filters['0'] = pos[0]
					   xlabel_filters['filters']['0'] = f

			   if pos[1] == 1:
				   if pos[0] > xlabel_filters['1']:
					   xlabel_filters['1'] = pos[0]
					   xlabel_filters['filters']['1'] = f

			   ax[f].annotate(f'{f}', xy=(0.05, 0.88), xycoords=('axes fraction'), fontsize=font)
			   ax[f].tick_params(labelsize=font-3)

		for f in xlabel_filters['filters'].values():
			ax[f].set_xlabel(f'time (s)', fontsize=font-2)

	# New time bin size for re-bining the plot
	new_time_bin = (Tn - T0) / ( points_density - 1 )  # (Total Exposure time. All Exposures included) / ( # points in plot )
	new_time_bin = int(np.round( new_time_bin ))

	n_ts = len(omTimeSeries)
	if src_id == -1:
	  axAll.annotate(f'WARNING: {n_ts} timeseries of non-identified sources (SRC_ID: {src_id})', xy=(0.05, 0.05), xycoords=('axes fraction'), fontsize=font-2)
	else:
	  axAll.annotate(f'Merged timeseries: {n_ts}', xy=(0.05, 0.05), xycoords=('axes fraction'), fontsize=font-2)

	# fluxConvFactors => count/s or Flux
	if not fluxConvFactors:
	   print('No OM Flux Conversion Factors found. Plotting rates instead of fluxes.')
	   countpersec = True
	   y_flux = False
	else:
	   if all(name in fluxConvFactors for name in ['ABF0' + s for s in filters_with_data]):
	      y_flux = True
	      countpersec = False
	   else:
	      print(f'No OM Flux Conversion Factor found for some filter. Plotting rates instead of fluxes.')
	      countpersec = True
	      y_flux = False


	times = {}
	for filter in filtersSort:
	   #tsrList = [ s['data'] for s in omTimeSeries if s['filter'] == filter ]

	   #k = i['filter'] + '_' + i['expid']+'_'+i['osw_id']+'_'+str(i['srcno'])
	   #tsrList2 = [ { s['expid']+'_'+s['osw_id']+'_'+str(s['srcno']) : s['data']} for s in omTimeSeries if s['filter'] == filter ]
	   #for t in tsrList2:
	   #    print(f'\tSRC_ID: {src_id}\t{t.keys()}')

	   tsrList = { s['expid']+'_'+s['osw_id']+'_'+str(s['srcno']) : s['data'] for s in omTimeSeries if s['filter'] == filter }
	   #for k, v in tsrList3.items():
	   #    print(f'\tSRC_ID: {src_id}\t{k}')

	   #for t in tsrList:
	   #    print(len(t))
	   #c=-1
	   #for t3 in tsrList3.values():
	   #    c+=1
	   #    print(f'{len(t3)}      c: {c}   len(tsrList[c]): {len(tsrList[c])}')

	   if not tsrList:
		   continue

	   p = 0
	   times[filter] = []

	   #for tsr in tsrList:
	   for l, tsr in tsrList.items():
		# --------------------------------
		# Re-bin
		#time_bin = omTimeSeries[filter]['Dt']
                time_bin = [ s['Dt'] for s in omTimeSeries if s['filter'] == filter ][0]
                t_bin_frac   = int(np.round( new_time_bin / time_bin ))
                if t_bin_frac <= 1:
                    rebin = False
		    #print(f'* No re-binning needed for this Light Curve. Input / Output time bins ratio : {t_bin_frac}')
		    #ntsr = tsr
                else:
                    rebin = True
                    tsr = ModuleUtil.TSrebin(tsr, time_bin, new_time_bin)    # import module 'ModuleUtil' required
                    #print(f'Re-binning values. DT: {time_bin}, new_DT: {new_time_bin}\tpoints_density: {points_density}')
                print(f'Plotting exp_win_srcno : {l} = SRC_ID: {src_id} ...')
                # Change time reference point to t_ref
                ## tsr['TIME'] = tsr['TIME'] - t_ref
                ## times[filter].append( min(tsr['TIME']) )
                ## times[filter].append( max(tsr['TIME']) )

                TIME_REF = tsr['TIME'] - t_ref
                times[filter].append( min(TIME_REF) )
                times[filter].append( max(TIME_REF) )

                if countpersec:
                   flux     = tsr['RATE']
                   flux_err = tsr['ERROR']
                else:
                   flux     = tsr['RATE']  * fluxConvFactors['ABF0'+filter]                 # RATE * ABF0'filter'
                   flux_err = tsr['ERROR'] * fluxConvFactors['ABF0'+filter]

                ###axAll.errorbar( TIME_REF, tsr['RATE'], tsr['ERROR'], fmt='.', c=ecol[filter], ms=4, ecolor='k')
                axAll.errorbar( TIME_REF, flux, flux_err, fmt='.', c=ecol[filter], ms=4, ecolor='k')

                # Exposure, window, srcno labels in plot over each exposure
                #if src_id == -1:
                #  e, o, s = l.split('_')
                #  lab = f'{e} osw: {o} srcno: {s}' 
                #  cen = (max(TIME_REF)+min(TIME_REF))/2
                #  axAll.annotate(f'{lab}', xy=(cen, 0.90 - p*0.05), xycoords=('data','axes fraction'), fontsize=font-2)
                #  p+=1


                # Subplots per filter if there are more than filter timeseries
                if n_filters_with_data != 1:
                    ###ax[filter].errorbar( TIME_REF, tsr['RATE'], tsr['ERROR'], fmt='.', c=ecol[filter], ms=4, ecolor='k')
                    ax[filter].errorbar( TIME_REF, flux, flux_err, fmt='.', c=ecol[filter], ms=4, ecolor='k')
                    #### format_label_string_with_exponent(ax[filter], axis='y')
                    ax[filter].yaxis.offsetText.set_visible(False)

	   center_time = ( min(times[filter]) + max(times[filter]) ) / 2
	   axAll.annotate(f'{filter}', xy=(center_time, 0.95), xycoords=('data','axes fraction'), fontsize=font)


	# Offset factor aside of the axis
	#	- Get the upper value of the axes
	#	- Remove that tag label
	#
	#	Alternative from axAll.yaxis.get_offset_text().get_text()
	#	but it takes so much time
	#	See: http://greg-ashton.physics.monash.edu/setting-nice-axes-labels-in-matplotlib.html
	newLabel = False

	# Get offset factor from upper ybound scientific notation
	up = axAll.get_ybound()[1]
	if str(up).lower().find('e') != -1:
		newLabel = True
		expo = str(up).split('e')[1]		# e-14, e-15, etc

	if newLabel:
		offsetScale = r'$10^{{{}}}$'.format(expo)
	else:
		offsetScale = ''

	axAll.yaxis.offsetText.set_visible(False)					# Remove the offset factor in the axis

	# X range
	t0 = min( [ y for x in times.values() for y in x ] )
	tn = max( [ y for x in times.values() for y in x ] )

	#if reftimes and src_id != -1:	# Fix the same X range for all the plots (in case #src's > 1)
	if reftimes:   # Fix the same X range for all the plots
	   t0 = reftimes['T0'] - t_ref
	   tn = reftimes['TN'] - t_ref
	print(f'First and Last times = {t0} - {tn}')

	###DT = axAll.get_xlim()[1] - axAll.get_xlim()[0]
	###offset = 0.02
	###axAll.set_xlim( t0 - DT*offset, tn + DT*offset)
	axAll.set_xlim( t0 - 200, tn + 200)

	axAll.set_xlabel(f'time (s) after {t_ref_utc} (UTC)', fontsize=font)

	if y_flux:
	   axAll.set_ylabel(f'Flux ' + r'$(erg/s/cm^2/A)$' + f' [x {offsetScale}]', fontsize=font)
	else:
	   axAll.set_ylabel(f'Rate (counts/s)', fontsize=font)

	axAll.annotate(f'OBS_ID: {obs_id}', xy=(0.00, 1.03), xycoords=('axes fraction'), fontsize=font)
	axAll.annotate(f'SRC_ID: {src_id}', xy=(0.85, 1.03), xycoords=('axes fraction'), fontsize=font)

	axAll.grid()

	if rebin:
		label='rebinned' + '_points' + str(points_density)
	else:
		label='norebinned'

	# OMSourceProducts-om-1-OM_PHA_spectrum-X00000000000055.ftz
	#
	# -om-<#exp>--X000< src_num  >
	# OMFast_merged_timeseries-
	#plotfile = f'{outputDir}OMTimeSeries_{obs_id}_{seqid}_{src_id:04d}_{label}.png'
	#plotfile = f'{outputDir}OMTimeSeries_{obs_id}_{src_id:04d}_{label}.png'
	#plotfile = f'{outputDir}OMFast_merged_timeseries-X000{src_id:011d}.png'
	plotfile = outfile + '.' + plotformat

	print(f'Merged timeseries PLOT for SRC_ID : {src_id} - {plotfile}')

	plt.savefig( plotfile )
	plt.close('all')

	return 0

####################################################################################################3

def mergeOMTimeSeries( omTimeSeries, src_id=None, obs_id=None, firstTimeseriesFile=None, args=None, outfile=None ):
	""" Merged all timeseries from the same identified source SRC_ID in the same FITS file """

	# Sort timeseries by start time
	times	  = {}
	allTables = []
	for i in omTimeSeries:

		if i['match']['src_id'] != src_id:
			print(f'ERROR: Some of the input omTimeSeries do not correspond to SRC_ID: {src_id}')
			return 1

		k = i['filter'] + '_' + i['expid']+'_'+i['osw_id']+'_'+str(i['srcno'])
		times[k] = ( i['data']['TIME'][0], i['data']['TIME'][-1] )	 # First and Last time values

		mjdref = i['timedef']['mjdref']

	# Vertical stack of all Exposure tables
	# 1) Sort the exposure names ( dict keys ) by Start Time ( times dict values )
	sortedExposures = sorted( times, key=times.get )

	# 2) Vertical stacking of teh tables ( table[exp] )
	for label in sortedExposures:
		filter, expid, osw_id, srcno = label.split('_')
		#print(f'{label} == {expid}, {osw_id}, {srcno}')
		t = [ s['data'] for s in omTimeSeries if ( s['expid'] == expid and s['osw_id'] == osw_id and str(s['srcno']) == srcno ) ][0]

		# Columns units
		t['TIME'].unit	= u.s
		t['RATE'].unit, t['ERROR'].unit, t['BACKV'].unit, t['BACKE'].unit = 4 * ['count/s']

		# Add some extra columns

		t['MJD']	= mjdref + (t['TIME']/86400.0)
		#t['MJD']	 = Time( mjdref + (t['TIME']/86400.0), format='mjd' )
		t['FILTER'] = filter
		t['EXPID']	= expid
		t['OSWID']	= osw_id
		t['SRCNO']	= srcno
		allTables.append( t )

	from astropy.table import vstack

	allTable = vstack( allTables )					# Optional option : ,metadata_conflicts='silent'

	TSTART = allTable['TIME'][0]					# First and last time values
	TSTOP  = allTable['TIME'][-1]

	# Convert 'Table object' to 'BinTableHDU object'
	allTableHDU = fits.table_to_hdu( allTable )

	# Improving Header information
	# (a BinTableHDU already has 'header')
	allTableHDU.header['EXTNAME'] = ('RATE', 'Table with the source and background count rate')

	# Write down the output FITS file
	#	Headers (h0 and h1) based on a copy of the 1st timeseries and then modified
	#
	#       fitsfile = f'{fitsDir}OMTimeSeries_{obs_id}_{src_id:04d}.FTZ'
#	fitsfile = outfile + '.FTZ'

        # Copy one of the original OMFast timeseries and modify the headers and data
        #   In this way, we take part of the original headers info
	from shutil import copy
#	copy(firstTimeseriesFile, fitsfile)

	# Be sure the FITS file is not compressed before openning with fits.open()
	# especially when the file is going to be modified
	testGzip = ModuleUtil.testGzip( firstTimeseriesFile )
	if testGzip:
		fitsfile = outfile + '.ftz'
		copy(firstTimeseriesFile, fitsfile)
		mergedTS = ModuleUtil.decompress( fitsfile, outputDir, remove=True )
	else:
		fitsfile = outfile + '.fits'
		copy(firstTimeseriesFile, fitsfile)
		mergedTS = fitsfile

	with fits.open(mergedTS, mode='update') as hdul:

		# Remove and include some keywords to h0
		h0 = hdul[0].header
		remove_from_h0 = ['XPROC1', 'XPROC2', 'XDAL0', 'XDAL1', 'XDAL2',  'EXP_ID', 'EXPIDSTR', 'EXPOSURE', 'OSW_ID', 'SRCNO']
		for r in remove_from_h0:
                    try:
                        h0.remove(r)
                    except:
                        pass

		h0['XPROC0']  = ' '.join(args)
		h0['CREATOR'] = args[0]
		h0['DATE']    = datetime.datetime.utcnow().isoformat(timespec='milliseconds')
		h0['SRC_ID']  = (src_id, 'Source identification')

		# Copy some keywords from h0 to h1
		keys_copy = [ 'TELESCOP','INSTRUME','TIMESYS','TIMEUNIT','TIMEREF','TASSIGN','CLOCKAPP','MJDREF']
		for k in keys_copy:
			allTableHDU.header[k]		   = h0[k]
			allTableHDU.header.comments[k] = h0.comments[k]

		TIMEDEL		= hdul[1].header['TIMEDEL']
		TIMEDELcomm = hdul[1].header.comments['TIMEDEL']
		allTableHDU.header['TIMEDEL']  = (TIMEDEL, TIMEDELcomm)
		allTableHDU.header['TSTART']   = (TSTART, 'Light curve start time')
		allTableHDU.header['TSTOP']    = (TSTOP , 'Light curve end time')
		allTableHDU.header['SRC_ID']   = (src_id, 'Source identification')

		# Write the Final Table into the FITS HDU extension 'RATE' = hdul['RATE']
		hdul['RATE'] = allTableHDU
		hdul.flush()

	### ModuleUtil.compress(mergedTS, remove=True)
	print(f'Merged timeseries FITS for SRC_ID : {src_id} - {mergedTS}')


########################################################################


import sys
import argparse
import ModuleUtil


version_num = __version__


def input():
	# Inut Options
	parser = argparse.ArgumentParser(description='Merge OMFast light curves',
			 formatter_class=argparse.ArgumentDefaultsHelpFormatter)

	parser.add_argument('--version', '-v', action='version', version=f'%(prog)s (%(prog)s-{version_num}) [-]')

	parser.add_argument('--ratesets', help='Input OMFast timeseries files', default='PobsidOMexpTIMESR1001.FTZ,PobsidOMexpTIMESR1002.FTZ')

	parser.add_argument('--fastsrclists', help='Input OM Fast-mode source lists files', default='PobsidOMexpSFSRLI1000.FTZ,PobsidOMexpSFSRLI2000.FTZ')
	parser.add_argument('--imgsrclists', help='Input OM Image-mode source lists files', default='PobsidOMexpSWSRLI0000.FTZ,PobsidOMexpSWSRLI1000.FTZ')
	parser.add_argument('--sumsrclist', help='Input OM Summary source list file', default='PobsidOMX000OBSMLI0000.FTZ')
	parser.add_argument('--max_dist', help='Maximum angular distance for searching in OBSMLI srcList (arcsec)', default=1.0, type=float)

	parser.add_argument("--outplot", type=ModuleUtil.str2bool, nargs='?',
						const=True, default=True,
						help="Output plot (yes/no)")

	parser.add_argument("--outfits", type=ModuleUtil.str2bool, nargs='?',
						const=True, default=True,
						help="Output merged FITS (yes/no)")

	parser.add_argument("--updateSrcList", type=ModuleUtil.str2bool, nargs='?',
						const=True, default=True,
						help="Update OMFast source list (yes/no)")

	parser.add_argument("--threading", type=ModuleUtil.str2bool, nargs='?',
						const=True, default=True,
						help="Source cross-match in multi-threading mode (yes/no)")

	parser.add_argument('--pointsDen', help='Total plot points', default=512, type=int)

	#parser.add_argument('--plotfiles', help='Plot files of the identified sources.', default='plotfile1,plotfile2')
	parser.add_argument('--plotformat', help='Plot format (png/pdf)', choices=['png', 'pdf'], default='png')

	#parser.print_help()
	#parser.add_argument('--fitsfiles', help='Merged FITS light curves of the identified sources.', default='fitsfiles')

	parser.add_argument('--outfiles', help='Output filenames seed', default='OMFast_merged_timeseries')

	args = parser.parse_args()

	# All Command line string ?
	# Program name ?

	return args


def newFile(src_id=None, format='fit'):
    import subprocess
    
    cmd = f'./run_newFile.pl {src_id} {format}'.split()

    #chk = subprocess.Popen( cmd, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, shell=True)
    chk = subprocess.run(cmd, capture_output=True, text=True)

    chk.check_returncode()
    
    return chk.stdout



def main(args):

	omTimeSeriesFiles = args.ratesets.split(',')
	omFastsrcLists	  = args.fastsrclists.split(',')
	omImagesrcLists   = args.imgsrclists.split(',')
	summarySrcList	  = args.sumsrclist
	max_dist          = args.max_dist * u.arcsec

	outplot0		  = args.outplot
	outfits0		  = args.outfits
	updateSrcList	  	  = args.updateSrcList
	threading		  = args.threading
	pointsDen		  = args.pointsDen
	#plotfiles		  = args.plotfiles
	plotformat		  = args.plotformat
	#fitsfiles		  = args.fitsfiles
	outfilesSeed		  = args.outfiles

	for f in omTimeSeriesFiles:
	   if not os.path.isfile(f):
	      print('Some of the input omTimeSeries file does not exist. Exiting')
	      sys.exit(1)

	#if omTimeSeriesFiles:
	th0 = time.time()
	reftimes, obs_id, omTimeSeries, fluxConvFactors, firstTimeseriesFile = handleFiles(
			omTimeSeriesFiles, 
			omFastsrcLists, 
			omImagesrcLists, 
			summarySrcList, 
			max_dist=max_dist, 
			threading=threading,
			updateSrcList=updateSrcList )
	#print(f'Reftimes excluding SRC_ID = -1 : {reftimes}')
	th1 = time.time()
	print(f'* Files and cross-match\t: {th1 - th0:.2f} seconds')
	#				 src_id1: [omTimeSeries1, omTimeSeries2, etc]
	#				 src_id2: [omTimeSeries1]
	#				 ...
	#				 handleFiles() includes sourceCrossMatch()
	#
	if not obs_id and not omTimeSeries and not fluxConvFactors:
		print(f'No valid timeseries for obs_id {obs_id}. Skipping')
		sys.exit(0)

	if outplot0 or outfits0:
	   print(f'OMFast merged timeseries ...')

	tf0 = time.time()

	print(f'Original timeseries correspond only to src_ids: {omTimeSeries.keys()}')
	for src_id, v in omTimeSeries.items():
	   outfits = outfits0
	   outplot = outplot0
	   srcLabel = src_id

	   if src_id == -1:
	      print(f'Non-identified source (SRC_ID = -1). No FITS merged timeseries. Only create plot.')
	      #print(f'Non-identified source (SRC_ID = -1). No merged timeseries.')
	      outfits  = False
	      outplot  = True
	      srcLabel = 0
	      #print(f'Merged timeseries skipped for SRC_ID = -1')
	      #continue

	   outfile = f'{outputDir}{outfilesSeed}_{obs_id}-X000{srcLabel:011d}'

	   #print(f'DEBUG ------- firstTimeseriesFile: {firstTimeseriesFile}')

	   if outplot and outfits and threading:
	      from concurrent.futures import ProcessPoolExecutor

	      with ProcessPoolExecutor(max_workers=2) as ex:
                                plot  = ex.submit( plotOMTimeSeries,
                                omTimeSeries[src_id],
                                points_density=pointsDen,
                                src_id=src_id,
                                fluxConvFactors=fluxConvFactors,
                                obs_id=obs_id,
                                reftimes=reftimes,
                                outfile=outfile,
                                plotformat=plotformat)

                                plot.result()


                                merge = ex.submit( mergeOMTimeSeries,
                                omTimeSeries[src_id],
                                src_id=src_id,
                                obs_id=obs_id,
                                firstTimeseriesFile=firstTimeseriesFile,
                                args=sys.argv,
                                outfile=outfile)

                                merge.result()

	   if not threading:
	      if outplot:
	         plot  = plotOMTimeSeries( 
                                   omTimeSeries[src_id]
                                   , points_density=pointsDen
                                   , src_id=src_id
                                   , fluxConvFactors=fluxConvFactors
                                   , obs_id=obs_id
                                   , reftimes=reftimes
                                   , outfile=outfile
                                   , plotformat=plotformat )
	      if outfits:
	         merge = mergeOMTimeSeries( 
                                   omTimeSeries[src_id]
                                   , src_id=src_id
                                   , obs_id=obs_id
                                   , firstTimeseriesFile=firstTimeseriesFile
                                   , args=sys.argv
                                   , outfile=outfile )




	tf1 = time.time()
	print(f'* Output files\t: {tf1 - tf0:.2f} seconds')

########################################################################

if __name__=='__main__':

   argsObj = input()

   import numpy as np
   from astropy.io import fits
   from astropy.coordinates import SkyCoord
   from astropy import units as u
   import ModuleUtil

   main(argsObj)


sys.exit(0)

########################################################################