pro metis_l1_prep ; keyword defining if the detector reference frame must be used for the output ref_detector = 1 ; start the log journal, 'output/metis_l1_prep_log.txt' ; some definitions metis_datatype = list( $ 'VL image', $ ; 0 'UV image', $ ; 1 'PCU accumulation matrix', $ ; 2 'VL temporal standard deviation', $ ; 3 'UV temporal standard deviation', $ ; 4 'VL cosmic-ray matrix', $ ; 5 'UV cosmic-ray matrix', $ ; 6 'PCU event list', $ ; 7 'PCU test event list', $ ; 8 'VL light-curve' $ ; 9 ) ; read the auxiliary input file input = json_parse('input/contents.json', /toarray, /tostruct) journal, 'File ' + input.file_name ; load the spice kernels load_spice_kernels, input.spice_kernels, kernel_list = kernel_list, kernel_version = kernel_version journal, 'SPICE kernel files correctly loaded:' journal, ' SDK version = ' + kernel_version ; import the calibration package index file cal_pack = json_parse(input.cal_pack_path + '/index.json', /toarray, /tostruct) ; include the calibration package path into the cal_pack structure cal_pack = create_struct('path', input.cal_pack_path + path_sep(), cal_pack) journal, 'Calibration package correctly imported:' journal, ' version = ' + cal_pack.version journal, ' validity range = ' + string(cal_pack.validity_range.start, cal_pack.validity_range._end, format = '(A, "-", A)') ; read the primary hdu data = mrdfits(input.file_name, 0, primary_header, /silent) naxis = fxpar(primary_header, 'NAXIS') naxis1 = fxpar(primary_header, 'NAXIS1', missing = 0) naxis2 = fxpar(primary_header, 'NAXIS2', missing = 0) ; read the metadata extension metadata_bin_table = mrdfits(input.file_name, 'metis metadata', metadata_extension_header, /silent) ; identify the data product and its nominal size datatype = fxpar(metadata_extension_header, 'DATATYPE') journal, 'L0 FITS file correctly read:' journal, ' datatype = ' + string(datatype, format = '(I0)') ; if the data product is a light curve or a pcu-event list, read the data binary hk_table if datatype gt 6 then data_bin_table = mrdfits(input.file_name, 1, data_extension_header, /silent) else data_bin_table = !null ; if the data product is an accumulation matrix, look for the accumulation vector extension and read it if it exists if datatype eq 2 then $ if fxpar(metadata_extension_header, 'M') eq 256 then $ accumul_vector = mrdfits(input.file_name, 'accumulation vector', vector_extension_header, /silent) else accumul_vector = !null ; NOTE - the radialization extension is ignored ; read the planning data planning_data = json_parse(input.planning_file_name, /toarray, /tostruct) ; definitions for the primary header ; version of the fits file version = string(input.l1_version + 1, format = '(I02)') ; creation and acquisition times in utc date = date_conv(systime(/julian, /utc), 'FITS') obt_beg = fxpar(primary_header, 'OBT_BEG') obt_end = fxpar(primary_header, 'OBT_END') obt_avg = (obt_beg + obt_end)/2.0D date_beg = solo_obt2utc(obt_beg) date_end = solo_obt2utc(obt_end) date_avg = solo_obt2utc(obt_avg) date_beg_string = strmid(date_beg, 0, 19) date_beg_string = date_beg_string.replace('-', '') date_beg_string = date_beg_string.replace(':', '') journal, 'Calculated UTC time of start acquisition:' journal, ' time = ' + date_beg ; name of the fits file file_name_fields = strsplit(fxpar(primary_header, 'FILENAME'), '_', /extract) file_name = 'solo_L1_' + file_name_fields[2] + '_' + date_beg_string + '_V' + version + '.fits' out_file_name = 'output/' + file_name ; filter keyword case datatype of 0: filter = 'VL' 1: filter = 'UV' 2: filter = 'UV' 3: filter = 'VL' 4: filter = 'UV' 5: filter = 'VL' 6: filter = 'UV' 7: filter = 'UV' 8: filter = 'UV' 9: filter = 'VL' endcase ; choose the correct calibration-package structure based on the image filter if filter eq 'VL' then channel = cal_pack.vl_channel else channel = cal_pack.uv_channel ; instrument/telescope/detector keywords detector = filter + 'D' telescope = 'SOLO/Metis/' + detector wavelnth = channel.bandpass.value[1] wavemin = channel.bandpass.value[0] wavemax = channel.bandpass.value[2] waveband = channel.name ; campaign keywords obs_id_fields = strsplit(planning_data.obs_id, '_', /extract) soop_type = obs_id_fields[2] obs_type = obs_id_fields[4] ; build the fits file extensions ; join the metadata extension header to the primary header removing useless keywords i = where(strmid(primary_header, 0, 8) eq 'COMP_RAT') j = where(strmid(metadata_extension_header, 0, 8) eq 'EXTNAME ') k = where(strmid(metadata_extension_header, 0, 8) eq 'TFORM1 ') primary_header = [ $ primary_header[0 : i], $ metadata_extension_header[j + 1 : k - 1], $ primary_header[i + 1: *] $ ] ; rebin the image if binning was applied during the acquisition and check for the data quality ; NOTE - this is done only for image data products if datatype le 6 then begin bin_type = fxpar(primary_header, 'B0_BIN', missing = 0) ; check for wrong binning type in fixed-polarization filtered images pol_id = fxpar(primary_header, 'POL_ID', missing = -1) vlfpfilt = fxpar(primary_header, 'VLFPFILT', missing = -1) if vlfpfilt eq 0 and pol_id eq 0 and bin_type eq 0 then bin_type = 1 bin_fact = 2^bin_type ; binning factor = 1, 2, or 4 if bin_fact gt 1 then begin naxis1 = naxis1/bin_fact naxis2 = naxis2/bin_fact data = rebin(data, naxis1, naxis2) ; evaluate the quality matrix quality_matrix = check_quality(data, cal_pack, filter) ; correct values including binning factor only for vl and uv images and accumulation matrices if datatype le 2 then data = long(data) * bin_fact^2 if ~ isa(comment) then comment = !null comment = [comment, 'Image was rebinned according to the commanded binning factor.'] journal, 'Data was binned during acquisition:' journal, ' binning = ' + string(bin_fact, format = '(I1)') + '×' + string(bin_fact, format = '(I1)') journal, ' data was rebinned:' journal, ' new size = ' + string(naxis1, format = '(I0)') + '×' + string(naxis2, format = '(I0)') endif else begin quality_matrix = check_quality(data, cal_pack, filter) journal, 'Data was not binned during acquistion.' endelse ; read the bad-pixel map bad_pixel_map = readfits(cal_pack.path + channel.bad_pixel_map[0].file_name, /silent) ; rebin the bad-pixel map to the size of the image to be corrected bad_pixel_map = rebin(bad_pixel_map, naxis1, naxis2) * bin_fact^2 ; correct the quality matrix according to the bad-pixel map bad_pixels = where(bad_pixel_map gt 0, count) if count gt 0 then quality_matrix[bad_pixels] = 0 endif else quality_matrix = !null ; correct for the effective exposure time ; NOTE - this is done only for vl and uv images and accumulation matrices telapse = float(obt_end - obt_beg) dit = fxpar(metadata_extension_header, 'DIT') ndit = fxpar(metadata_extension_header, 'NDIT', missing = 1) ndit1 = fxpar(metadata_extension_header, 'NDIT1', missing = 1) ndit2 = fxpar(metadata_extension_header, 'NDIT2', missing = 1) if datatype le 2 then begin nsumexp = ndit * ndit1 * ndit2 xposure = dit/1000.D0 * nsumexp data = long(data) * nsumexp if ~ isa(comment) then comment = !null comment = [comment, 'Image values were corrected for the total exposure time.'] journal, 'Image values were corrected for the total exposure time:' journal, ' dit = ' + string(dit, format = '(I0)') journal, ' nsumexp = ' + string(nsumexp, format = '(I0)') endif else begin nsumexp = 1 xposure = dit/1000.D0 endelse ; adjust the primary header (it is almost the same for all data product types) fxaddpar, primary_header, 'FILENAME', file_name fxaddpar, primary_header, 'PARENT', file_basename(input.file_name), 'Name of the parent file', before = 'APID' fxaddpar, primary_header, 'DATE', date, 'Date and time of FITS file creation', before = 'OBT_BEG' fxaddpar, primary_header, 'DATE-OBS', date_beg, 'Same as DATE-BEG', before = 'OBT_BEG' fxaddpar, primary_header, 'DATE-BEG', date_beg, 'Start time of observation', before = 'OBT_BEG' fxaddpar, primary_header, 'DATE-AVG', date_avg, 'Average time of observation', before = 'OBT_BEG' fxaddpar, primary_header, 'DATE-END', date_end, 'End time of observation', before = 'OBT_BEG' fxaddpar, primary_header, 'TIMESYS', 'UTC', 'System used for time keywords', before = 'OBT_BEG' fxaddpar, primary_header, 'TIMRDER', 0.0, '[s] Estimated random error in time values', before = 'OBT_BEG' fxaddpar, primary_header, 'TIMSYER', 0.0, '[s] Estimated systematic error in time values', before = 'OBT_BEG' fxaddpar, primary_header, 'LEVEL', 'L1' fxaddpar, primary_header, 'CREATOR', 'metis_l1_prep.pro' fxaddpar, primary_header, 'VERS_SW', input.sw_version fxaddpar, primary_header, 'VERS_CAL', cal_pack.version, 'Version of the calibration package', after = 'VERS_SW' fxaddpar, primary_header, 'OBSRVTRY', 'Solar Orbiter', 'Satellite name', before = 'INSTRUME' fxaddpar, primary_header, 'TELESCOP', telescope, 'Telescope that took the measurement', before = 'INSTRUME' fxaddpar, primary_header, 'DETECTOR', detector, 'Subunit/sensor', before = 'DATAMIN' fxaddpar, primary_header, 'OBJECT', 'TBD', 'The use of the keyword OBJECT is [TBD]', before = 'DATAMIN' fxaddpar, primary_header, 'OBS_MODE', planning_data.obs_mode, 'Observation mode', before = 'DATAMIN' fxaddpar, primary_header, 'OBS_TYPE', obs_type, 'Encoded version of OBS_MODE', before = 'DATAMIN' fxaddpar, primary_header, 'FILTER', filter, 'Filter used to acquire this image', before = 'DATAMIN' fxaddpar, primary_header, 'WAVELNTH', wavelnth, '[nm] Characteristic wavelength of observation', before = 'DATAMIN' fxaddpar, primary_header, 'WAVEMIN', wavemin, '[nm] Min. wavelength where response > 0.05 of max.', before = 'DATAMIN' fxaddpar, primary_header, 'WAVEMAX', wavemax, '[nm] Max. wavelength where response > 0.05 of max.', before = 'DATAMIN' fxaddpar, primary_header, 'WAVEBAND', waveband, 'Bandpass description', before = 'DATAMIN' fxaddpar, primary_header, 'XPOSURE', xposure, '[s] Total effective exposure time', before = 'DATAMIN' fxaddpar, primary_header, 'NSUMEXP', nsumexp, 'Number of detector readouts summed together', before = 'DATAMIN' fxaddpar, primary_header, 'TELAPSE', telapse, '[s] Elapsed time between beginning and end of observation', before = 'DATAMIN' fxaddpar, primary_header, 'SOOPNAME', planning_data.soop_name, 'Name of the SOOP campaign that the data belong to', before = 'DATAMIN' fxaddpar, primary_header, 'SOOPTYPE', soop_type, 'Campaign ID(s) that the data belong to', before = 'DATAMIN' fxaddpar, primary_header, 'OBS_ID', planning_data.obs_id, 'Unique ID of the individual observation', before = 'DATAMIN' fxaddpar, primary_header, 'TARGET', 'TBD', 'Type of target from planning', before = 'DATAMIN' fxaddpar, primary_header, 'BSCALE', 1, 'Ratio of physical to array value at 0 offset', before = 'DATAMIN' fxaddpar, primary_header, 'BZERO', 0, 'Physical value for the array value 0', before = 'DATAMIN' fxaddpar, primary_header, 'BTYPE', metis_datatype[datatype], 'Science data object type', before = 'DATAMIN' fxaddpar, primary_header, 'BUNIT', 'DN', 'Units of physical value, after application of BSCALE and BZERO', before = 'DATAMIN' fxaddpar, primary_header, 'BLANK', 0 fxaddpar, primary_header, 'DATAMIN', min(data, /nan) fxaddpar, primary_header, 'DATAMAX', max(data, /nan) fxaddpar, primary_header, 'IDB_VERS', input.idb_version, '', before = 'HDR_VERS' fxaddpar, primary_header, 'INFO_URL', 'http://metis.oato.inaf.it', 'Link to more information on the instrument data', before = 'HISTORY' if datatype le 6 then begin fxaddpar, primary_header, 'NBIN1', bin_fact, 'Binning factor in the dimension 1', before = 'COMPRESS' fxaddpar, primary_header, 'NBIN2', bin_fact, 'Binning factor in the dimension 2', before = 'COMPRESS' fxaddpar, primary_header, 'NBIN', bin_fact * bin_fact, 'Product of all NBIN values above', before = 'COMPRESS' fxaddpar, primary_header, 'PXBEG1', 1, 'First pixel that has been read out in dimension 1', before = 'COMPRESS' fxaddpar, primary_header, 'PXBEG2', 1, 'First pixel that has been read out in dimension 2', before = 'COMPRESS' fxaddpar, primary_header, 'PXEND1', naxis1, 'Last pixel that has been read out in dimension 1', before = 'COMPRESS' fxaddpar, primary_header, 'PXEND2', naxis2, 'Last pixel that has been read out in dimension 2', before = 'COMPRESS' endif ; read the house-keeping telemetry hk_table = json_parse(input.hk_file_name, /toarray, /tostruct) ; replace raw values with calibrated values in the primary header empty_params = !null if datatype eq 0 or datatype eq 3 or datatype eq 5 then begin ; NOTE - DACPOL parameters are not calibrated since a calibration curve does not exist in the IDB. Their calibration in physical units (e.g., voltages or angles) should be done later ; fxaddpar, primary_header, 'DAC1POL1', interpol_param(hk_table, 'NIT0E061', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC2POL1', interpol_param(hk_table, 'NIT0E062', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC1POL2', interpol_param(hk_table, 'NIT0E064', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC2POL2', interpol_param(hk_table, 'NIT0E065', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC1POL3', interpol_param(hk_table, 'NIT0E067', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC2POL3', interpol_param(hk_table, 'NIT0E068', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC1POL4', interpol_param(hk_table, 'NIT0E06A', date_avg, empty_params = empty_params) ; fxaddpar, primary_header, 'DAC2POL4', interpol_param(hk_table, 'NIT0E06B', date_avg, empty_params = empty_params) fxaddpar, primary_header, 'TSENSOR', interpol_param(hk_table, 'NIT0E0E0', date_avg, empty_params = empty_params), '[degC] VLDA temperature' fxaddpar, primary_header, 'PMPTEMP', interpol_param(hk_table, 'NIT0L00D', date_avg, empty_params = empty_params), '[degC] PMP temperature' journal, 'Header keywords were calibrated using HK parameters:' journal, ' TSENSOR = ' + string(fxpar(primary_header, 'TSENSOR'), format = '(F0)') journal, ' PMPTEMP = ' + string(fxpar(primary_header, 'PMPTEMP'), format = '(F0)') endif if datatype eq 1 or datatype eq 4 or datatype eq 6 then begin fxaddpar, primary_header, 'HVU_SCR', interpol_param(hk_table, 'NIT0E070', date_avg, empty_params = empty_params), '[raw] HVU Screen commanded voltage' fxaddpar, primary_header, 'HVU_MCP', interpol_param(hk_table, 'NIT0E071', date_avg, empty_params = empty_params), '[raw] HVU MCP commanded voltage' fxaddpar, primary_header, 'HV_SCR_V', interpol_param(hk_table, 'NIT0E0B7', date_avg, empty_params = empty_params), '[V] HVU Screen + MCP read voltage', after = 'HVU_MCP' fxaddpar, primary_header, 'HV_MCP_V', interpol_param(hk_table, 'NIT0E0B6', date_avg, empty_params = empty_params), '[V] HVU MCP read voltage', after = 'HV_SCR_V' fxaddpar, primary_header, 'HV_MCP_I', interpol_param(hk_table, 'NIT0E0BF', date_avg, empty_params = empty_params), '[uA] HVU MCP current', after = 'HV_MCP_V' fxaddpar, primary_header, 'HV_TEMP', interpol_param(hk_table, 'NIT0E0B5', date_avg, empty_params = empty_params), '[degC] HVU temperature', after = 'HV_MCP_I' fxaddpar, primary_header, 'TSENSOR ', interpol_param(hk_table, 'NIT0E050', date_avg, empty_params = empty_params), '[degC] UVDA temperature' journal, 'Header keywords were calibrated using HK parameters:' journal, ' HVU_SCR = ' + string(fxpar(primary_header, 'HVU_SCR'), format = '(F0)') journal, ' HVU_MCP = ' + string(fxpar(primary_header, 'HVU_MCP'), format = '(F0)') journal, ' HV_SCR_V = ' + string(fxpar(primary_header, 'HV_SCR_V'), format = '(F0)') journal, ' HV_MCP_V = ' + string(fxpar(primary_header, 'HV_MCP_V'), format = '(F0)') journal, ' HV_MCP_I = ' + string(fxpar(primary_header, 'HV_MCP_I'), format = '(F0)') journal, ' HV_TEMP = ' + string(fxpar(primary_header, 'HV_TEMP'), format = '(F0)') journal, ' TSENSOR = ' + string(fxpar(primary_header, 'TSENSOR'), format = '(F0)') endif if empty_params eq !null then empty_params = '' ; replace with text keyword values key = fxpar(primary_header, 'MEASKIND', count = count) if count gt 0 then begin pol_id = fxpar(primary_header, 'POL_ID') if key eq 0 and pol_id eq 0 then fxaddpar, primary_header, 'MEASKIND', 'Fixed pol.' if key eq 0 and pol_id ne 0 then fxaddpar, primary_header, 'MEASKIND', 'pB' if key eq 1 then fxaddpar, primary_header, 'MEASKIND', 'tB' endif key = fxpar(primary_header, 'FRAMEMOD', count = count) if count gt 0 then begin if key eq 0 then fxaddpar, primary_header, 'FRAMEMOD', 'Single' if key eq 1 then fxaddpar, primary_header, 'FRAMEMOD', 'Multiple' endif key = fxpar(primary_header, 'VLFPFILT', count = count) if count gt 0 then begin pol_id = fxpar(primary_header, 'POL_ID') if pol_id ne 0 then fxaddpar, primary_header, 'VLFPFILT', 'Not applicable' else begin if key eq 0 then fxaddpar, primary_header, 'VLFPFILT', 'Binning' if key eq 1 then fxaddpar, primary_header, 'VLFPFILT', 'Masking' if key eq 2 then fxaddpar, primary_header, 'VLFPFILT', 'No filter' endelse endif key = fxpar(primary_header, 'PMPSTAB', count = count) if count gt 0 then begin if key eq 0 then fxaddpar, primary_header, 'PMPSTAB', 'Not stable' if key eq 1 then fxaddpar, primary_header, 'PMPSTAB', 'Stable' endif key = fxpar(primary_header, 'REF_ROWS', count = count) if count gt 0 then begin if key eq 0 then fxaddpar, primary_header, 'REF_ROWS', 'Not included' if key eq 1 then fxaddpar, primary_header, 'REF_ROWS', 'Included' endif keys = ['PRESUM', 'CME_OBS', 'SUNDISK', 'CR_SEP', 'SP_NOISE', 'COMPR', 'MASKING', 'RADIAL'] foreach key_name, keys do begin key = fxpar(primary_header, key_name, count = count) if count gt 0 then begin if key eq 0 then fxaddpar, primary_header, key_name, 'Disabled' if key eq 1 then fxaddpar, primary_header, key_name, 'Enabled' endif endforeach ; convert the fits header into an idl structure header = fits_hdr2struct(primary_header) ; append wcs keywords if datatype le 6 then begin wcs = metis_wcs(header, cal_pack, ref_detector = ref_detector) foreach element, wcs do fxaddpar, primary_header, element.name, element.value, element.comment, before = 'DATATYPE' endif ; append solar keywords ephemeris = solo_get_ephemeris(header, cal_pack) foreach element, ephemeris do fxaddpar, primary_header, element.name, element.value, element.comment, before = 'DATATYPE' fxaddpar, primary_header, 'HISTORY', 'WCS and solar ephemeris:' fxaddpar, primary_header, 'HISTORY', ' SKD version = ' + kernel_version ; remove redundant and misleading keywords sxdelpar, primary_header, 'WIDTH' sxdelpar, primary_header, 'HEIGHT' sxdelpar, primary_header, 'X_SIZE' sxdelpar, primary_header, 'Y_SIZE' sxdelpar, primary_header, 'Z_SIZE' sxdelpar, primary_header, 'P_BANDS' sxdelpar, primary_header, 'N_BANDS' sxdelpar, primary_header, 'ORIG_X' sxdelpar, primary_header, 'ORIG_Y' ; modify keywords for file history fxaddpar, primary_header, 'HISTORY', 'L1 FITS file created on ' + date ; modify keywords for comments old_comment = fxpar(primary_header, 'COMMENT', count = count) if count eq 0 then old_comment = '' sxdelpar, primary_header, 'COMMENT' if max(old_comment.startswith('Warning: correction of the OBT for pB acquisitions was applied.')) then begin if ~ isa(comment) then comment = !null comment = ['Correction of the OBT for pB acquisitions was applied at L0 level.', comment] endif if max(old_comment.startswith('Warning: the OBT of the acquisition is missing.')) then begin if ~ isa(comment) then comment = !null comment = ['Warning: the OBT of the acquisition is missing.',' The generation time of the first science packet was used', ' to compute OBT_BEG. Polarimetric keywords DAC*POL*',' may be wrong.', comment] endif if isa(comment) then begin for k = 0, n_elements(comment) - 1 do $ fxaddpar, primary_header, 'COMMENT', comment[k] endif ; rectify the image if requested if not ref_detector then begin data = metis_rectify(data, filter) quality_matrix = metis_rectify(quality_matrix, filter) endif ; add checksum and datasum to the fits header and save fits_add_checksum, primary_header, data mwrfits, data, out_file_name, primary_header, /no_comment, /create, /silent journal, 'Fits file created:' journal, ' file name = ' + out_file_name ; if applicable, save the data binary-table extension as it is if isa(data_bin_table) then begin fits_add_checksum, data_extension_header, data_bin_table mwrfits, data_bin_table, out_file_name, data_extension_header, /no_comment, /silent endif ; save the quality matrix if datatype le 6 and isa(quality_matrix) then begin quality_matrix_header = !null fxaddpar, quality_matrix_header, 'PCOUNT', 0, 'Parameter count' fxaddpar, quality_matrix_header, 'GCOUNT', 1, 'Group count' fxaddpar, quality_matrix_header, 'EXTNAME', 'Quality matrix', 'Extension name' fits_add_checksum, quality_matrix_header, quality_matrix mwrfits, quality_matrix, out_file_name, quality_matrix_header, /no_comment, /silent journal, 'Quality-matrix extension correctly added.' endif ; build the telemetry extension hk_extension_header = !null fxaddpar, hk_extension_header, 'PCOUNT', 0, 'Parameter count' fxaddpar, hk_extension_header, 'GCOUNT', 1, 'Group count' fxaddpar, hk_extension_header, 'EXTNAME', 'House-keeping', 'Extension name' hk_bin_table = make_bin_table(hk_table) mwrfits, hk_bin_table, out_file_name, hk_extension_header, /no_comment, /silent journal, 'HK binary-table extension correctly added.' ; unload the spice kernels load_spice_kernels, kernel_list = kernel_list, /unload journal, 'SPICE kernel files unloaded.' ; write the auxiliary information file output = { $ file_name: out_file_name, $ l0_file_name: input.file_name, $ log_file_name: 'output/metis_l1_prep_log.txt', $ empty_params : empty_params[uniq(empty_params)] $ } json_write, output, 'output/contents.json' journal, 'Output saved' ;close the log journal, 'Exiting without errors.' journal exit, status = 0 end