Skip to content
metis_l1_prep.pro 26.5 KiB
Newer Older
Roberto Susino's avatar
Roberto Susino committed
pro metis_l1_prep
    ; keyword defining if the detector reference frame must be used for the output
Roberto Susino's avatar
Roberto Susino committed

Roberto Susino's avatar
Roberto Susino committed

Roberto Susino's avatar
Roberto Susino committed

    journal, 'output/metis_l1_prep_log.txt'
Roberto Susino's avatar
Roberto Susino committed

    ; some definitions
Roberto Susino's avatar
Roberto Susino committed

    metis_datatype = list( $
        'VL image', $ ; 0
        'UV image', $ ; 1
        'PCU accumulation matrix', $ ; 2
        'VL temporal standard deviation', $ ; 3
        'UV temporal standard deviation', $ ; 4
        'VL cosmic-ray matrix', $ ; 5
        'UV cosmic-ray matrix', $ ; 6
        'PCU event list', $ ; 7
        'PCU test event list', $ ; 8
        'VL light-curve' $ ; 9
        )
Roberto Susino's avatar
Roberto Susino committed

    ; read the auxiliary input file
Roberto Susino's avatar
Roberto Susino committed

    input = json_parse('input/contents.json', /toarray, /tostruct)
Roberto Susino's avatar
Roberto Susino committed

    journal, 'File ' + input.file_name
Roberto Susino's avatar
Roberto Susino committed

    ; load the spice kernels
Roberto Susino's avatar
Roberto Susino committed

    load_spice_kernels, input.spice_kernels, kernel_list = kernel_list, kernel_version = kernel_version
Roberto Susino's avatar
Roberto Susino committed

    journal, 'SPICE kernel files correctly loaded:'
    journal, '  SDK version = ' + kernel_version
Roberto Susino's avatar
Roberto Susino committed

    ; import the calibration package index file
Roberto Susino's avatar
Roberto Susino committed

    cal_pack = json_parse(input.cal_pack_path + '/index.json', /toarray, /tostruct)
Roberto Susino's avatar
Roberto Susino committed

    ; include the calibration package path into the cal_pack structure
Roberto Susino's avatar
Roberto Susino committed

    cal_pack = create_struct('path', input.cal_pack_path + path_sep(), cal_pack)
Roberto Susino's avatar
Roberto Susino committed

    journal, 'Calibration package correctly imported:'
    journal, '  version = ' + cal_pack.version
    journal, '  validity range = ' + string(cal_pack.validity_range.start, cal_pack.validity_range._end, format = '(A, "-", A)')
Roberto Susino's avatar
Roberto Susino committed

    ; read the primary hdu
Roberto Susino's avatar
Roberto Susino committed

    data = mrdfits(input.file_name, 0, primary_header, /silent)
Roberto Susino's avatar
Roberto Susino committed

    naxis = fxpar(primary_header, 'NAXIS')
    naxis1 = fxpar(primary_header, 'NAXIS1', missing = 0)
    naxis2 = fxpar(primary_header, 'NAXIS2', missing = 0)
Roberto Susino's avatar
Roberto Susino committed

    ; read the metadata extension
Roberto Susino's avatar
Roberto Susino committed

    metadata_bin_table = mrdfits(input.file_name, 'metis metadata', metadata_extension_header, /silent)
Roberto Susino's avatar
Roberto Susino committed

    ; identify the data product and its nominal size
Roberto Susino's avatar
Roberto Susino committed

    datatype = fxpar(metadata_extension_header, 'DATATYPE')
Roberto Susino's avatar
Roberto Susino committed

    journal, 'L0 FITS file correctly read:'
    journal, '  datatype = ' + string(datatype, format = '(I0)')
Roberto Susino's avatar
Roberto Susino committed

    ; if the data product is a light curve or a pcu-event list, read the data binary hk_table
Roberto Susino's avatar
Roberto Susino committed

    if datatype gt 6 then data_bin_table = mrdfits(input.file_name, 1, data_extension_header, /silent) else data_bin_table = !null
Roberto Susino's avatar
Roberto Susino committed

    ; if the data product is an accumulation matrix, look for the accumulation vector extension and read it if it exists
Roberto Susino's avatar
Roberto Susino committed

    if datatype eq 2 then $
        if fxpar(metadata_extension_header, 'M') eq 256 then $
            accumul_vector = mrdfits(input.file_name, 'accumulation vector', vector_extension_header, /silent) else accumul_vector = !null
Roberto Susino's avatar
Roberto Susino committed

    ; NOTE - the radialization extension is ignored
Roberto Susino's avatar
Roberto Susino committed

    ; read the planning data
Roberto Susino's avatar
Roberto Susino committed

    planning_data = json_parse(input.planning_file_name, /toarray, /tostruct)
Roberto Susino's avatar
Roberto Susino committed

    ; definitions for the primary header
    ; version of the fits file
Roberto Susino's avatar
Roberto Susino committed

    version = string(input.l1_version, format = '(I02)')
Roberto Susino's avatar
Roberto Susino committed

    ; creation and acquisition times in utc
Roberto Susino's avatar
Roberto Susino committed

    date = date_conv(systime(/julian, /utc), 'FITS')
Roberto Susino's avatar
Roberto Susino committed

    obt_beg = fxpar(primary_header, 'OBT_BEG')
    obt_end = fxpar(primary_header, 'OBT_END')
    obt_avg = (obt_beg + obt_end) / 2.0d
Roberto Susino's avatar
Roberto Susino committed

    date_beg = solo_obt2utc(obt_beg)
    date_end = solo_obt2utc(obt_end)
    date_avg = solo_obt2utc(obt_avg)
Roberto Susino's avatar
Roberto Susino committed

    date_beg_string = strmid(date_beg, 0, 19)
    date_beg_string = date_beg_string.replace('-', '')
    date_beg_string = date_beg_string.replace(':', '')
Roberto Susino's avatar
Roberto Susino committed

    journal, 'Calculated UTC time of start acquisition:'
    journal, '  time = ' + date_beg
Roberto Susino's avatar
Roberto Susino committed

    ; name of the fits file
Roberto Susino's avatar
Roberto Susino committed

    file_name_fields = strsplit(fxpar(primary_header, 'FILENAME'), '_', /extract)
    file_name = 'solo_L1_' + file_name_fields[2] + '_' + date_beg_string + '_V' + version + '.fits'
    out_file_name = 'output/' + file_name
Roberto Susino's avatar
Roberto Susino committed

Roberto Susino's avatar
Roberto Susino committed

    case datatype of
        0: filter = 'VL'
        1: filter = 'UV'
        2: filter = 'UV'
        3: filter = 'VL'
        4: filter = 'UV'
        5: filter = 'VL'
        6: filter = 'UV'
        7: filter = 'UV'
        8: filter = 'UV'
        9: filter = 'VL'
    endcase
Roberto Susino's avatar
Roberto Susino committed

    ; choose the correct calibration-package structure based on the image filter
Roberto Susino's avatar
Roberto Susino committed

    if filter eq 'VL' then channel = cal_pack.vl_channel else channel = cal_pack.uv_channel
Roberto Susino's avatar
Roberto Susino committed

    ; instrument/telescope/detector keywords
Roberto Susino's avatar
Roberto Susino committed

    detector = filter + 'D'
    telescope = 'SOLO/Metis/' + detector
    wavelnth = channel.bandpass.value[1]
    wavemin = channel.bandpass.value[0]
    wavemax = channel.bandpass.value[2]
    waveband = channel.name
Roberto Susino's avatar
Roberto Susino committed

    ; campaign keywords
Roberto Susino's avatar
Roberto Susino committed

    if planning_data.obs_id ne 'none' then begin
        obs_id_fields = strsplit(planning_data.obs_id, '_', /extract)
        if obs_id_fields[2] eq '000' then sooptype = 'none' else sooptype = obs_id_fields[2]
        if obs_id_fields[4] eq '000' then obs_type = 'none' else obs_type = obs_id_fields[4]
    endif else begin
        sooptype = 'none'
        obs_type = 'none'
    endelse
Roberto Susino's avatar
Roberto Susino committed

    ; build the fits file extensions
Roberto Susino's avatar
Roberto Susino committed

    ; join the metadata extension header to the primary header removing useless keywords
Roberto Susino's avatar
Roberto Susino committed

    del_tags = list( $
        'XTENSION', $
        'BITPIX', $
        'NAXIS', $
        'NAXIS1', $
        'NAXIS2', $
        'PCOUNT', $
        'GCOUNT', $
        'TFIELDS', $
        'TTYPE1', $
        'TFORM1', $
        'TTYPE2', $
        'TFORM2', $
        'EXTNAME', $
        'CHECKSUM', $
        'DATASUM', $
        'END' $
        )
Roberto Susino's avatar
Roberto Susino committed

    foreach item, metadata_extension_header do begin
        tag = strmid(item, 0, 8)
        tag = tag.trim()
        if ~isa(del_tags.where(tag)) then begin
            value = fxpar(metadata_extension_header, tag, comment = comment)
            fxaddpar, primary_header, tag, value, comment, before = 'CHECKSUM'
        endif
    endforeach

    compr = fxpar(primary_header, 'COMPR', missing = -1)
    xsize = fxpar(primary_header, 'X_SIZE', missing = -1)

    if compr and xsize lt 0 then begin
        bin_type = fxpar(primary_header, 'BIN_TYPE')
        fxaddpar, primary_header, 'X_SIZE', 2048 / 2 ^ bin_type, before = 'CHECKSUM'
        fxaddpar, primary_header, 'Y_SIZE', 2048 / 2 ^ bin_type, before = 'CHECKSUM'
        fxaddpar, primary_header, 'Z_SIZE', 1, before = 'CHECKSUM'
        fxaddpar, primary_header, 'P_BANDS', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'N_BANDS', 1, before = 'CHECKSUM'
        fxaddpar, primary_header, 'ORIG_X', 2048, before = 'CHECKSUM'
        fxaddpar, primary_header, 'ORIG_Y', 2048, before = 'CHECKSUM'
        fxaddpar, primary_header, 'FIRSTROW', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B0_BIN', bin_type, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B0_DQ', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B0_STOP', 2048, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B1_BIN', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B1_DQ', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B1_STOP', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B2_BIN', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B2_DQ', 0, before = 'CHECKSUM'
        fxaddpar, primary_header, 'B2_STOP', 0, before = 'CHECKSUM'
    endif

    ; rebin the image if binning was applied during the acquisition and check for the data quality
    ; NOTE - this is done only for image data products

    comment = !null
    if datatype le 6 then begin
        bin_type = fxpar(primary_header, 'B0_BIN', missing = 0)

        ; check for wrong binning type in fixed-polarization filtered images

        pol_id = fxpar(primary_header, 'POL_ID', missing = -1)
        vlfpfilt = fxpar(primary_header, 'VLFPFILT', missing = -1)
        if vlfpfilt eq 0 and pol_id eq 0 and bin_type eq 0 then bin_type = 1

        bin_fact = 2 ^ bin_type ; binning factor = 1, 2, or 4

        if bin_fact gt 1 then begin
            naxis1 = naxis1 / bin_fact
            naxis2 = naxis2 / bin_fact
            data = rebin(data, naxis1, naxis2)

            ; evaluate the quality matrix

            quality_matrix = check_quality(data, cal_pack, filter)

            ; correct values including binning factor only for vl and uv images and accumulation matrices

            if datatype le 2 then data = long(data) * bin_fact ^ 2

            if ~isa(comment) then comment = !null
            comment = [comment, 'Image was rebinned according to the commanded binning factor.']

            journal, 'Data was binned during acquisition:'
            journal, '  binning = ' + string(bin_fact, format = '(I1)') + '×' + string(bin_fact, format = '(I1)')
            journal, '  data was rebinned:'
            journal, '    new size = ' + string(naxis1, format = '(I0)') + '×' + string(naxis2, format = '(I0)')
        endif else begin
            quality_matrix = check_quality(data, cal_pack, filter)

            journal, 'Data was not binned during acquistion.'
        endelse

        ; read the bad-pixel map

        bad_pixel_map = readfits(cal_pack.path + channel.bad_pixel_map[0].file_name, /silent)

        ; adjust the map based on the presence of the reference rows

        map_size = size(bad_pixel_map)
        ref_rows = fxpar(primary_header, 'REF_ROWS', missing = -1)
        if ref_rows ge 0 then first_row = ref_rows ? 2 : 0 else first_row = 0
        bad_pixel_map = bad_pixel_map[*, first_row : first_row + map_size[1] - 1]

        ; rebin the bad-pixel map to the size of the image to be corrected

        bad_pixel_map = rebin(bad_pixel_map, naxis1, naxis2) * bin_fact ^ 2

        ; correct the quality matrix according to the bad-pixel map

        bad_pixels = where(bad_pixel_map gt 0, count)
        if count gt 0 then quality_matrix[bad_pixels] = 0
    endif else quality_matrix = !null

    ; correct for the effective exposure time
    ; NOTE - this is done only for vl and uv images and accumulation matrices

    telapse = float(obt_end - obt_beg)

    dit = fxpar(metadata_extension_header, 'DIT')
    ndit = fxpar(metadata_extension_header, 'NDIT', missing = 1)
    ndit1 = fxpar(metadata_extension_header, 'NDIT1', missing = 1)
    ndit2 = fxpar(metadata_extension_header, 'NDIT2', missing = 1)

    if datatype le 2 then begin
        nsumexp = ndit * ndit1 * ndit2
        xposure = dit / 1000.d0 * nsumexp
        data = long(data) * nsumexp

        if ~isa(comment) then comment = !null
        comment = [comment, 'Image values were corrected for the total exposure time.']

        journal, 'Image values were corrected for the total exposure time:'
        journal, '  dit = ' + string(dit, format = '(I0)')
        journal, '  nsumexp = ' + string(nsumexp, format = '(I0)')
    endif else begin
        nsumexp = 1
        xposure = dit / 1000.d0
    endelse

    ; adjust the primary header (it is almost the same for all data product types)

    fxaddpar, primary_header, 'FILENAME', file_name
    fxaddpar, primary_header, 'PARENT', file_basename(input.file_name), 'name of the parent file', before = 'APID'
    fxaddpar, primary_header, 'DATE', date, 'date and time of FITS file creation', before = 'OBT_BEG'
    fxaddpar, primary_header, 'DATE-OBS', date_beg, 'same as DATE-BEG', before = 'OBT_BEG'
    fxaddpar, primary_header, 'DATE-BEG', date_beg, 'start time of observation', before = 'OBT_BEG'
    fxaddpar, primary_header, 'DATE-AVG', date_avg, 'average time of observation', before = 'OBT_BEG'
    fxaddpar, primary_header, 'DATE-END', date_end, 'end time of observation', before = 'OBT_BEG'
    fxaddpar, primary_header, 'TIMESYS', 'UTC', 'system used for time keywords', before = 'OBT_BEG'
    fxaddpar, primary_header, 'TIMRDER', 0.0, '[s] estimated random error in time values', before = 'OBT_BEG'
    fxaddpar, primary_header, 'TIMSYER', 0.0, '[s] estimated systematic error in time values', before = 'OBT_BEG'
    fxaddpar, primary_header, 'LEVEL', 'L1'
    fxaddpar, primary_header, 'CREATOR', 'metis_l1_prep.pro'
    fxaddpar, primary_header, 'VERS_SW', input.sw_version
    fxaddpar, primary_header, 'VERS_CAL', cal_pack.version, 'version of the calibration package', after = 'VERS_SW'
    fxaddpar, primary_header, 'VERSION', version
    fxaddpar, primary_header, 'OBSRVTRY', 'Solar Orbiter', 'satellite name', before = 'INSTRUME'
    fxaddpar, primary_header, 'TELESCOP', telescope, 'telescope that took the measurement', before = 'INSTRUME'
    fxaddpar, primary_header, 'DETECTOR', detector, 'subunit/sensor', before = 'DATAMIN'
    fxaddpar, primary_header, 'OBJECT', 'TBD', 'the use of the keyword OBJECT is [TBD]', before = 'DATAMIN'
    fxaddpar, primary_header, 'OBS_MODE', planning_data.obs_mode, 'observation mode', before = 'DATAMIN'
    fxaddpar, primary_header, 'OBS_TYPE', obs_type, 'encoded version of OBS_MODE', before = 'DATAMIN'
    fxaddpar, primary_header, 'FILTER', filter, 'filter used to acquire this image', before = 'DATAMIN'
    fxaddpar, primary_header, 'WAVELNTH', wavelnth, '[nm] characteristic wavelength of observation', before = 'DATAMIN'
    fxaddpar, primary_header, 'WAVEMIN', wavemin, '[nm] min. bandpass wavelength', before = 'DATAMIN'
    fxaddpar, primary_header, 'WAVEMAX', wavemax, '[nm] max. bandpass wavelength', before = 'DATAMIN'
    fxaddpar, primary_header, 'WAVEBAND', waveband, 'bandpass description', before = 'DATAMIN'
    fxaddpar, primary_header, 'XPOSURE', xposure, '[s] total effective exposure time', before = 'DATAMIN'
    fxaddpar, primary_header, 'NSUMEXP', nsumexp, 'number of detector readouts summed together', before = 'DATAMIN'
    fxaddpar, primary_header, 'TELAPSE', telapse, '[s] elapsed time during observation', before = 'DATAMIN'
    fxaddpar, primary_header, 'SOOPNAME', planning_data.soop_name, 'name of the SOOP that the data belong to', before = 'DATAMIN'
    fxaddpar, primary_header, 'SOOPTYPE', sooptype, 'campaign ID(s) that the data belong to', before = 'DATAMIN'
    fxaddpar, primary_header, 'OBS_ID', planning_data.obs_id, 'unique ID of the individual observation', before = 'DATAMIN'
    fxaddpar, primary_header, 'TARGET', 'TBD', 'type of target from planning', before = 'DATAMIN'
    fxaddpar, primary_header, 'BSCALE', 1, 'ratio of physical to array value at 0 offset', before = 'DATAMIN'
    fxaddpar, primary_header, 'BZERO', 0, 'physical value for the array value 0', before = 'DATAMIN'
    fxaddpar, primary_header, 'BTYPE', metis_datatype[datatype], 'science data object type', before = 'DATAMIN'
    fxaddpar, primary_header, 'BUNIT', 'DN', 'units of physical value', before = 'DATAMIN'
    fxaddpar, primary_header, 'BLANK', 0, 'data value used to mark undefined pixels', after = 'BUNIT'
    fxaddpar, primary_header, 'DATAMIN', min(data, /nan)
    fxaddpar, primary_header, 'DATAMAX', max(data, /nan)
    fxaddpar, primary_header, 'IDB_VERS', input.idb_version, '', before = 'HDR_VERS'
    fxaddpar, primary_header, 'INFO_URL', 'http://metis.oato.inaf.it', 'link to more information on the instrument', before = 'HISTORY'

    if datatype le 6 then begin
        fxaddpar, primary_header, 'NBIN1', bin_fact, 'binning factor in the dimension 1', before = 'COMPRESS'
        fxaddpar, primary_header, 'NBIN2', bin_fact, 'binning factor in the dimension 2', before = 'COMPRESS'
        fxaddpar, primary_header, 'NBIN', bin_fact * bin_fact, 'product of all NBIN values above', before = 'COMPRESS'
        fxaddpar, primary_header, 'PXBEG1', 1, 'first pixel read out in dimension 1', before = 'COMPRESS'
        fxaddpar, primary_header, 'PXBEG2', 1, 'first pixel read out in dimension 2', before = 'COMPRESS'
        fxaddpar, primary_header, 'PXEND1', naxis1, 'last pixel read out in dimension 1', before = 'COMPRESS'
        fxaddpar, primary_header, 'PXEND2', naxis2, 'last pixel read out in dimension 2', before = 'COMPRESS'
    endif

    ; read the house-keeping telemetry

    hk_table = json_parse(input.hk_file_name, /toarray, /tostruct)

    ; replace raw values with calibrated values in the primary header

    empty_params = !null

    if datatype eq 0 or datatype eq 3 or datatype eq 5 then begin
        ; patch to fix the lack of the keyword SEQ_NUM

        pol_id = fxpar(primary_header, 'POL_ID', missing = 0)
        if pol_id ge 1 and pol_id le 4 then begin
            seq_num = fxpar(primary_header, 'SEQ_NUM', missing = 0)
            if seq_num eq 0 then begin
                obj_cnt = fxpar(primary_header, 'OBJ_CNT')
                n_pol = fxpar(primary_header, 'N_POL', missing = 4)
                seq_num = ((obj_cnt - 1) / n_pol + 1)
                fxaddpar, primary_header, 'SEQ_NUM', seq_num, before = 'POL_ID'
            endif
        endif

        ; NOTE - DACPOL parameters are not calibrated since a calibration curve does not exist in the IDB. Their calibration in physical units (e.g., voltages or angles) should be done later

        ; fxaddpar, primary_header, 'DAC1POL1', interpol_param(hk_table, 'NIT0E061', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC2POL1', interpol_param(hk_table, 'NIT0E062', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC1POL2', interpol_param(hk_table, 'NIT0E064', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC2POL2', interpol_param(hk_table, 'NIT0E065', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC1POL3', interpol_param(hk_table, 'NIT0E067', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC2POL3', interpol_param(hk_table, 'NIT0E068', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC1POL4', interpol_param(hk_table, 'NIT0E06A', date_avg, empty_params = empty_params)
        ; fxaddpar, primary_header, 'DAC2POL4', interpol_param(hk_table, 'NIT0E06B', date_avg, empty_params = empty_params)

        fxaddpar, primary_header, 'TSENSOR', interpol_param(hk_table, 'NIT0E0E0', date_avg, empty_params = empty_params), '[degC] VLDA temperature'
        fxaddpar, primary_header, 'PMPTEMP', interpol_param(hk_table, 'NIT0L00D', date_avg, empty_params = empty_params), '[degC] PMP temperature'

        journal, 'Header keywords were calibrated using HK parameters:'
        journal, '  TSENSOR = ' + string(fxpar(primary_header, 'TSENSOR'), format = '(F0)')
        journal, '  PMPTEMP = ' + string(fxpar(primary_header, 'PMPTEMP'), format = '(F0)')
    endif

    if datatype eq 1 or datatype eq 4 or datatype eq 6 then begin
        fxaddpar, primary_header, 'HVU_SCR', interpol_param(hk_table, 'NIT0E070', date_avg, empty_params = empty_params), '[raw] HVU Screen commanded voltage'
        fxaddpar, primary_header, 'HVU_MCP', interpol_param(hk_table, 'NIT0E071', date_avg, empty_params = empty_params), '[raw] HVU MCP commanded voltage'
        fxaddpar, primary_header, 'HV_SCR_V', interpol_param(hk_table, 'NIT0E0B7', date_avg, empty_params = empty_params), '[V] HVU Screen + MCP read voltage', after = 'HVU_MCP'
        fxaddpar, primary_header, 'HV_MCP_V', interpol_param(hk_table, 'NIT0E0B6', date_avg, empty_params = empty_params), '[V] HVU MCP read voltage', after = 'HV_SCR_V'
        fxaddpar, primary_header, 'HV_MCP_I', interpol_param(hk_table, 'NIT0E0BF', date_avg, empty_params = empty_params), '[uA] HVU MCP current', after = 'HV_MCP_V'
        fxaddpar, primary_header, 'HV_TEMP', interpol_param(hk_table, 'NIT0E0B5', date_avg, empty_params = empty_params), '[degC] HVU temperature', after = 'HV_MCP_I'
        fxaddpar, primary_header, 'TSENSOR', interpol_param(hk_table, 'NIT0E050', date_avg, empty_params = empty_params), '[degC] UVDA temperature'

        journal, 'Header keywords were calibrated using HK parameters:'
        journal, '  HVU_SCR  = ' + string(fxpar(primary_header, 'HVU_SCR'), format = '(F0)')
        journal, '  HVU_MCP  = ' + string(fxpar(primary_header, 'HVU_MCP'), format = '(F0)')
        journal, '  HV_SCR_V = ' + string(fxpar(primary_header, 'HV_SCR_V'), format = '(F0)')
        journal, '  HV_MCP_V = ' + string(fxpar(primary_header, 'HV_MCP_V'), format = '(F0)')
        journal, '  HV_MCP_I = ' + string(fxpar(primary_header, 'HV_MCP_I'), format = '(F0)')
        journal, '  HV_TEMP = ' + string(fxpar(primary_header, 'HV_TEMP'), format = '(F0)')
        journal, '  TSENSOR  = ' + string(fxpar(primary_header, 'TSENSOR'), format = '(F0)')
    endif

    if empty_params eq !null then empty_params = ''

    ; replace with text keyword values

    key = fxpar(primary_header, 'MEASKIND', count = count)
    if count gt 0 then begin
        pol_id = fxpar(primary_header, 'POL_ID')
        if key eq 0 and pol_id eq 0 then fxaddpar, primary_header, 'MEASKIND', 'Fixed pol.'
        if key eq 0 and pol_id ne 0 then fxaddpar, primary_header, 'MEASKIND', 'pB'
        if key eq 1 then fxaddpar, primary_header, 'MEASKIND', 'tB'
    endif

    key = fxpar(primary_header, 'FRAMEMOD', count = count)
    if count gt 0 then begin
        if key eq 0 then fxaddpar, primary_header, 'FRAMEMOD', 'Single'
        if key eq 1 then fxaddpar, primary_header, 'FRAMEMOD', 'Multiple'
    endif

    key = fxpar(primary_header, 'VLFPFILT', count = count)
    if count gt 0 then begin
        pol_id = fxpar(primary_header, 'POL_ID')
        if pol_id ne 0 then fxaddpar, primary_header, 'VLFPFILT', 'Not applicable' else begin
            if key eq 0 then fxaddpar, primary_header, 'VLFPFILT', 'Binning'
            if key eq 1 then fxaddpar, primary_header, 'VLFPFILT', 'Masking'
            if key eq 2 then fxaddpar, primary_header, 'VLFPFILT', 'No filter'
        endelse
    endif

    key = fxpar(primary_header, 'PMPSTAB', count = count)
    if count gt 0 then begin
        if key eq 0 then fxaddpar, primary_header, 'PMPSTAB', 'Not stable'
        if key eq 1 then fxaddpar, primary_header, 'PMPSTAB', 'Stable'
    endif

    key = fxpar(primary_header, 'REF_ROWS', count = count)
    if count gt 0 then begin
        if key eq 0 then fxaddpar, primary_header, 'REF_ROWS', 'Not included'
        if key eq 1 then fxaddpar, primary_header, 'REF_ROWS', 'Included'
    endif

    keys = ['PRESUM', 'CME_OBS', 'SUNDISK', 'CR_SEP', 'SP_NOISE', 'COMPR', 'MASKING', 'RADIAL']
    foreach key_name, keys do begin
        key = fxpar(primary_header, key_name, count = count)
        if count gt 0 then begin
            if key eq 0 then fxaddpar, primary_header, key_name, 'Disabled'
            if key eq 1 then fxaddpar, primary_header, key_name, 'Enabled'
        endif
    endforeach

    ; convert the fits header into an idl structure

    header = fits_hdr2struct(primary_header)

    ; append wcs keywords

    if datatype le 6 then begin
        wcs = metis_wcs(header, cal_pack, ref_detector = ref_detector)
        foreach element, wcs do fxaddpar, primary_header, element.name, element.value, element.comment, before = 'DATATYPE'
    endif

    ; append solar keywords

    ephemeris = solo_get_ephemeris(header, cal_pack)
    foreach element, ephemeris do fxaddpar, primary_header, element.name, element.value, element.comment, before = 'DATATYPE'

    fxaddpar, primary_header, 'HISTORY', 'WCS and solar ephemeris:'
    fxaddpar, primary_header, 'HISTORY', '  SKD version = ' + kernel_version

    ; remove redundant and misleading keywords

    sxdelpar, primary_header, 'WIDTH'
    sxdelpar, primary_header, 'HEIGHT'
    sxdelpar, primary_header, 'X_SIZE'
    sxdelpar, primary_header, 'Y_SIZE'
    sxdelpar, primary_header, 'Z_SIZE'
    sxdelpar, primary_header, 'P_BANDS'
    sxdelpar, primary_header, 'N_BANDS'
    sxdelpar, primary_header, 'ORIG_X'
    sxdelpar, primary_header, 'ORIG_Y'

    ; modify keywords for file history

    fxaddpar, primary_header, 'HISTORY', 'L1 FITS file created on ' + date

    ; modify keywords for comments

    old_comment = fxpar(primary_header, 'COMMENT', count = count)
    if count eq 0 then old_comment = ''
    sxdelpar, primary_header, 'COMMENT'
    if max(old_comment.startswith('Warning: correction of the OBT for pB acquisitions was applied.')) then begin
        if ~isa(comment) then comment = !null
        comment = ['Correction of the OBT for pB acquisitions was applied at L0 level.', comment]
    endif
    if max(old_comment.startswith('Warning: the OBT of the acquisition is missing.')) then begin
        if ~isa(comment) then comment = !null
        comment = ['Warning: the OBT of the acquisition is missing.', '  The generation time of the first science packet was used', '  to compute OBT_BEG. Polarimetric keywords DAC*POL*', '  may be wrong.', comment]
    endif
    if isa(comment) then begin
        for k = 0, n_elements(comment) - 1 do $
            fxaddpar, primary_header, 'COMMENT', comment[k]
    endif
Roberto Susino's avatar
Roberto Susino committed

    ; rectify the image if requested
Roberto Susino's avatar
Roberto Susino committed

    if not ref_detector then begin
        data = metis_rectify(data, filter)
        quality_matrix = metis_rectify(quality_matrix, filter)
    endif
Roberto Susino's avatar
Roberto Susino committed

    ; add checksum and datasum to the fits header and save
Roberto Susino's avatar
Roberto Susino committed

    fits_add_checksum, primary_header, data
    mwrfits, data, out_file_name, primary_header, /no_comment, /create, /silent
Roberto Susino's avatar
Roberto Susino committed

    journal, 'Fits file created:'
    journal, '  file name = ' + out_file_name
Roberto Susino's avatar
Roberto Susino committed

    ; if applicable, save the data binary-table extension as it is
Roberto Susino's avatar
Roberto Susino committed

    if isa(data_bin_table) then begin
        if datatype lt 9 then fits_add_checksum, data_extension_header, data_bin_table
        mwrfits, data_bin_table, out_file_name, data_extension_header, /no_comment, /silent
    endif
Roberto Susino's avatar
Roberto Susino committed

    ; save the quality matrix
Roberto Susino's avatar
Roberto Susino committed

    if datatype le 6 and isa(quality_matrix) then begin
        quality_matrix_header = !null
        fxaddpar, quality_matrix_header, 'PCOUNT', 0, 'parameter count'
        fxaddpar, quality_matrix_header, 'GCOUNT', 1, 'group count'
        fxaddpar, quality_matrix_header, 'EXTNAME', 'Quality matrix', 'extension name'
        fits_add_checksum, quality_matrix_header, quality_matrix
        mwrfits, quality_matrix, out_file_name, quality_matrix_header, /no_comment, /silent
Roberto Susino's avatar
Roberto Susino committed

        journal, 'Quality-matrix extension correctly added.'
    endif
Roberto Susino's avatar
Roberto Susino committed

    ; build the telemetry extension
Roberto Susino's avatar
Roberto Susino committed

    hk_extension_header = !null
    fxaddpar, hk_extension_header, 'PCOUNT', 0, 'parameter count'
    fxaddpar, hk_extension_header, 'GCOUNT', 1, 'group count'
    fxaddpar, hk_extension_header, 'EXTNAME', 'House-keeping', 'extension name'
Roberto Susino's avatar
Roberto Susino committed

    hk_bin_table = make_bin_table(hk_table)
    mwrfits, hk_bin_table, out_file_name, hk_extension_header, /no_comment, /silent
Roberto Susino's avatar
Roberto Susino committed

    journal, 'HK binary-table extension correctly added.'
Roberto Susino's avatar
Roberto Susino committed

    ; fix some header keywords
Roberto Susino's avatar
Roberto Susino committed

    fix_fits_header, out_file_name
Roberto Susino's avatar
Roberto Susino committed

    ; unload the spice kernels
Roberto Susino's avatar
Roberto Susino committed

    load_spice_kernels, kernel_list = kernel_list, /unload
Roberto Susino's avatar
Roberto Susino committed

    journal, 'SPICE kernel files unloaded.'
Roberto Susino's avatar
Roberto Susino committed

    ; write the auxiliary information file
Roberto Susino's avatar
Roberto Susino committed

    output = { $
        file_name: out_file_name, $
        l0_file_name: input.file_name, $
        log_file_name: 'output/metis_l1_prep_log.txt', $
        empty_params: empty_params[uniq(empty_params)] $
        }
Roberto Susino's avatar
Roberto Susino committed

    json_write, output, 'output/contents.json'

    journal, 'Output saved'

    ; close the log

    journal, 'Exiting without errors.'
    journal
    exit, status = 0
end