Skip to content
Snippets Groups Projects
Commit 660d2af0 authored by Roberto Susino's avatar Roberto Susino
Browse files

Version 1.1

parent 10519d7c
Branches
Tags
No related merge requests found
function interpol_param, hk_table, par_name, time function interpol_param, table, par_name, date, empty_params = empty_params
i = where(hk_table.par_name eq par_name, count) i = where(table.par_name eq par_name, count)
hk_time = dblarr(count) if count lt 1 then return, fix(32768) ; this should not be necessary
par_date = dblarr(count)
par_val = fltarr(count)
for j = 0, count - 1 do begin for j = 0, count - 1 do begin
times = strsplit(hk_table.gen_time[i[j]], ":", /extract) k = i[j]
hk_time[j] = double(times[0]) + double(times[1])/65536.0D if table.eng_val[k] eq 'N/A' then begin
table.eng_val[k] = table.raw_val[k]
if ~ isa(empty_params) then $
empty_params = table.par_name[k] else $
empty_params = [empty_params, table.par_name[k]]
endif
par_date[j] = date_conv(table.gen_time[k], 'JULIAN')
par_val[j] = float(table.eng_val[k])
endfor endfor
value = interpol(float(hk_table.eng_val[i]), hk_time, time) value = interpol(par_val, par_date, date_conv(date, 'JULIAN'))
return, value return, value
end end
\ No newline at end of file
...@@ -29,7 +29,7 @@ pro metis_l1_prep ...@@ -29,7 +29,7 @@ pro metis_l1_prep
1024L $ ; 6 1024L $ ; 6
) )
; read the auxiliary file - here we have all the inputs we need ; read the auxiliary input file
input = json_parse('input/contents.json', /toarray, /tostruct) input = json_parse('input/contents.json', /toarray, /tostruct)
...@@ -105,7 +105,7 @@ pro metis_l1_prep ...@@ -105,7 +105,7 @@ pro metis_l1_prep
; creation and acquisition times in utc ; creation and acquisition times in utc
date = date_conv(systime(/julian, /utc), 'FITS') + 'Z' date = date_conv(systime(/julian, /utc), 'FITS')
obt_beg = fxpar(primary_header, 'OBT_BEG') obt_beg = fxpar(primary_header, 'OBT_BEG')
obt_end = fxpar(primary_header, 'OBT_END') obt_end = fxpar(primary_header, 'OBT_END')
...@@ -120,9 +120,16 @@ pro metis_l1_prep ...@@ -120,9 +120,16 @@ pro metis_l1_prep
; name of the fits file ; name of the fits file
file_name_fields = strsplit(fxpar(primary_header, 'FILENAME'), '_', /extract) file_name_fields = strsplit(fxpar(primary_header, 'FILENAME'), '_', /extract)
file_name = 'solo_l1_' + file_name_fields[2] + '_' + date_beg_string + '_v' + version + '.fits' file_name = 'solo_l1_' + file_name_fields[2] + '_' + date_beg_string + '_v' + version + '.fits'
out_file_name = 'output/' + file_name out_file_name = 'output/' + file_name
; exposure times
dit = fxpar(metadata_extension_header, 'DIT')
telapse = obt_end - obt_beg
xposure = dit/1000.
; instrument keywords ; instrument keywords
; TODO - complete with filter information ; TODO - complete with filter information
...@@ -164,7 +171,7 @@ pro metis_l1_prep ...@@ -164,7 +171,7 @@ pro metis_l1_prep
; adjust the primary header (it is almost the same for all data product types) ; adjust the primary header (it is almost the same for all data product types)
fxaddpar, primary_header, 'FILENAME', file_name, 'FITS filename' fxaddpar, primary_header, 'FILENAME', file_name
fxaddpar, primary_header, 'PARENT', file_basename(input.file_name), 'Name of the parent file that got processed to the current one', before = 'APID' fxaddpar, primary_header, 'PARENT', file_basename(input.file_name), 'Name of the parent file that got processed to the current one', before = 'APID'
fxaddpar, primary_header, 'DATE', date, 'Date and time of FITS file creation', before = 'OBT_BEG' fxaddpar, primary_header, 'DATE', date, 'Date and time of FITS file creation', before = 'OBT_BEG'
fxaddpar, primary_header, 'DATE-OBS', date_beg, 'Same as DATE-BEG', before = 'OBT_BEG' fxaddpar, primary_header, 'DATE-OBS', date_beg, 'Same as DATE-BEG', before = 'OBT_BEG'
...@@ -174,8 +181,8 @@ pro metis_l1_prep ...@@ -174,8 +181,8 @@ pro metis_l1_prep
fxaddpar, primary_header, 'TIMESYS', 'UTC', 'System used for time keywords', before = 'OBT_BEG' fxaddpar, primary_header, 'TIMESYS', 'UTC', 'System used for time keywords', before = 'OBT_BEG'
fxaddpar, primary_header, 'TIMRDER', 0.0, 'Estimated random error in time values', before = 'OBT_BEG' fxaddpar, primary_header, 'TIMRDER', 0.0, 'Estimated random error in time values', before = 'OBT_BEG'
fxaddpar, primary_header, 'TIMSYER', 0.0, 'Estimated systematic error in time values', before = 'OBT_BEG' fxaddpar, primary_header, 'TIMSYER', 0.0, 'Estimated systematic error in time values', before = 'OBT_BEG'
fxaddpar, primary_header, 'LEVEL', 'L1', 'Data processing level' fxaddpar, primary_header, 'LEVEL', 'L1'
fxaddpar, primary_header, 'CREATOR', 'metis_l1_prep.pro', 'FITS creation software' fxaddpar, primary_header, 'CREATOR', 'metis_l1_prep.pro'
fxaddpar, primary_header, 'OBSRVTRY', 'Solar Orbiter', 'Satellite name', before = 'INSTRUME' fxaddpar, primary_header, 'OBSRVTRY', 'Solar Orbiter', 'Satellite name', before = 'INSTRUME'
fxaddpar, primary_header, 'TELESCOP', telescope, 'Telescope that took the measurement', before = 'INSTRUME' fxaddpar, primary_header, 'TELESCOP', telescope, 'Telescope that took the measurement', before = 'INSTRUME'
fxaddpar, primary_header, 'DETECTOR', detector, 'Subunit/sensor', before = 'DATAMIN' fxaddpar, primary_header, 'DETECTOR', detector, 'Subunit/sensor', before = 'DATAMIN'
...@@ -187,9 +194,9 @@ pro metis_l1_prep ...@@ -187,9 +194,9 @@ pro metis_l1_prep
fxaddpar, primary_header, 'WAVEMIN', wavemin, 'The shortest wavelength at which the net (approximate) response function becomes 0.05 times the maximum response. ', before = 'DATAMIN' fxaddpar, primary_header, 'WAVEMIN', wavemin, 'The shortest wavelength at which the net (approximate) response function becomes 0.05 times the maximum response. ', before = 'DATAMIN'
fxaddpar, primary_header, 'WAVEMAX', wavemax, 'The longest wavelength at which the net (approximate) response function becomes 0.05 times the maximum response', before = 'DATAMIN' fxaddpar, primary_header, 'WAVEMAX', wavemax, 'The longest wavelength at which the net (approximate) response function becomes 0.05 times the maximum response', before = 'DATAMIN'
fxaddpar, primary_header, 'WAVEBAND', waveband, 'Description of the wavelength band', before = 'DATAMIN' fxaddpar, primary_header, 'WAVEBAND', waveband, 'Description of the wavelength band', before = 'DATAMIN'
fxaddpar, primary_header, 'XPOSURE', 0, 'Total effective exposure time of the observation, in seconds', before = 'DATAMIN' fxaddpar, primary_header, 'XPOSURE', xposure, 'Total effective exposure time of the observation, in seconds', before = 'DATAMIN'
fxaddpar, primary_header, 'NSUMEXP', 0, 'Number of images summed together to form the observation', before = 'DATAMIN' fxaddpar, primary_header, 'NSUMEXP', 1, 'Number of images summed together to form the observation', before = 'DATAMIN'
fxaddpar, primary_header, 'TELAPSE', 0, 'Total elapsed time between the beginning and end of the complete observation in seconds, including any dead times between exposures', before = 'DATAMIN' fxaddpar, primary_header, 'TELAPSE', telapse, 'Total elapsed time between the beginning and end of the complete observation in seconds, including any dead times between exposures', before = 'DATAMIN'
fxaddpar, primary_header, 'SOOPNAME', planning_data.soop_name, 'SOOP(s) that this observation belongs to', before = 'DATAMIN' fxaddpar, primary_header, 'SOOPNAME', planning_data.soop_name, 'SOOP(s) that this observation belongs to', before = 'DATAMIN'
fxaddpar, primary_header, 'SOOPTYPE', soop_type, before = 'DATAMIN' fxaddpar, primary_header, 'SOOPTYPE', soop_type, before = 'DATAMIN'
fxaddpar, primary_header, 'OBS_ID', planning_data.obs_id, 'Unique identifier for the observation that is associated with the data acquisition', before = 'DATAMIN' fxaddpar, primary_header, 'OBS_ID', planning_data.obs_id, 'Unique identifier for the observation that is associated with the data acquisition', before = 'DATAMIN'
...@@ -210,14 +217,41 @@ pro metis_l1_prep ...@@ -210,14 +217,41 @@ pro metis_l1_prep
fxaddpar, primary_header, 'IDB_VERS', input.idb_version, '', before = 'HDR_VERS' fxaddpar, primary_header, 'IDB_VERS', input.idb_version, '', before = 'HDR_VERS'
fxaddpar, primary_header, 'INFO_URL', 'http://metis.oato.inaf.it', 'Link to more information on the instrument data', before = 'HISTORY' fxaddpar, primary_header, 'INFO_URL', 'http://metis.oato.inaf.it', 'Link to more information on the instrument data', before = 'HISTORY'
; add checksum and datasum to the fits header ; read the house-keeping telemetry
; WARN - should this be done at the end? i don't know
fits_add_checksum, primary_header, image hk_table = json_parse(input.hk_file_name, /toarray, /tostruct)
; replace raw values with calibrated values in the primary header
; WARN - must be done here?
if datatype eq 0 or datatype eq 3 or datatype eq 5 then begin
; WARN - DACPOL parameters are not calibrated since a calibration curve does not exist in the IDB. Their calibration in physical units (e.g., voltages or angles) should be done later
; fxaddpar, primary_header, 'DAC1POL1', interpol_param(hk_table, 'NIT0E061', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC2POL1', interpol_param(hk_table, 'NIT0E062', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC1POL2', interpol_param(hk_table, 'NIT0E064', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC2POL2', interpol_param(hk_table, 'NIT0E065', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC1POL3', interpol_param(hk_table, 'NIT0E067', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC2POL3', interpol_param(hk_table, 'NIT0E068', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC1POL4', interpol_param(hk_table, 'NIT0E06A', date_avg, empty_param = empty_param)
; fxaddpar, primary_header, 'DAC2POL4', interpol_param(hk_table, 'NIT0E06B', date_avg, empty_param = empty_param)
fxaddpar, primary_header, 'TSENSOR ', interpol_param(hk_table, 'NIT0E0E0', date_avg, empty_params = empty_params)
fxaddpar, primary_header, 'PMPTEMP ', interpol_param(hk_table, 'NIT0L00D', date_avg, empty_params = empty_params)
endif
if datatype eq 1 or datatype eq 4 or datatype eq 6 then begin
fxaddpar, primary_header, 'HVU_SCR ', interpol_param(hk_table, 'NIT0E070', date_avg, empty_params = empty_params)
fxaddpar, primary_header, 'HVU_MCP ', interpol_param(hk_table, 'NIT0E071', date_avg, empty_params = empty_params)
fxaddpar, primary_header, 'TSENSOR ', interpol_param(hk_table, 'NIT0E050', date_avg, empty_params = empty_params)
endif
if ~ isa(empty_params) then empty_params = ''
; add keywords for file history ; add keywords for file history
; fxaddpar, primary_header, 'HISTORY', '' fxaddpar, primary_header, 'HISTORY', ''
; remove unused keywords ; remove unused keywords
...@@ -236,40 +270,17 @@ pro metis_l1_prep ...@@ -236,40 +270,17 @@ pro metis_l1_prep
sxdelpar, primary_header, 'ORIG_Y' sxdelpar, primary_header, 'ORIG_Y'
sxdelpar, primary_header, 'FIRSTROW' sxdelpar, primary_header, 'FIRSTROW'
; add checksum and datasum to the fits header
; WARN - should this be done at the end? i don't know
fits_add_checksum, primary_header, image
mwrfits, image, out_file_name, primary_header, /no_comment, /create, /silent mwrfits, image, out_file_name, primary_header, /no_comment, /create, /silent
; if applicable, save the data binary-table extension as it is ; if applicable, save the data binary-table extension as it is
if isa(data_bin_table) then mwrfits, data_bin_table, 'output/' + file_name, data_extension_header, /no_comment, /silent if isa(data_bin_table) then mwrfits, data_bin_table, 'output/' + file_name, data_extension_header, /no_comment, /silent
; read the house-keeping telemetry
hk_bin_table = json_parse(input.hk_file_name, /toarray, /tostruct)
; replace raw values with calibrated values in the primary header
; WARN - must be done here?
if datatype eq 0 or datatype eq 3 or datatype eq 5 then begin
fxaddpar, primary_header, 'DAC1POL1', interpol_param(hk_bin_table, 'NIT0E061', obt_avg)
fxaddpar, primary_header, 'DAC2POL1', interpol_param(hk_bin_table, 'NIT0E062', obt_avg)
fxaddpar, primary_header, 'DAC1POL2', interpol_param(hk_bin_table, 'NIT0E064', obt_avg)
fxaddpar, primary_header, 'DAC2POL2', interpol_param(hk_bin_table, 'NIT0E065', obt_avg)
fxaddpar, primary_header, 'DAC1POL3', interpol_param(hk_bin_table, 'NIT0E067', obt_avg)
fxaddpar, primary_header, 'DAC2POL3', interpol_param(hk_bin_table, 'NIT0E068', obt_avg)
fxaddpar, primary_header, 'DAC1POL4', interpol_param(hk_bin_table, 'NIT0E06A', obt_avg)
fxaddpar, primary_header, 'DAC2POL4', interpol_param(hk_bin_table, 'NIT0E06B', obt_avg)
fxaddpar, primary_header, 'TSENSOR', interpol_param(hk_bin_table, 'NIT0E0E0', obt_avg)
fxaddpar, primary_header, 'PMPTEMP', interpol_param(hk_bin_table, 'NIT0L00D', obt_avg)
endif
if datatype eq 1 or datatype eq 4 or datatype eq 6 then begin
fxaddpar, primary_header, 'HVU_SCR', interpol_param(hk_bin_table, 'NIT0E070', obt_avg)
fxaddpar, primary_header, 'HVU_MCP', interpol_param(hk_bin_table, 'NIT0E071', obt_avg)
fxaddpar, primary_header, 'TSENSOR', interpol_param(hk_bin_table, 'NIT0E050', obt_avg)
endif
; build the telemetry extension ; build the telemetry extension
hk_extension_header = !null hk_extension_header = !null
...@@ -282,6 +293,20 @@ pro metis_l1_prep ...@@ -282,6 +293,20 @@ pro metis_l1_prep
fxaddpar, hk_extension_header, 'GCOUNT', 1, 'Group count' fxaddpar, hk_extension_header, 'GCOUNT', 1, 'Group count'
fxaddpar, hk_extension_header, 'EXTNAME', 'House-keeping', 'Extension name' fxaddpar, hk_extension_header, 'EXTNAME', 'House-keeping', 'Extension name'
for i = 0, n_elements(hk_table.par_name) - 1 do begin
param = create_struct( $
'PAR_NAME', hk_table.par_name[i], $
'PACKET', hk_table.packet[i], $
'GEN_TIME', hk_table.gen_time[i], $
'REC_TIME', hk_table.rec_time[i], $
'RAW_VAL', hk_table.raw_val[i], $
'ENG_VAL', hk_table.eng_val[i], $
'UNIT', hk_table.unit[i], $
'DESCR', hk_table.desc[i] $
)
if ~ isa(hk_bin_table) then hk_bin_table = param else hk_bin_table = [hk_bin_table, param]
endfor
mwrfits, hk_bin_table, out_file_name, hk_extension_header, /no_comment, /silent mwrfits, hk_bin_table, out_file_name, hk_extension_header, /no_comment, /silent
; write the auxiliary information file ; write the auxiliary information file
...@@ -289,15 +314,14 @@ pro metis_l1_prep ...@@ -289,15 +314,14 @@ pro metis_l1_prep
output = { $ output = { $
file_name: out_file_name, $ file_name: out_file_name, $
l0_file_name: input.file_name, $ l0_file_name: input.file_name, $
log_file_name: 'output/metis_l1_prep_log.txt' $ log_file_name: 'output/metis_l1_prep_log.txt', $
empty_params : empty_params[uniq(empty_params)] $
} }
openw, unit, 'output/contents.json', /get_lun openw, unit, 'output/contents.json', /get_lun
printf, unit, output, /implied_print printf, unit, output, /implied_print
free_lun, unit free_lun, unit
; json_write, output, 'output/contents.json'
; unload the spice kernels ; unload the spice kernels
cspice_unload, kernels cspice_unload, kernels
......
function solo_obt2utc, obt function solo_obt2utc, obt
solo_id = -144 solo_id = -144
cspice_scs2e, solo_id, obt, et cspice_scs2e, solo_id, obt, et
cspice_et2utc, et, 'ISOC', 2, utc cspice_et2utc, et, 'ISOC', 3, utc
return, utc + 'Z' return, utc
end end
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment