Skip to content
Snippets Groups Projects
Commit 88b4acee authored by Tyler Thatcher's avatar Tyler Thatcher
Browse files

Added covariance matrix

parent e2fbdf2e
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import os import os
import sys import sys
from functools import singledispatch from functools import singledispatch
import warnings import warnings
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import math import math
import pyproj import pyproj
# sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio") # Path to local plio if wanted
sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio")
from plio.examples import get_path from plio.examples import get_path
from plio.io.io_bae import read_gpf, read_ipf from plio.io.io_bae import read_gpf, read_ipf
import plio.io.io_controlnetwork as cn import plio.io.io_controlnetwork as cn
import plio.io.isis_serial_number as sn import plio.io.isis_serial_number as sn
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Reads a .atf file and outputs all of the # Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the # .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others) # .apf file (should be the same as all others)
def read_atf(atf_file): def read_atf(atf_file):
with open(atf_file) as f: with open(atf_file) as f:
files = [] files = []
ipf = [] ipf = []
sup = [] sup = []
files_dict = [] files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file # Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f: for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf': if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line) files.append(line)
files = np.array(files) files = np.array(files)
# Creates appropriate arrays for certain files in the right format # Creates appropriate arrays for certain files in the right format
for file in files: for file in files:
file = file.strip() file = file.strip()
file = file.split(' ') file = file.split(' ')
# Grabs all the IPF files # Grabs all the IPF files
if file[1].endswith('.ipf'): if file[1].endswith('.ipf'):
ipf.append(file[1]) ipf.append(file[1])
# Grabs all the SUP files # Grabs all the SUP files
if file[1].endswith('.sup'): if file[1].endswith('.sup'):
sup.append(file[1]) sup.append(file[1])
files_dict.append(file) files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF # Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict)) files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images # Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images # Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file # Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file)) files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict return files_dict
# converts columns l. and s. to isis # converts columns l. and s. to isis
def line_sample_size(record, path): def line_sample_size(record, path):
with open(os.path.join(path, record['ipf_file'] + '.sup')) as f: with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:
for i, line in enumerate(f): for i, line in enumerate(f):
if i == 2: if i == 2:
img_index = line.split('\\') img_index = line.split('\\')
img_index = img_index[-1].strip() img_index = img_index[-1].strip()
img_index = img_index.split('.')[0] img_index = img_index.split('.')[0]
if i == 3: if i == 3:
line_size = line.split(' ') line_size = line.split(' ')
line_size = line_size[-1].strip() line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file']) assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4: if i == 4:
sample_size = line.split(' ') sample_size = line.split(' ')
sample_size = sample_size[-1].strip() sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file']) assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break break
line_size = int(line_size)/2.0 + record['l.'] + 1 line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1 sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index return sample_size, line_size, img_index
# converts known to ISIS keywords # converts known to ISIS keywords
def known(record): def known(record):
if record['known'] == 0: if record['known'] == 0:
return 'Free' return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3: elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained' return 'Constrained'
# converts +/- 180 system to 0 - 360 system # converts +/- 180 system to 0 - 360 system
def to_360(num): def to_360(num):
return num % 360 return num % 360
# ocentric to ographic latitudes # ocentric to ographic latitudes
def oc2og(dlat, dMajorRadius, dMinorRadius): def oc2og(dlat, dMajorRadius, dMinorRadius):
try: try:
dlat = math.radians(dlat) dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat))) dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat) dlat = math.degrees(dlat)
except: except:
print ("Error in oc2og conversion") print ("Error in oc2og conversion")
return dlat return dlat
# ographic to ocentric latitudes # ographic to ocentric latitudes
def og2oc(dlat, dMajorRadius, dMinorRadius): def og2oc(dlat, dMajorRadius, dMinorRadius):
try: try:
dlat = math.radians(dlat) dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2))) dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat) dlat = math.degrees(dlat)
except: except:
print ("Error in og2oc conversion") print ("Error in og2oc conversion")
return dlat return dlat
# gets eRadius and pRadius from a .prj file # gets eRadius and pRadius from a .prj file
def get_axis(file): def get_axis(file):
with open(file) as f: with open(file) as f:
from collections import defaultdict from collections import defaultdict
files = defaultdict(list) files = defaultdict(list)
for line in f: for line in f:
ext = line.strip().split(' ') ext = line.strip().split(' ')
files[ext[0]].append(ext[-1]) files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0]) eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0])) pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius return eRadius, pRadius
# function to convert lat_Y_North to ISIS_lat # function to convert lat_Y_North to ISIS_lat
def lat_ISIS_coord(record, semi_major, semi_minor): def lat_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor) ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord) coord_360 = to_360(ocentric_coord)
return coord_360 return coord_360
# function to convert long_X_East to ISIS_lon # function to convert long_X_East to ISIS_lon
def lon_ISIS_coord(record, semi_major, semi_minor): def lon_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor) ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord) coord_360 = to_360(ocentric_coord)
return coord_360 return coord_360
def body_fix(record, semi_major, semi_minor): def body_fix(record, semi_major, semi_minor, inverse=False):
"""
Parameters
----------
record : ndarray
(n,3) where columns are x, y, height or lon, lat, alt
"""
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor) ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor) lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height
if inverse:
lon, lat, height = pyproj.transform(ecef, lla, record[0], record[1], record[2])
return lon, lat, height
else:
y, x, z = pyproj.transform(lla, ecef, record[0], record[1], record[2])
return y, x, z
def ignore_toggle(record):
if record['stat'] == 0:
return True
else:
return False
# TODO: Does isis cnet need a convariance matrix for sigmas? Even with a static matrix of 1,1,1,1 no output
def compute_sigma_covariance_matrix(lat, lon, rad, latsigma, lonsigma, radsigma, semimajor_axis):
"""
Given geospatial coordinates, desired accuracy sigmas, and an equitorial radius, compute a 2x3
sigma covariange matrix.
Parameters
----------
lat : float
A point's latitude in degrees
lon : float
A point's longitude in degrees
rad : float
The radius (z-value) of the point in meters
latsigma : float
The desired latitude accuracy in meters (Default 10.0)
lonsigma : float
The desired longitude accuracy in meters (Default 10.0)
radsigma : float
The desired radius accuracy in meters (Defualt: 15.0)
semimajor_axis : float
The semi-major or equitorial radius in meters (Default: 1737400.0 - Moon)
Returns
-------
rectcov : ndarray
(2,3) covariance matrix
"""
lat = math.radians(lat)
lon = math.radians(lon)
# SetSphericalSigmasDistance
scaled_lat_sigma = latsigma / semimajor_axis
# This is specific to each lon.
scaled_lon_sigma = lonsigma * math.cos(lat) / semimajor_axis
# SetSphericalSigmas
cov = np.eye(3,3)
cov[0,0] = scaled_lat_sigma ** 2
cov[1,1] = scaled_lon_sigma ** 2
cov[2,2] = radsigma ** 2
# Approximate the Jacobian
j = np.zeros((3,3))
cosphi = math.cos(lat)
sinphi = math.sin(lat)
coslambda = math.cos(lon)
sinlambda = math.sin(lon)
rcosphi = rad * cosphi
rsinphi = rad * sinphi
j[0,0] = -rsinphi * coslambda
j[0,1] = -rcosphi * sinlambda
j[0,2] = cosphi * coslambda
j[1,0] = -rsinphi * sinlambda
j[1,1] = rcosphi * coslambda
j[1,2] = cosphi * sinlambda
j[2,0] = rcosphi
j[2,1] = 0.
j[2,2] = sinphi
mat = j.dot(cov)
mat = mat.dot(j.T)
rectcov = np.zeros((2,3))
rectcov[0,0] = mat[0,0]
rectcov[0,1] = mat[0,1]
rectcov[0,2] = mat[0,2]
rectcov[1,0] = mat[1,1]
rectcov[1,1] = mat[1,2]
rectcov[1,2] = mat[2,2]
return rectcov
# return np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]])
def compute_cov_matrix(record, semimajor_axis):
cov_matrix = compute_sigma_covariance_matrix(record['lat_Y_North'], record['long_X_East'], record['ht'], record['sig0'], record['sig1'], record['sig2'], semimajor_axis)
return cov_matrix.ravel().tolist()
# applys transformations to columns # applys transformations to columns
def apply_transformations(atf_dict, df): def apply_transformations(atf_dict, df):
prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1]) prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1])
eRadius, pRadius = get_axis(prj_file) eRadius, pRadius = get_axis(prj_file)
lla = np.array([[df['long_X_East']], [df['lat_Y_North']], [df['ht']]])
ecef = body_fix(lla, semi_major = eRadius, semi_minor = pRadius, inverse=False)
df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1))) df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))
df['known'] = df.apply(known, axis=1) df['known'] = df.apply(known, axis=1)
df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['long_X_East'] = ecef[0][0]
df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['lat_Y_North'] = ecef[1][0]
df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1)) df['ht'] = ecef[2][0]
df['aprioriCovar'] = df.apply(compute_cov_matrix, semimajor_axis = eRadius, axis=1)
df['ignore'] = df.apply(ignore_toggle, axis=1)
def socet2isis(prj_file): def socet2isis(prj_file):
# Read in and setup the atf dict of information # Read in and setup the atf dict of information
atf_dict = read_atf(prj_file) atf_dict = read_atf(prj_file)
# Get the gpf and ipf files using atf dict # Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']); gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']] ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes # Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file) gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list) ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes # Check for differences between point ids using each dataframes
# point ids as a reference # point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id'])) gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id'])) ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx) point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0: if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \
\n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) \n\nContinuing, but these points will be missing from the control network".format(list(point_diff)))
# Merge the two dataframes on their point id columns # Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations # Apply the transformations
apply_transformations(atf_dict, socet_df) apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe # Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x', column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma',
'sig_l': 'linesigma', 'sig_s': 'samplesigma'}
# Rename the columns using the column remap above # Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True) socet_df.rename(columns = column_remap, inplace=True)
# Return the socet dataframe to be converted to a control net # Return the socet dataframe to be converted to a control net
return socet_df return socet_df
# creates a dict of serial numbers with the cub being the key # creates a dict of serial numbers with the cub being the key
def serial_numbers(images, path, extension): def serial_numbers(images, path, extension):
serial_dict = dict() serial_dict = dict()
for image in images: for image in images:
snum = sn.generate_serial_number(os.path.join(path, image + extension)) snum = sn.generate_serial_number(os.path.join(path, image + extension))
snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO') snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')
serial_dict[image] = snum serial_dict[image] = snum
return serial_dict return serial_dict
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# path = '/home/tthatcher/Desktop/Projects/plio_imgs/quest_imgs/'
# extension = '.lev1.cub'
# atf_file = ('/home/tthatcher/Desktop/Projects/plio_imgs/quest_imgs/CTX_Athabasca_Middle_step0.atf')
# Setup stuffs for the cub information namely the path and extension # Setup stuffs for the cub information namely the path and extension
path = '/Volumes/Blueman/' path = '/home/tthatcher/Desktop/Projects/plio_imgs/correct_imgs/'
extension = '.lev1.cub' extension = '.8bit.cub'
prj_file = get_path('CTX_Athabasca_Middle_step0.atf') atf_file = ('/where/y')
socet_df = socet2isis(prj_file) socet_df = socet2isis(atf_file)
images = pd.unique(socet_df['ipf_file']) images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(images, path, extension) serial_dict = serial_numbers(images, path, extension)
# creates the control network # creates the control network
cn.to_isis('/Volumes/Blueman/cn.net', socet_df, serial_dict) cn.to_isis('/path/you/want/the/cnet/to/be/in/cn.net', socet_df, serial_dict)
```
%% Output # cn.to_isis('/home/tthatcher/Desktop/Projects/plio_imgs/quest_imgs/cn.net', socet_df, serial_dict)
```
/Users/adampaquette/anaconda/envs/pysat/lib/python3.6/site-packages/ipykernel_launcher.py:173: UserWarning: The following points found in ipf files missing from gpf file:
['P03_002226_1895_XI_09N203W_15', 'P03_002226_1895_XI_09N203W_16', 'P03_002226_1895_XI_09N203W_17', 'P03_002226_1895_XI_09N203W_18', 'P03_002226_1895_XI_09N203W_19', 'P03_002226_1895_XI_09N203W_20', 'P03_002226_1895_XI_09N203W_21', 'P03_002226_1895_XI_09N203W_22', 'P03_002226_1895_XI_09N203W_24', 'P03_002226_1895_XI_09N203W_26', 'P03_002226_1895_XI_09N203W_30', 'P03_002226_1895_XI_09N203W_31', 'P03_002226_1895_XI_09N203W_32', 'P03_002226_1895_XI_09N203W_34', 'P03_002226_1895_XI_09N203W_36', 'P03_002226_1895_XI_09N203W_37', 'P03_002226_1895_XI_09N203W_44', 'P03_002226_1895_XI_09N203W_48', 'P03_002226_1895_XI_09N203W_49', 'P03_002226_1895_XI_09N203W_56', 'P03_002226_1895_XI_09N203W_57', 'P03_002226_1895_XI_09N203W_61', 'P03_002226_1895_XI_09N203W_62', 'P03_002226_1895_XI_09N203W_63', 'P03_002226_1895_XI_09N203W_65', 'P19_008344_1894_XN_09N203W_4', 'P20_008845_1894_XN_09N203W_15'].
Continuing, but these points will be missing from the control network
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment