Skip to content
Snippets Groups Projects
Commit 0b25407f authored by Adam Paquette's avatar Adam Paquette
Browse files

Moved all transformations, updated bin script, and notebook.

parent 5da2071a
No related branches found
No related tags found
No related merge requests found
import os import os
import warnings
import numpy as np import numpy as np
from plio.examples import get_path
from plio.io.io_bae import read_atf, read_gpf, read_ipf from plio.io.io_bae import read_atf, read_gpf, read_ipf
from plio.spatial.transformations import *
import plio.io.io_controlnetwork as cn
import pandas as pd
# TODO: Change script to potentially handle configuration files
# Setup the at_file and path to cubes
cub_path = '/Volumes/Blueman/'
at_file = get_path('CTX_Athabasca_Middle_step0.atf')
# Define ipf mapping to cubs
image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',
'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',
'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',
'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',
'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',
'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',
'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}
##
# End Config
##
# Read in and setup the atf dict of information
atf_dict = read_atf(at_file)
# Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \
\n\nContinuing, but these points will be missing from the control network".format(list(point_diff)))
# Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations
apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True)
images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(image_dict, cub_path)
# creates the control network
cn.to_isis('/Volumes/Blueman/test.net', socet_df, serial_dict)
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import os import os
import sys import sys
from functools import singledispatch from functools import singledispatch
import warnings import warnings
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import math import math
import pyproj import pyproj
# sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio") # sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio")
from plio.examples import get_path from plio.examples import get_path
from plio.io.io_bae import read_gpf, read_ipf from plio.io.io_bae import read_gpf, read_ipf
import plio.io.io_controlnetwork as cn import plio.io.io_controlnetwork as cn
import plio.io.isis_serial_number as sn import plio.io.isis_serial_number as sn
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Reads a .atf file and outputs all of the # Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the # .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others) # .apf file (should be the same as all others)
def read_atf(atf_file): def read_atf(atf_file):
with open(atf_file) as f: with open(atf_file) as f:
files = [] files = []
ipf = [] ipf = []
sup = [] sup = []
files_dict = [] files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file # Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f: for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf': if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line) files.append(line)
files = np.array(files) files = np.array(files)
# Creates appropriate arrays for certain files in the right format # Creates appropriate arrays for certain files in the right format
for file in files: for file in files:
file = file.strip() file = file.strip()
file = file.split(' ') file = file.split(' ')
# Grabs all the IPF files # Grabs all the IPF files
if file[1].endswith('.ipf'): if file[1].endswith('.ipf'):
ipf.append(file[1]) ipf.append(file[1])
# Grabs all the SUP files # Grabs all the SUP files
if file[1].endswith('.sup'): if file[1].endswith('.sup'):
sup.append(file[1]) sup.append(file[1])
files_dict.append(file) files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF # Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict)) files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images # Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images # Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file # Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file)) files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict return files_dict
# converts columns l. and s. to isis # converts columns l. and s. to isis
def line_sample_size(record, path): def line_sample_size(record, path):
with open(os.path.join(path, record['ipf_file'] + '.sup')) as f: with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:
for i, line in enumerate(f): for i, line in enumerate(f):
if i == 2: if i == 2:
img_index = line.split('\\') img_index = line.split('\\')
img_index = img_index[-1].strip() img_index = img_index[-1].strip()
img_index = img_index.split('.')[0] img_index = img_index.split('.')[0]
if i == 3: if i == 3:
line_size = line.split(' ') line_size = line.split(' ')
line_size = line_size[-1].strip() line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file']) assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4: if i == 4:
sample_size = line.split(' ') sample_size = line.split(' ')
sample_size = sample_size[-1].strip() sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file']) assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break break
line_size = int(line_size)/2.0 + record['l.'] + 1 line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1 sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index return sample_size, line_size, img_index
# converts known to ISIS keywords # converts known to ISIS keywords
def known(record): def known(record):
if record['known'] == 0: if record['known'] == 0:
return 'Free' return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3: elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained' return 'Constrained'
# converts +/- 180 system to 0 - 360 system # converts +/- 180 system to 0 - 360 system
def to_360(num): def to_360(num):
return num % 360 return num % 360
# ocentric to ographic latitudes # ocentric to ographic latitudes
def oc2og(dlat, dMajorRadius, dMinorRadius): def oc2og(dlat, dMajorRadius, dMinorRadius):
try: try:
dlat = math.radians(dlat) dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat))) dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat) dlat = math.degrees(dlat)
except: except:
print ("Error in oc2og conversion") print ("Error in oc2og conversion")
return dlat return dlat
# ographic to ocentric latitudes # ographic to ocentric latitudes
def og2oc(dlat, dMajorRadius, dMinorRadius): def og2oc(dlat, dMajorRadius, dMinorRadius):
try: try:
dlat = math.radians(dlat) dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2))) dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat) dlat = math.degrees(dlat)
except: except:
print ("Error in og2oc conversion") print ("Error in og2oc conversion")
return dlat return dlat
# gets eRadius and pRadius from a .prj file # gets eRadius and pRadius from a .prj file
def get_axis(file): def get_axis(file):
with open(file) as f: with open(file) as f:
from collections import defaultdict from collections import defaultdict
files = defaultdict(list) files = defaultdict(list)
for line in f: for line in f:
ext = line.strip().split(' ') ext = line.strip().split(' ')
files[ext[0]].append(ext[-1]) files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0]) eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0])) pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius return eRadius, pRadius
# function to convert lat_Y_North to ISIS_lat # function to convert lat_Y_North to ISIS_lat
def lat_ISIS_coord(record, semi_major, semi_minor): def lat_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor) ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord) coord_360 = to_360(ocentric_coord)
return coord_360 return coord_360
# function to convert long_X_East to ISIS_lon # function to convert long_X_East to ISIS_lon
def lon_ISIS_coord(record, semi_major, semi_minor): def lon_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor) ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord) coord_360 = to_360(ocentric_coord)
return coord_360 return coord_360
def body_fix(record, semi_major, semi_minor): def body_fix(record, semi_major, semi_minor):
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor) ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor) lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht']) lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height return lon, lat, height
# applys transformations to columns # applys transformations to columns
def apply_transformations(atf_dict, df): def apply_transformations(atf_dict, df):
prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1]) prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1])
eRadius, pRadius = get_axis(prj_file) eRadius, pRadius = get_axis(prj_file)
df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1))) df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))
df['known'] = df.apply(known, axis=1) df['known'] = df.apply(known, axis=1)
df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1)) df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))
def socet2isis(prj_file): def socet2isis(prj_file):
# Read in and setup the atf dict of information # Read in and setup the atf dict of information
atf_dict = read_atf(prj_file) atf_dict = read_atf(prj_file)
# Get the gpf and ipf files using atf dict # Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']); gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']] ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes # Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file) gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list) ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes # Check for differences between point ids using each dataframes
# point ids as a reference # point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id'])) gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id'])) ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx) point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0: if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \
\n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) \n\nContinuing, but these points will be missing from the control network".format(list(point_diff)))
# Merge the two dataframes on their point id columns # Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations # Apply the transformations
apply_transformations(atf_dict, socet_df) apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe # Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x', column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above # Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True) socet_df.rename(columns = column_remap, inplace=True)
# Return the socet dataframe to be converted to a control net # Return the socet dataframe to be converted to a control net
return socet_df return socet_df
# creates a dict of serial numbers with the cub being the key # creates a dict of serial numbers with the cub being the key
def serial_numbers(images, path, extension): def serial_numbers(image_dict, path):
serial_dict = dict() serial_dict = dict()
for image in images: for key in image_dict:
snum = sn.generate_serial_number(os.path.join(path, image + extension)) snum = sn.generate_serial_number(os.path.join(path, image_dict[key]))
snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO') snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')
serial_dict[image] = snum serial_dict[key] = snum
return serial_dict return serial_dict
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Setup stuffs for the cub information namely the path and extension # Setup stuffs for the cub information namely the path and extension
path = '/Volumes/Blueman/' path = '/Volumes/Blueman/'
extension = '.lev1.cub' atf_file = get_path('CTX_Athabasca_Middle_step0.atf')
prj_file = get_path('CTX_Athabasca_Middle_step0.atf')
socet_df = socet2isis(prj_file) image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',
'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',
'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',
'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',
'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',
'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',
'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}
socet_df = socet2isis(atf_file)
images = pd.unique(socet_df['ipf_file']) images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(images, path, extension) serial_dict = serial_numbers(image_dict, path)
# creates the control network # creates the control network
cn.to_isis('/Volumes/Blueman/cn.net', socet_df, serial_dict) cn.to_isis('/Volumes/Blueman/banana.net', socet_df, serial_dict)
``` ```
%% Output
/Users/adampaquette/anaconda/envs/pysat/lib/python3.6/site-packages/ipykernel_launcher.py:173: UserWarning: The following points found in ipf files missing from gpf file:
['P03_002226_1895_XI_09N203W_15', 'P03_002226_1895_XI_09N203W_16', 'P03_002226_1895_XI_09N203W_17', 'P03_002226_1895_XI_09N203W_18', 'P03_002226_1895_XI_09N203W_19', 'P03_002226_1895_XI_09N203W_20', 'P03_002226_1895_XI_09N203W_21', 'P03_002226_1895_XI_09N203W_22', 'P03_002226_1895_XI_09N203W_24', 'P03_002226_1895_XI_09N203W_26', 'P03_002226_1895_XI_09N203W_30', 'P03_002226_1895_XI_09N203W_31', 'P03_002226_1895_XI_09N203W_32', 'P03_002226_1895_XI_09N203W_34', 'P03_002226_1895_XI_09N203W_36', 'P03_002226_1895_XI_09N203W_37', 'P03_002226_1895_XI_09N203W_44', 'P03_002226_1895_XI_09N203W_48', 'P03_002226_1895_XI_09N203W_49', 'P03_002226_1895_XI_09N203W_56', 'P03_002226_1895_XI_09N203W_57', 'P03_002226_1895_XI_09N203W_61', 'P03_002226_1895_XI_09N203W_62', 'P03_002226_1895_XI_09N203W_63', 'P03_002226_1895_XI_09N203W_65', 'P19_008344_1894_XN_09N203W_4', 'P20_008845_1894_XN_09N203W_15'].
Continuing, but these points will be missing from the control network
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
``` ```
......
import os
import math
import pyproj
import plio.io.isis_serial_number as sn
def line_sample_size(record, path):
"""
Converts columns l. and s. to sample size, line size, and generates an
image index
Parameters
----------
record : object
Pandas series object
path : str
Path to the associated sup files for a socet project
Returns
-------
: list
A list of sample_size, line_size, and img_index
"""
with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:
for i, line in enumerate(f):
if i == 2:
img_index = line.split('\\')
img_index = img_index[-1].strip()
img_index = img_index.split('.')[0]
if i == 3:
line_size = line.split(' ')
line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4:
sample_size = line.split(' ')
sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break
line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index
# converts known to ISIS keywords
def known(record):
"""
Converts the known field from a socet dataframe into the
isis point_type column
Parameters
----------
record : object
Pandas series object
Returns
-------
: str
String representation of a known field
"""
if record['known'] == 0:
return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained'
# converts +/- 180 system to 0 - 360 system
def to_360(num):
"""
Transforms a given number into 0 - 360 space
Parameters
----------
num : int
A given integer
Returns
-------
: int
num moduloed by 360
"""
return num % 360
def oc2og(dlat, dMajorRadius, dMinorRadius):
"""
Ocentric to ographic latitudes
Parameters
----------
dlat : float
Latitude to convert
dMajorRadius : float
Radius from the center of the body to the equater
dMinorRadius : float
Radius from the pole to the center of mass
Returns
-------
dlat : float
Converted latitude into ographic space
"""
try:
dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat)
except:
print ("Error in oc2og conversion")
return dlat
def og2oc(dlat, dMajorRadius, dMinorRadius):
"""
Ographic to ocentric latitudes
Parameters
----------
dlat : float
Latitude to convert
dMajorRadius : float
Radius from the center of the body to the equater
dMinorRadius : float
Radius from the pole to the center of mass
Returns
-------
dlat : float
Converted latitude into ocentric space
"""
try:
dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat)
except:
print ("Error in og2oc conversion")
return dlat
# gets eRadius and pRadius from a .prj file
def get_axis(file):
"""
Gets eRadius and pRadius from a .prj file
Parameters
----------
file : str
file with path to a given socet project file
Returns
-------
: list
A list of the eRadius and pRadius of the project file
"""
with open(file) as f:
from collections import defaultdict
files = defaultdict(list)
for line in f:
ext = line.strip().split(' ')
files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius
def lat_ISIS_coord(record, semi_major, semi_minor):
"""
Function to convert lat_Y_North to ISIS_lat
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
coord_360 : float
Converted latitude into ocentric space, and mapped
into 0 to 360
"""
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def lon_ISIS_coord(record, semi_major, semi_minor):
"""
Function to convert long_X_East to ISIS_lon
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
coord_360 : float
Converted longitude into ocentric space, and mapped
into 0 to 360
"""
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def body_fix(record, semi_major, semi_minor):
"""
Transforms latitude, longitude, and height of a socet point into
a body fixed point
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
: list
Body fixed lat, lon and height coordinates as lat, lon, ht
"""
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height
def apply_transformations(atf_dict, df):
"""
Takes a atf dictionary and a socet dataframe and applies the necessary
transformations to convert that dataframe into a isis compatible
dataframe
Parameters
----------
atf_dict : dict
Dictionary containing information from an atf file
df : object
Pandas dataframe object
"""
prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1])
eRadius, pRadius = get_axis(prj_file)
df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))
df['known'] = df.apply(known, axis=1)
df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))
def serial_numbers(image_dict, path):
"""
Creates a dict of serial numbers with the cub being the key
Parameters
----------
images : list
path : str
extension : str
Returns
-------
serial_dict : dict
"""
serial_dict = dict()
for key in image_dict:
snum = sn.generate_serial_number(os.path.join(path, image_dict[key]))
snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')
serial_dict[key] = snum
return serial_dict
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment