Skip to content
Snippets Groups Projects
Commit 398f1178 authored by acpaquette's avatar acpaquette Committed by jlaura
Browse files

Socet clean up (#58)

* Small updates to get the scripts working.

* Fixed pradius calculations, and made some small changes.

* Removed coord transforms and made body_fix func more generic.

* Updated version.

* Updated reproj doc string.

* General refactor to socet scripts and clean up. Simplfied input for both scripts.
parent 0017d486
No related branches found
No related tags found
No related merge requests found
#!/usr/bin/env python #!/usr/bin/env python
import argparse
import os import os
import math import math
import argparse
import pandas as pd import pandas as pd
from plio.io.io_bae import save_gpf, save_ipf from plio.io.io_bae import save_gpf, save_ipf
from plio.spatial.transformations import apply_isis_transformations
import plio.io.io_controlnetwork as cn import plio.io.io_controlnetwork as cn
import plio.io.isis_serial_number as sn import plio.io.isis_serial_number as sn
from plio.spatial.transformations import apply_isis_transformations
from plio.utils.utils import split_all_ext
def parse_args(): def parse_args():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
# Add args here
parser.add_argument('cnet_file', help='Path to an isis control network.') parser.add_argument('cnet_file', help='Path to an isis control network.')
parser.add_argument('e_radius', type=float, help='The semimajor radius of a given target.') parser.add_argument('e_radius', type=float, help='The semimajor radius of a given target.')
parser.add_argument('eccentricity', type=float, help='The eccentricity of a given target.') parser.add_argument('eccentricity', type=float, help='The eccentricity of a given target.')
parser.add_argument('cub_path', help='Path to the cub files associated with a control network.') parser.add_argument('cub_list', help='Path to a list file of all cubes being used. This \
parser.add_argument('cub_extension', help='Extension for all cubes.') includes file paths an extnsions.')
parser.add_argument('cub_list', help='Path to a list file of all cubes being used')
parser.add_argument('out_gpf', help='Path to save location of gpf file and new ipf files.') parser.add_argument('out_gpf', help='Path to save location of gpf file and new ipf files.')
parser.add_argument('--adjusted', help='Flag for saving apriori values or adjusted values', parser.add_argument('--adjusted', help='Flag for saving apriori values or adjusted values',
default=False, required = False) default=False, required = False)
...@@ -34,9 +33,6 @@ def main(args): ...@@ -34,9 +33,6 @@ def main(args):
e_radius = args.e_radius e_radius = args.e_radius
p_radius = e_radius * math.sqrt((1 - (args.eccentricity ** 2))) p_radius = e_radius * math.sqrt((1 - (args.eccentricity ** 2)))
cub_path = args.cub_path
extension = args.cub_extension
with open(args.cub_list, 'r') as f: with open(args.cub_list, 'r') as f:
lines = f.readlines() lines = f.readlines()
cub_list = [cub.replace('\n', '') for cub in lines] cub_list = [cub.replace('\n', '') for cub in lines]
...@@ -46,10 +42,10 @@ def main(args): ...@@ -46,10 +42,10 @@ def main(args):
adjusted_flag = args.adjusted adjusted_flag = args.adjusted
# Create cub dict to map ipf to cub # Create cub dict to map ipf to cub
cub_dict = {i: i + extension for i in cub_list} cub_dict = {split_all_ext(os.path.split(i)[1]): i for i in cub_list}
# Create serial dict to match serial to ipf # Create serial dict to match serial to ipf
serial_dict = {sn.generate_serial_number(os.path.join(cub_path, i + extension)): i for i in cub_list} serial_dict = {sn.generate_serial_number(i): split_all_ext(os.path.split(i)[-1]) for i in cub_list}
# Remove duplicate columns # Remove duplicate columns
# There are better ways to do this but pandas was not having it # There are better ways to do this but pandas was not having it
...@@ -82,7 +78,7 @@ def main(args): ...@@ -82,7 +78,7 @@ def main(args):
df.rename(columns = column_map, inplace=True) df.rename(columns = column_map, inplace=True)
apply_isis_transformations(df, e_radius, p_radius, serial_dict, extension, cub_path) apply_isis_transformations(df, e_radius, p_radius, serial_dict, cub_dict)
# Save the ipf(s) # Save the ipf(s)
save_ipf(df, os.path.split(out_gpf)[0]) save_ipf(df, os.path.split(out_gpf)[0])
......
...@@ -4,19 +4,20 @@ import sys ...@@ -4,19 +4,20 @@ import sys
import argparse import argparse
import warnings import warnings
import pandas as pd
from plio.io.io_bae import read_atf, read_gpf, read_ipf from plio.io.io_bae import read_atf, read_gpf, read_ipf
from plio.spatial.transformations import apply_socet_transformations, serial_numbers
import plio.io.io_controlnetwork as cn import plio.io.io_controlnetwork as cn
import plio.io.isis_serial_number as sn
import pandas as pd from plio.spatial.transformations import apply_socet_transformations
from plio.utils.utils import split_all_ext
def parse_args(): def parse_args():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
# Add args here
parser.add_argument('at_file', help='Path to the .atf file for a project.') parser.add_argument('at_file', help='Path to the .atf file for a project.')
parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.') parser.add_argument('cub_list', help='Path to a list file containing paths to the associated\
parser.add_argument('extension', help='Extension for all cubes being used.') Isis cubes.')
parser.add_argument('target_name', help='Name of the target body used in the control net') parser.add_argument('target_name', help='Name of the target body used in the control net')
parser.add_argument('--outpath', help='Directory for the control network to be output to.') parser.add_argument('--outpath', help='Directory for the control network to be output to.')
...@@ -26,8 +27,12 @@ def parse_args(): ...@@ -26,8 +27,12 @@ def parse_args():
def main(args): def main(args):
# Setup the at_file, path to cubes, and control network out path # Setup the at_file, path to cubes, and control network out path
at_file = args.at_file at_file = args.at_file
with open(args.cub_list, 'r') as f:
lines = f.readlines()
cub_list = [cub.replace('\n', '') for cub in lines]
cnet_out = os.path.split(os.path.splitext(at_file)[0])[1] cnet_out = os.path.split(os.path.splitext(at_file)[0])[1]
cub_path = args.cub_file_path
if( args.outpath ): if( args.outpath ):
outpath = args.outpath outpath = args.outpath
...@@ -72,9 +77,8 @@ def main(args): ...@@ -72,9 +77,8 @@ def main(args):
# Rename the columns using the column remap above # Rename the columns using the column remap above
socet_df.rename(columns = column_map, inplace=True) socet_df.rename(columns = column_map, inplace=True)
# Build an image and serial dict assuming the cubes will be named as the IPFs are # Build a serial dict assuming the cubes will be named as the IPFs are
image_dict = {i: i + args.extension for i in pd.unique(socet_df['ipf_file'])} serial_dict = {split_all_ext(os.path.split(i)[-1]): sn.generate_serial_number(i) for i in cub_list}
serial_dict = serial_numbers(image_dict, cub_path)
# creates the control network # creates the control network
cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict, targetname = args.target_name) cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict, targetname = args.target_name)
......
...@@ -218,7 +218,7 @@ def stat_toggle(record): ...@@ -218,7 +218,7 @@ def stat_toggle(record):
else: else:
return False return False
def apply_isis_transformations(df, eRadius, pRadius, serial_dict, extension, cub_path): def apply_isis_transformations(df, eRadius, pRadius, serial_dict, cub_dict):
""" """
Takes a atf dictionary and a socet dataframe and applies the necessary Takes a atf dictionary and a socet dataframe and applies the necessary
transformations to convert that dataframe into a isis compatible transformations to convert that dataframe into a isis compatible
...@@ -261,8 +261,7 @@ def apply_isis_transformations(df, eRadius, pRadius, serial_dict, extension, cub ...@@ -261,8 +261,7 @@ def apply_isis_transformations(df, eRadius, pRadius, serial_dict, extension, cub
df['known'] = df.apply(reverse_known, axis = 1) df['known'] = df.apply(reverse_known, axis = 1)
df['ipf_file'] = df['serialnumber'].apply(lambda serial_number: serial_dict[serial_number]) df['ipf_file'] = df['serialnumber'].apply(lambda serial_number: serial_dict[serial_number])
df['l.'], df['s.'] = zip(*df.apply(fix_sample_line, serial_dict = serial_dict, df['l.'], df['s.'] = zip(*df.apply(fix_sample_line, serial_dict = serial_dict,
extension = extension, cub_dict = cub_dict, axis = 1))
cub_path = cub_path, axis = 1))
# Add dummy for generic value setting # Add dummy for generic value setting
x_dummy = lambda x: np.full(len(df), x) x_dummy = lambda x: np.full(len(df), x)
...@@ -313,28 +312,6 @@ def apply_socet_transformations(atf_dict, df): ...@@ -313,28 +312,6 @@ def apply_socet_transformations(atf_dict, df):
df['aprioriCovar'] = df.apply(compute_cov_matrix, semimajor_axis = eRadius, axis=1) df['aprioriCovar'] = df.apply(compute_cov_matrix, semimajor_axis = eRadius, axis=1)
df['stat'] = df.apply(stat_toggle, axis=1) df['stat'] = df.apply(stat_toggle, axis=1)
def serial_numbers(image_dict, path):
"""
Creates a dict of serial numbers with the cub being the key
Parameters
----------
images : list
path : str
extension : str
Returns
-------
serial_dict : dict
"""
serial_dict = dict()
for key in image_dict:
serial_dict[key] = sn.generate_serial_number(os.path.join(path, image_dict[key]))
return serial_dict
# TODO: Does isis cnet need a convariance matrix for sigmas? Even with a static matrix of 1,1,1,1 # TODO: Does isis cnet need a convariance matrix for sigmas? Even with a static matrix of 1,1,1,1
def compute_sigma_covariance_matrix(lat, lon, rad, latsigma, lonsigma, radsigma, semimajor_axis): def compute_sigma_covariance_matrix(lat, lon, rad, latsigma, lonsigma, radsigma, semimajor_axis):
...@@ -438,7 +415,7 @@ def reverse_known(record): ...@@ -438,7 +415,7 @@ def reverse_known(record):
elif record_type == 1 or record_type == 3 or record_type == 4: elif record_type == 1 or record_type == 3 or record_type == 4:
return 3 return 3
def fix_sample_line(record, serial_dict, extension, cub_path): def fix_sample_line(record, serial_dict, cub_dict):
""" """
Extracts the sample, line data from a cube and computes deviation from the Extracts the sample, line data from a cube and computes deviation from the
center of the image center of the image
...@@ -451,11 +428,8 @@ def fix_sample_line(record, serial_dict, extension, cub_path): ...@@ -451,11 +428,8 @@ def fix_sample_line(record, serial_dict, extension, cub_path):
serial_dict : dict serial_dict : dict
Maps serial numbers to images Maps serial numbers to images
extension : str cub_dict : dict
Extension for cube being looked at Maps basic cub names to their assocated absoluate path cubs
cub_path : str
Path to a given cube being looked at
Returns Returns
------- -------
...@@ -467,7 +441,7 @@ def fix_sample_line(record, serial_dict, extension, cub_path): ...@@ -467,7 +441,7 @@ def fix_sample_line(record, serial_dict, extension, cub_path):
""" """
# Cube location to load # Cube location to load
cube = pvl.load(os.path.join(cub_path, serial_dict[record['serialnumber']] + extension)) cube = pvl.load(cub_dict[serial_dict[record['serialnumber']]])
line_size = find_in_dict(cube, 'Lines') line_size = find_in_dict(cube, 'Lines')
sample_size = find_in_dict(cube, 'Samples') sample_size = find_in_dict(cube, 'Samples')
......
...@@ -181,3 +181,24 @@ def lookup(df, lookupfile=None, lookupdf=None, sep=',', skiprows=1, left_on='scl ...@@ -181,3 +181,24 @@ def lookup(df, lookupfile=None, lookupdf=None, sep=',', skiprows=1, left_on='scl
# combine the df and the new metadata # combine the df and the new metadata
df = pd.concat([metadata, df], axis=1) df = pd.concat([metadata, df], axis=1)
return df return df
def split_all_ext(path):
"""
General function for removeing all potential extensions from a file.
Parameters
----------
path : str
Path or file name with potential extension
Returns
-------
base : str
Path or file name with all potnetial extensions removed
"""
base, ext = os.path.splitext(path)
while len(ext) is not 0:
base, ext = os.path.splitext(base)
return base
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment