Skip to content
Snippets Groups Projects
Commit 6f1d655e authored by Adam Paquette's avatar Adam Paquette
Browse files

Merge branch 'master' of https://github.com/tthatcher95/plio

parents f099e46b be9a72a3
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
``` python
import os
import sys
from functools import singledispatch
import warnings
import pandas as pd
import numpy as np
sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio")
from plio.examples import get_path
from plio.io.io_bae import read_gpf, read_ipf
```
%% Cell type:code id: tags:
``` python
# Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others)
def read_atf(atf_file):
with open(atf_file) as f:
files = []
ipf = []
sup = []
files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line)
files = np.array(files)
# Creates appropriate arrays for certain files in the right format
for file in files:
file = file.strip()
file = file.split(' ')
# Grabs all the IPF files
if file[1].endswith('.ipf'):
ipf.append(file[1])
# Grabs all the SUP files
if file[1].endswith('.sup'):
sup.append(file[1])
files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict
```
%% Cell type:code id: tags:
``` python
atf_dict = read_atf(get_path('CTX_Athabasca_Middle_step0.atf'))
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
gpf_df = read_gpf(gpf_file).set_index('point_id')
ipf_df = read_ipf(ipf_list).set_index('pt_id')
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
point_diff = ipf_df.index.difference(gpf_df.index)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \
\n\nContinuing, but these points will be missing from the control network".format(list(point_diff)))
new_df = ipf_df.merge(gpf_df, left_index=True, right_index=True)
new_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
```
%% Cell type:code id: tags:
``` python
import math
import pyproj
image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',
'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',
'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',
'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',
'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',
'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',
'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}
# converts columns l. and s. to isis
def line_sample_size(record):
with open(atf_dict['PATH'] + '/' + record['ipf_file'] + '.sup') as f:
for i, line in enumerate(f):
if i == 2:
img_index = line.split('\\')
img_index = img_index[-1].strip()
img_index = img_index.split('.')[0]
img_index = image_dict[img_index]
if i == 3:
line_size = line.split(' ')
line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4:
sample_size = line.split(' ')
sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break
line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size
return sample_size, line_size, img_index
# converts known to ISIS keywords
def known(record):
if record['known'] == 0:
return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained'
# converts +/- 180 system to 0 - 360 system
def to_360(num):
return num % 360
# ocentric to ographic latitudes
def oc2og(dlat, dMajorRadius, dMinorRadius):
try:
dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat)
except:
print ("Error in oc2og conversion")
return dlat
# ographic to ocentric latitudes
def og2oc(dlat, dMajorRadius, dMinorRadius):
try:
dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat)
except:
print ("Error in og2oc conversion")
return dlat
# gets eRadius and pRadius from a .prj file
def get_axis(file):
with open(atf_dict['PATH'] + '/' + file) as f:
from collections import defaultdict
files = defaultdict(list)
for line in f:
ext = line.strip().split(' ')
files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius
# function to convert lat_Y_North to ISIS_lat
def lat_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
# function to convert long_X_East to ISIS_lon
def lon_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def body_fix(record, semi_major, semi_minor):
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height
# applys transformations to columns
def socet2isis(prj_file):
eRadius, pRadius = get_axis(prj_file)
new_df['s.'], new_df['l.'] = (zip(*new_df.apply(sample_size, axis=1)))
new_df['s.'], new_df['l.'], new_df['image_index'] = (zip(*new_df.apply(line_sample_size, axis=1)))
new_df['known'] = new_df.apply(known, axis=1)
new_df['lat_Y_North'] = new_df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
new_df['long_X_East'] = new_df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
new_df['long_X_East'], new_df['lat_Y_North'], new_df['ht'] = zip(*new_df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))
socet2isis('CTX_Athabasca_Middle.prj')
```
%% Cell type:code id: tags:
``` python
new_df['image_index']
```
%% Cell type:code id: tags:
``` python
column_remap = {'l.': 'x', 's.': 'y',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}
new_df.rename(columns=column_remap, inplace=True)
new_df
```
%% Cell type:code id: tags:
``` python
import plio.io.io_controlnetwork as cn
import plio.io.isis_serial_number as sn
# creates a dict of serial numbers with the cub being the key
def serial_numbers():
serial_dict = {}
image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',
'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',
'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',
'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',
'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',
'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',
'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}
for key in image_dict:
serial_dict[image_dict[key]] = sn.generate_serial_number('/home/tthatcher/Desktop/Projects/Plio/' + image_dict[key])
return serial_dict
# serial number dictionary
serial_dict = serial_numbers()
print(serial_dict)
# creates the control network
cnet = cn.to_isis('/home/tthatcher/Desktop/Projects/Plio/cn.csv', new_df, serial_dict)
```
%% Cell type:code id: tags:
``` python
@singledispatch
def read_ipf(arg):
return str(arg)
# new_df['known'] = new_df.apply(known, axis=1)
@read_ipf.register(str)
def read_ipf_str(input_data):
"""
"""AttributeError: 'Series' object has no attribute 'image_index'
Read a socet ipf file into a pandas data frame
Parameters
----------
input_data : str
path to the an input data file
Returns
-------
df : pd.DataFrame
containing the ipf data with appropriate column names and indices
"""
# Check that the number of rows is matching the expected number
with open(input_data, 'r') as f:
for i, l in enumerate(f):
if i == 1:
if i == 1:/home/tthatcher/Desktop/Projects/Plio/plio
cnt = int(l)
elif i == 2:
col = l
break
columns = np.genfromtxt(input_data, skip_header=2, dtype='unicode',
max_rows = 1, delimiter = ',')
# TODO: Add unicode conversion
d = [line.split() for line in open(input_data, 'r')]
d = np.hstack(np.array(d[3:]))
d = d.reshape(-1, 12)
df = pd.DataFrame(d, columns=columns)
file = os.path.split(os.path.splitext(input_data)[0])[1]
df['ipf_file'] = pd.Series(np.full((len(df['pt_id'])), file), index = df.index)
assert int(cnt) == len(df), 'Dataframe length {} does not match point length {}.'.format(int(cnt), len(df))
# Soft conversion of numeric types to numerics, allows str in first col for point_id
df = df.apply(pd.to_numeric, errors='ignore')
return df
@read_ipf.register(list)
def read_ipf_list(input_data_list):
"""
Read a socet ipf file into a pandas data frame
Parameters
----------
input_data_list : list
list of paths to the a set of input data files
Returns
-------
df : pd.DataFrame
containing the ipf data with appropriate column names and indices
"""
frames = []
for input_file in input_data_list:
frames.append(read_ipf(input_file))
df = pd.concat(frames)
return df
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment