Skip to content
Snippets Groups Projects
Commit 3477f2b2 authored by Tyler Thatcher's avatar Tyler Thatcher
Browse files

Added covariance matrix and generic paths

parents bebb21e5 dbc1207a
No related branches found
No related tags found
No related merge requests found
...@@ -37,7 +37,7 @@ install: ...@@ -37,7 +37,7 @@ install:
- conda config --add channels conda-forge - conda config --add channels conda-forge
- conda install -q gdal h5py pandas sqlalchemy pyyaml networkx affine protobuf scipy pvl - conda install -q gdal h5py pandas sqlalchemy pyyaml networkx affine protobuf scipy pvl
# Development installation # Development installation
- conda install -q pytest pytest-cov sh - conda install -q pytest pytest-cov sh coveralls nbsphinx
script: script:
- pytest --cov=plio - pytest --cov=plio
...@@ -51,7 +51,8 @@ after_success: ...@@ -51,7 +51,8 @@ after_success:
- conda build --token $CONDA_UPLOAD_TOKEN --python $PYTHON_VERSION recipe -q - conda build --token $CONDA_UPLOAD_TOKEN --python $PYTHON_VERSION recipe -q
# Docs to gh-pages # Docs to gh-pages
- source activate test_env # Reactivate the env to have all deps installed. - source activate test # Reactivate the env to have all deps installed.
- pip install travis-sphinx
- travis-sphinx build --source=docs --nowarn # The sphinx build script - travis-sphinx build --source=docs --nowarn # The sphinx build script
- travis-sphinx deploy --branches=dev - travis-sphinx deploy --branches=dev
......
import os
import numpy as np
# Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others)
def read_atf(atf_file):
with open(atf_file) as f:
files = []
ipf = []
sup = []
files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line)
files = np.array(files)
# Creates appropriate arrays for certain files in the right format
for file in files:
file = file.strip()
file = file.split(' ')
# Grabs all the IPF files
if file[1].endswith('.ipf'):
ipf.append(file[1])
# Grabs all the SUP files
if file[1].endswith('.sup'):
sup.append(file[1])
files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict
#!/usr/bin/env python
import argparse
import os
import sys
import warnings
import csv
import numpy as np
from plio.examples import get_path
from plio.io.io_bae import read_atf, read_gpf, read_ipf
from plio.spatial.transformations import *
import plio.io.io_controlnetwork as cn
import pandas as pd
def parse_args():
parser = argparse.ArgumentParser()
# Add args here
parser.add_argument('at_file', help='Path to the .atf file for a project.')
parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.')
parser.add_argument('cub_ipf_map', help='Path to map file for all ipfs and cubes.')
parser.add_argument('--outpath', help='Directory for the control network to be output to.',
required = False)
return parser.parse_args()
def main(args):
# Setup the at_file, path to cubes, and control network out path
at_file = args.at_file
cnet_out = os.path.split(os.path.splitext(at_file)[0])[1]
cub_path = args.cub_file_path
if(args.outpath):
outpath = args.outpath
else:
outpath = os.path.split(at_file)[0]
with open(args.cub_ipf_map) as cub_ipf_map:
reader = csv.reader(cub_ipf_map, delimiter = ',')
image_dict = dict([(row[0], row[1]) for row in reader])
# Read in and setup the atf dict of information
atf_dict = read_atf(at_file)
# Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: " +
"\n\n{}\n\n".format("\n".join(point_diff)) +
"Continuing, but these points will be missing from the control " +
"network.", stacklevel=3)
# Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations
apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma',
'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True)
images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(image_dict, cub_path)
# creates the control network
cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict)
if __name__ == '__main__':
main(parse_args())
No preview for this file type
Object = IsisCube
Object = Core
StartByte = 65537
Format = Tile
TileSamples = 1000
TileLines = 1024
Group = Dimensions
Samples = 5000
Lines = 7168
Bands = 1
End_Group
Group = Pixels
Type = SignedWord
ByteOrder = Lsb
Base = 0.0
Multiplier = 1.0
End_Group
End_Object
Group = Instrument
SpacecraftName = Mars_Reconnaissance_Orbiter
InstrumentId = CTX
TargetName = Mars
MissionPhaseName = PSP
StartTime = 2008-09-17T05:08:10.820
SpacecraftClockCount = 0906095311:038
OffsetModeId = 196/190/181
LineExposureDuration = 1.877 <MSEC>
FocalPlaneTemperature = 293.4 <K>
SampleBitModeId = SQROOT
SpatialSumming = 1
SampleFirstPixel = 0
End_Group
Group = Archive
DataSetId = MRO-M-CTX-2-EDR-L0-V1.0
ProductId = B01_010045_1878_XN_07N205W
ProducerId = MRO_CTX_TEAM
ProductCreationTime = 2009-04-13T19:24:46
OrbitNumber = 10045
End_Group
Group = BandBin
FilterName = BroadBand
Center = 0.65 <micrometers>
Width = 0.15 <micrometers>
End_Group
Group = Kernels
NaifFrameCode = -74021
LeapSecond = $base/kernels/lsk/naif0012.tls
TargetAttitudeShape = $base/kernels/pck/pck00009.tpc
TargetPosition = (Table, $base/kernels/spk/de405.bsp)
InstrumentPointing = (Table,
$mro/kernels/ck/mro_sc_psp_080916_080922.bc,
$mro/kernels/fk/mro_v15.tf)
Instrument = Null
SpacecraftClock = $mro/kernels/sclk/MRO_SCLKSCET.00064.65536.tsc
InstrumentPosition = (Table, $mro/kernels/spk/mro_psp8.bsp)
InstrumentAddendum = $mro/kernels/iak/mroctxAddendum005.ti
ShapeModel = $base/dems/molaMarsPlanetaryRadius0005.cub
InstrumentPositionQuality = Reconstructed
InstrumentPointingQuality = Reconstructed
CameraVersion = 1
End_Group
End_Object
Object = Label
Bytes = 65536
End_Object
P01_001540_1889_XI_08N204W,P01_001540_1889_XI_08N204W.lev1.cub
P01_001606_1897_XI_09N203W,P01_001606_1897_XI_09N203W.lev1.cub
P02_001804_1889_XI_08N204W,P02_001804_1889_XI_08N204W.lev1.cub
P03_002226_1895_XI_09N203W,P03_002226_1895_XI_09N203W.lev1.cub
P03_002371_1888_XI_08N204W,P03_002371_1888_XI_08N204W.lev1.cub
P19_008344_1894_XN_09N203W,P19_008344_1894_XN_09N203W.lev1.cub
P20_008845_1894_XN_09N203W,P20_008845_1894_XN_09N203W.lev1.cub
...@@ -271,3 +271,63 @@ def save_gpf(df, output_file): ...@@ -271,3 +271,63 @@ def save_gpf(df, output_file):
outGPF.close() outGPF.close()
return return
def read_atf(atf_file):
"""
Reads a .atf file and outputs all of the
.ipf, .gpf, .sup, .prj, and path to locate the
.apf file (should be the same as all others)
Parameters
----------
atf_file : str
Full path to a socet atf file
Returns
-------
files_dict : dict
Dict of files and data associated with a socet
project
"""
with open(atf_file) as f:
files = []
ipf = []
sup = []
files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line)
files = np.array(files)
# Creates appropriate arrays for certain files in the right format
for file in files:
file = file.strip()
file = file.split(' ')
# Grabs all the IPF files
if file[1].endswith('.ipf'):
ipf.append(file[1])
# Grabs all the SUP files
if file[1].endswith('.sup'):
sup.append(file[1])
files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict
...@@ -41,6 +41,10 @@ class Translations(Base): # pragma: no cover ...@@ -41,6 +41,10 @@ class Translations(Base): # pragma: no cover
self.instrument = instrument self.instrument = instrument
self.translation = translation self.translation = translation
def __repr__(self):
return 'Mission: {}\nInstrument: {}\nTranslation: {}'.format(self.mission,
self.instrument,
self.translation)
class StringToMission(Base): # pragma: no cover class StringToMission(Base): # pragma: no cover
""" """
......
...@@ -82,6 +82,7 @@ def generate_serial_number(label): ...@@ -82,6 +82,7 @@ def generate_serial_number(label):
# Sort the keys to ensure proper iteration order # Sort the keys to ensure proper iteration order
keys = sorted(translation.keys()) keys = sorted(translation.keys())
for k in keys: for k in keys:
try: try:
group = translation[k] group = translation[k]
......
import os import os
import sys import sys
import unittest
from plio.examples import get_path import pytest
sys.path.insert(0, os.path.abspath('..'))
from plio.io import isis_serial_number from plio.io import isis_serial_number
from plio.examples import get_path
@pytest.fixture
def apollo_lbl():
return get_path('Test_PVL.lbl')
@pytest.fixture
def ctx_lbl():
return get_path('ctx.pvl')
class TestIsisSerials(unittest.TestCase): @pytest.mark.parametrize("label, expected", [(apollo_lbl(), 'APOLLO15/METRIC/1971-07-31T14:02:27.179'),
(ctx_lbl(),'MRO/CTX/0906095311:038')
])
def test_generate_serial_number(label, expected):
serial = isis_serial_number.generate_serial_number(label)
assert serial == expected
def test_generate_serial_number(self):
label = get_path('Test_PVL.lbl') \ No newline at end of file
serial = isis_serial_number.generate_serial_number(label)
self.assertEqual('APOLLO15/METRIC/1971-07-31T14:02:27.179', serial)
\ No newline at end of file
import os
import math
import pyproj
import plio.io.isis_serial_number as sn
def line_sample_size(record, path):
"""
Converts columns l. and s. to sample size, line size, and generates an
image index
Parameters
----------
record : object
Pandas series object
path : str
Path to the associated sup files for a socet project
Returns
-------
: list
A list of sample_size, line_size, and img_index
"""
with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:
for i, line in enumerate(f):
if i == 2:
img_index = line.split('\\')
img_index = img_index[-1].strip()
img_index = img_index.split('.')[0]
if i == 3:
line_size = line.split(' ')
line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4:
sample_size = line.split(' ')
sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break
line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index
def known(record):
"""
Converts the known field from a socet dataframe into the
isis point_type column
Parameters
----------
record : object
Pandas series object
Returns
-------
: str
String representation of a known field
"""
if record['known'] == 0:
return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained'
def to_360(num):
"""
Transforms a given number into 0 - 360 space
Parameters
----------
num : int
A given integer
Returns
-------
: int
num moduloed by 360
"""
return num % 360
def oc2og(dlat, dMajorRadius, dMinorRadius):
"""
Ocentric to ographic latitudes
Parameters
----------
dlat : float
Latitude to convert
dMajorRadius : float
Radius from the center of the body to the equater
dMinorRadius : float
Radius from the pole to the center of mass
Returns
-------
dlat : float
Converted latitude into ographic space
"""
try:
dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat)
except:
print ("Error in oc2og conversion")
return dlat
def og2oc(dlat, dMajorRadius, dMinorRadius):
"""
Ographic to ocentric latitudes
Parameters
----------
dlat : float
Latitude to convert
dMajorRadius : float
Radius from the center of the body to the equater
dMinorRadius : float
Radius from the pole to the center of mass
Returns
-------
dlat : float
Converted latitude into ocentric space
"""
try:
dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat)
except:
print ("Error in og2oc conversion")
return dlat
def get_axis(file):
"""
Gets eRadius and pRadius from a .prj file
Parameters
----------
file : str
file with path to a given socet project file
Returns
-------
: list
A list of the eRadius and pRadius of the project file
"""
with open(file) as f:
from collections import defaultdict
files = defaultdict(list)
for line in f:
ext = line.strip().split(' ')
files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius
def lat_ISIS_coord(record, semi_major, semi_minor):
"""
Function to convert lat_Y_North to ISIS_lat
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
coord_360 : float
Converted latitude into ocentric space, and mapped
into 0 to 360
"""
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def lon_ISIS_coord(record, semi_major, semi_minor):
"""
Function to convert long_X_East to ISIS_lon
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
coord_360 : float
Converted longitude into ocentric space, and mapped
into 0 to 360
"""
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def body_fix(record, semi_major, semi_minor):
"""
Transforms latitude, longitude, and height of a socet point into
a body fixed point
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
: list
Body fixed lat, lon and height coordinates as lat, lon, ht
"""
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height
def apply_transformations(atf_dict, df):
"""
Takes a atf dictionary and a socet dataframe and applies the necessary
transformations to convert that dataframe into a isis compatible
dataframe
Parameters
----------
atf_dict : dict
Dictionary containing information from an atf file
df : object
Pandas dataframe object
"""
prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1])
eRadius, pRadius = get_axis(prj_file)
df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))
df['known'] = df.apply(known, axis=1)
df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))
def serial_numbers(image_dict, path):
"""
Creates a dict of serial numbers with the cub being the key
Parameters
----------
images : list
path : str
extension : str
Returns
-------
serial_dict : dict
"""
serial_dict = dict()
for key in image_dict:
serial_dict[key] = sn.generate_serial_number(os.path.join(path, image_dict[key]))
return serial_dict
...@@ -36,6 +36,7 @@ def setup_package(): ...@@ -36,6 +36,7 @@ def setup_package():
package_data={'plio' : list(examples) + ['data/*.db', 'data/*.py'] +\ package_data={'plio' : list(examples) + ['data/*.db', 'data/*.py'] +\
['sqlalchemy_json/*.py', 'sqlalchemy_json/LICENSE']}, ['sqlalchemy_json/*.py', 'sqlalchemy_json/LICENSE']},
zip_safe=False, zip_safe=False,
scripts=['bin/socet2isis'],
install_requires=[ install_requires=[
'gdal', 'gdal',
'numpy', 'numpy',
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment