Skip to content
Snippets Groups Projects
Commit 1525218b authored by Adam Paquette's avatar Adam Paquette
Browse files

Merge branch 'ipf' of https://github.com/acpaquette/plio into ipf

parents 083c74e4 b4b37c37
No related branches found
No related tags found
No related merge requests found
......@@ -37,7 +37,7 @@ install:
- conda config --add channels conda-forge
- conda install -q gdal h5py pandas sqlalchemy pyyaml networkx affine protobuf scipy pvl
# Development installation
- conda install -q pytest pytest-cov sh
- conda install -q pytest pytest-cov sh coveralls nbsphinx
script:
- pytest --cov=plio
......@@ -51,7 +51,8 @@ after_success:
- conda build --token $CONDA_UPLOAD_TOKEN --python $PYTHON_VERSION recipe -q
# Docs to gh-pages
- source activate test_env # Reactivate the env to have all deps installed.
- source activate test # Reactivate the env to have all deps installed.
- pip install travis-sphinx
- travis-sphinx build --source=docs --nowarn # The sphinx build script
- travis-sphinx deploy --branches=dev
......
import os
import warnings
import numpy as np
from plio.examples import get_path
from plio.io.io_bae import read_atf, read_gpf, read_ipf
from plio.spatial.transformations import *
import plio.io.io_controlnetwork as cn
import pandas as pd
# TODO: Change script to potentially handle configuration files
# Setup the at_file and path to cubes
cub_path = '/Volumes/Blueman/'
at_file = get_path('CTX_Athabasca_Middle_step0.atf')
# Define ipf mapping to cubs
image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',
'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',
'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',
'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',
'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',
'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',
'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}
##
# End Config
##
# Read in and setup the atf dict of information
atf_dict = read_atf(at_file)
# Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \
\n\nContinuing, but these points will be missing from the control network".format(list(point_diff)))
# Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations
apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True)
images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(image_dict, cub_path)
# creates the control network
cn.to_isis('/Volumes/Blueman/test.net', socet_df, serial_dict)
#!/usr/bin/env python
import argparse
import os
def parse_args():
parser = argparse.ArgumentParser()
# Add args here
return parser.parse_args()
def main(args):
print('Do some stuff')
if __name__ == '__main__':
main(parse_args())
#!/usr/bin/env python
import argparse
import os
import sys
import warnings
import csv
import numpy as np
from plio.examples import get_path
from plio.io.io_bae import read_atf, read_gpf, read_ipf
from plio.spatial.transformations import *
import plio.io.io_controlnetwork as cn
import pandas as pd
def parse_args():
parser = argparse.ArgumentParser()
# Add args here
parser.add_argument('at_file', help='Path to the .atf file for a project.')
parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.')
parser.add_argument('cub_ipf_map', help='Path to map file for all ipfs and cubes.')
parser.add_argument('target_name', help='Name of the target body used in the control net')
parser.add_argument('--outpath', help='Directory for the control network to be output to.',
required = False)
return parser.parse_args()
def main(args):
print('Getting some work done')
# Setup the at_file, path to cubes, and control network out path
at_file = args.at_file
cnet_out = os.path.split(os.path.splitext(at_file)[0])[1]
cub_path = args.cub_file_path
if(args.outpath):
outpath = args.outpath
else:
outpath = os.path.split(at_file)[0]
with open(args.cub_ipf_map) as cub_ipf_map:
reader = csv.reader(cub_ipf_map, delimiter = ',')
image_dict = dict([(row[0], row[1]) for row in reader])
# Read in and setup the atf dict of information
atf_dict = read_atf(at_file)
# Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: " +
"\n\n{}\n\n".format("\n".join(point_diff)) +
"Continuing, but these points will be missing from the control " +
"network.", stacklevel=3)
# Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations
apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma',
'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True)
images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(image_dict, cub_path)
# creates the control network
cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict, targetname = args.target_name)
if __name__ == '__main__':
main(parse_args())
\ No newline at end of file
main(parse_args())
This diff is collapsed.
P01_001540_1889_XI_08N204W,P01_001540_1889_XI_08N204W.lev1.cub
P01_001606_1897_XI_09N203W,P01_001606_1897_XI_09N203W.lev1.cub
P02_001804_1889_XI_08N204W,P02_001804_1889_XI_08N204W.lev1.cub
P03_002226_1895_XI_09N203W,P03_002226_1895_XI_09N203W.lev1.cub
P03_002371_1888_XI_08N204W,P03_002371_1888_XI_08N204W.lev1.cub
P19_008344_1894_XN_09N203W,P19_008344_1894_XN_09N203W.lev1.cub
P20_008845_1894_XN_09N203W,P20_008845_1894_XN_09N203W.lev1.cub
from time import gmtime, strftime
import pandas as pd
import numpy as np
import pvl
from plio.io import ControlNetFileV0002_pb2 as cnf
......@@ -186,22 +187,23 @@ class IsisStore(object):
header_bytes = find_in_dict(pvl_header, 'HeaderBytes')
point_start_byte = find_in_dict(pvl_header, 'PointsStartByte')
version = find_in_dict(pvl_header, 'Version')
if version == 2:
point_attrs = [i for i in cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name if i != 'measures']
measure_attrs = [i for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name]
cols = point_attrs + measure_attrs
cp = cnf.ControlPointFileEntryV0002()
self._handle.seek(header_start_byte)
pbuf_header = cnf.ControlNetFileHeaderV0002()
pbuf_header.ParseFromString(self._handle.read(header_bytes))
self._handle.seek(point_start_byte)
cp = cnf.ControlPointFileEntryV0002()
pts = []
for s in pbuf_header.pointMessageSizes:
cp.ParseFromString(self._handle.read(s))
cp.ParseFromString(self._handle.read(s))
pt = [getattr(cp, i) for i in point_attrs if i != 'measures']
for measure in cp.measures:
......@@ -267,24 +269,24 @@ class IsisStore(object):
# As per protobuf docs for assigning to a repeated field.
if attr == 'aprioriCovar':
arr = g.iloc[0]['aprioriCovar']
point_spec.aprioriCovar.extend(arr.ravel().tolist())
if isinstance(arr, np.ndarray):
arr = arr.ravel().tolist()
point_spec.aprioriCovar.extend(arr)
else:
setattr(point_spec, attr, attrtype(g.iloc[0][attr]))
point_spec.type = 2 # Hardcoded to free
point_spec.type = 2 # Hardcoded to free this is bad
# The reference index should always be the image with the lowest index
point_spec.referenceIndex = 0
# A single extend call is cheaper than many add calls to pack points
measure_iterable = []
for node_id, m in g.iterrows():
measure_spec = point_spec.Measure()
# For all of the attributes, set if they are an dict accessible attr of the obj.
for attr, attrtype in self.measure_attrs:
if attr in g.columns:
setattr(measure_spec, attr, attrtype(m[attr]))
measure_spec.serialnumber = serials[m.image_index]
measure_spec.sample = m.x
measure_spec.line = m.y
......@@ -298,7 +300,6 @@ class IsisStore(object):
point_message = point_spec.SerializeToString()
point_sizes.append(point_spec.ByteSize())
point_messages.append(point_message)
return point_messages, point_sizes
def create_buffer_header(self, networkid, targetname,
......
......@@ -45,7 +45,6 @@ def line_sample_size(record, path):
sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index
# converts known to ISIS keywords
def known(record):
"""
Converts the known field from a socet dataframe into the
......@@ -67,7 +66,6 @@ def known(record):
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained'
# converts +/- 180 system to 0 - 360 system
def to_360(num):
"""
Transforms a given number into 0 - 360 space
......@@ -140,7 +138,6 @@ def og2oc(dlat, dMajorRadius, dMinorRadius):
print ("Error in og2oc conversion")
return dlat
# gets eRadius and pRadius from a .prj file
def get_axis(file):
"""
Gets eRadius and pRadius from a .prj file
......@@ -291,7 +288,5 @@ def serial_numbers(image_dict, path):
serial_dict = dict()
for key in image_dict:
snum = sn.generate_serial_number(os.path.join(path, image_dict[key]))
snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')
serial_dict[key] = snum
serial_dict[key] = sn.generate_serial_number(os.path.join(path, image_dict[key]))
return serial_dict
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment