diff --git a/.travis.yml b/.travis.yml index 2d2e09d128b335233fa9baaf909e17292fe3f9d1..d05d2d04c1dfbb63fcf1d23242f483701dea28ba 100644 --- a/.travis.yml +++ b/.travis.yml @@ -37,7 +37,7 @@ install: - conda config --add channels conda-forge - conda install -q gdal h5py pandas sqlalchemy pyyaml networkx affine protobuf scipy pvl # Development installation - - conda install -q pytest pytest-cov sh + - conda install -q pytest pytest-cov sh coveralls nbsphinx script: - pytest --cov=plio @@ -51,7 +51,8 @@ after_success: - conda build --token $CONDA_UPLOAD_TOKEN --python $PYTHON_VERSION recipe -q # Docs to gh-pages - - source activate test_env # Reactivate the env to have all deps installed. + - source activate test # Reactivate the env to have all deps installed. + - pip install travis-sphinx - travis-sphinx build --source=docs --nowarn # The sphinx build script - travis-sphinx deploy --branches=dev diff --git a/bin/Socetnet2ISIS.py b/bin/Socetnet2ISIS.py deleted file mode 100644 index a410606abc65d9ec3039aae5d7091ba25a5af9de..0000000000000000000000000000000000000000 --- a/bin/Socetnet2ISIS.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -import warnings -import numpy as np - -from plio.examples import get_path -from plio.io.io_bae import read_atf, read_gpf, read_ipf -from plio.spatial.transformations import * -import plio.io.io_controlnetwork as cn - -import pandas as pd - -# TODO: Change script to potentially handle configuration files - -# Setup the at_file and path to cubes -cub_path = '/Volumes/Blueman/' -at_file = get_path('CTX_Athabasca_Middle_step0.atf') - -# Define ipf mapping to cubs -image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub', - 'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub', - 'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub', - 'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub', - 'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub', - 'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub', - 'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'} - -## -# End Config -## - -# Read in and setup the atf dict of information -atf_dict = read_atf(at_file) - -# Get the gpf and ipf files using atf dict -gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']); -ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']] - -# Read in the gpf file and ipf file(s) into seperate dataframes -gpf_df = read_gpf(gpf_file) -ipf_df = read_ipf(ipf_list) - -# Check for differences between point ids using each dataframes -# point ids as a reference -gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id'])) -ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id'])) - -point_diff = ipf_pt_idx.difference(gpf_pt_idx) - -if len(point_diff) != 0: - warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ - \n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) - -# Merge the two dataframes on their point id columns -socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') - -# Apply the transformations -apply_transformations(atf_dict, socet_df) - -# Define column remap for socet dataframe -column_remap = {'l.': 'y', 's.': 'x', - 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', - 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', - 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} - -# Rename the columns using the column remap above -socet_df.rename(columns = column_remap, inplace=True) - -images = pd.unique(socet_df['ipf_file']) - -serial_dict = serial_numbers(image_dict, cub_path) - -# creates the control network -cn.to_isis('/Volumes/Blueman/test.net', socet_df, serial_dict) diff --git a/bin/isis2socet b/bin/isis2socet new file mode 100644 index 0000000000000000000000000000000000000000..29b75d891a6bad84c9dd2749e5659c0c94a64598 --- /dev/null +++ b/bin/isis2socet @@ -0,0 +1,17 @@ +#!/usr/bin/env python +import argparse +import os + +def parse_args(): + parser = argparse.ArgumentParser() + + # Add args here + + return parser.parse_args() + + +def main(args): + print('Do some stuff') + +if __name__ == '__main__': + main(parse_args()) diff --git a/bin/socet2isis b/bin/socet2isis index 44255aec300c8ad9f4aed798258a8a9cf8fb89c6..a32aa0fab4d7982fd28a3596f33a2dd6fe061279 100644 --- a/bin/socet2isis +++ b/bin/socet2isis @@ -1,15 +1,93 @@ #!/usr/bin/env python import argparse +import os +import sys +import warnings +import csv +import numpy as np + +from plio.examples import get_path +from plio.io.io_bae import read_atf, read_gpf, read_ipf +from plio.spatial.transformations import * +import plio.io.io_controlnetwork as cn + +import pandas as pd def parse_args(): parser = argparse.ArgumentParser() + # Add args here + parser.add_argument('at_file', help='Path to the .atf file for a project.') + parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.') + parser.add_argument('cub_ipf_map', help='Path to map file for all ipfs and cubes.') + parser.add_argument('target_name', help='Name of the target body used in the control net') + parser.add_argument('--outpath', help='Directory for the control network to be output to.', + required = False) return parser.parse_args() def main(args): - print('Getting some work done') + # Setup the at_file, path to cubes, and control network out path + at_file = args.at_file + cnet_out = os.path.split(os.path.splitext(at_file)[0])[1] + cub_path = args.cub_file_path + + if(args.outpath): + outpath = args.outpath + else: + outpath = os.path.split(at_file)[0] + + with open(args.cub_ipf_map) as cub_ipf_map: + reader = csv.reader(cub_ipf_map, delimiter = ',') + image_dict = dict([(row[0], row[1]) for row in reader]) + + # Read in and setup the atf dict of information + atf_dict = read_atf(at_file) + + # Get the gpf and ipf files using atf dict + gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']); + ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']] + + # Read in the gpf file and ipf file(s) into seperate dataframes + gpf_df = read_gpf(gpf_file) + ipf_df = read_ipf(ipf_list) + + # Check for differences between point ids using each dataframes + # point ids as a reference + gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id'])) + ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id'])) + + point_diff = ipf_pt_idx.difference(gpf_pt_idx) + + if len(point_diff) != 0: + warnings.warn("The following points found in ipf files missing from gpf file: " + + "\n\n{}\n\n".format("\n".join(point_diff)) + + "Continuing, but these points will be missing from the control " + + "network.", stacklevel=3) + + # Merge the two dataframes on their point id columns + socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') + + # Apply the transformations + apply_transformations(atf_dict, socet_df) + + # Define column remap for socet dataframe + column_remap = {'l.': 'y', 's.': 'x', + 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', + 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', + 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', + 'sig2': 'AprioriRadiusSigma'} + + # Rename the columns using the column remap above + socet_df.rename(columns = column_remap, inplace=True) + + images = pd.unique(socet_df['ipf_file']) + + serial_dict = serial_numbers(image_dict, cub_path) + + # creates the control network + cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict, targetname = args.target_name) if __name__ == '__main__': - main(parse_args()) \ No newline at end of file + main(parse_args()) diff --git a/notebooks/Socet2ISIS.ipynb b/notebooks/Socet2ISIS.ipynb index f9ff8e2dd784c0c36eca6a96d8133d847e531657..a75564baa800ed5f4ac3e93f8935b1fb843635fe 100644 --- a/notebooks/Socet2ISIS.ipynb +++ b/notebooks/Socet2ISIS.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 56, "metadata": {}, "outputs": [], "source": [ @@ -16,17 +16,16 @@ "import math\n", "import pyproj\n", "\n", - "# sys.path.insert(0, \"/home/tthatcher/Desktop/Projects/Plio/plio\")\n", - "\n", "from plio.examples import get_path\n", "from plio.io.io_bae import read_gpf, read_ipf\n", + "from collections import defaultdict\n", "import plio.io.io_controlnetwork as cn\n", "import plio.io.isis_serial_number as sn" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 85, "metadata": {}, "outputs": [], "source": [ @@ -35,49 +34,55 @@ "# .apf file (should be the same as all others) \n", "def read_atf(atf_file):\n", " with open(atf_file) as f:\n", - "\n", - " files = []\n", - " ipf = []\n", - " sup = []\n", - " files_dict = []\n", " \n", - " # Grabs every PRJ, GPF, SUP, and IPF image from the ATF file\n", + " # Extensions of files we want\n", + " files_ext = ['.prj', '.sup', '.ipf', '.gpf']\n", + " files_dict = []\n", + " files = defaultdict(list)\n", + "\n", " for line in f:\n", - " if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':\n", - " files.append(line)\n", + " ext = os.path.splitext(line)[-1].strip()\n", + " \n", + " # Check is needed for split as all do not have a space\n", + " if ext in files_ext:\n", + " \n", + " # If it is the .prj file, it strips the directory away and grabs file name\n", + " if ext == '.prj':\n", + " files[ext].append(line.strip().split(' ')[1].split('\\\\')[-1])\n", + " \n", + " # If the ext is in the list of files we care about, it addes to the dict\n", + " files[ext].append(line.strip().split(' ')[-1])\n", + " \n", + " else:\n", + " \n", + " # Adds to the dict even if not in files we care about\n", + " files[ext.strip()].append(line)\n", " \n", - " files = np.array(files)\n", + " # Gets the base filepath\n", + " files['basepath'] = os.path.dirname(os.path.abspath(atf_file))\n", " \n", - " # Creates appropriate arrays for certain files in the right format\n", - " for file in files:\n", - " file = file.strip()\n", - " file = file.split(' ')\n", - "\n", - " # Grabs all the IPF files\n", - " if file[1].endswith('.ipf'):\n", - " ipf.append(file[1])\n", - "\n", - " # Grabs all the SUP files\n", - " if file[1].endswith('.sup'):\n", - " sup.append(file[1])\n", - "\n", - " files_dict.append(file)\n", - "\n", " # Creates a dict out of file lists for GPF, PRJ, IPF, and ATF\n", " files_dict = (dict(files_dict))\n", " \n", " # Sets the value of IMAGE_IPF to all IPF images\n", - " files_dict['IMAGE_IPF'] = ipf\n", + " files_dict['IMAGE_IPF'] = files['.ipf']\n", " \n", " # Sets the value of IMAGE_SUP to all SUP images\n", - " files_dict['IMAGE_SUP'] = sup\n", + " files_dict['IMAGE_SUP'] = files['.sup']\n", + " \n", + " # Sets value for GPF file\n", + " files_dict['GP_FILE'] = files['.gpf'][0]\n", + " \n", + " # Sets value for PRJ file\n", + " files_dict['PROJECT'] = files['.prj'][0]\n", " \n", " # Sets the value of PATH to the path of the ATF file\n", - " files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))\n", + " files_dict['PATH'] = files['basepath']\n", " \n", " return files_dict\n", "\n", "# converts columns l. and s. to isis\n", + "# no transform applied\n", "def line_sample_size(record, path):\n", " with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:\n", " for i, line in enumerate(f):\n", @@ -103,6 +108,7 @@ " return sample_size, line_size, img_index\n", " \n", "# converts known to ISIS keywords\n", + "# transform\n", "def known(record):\n", " if record['known'] == 0:\n", " return 'Free'\n", @@ -115,6 +121,7 @@ " return num % 360\n", "\n", "# ocentric to ographic latitudes\n", + "# transform but unsure how to handle\n", "def oc2og(dlat, dMajorRadius, dMinorRadius):\n", " try: \n", " dlat = math.radians(dlat)\n", @@ -125,6 +132,7 @@ " return dlat\n", "\n", "# ographic to ocentric latitudes\n", + "# transform but unsure how to handle\n", "def og2oc(dlat, dMajorRadius, dMinorRadius):\n", " try:\n", " dlat = math.radians(dlat)\n", @@ -163,25 +171,134 @@ " coord_360 = to_360(ocentric_coord)\n", " return coord_360\n", "\n", - "def body_fix(record, semi_major, semi_minor):\n", + "def body_fix(record, semi_major, semi_minor, inverse=False):\n", + " \"\"\"\n", + " Parameters\n", + " ----------\n", + " record : ndarray\n", + " (n,3) where columns are x, y, height or lon, lat, alt\n", + " \"\"\"\n", + " \n", " ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)\n", " lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)\n", - " lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])\n", - " return lon, lat, height\n", " \n", + " if inverse:\n", + " lon, lat, height = pyproj.transform(ecef, lla, record[0], record[1], record[2])\n", + " return lon, lat, height\n", + " else:\n", + " y, x, z = pyproj.transform(lla, ecef, record[0], record[1], record[2])\n", + " return y, x, z\n", + "\n", + "def ignore_toggle(record):\n", + " if record['stat'] == 0:\n", + " return True\n", + " else:\n", + " return False\n", + "\n", + "# TODO: Does isis cnet need a convariance matrix for sigmas? Even with a static matrix of 1,1,1,1 \n", + "def compute_sigma_covariance_matrix(lat, lon, rad, latsigma, lonsigma, radsigma, semimajor_axis):\n", + " \n", + " \"\"\"\n", + " Given geospatial coordinates, desired accuracy sigmas, and an equitorial radius, compute a 2x3\n", + " sigma covariange matrix.\n", + " \n", + " Parameters\n", + " ----------\n", + " lat : float\n", + " A point's latitude in degrees\n", + " \n", + " lon : float\n", + " A point's longitude in degrees\n", + " \n", + " rad : float\n", + " The radius (z-value) of the point in meters\n", + " \n", + " latsigma : float\n", + " The desired latitude accuracy in meters (Default 10.0)\n", + " \n", + " lonsigma : float\n", + " The desired longitude accuracy in meters (Default 10.0)\n", + " \n", + " radsigma : float\n", + " The desired radius accuracy in meters (Defualt: 15.0)\n", + " \n", + " semimajor_axis : float\n", + " The semi-major or equitorial radius in meters (Default: 1737400.0 - Moon)\n", + " \n", + " Returns\n", + " -------\n", + " rectcov : ndarray\n", + " (2,3) covariance matrix\n", + " \"\"\"\n", + " \n", + " lat = math.radians(lat)\n", + " lon = math.radians(lon)\n", + " \n", + " # SetSphericalSigmasDistance\n", + " scaled_lat_sigma = latsigma / semimajor_axis\n", + "\n", + " # This is specific to each lon.\n", + " scaled_lon_sigma = lonsigma * math.cos(lat) / semimajor_axis\n", + " \n", + " # SetSphericalSigmas\n", + " cov = np.eye(3,3)\n", + " cov[0,0] = scaled_lat_sigma ** 2\n", + " cov[1,1] = scaled_lon_sigma ** 2\n", + " cov[2,2] = radsigma ** 2\n", + " \n", + " # Approximate the Jacobian\n", + " j = np.zeros((3,3))\n", + " cosphi = math.cos(lat)\n", + " sinphi = math.sin(lat)\n", + " coslambda = math.cos(lon)\n", + " sinlambda = math.sin(lon)\n", + " rcosphi = rad * cosphi\n", + " rsinphi = rad * sinphi\n", + " j[0,0] = -rsinphi * coslambda\n", + " j[0,1] = -rcosphi * sinlambda\n", + " j[0,2] = cosphi * coslambda\n", + " j[1,0] = -rsinphi * sinlambda\n", + " j[1,1] = rcosphi * coslambda\n", + " j[1,2] = cosphi * sinlambda\n", + " j[2,0] = rcosphi\n", + " j[2,1] = 0.\n", + " j[2,2] = sinphi\n", + " mat = j.dot(cov)\n", + " mat = mat.dot(j.T)\n", + " rectcov = np.zeros((2,3))\n", + " rectcov[0,0] = mat[0,0]\n", + " rectcov[0,1] = mat[0,1]\n", + " rectcov[0,2] = mat[0,2]\n", + " rectcov[1,0] = mat[1,1]\n", + " rectcov[1,1] = mat[1,2]\n", + " rectcov[1,2] = mat[2,2]\n", + " \n", + " return np.array(rectcov)\n", + "# return np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]])\n", + "\n", + "\n", + "def compute_cov_matrix(record, semimajor_axis):\n", + " cov_matrix = compute_sigma_covariance_matrix(record['lat_Y_North'], record['long_X_East'], record['ht'], record['sig0'], record['sig1'], record['sig2'], semimajor_axis)\n", + " return cov_matrix.ravel().tolist()\n", "\n", "# applys transformations to columns\n", - "def apply_transformations(atf_dict, df):\n", - " prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\\\')[-1])\n", + "def apply_two_isis_transformations(atf_dict, df):\n", + " prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'])\n", " \n", " eRadius, pRadius = get_axis(prj_file)\n", " \n", + " lla = np.array([[df['long_X_East']], [df['lat_Y_North']], [df['ht']]])\n", + " \n", + " ecef = body_fix(lla, semi_major = eRadius, semi_minor = pRadius, inverse=False)\n", + " \n", " df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))\n", " df['known'] = df.apply(known, axis=1)\n", - " df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)\n", - " df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)\n", - " df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))\n", - " \n", + " df['long_X_East'] = ecef[0][0]\n", + " df['lat_Y_North'] = ecef[1][0]\n", + " df['ht'] = ecef[2][0] \n", + " df['aprioriCovar'] = df.apply(compute_cov_matrix, semimajor_axis = eRadius, axis=1)\n", + "# df['ignore'] = df.apply(ignore_toggle, axis=1)\n", + " \n", "def socet2isis(prj_file):\n", " # Read in and setup the atf dict of information\n", " atf_dict = read_atf(prj_file)\n", @@ -209,59 +326,639 @@ " socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')\n", " \n", " # Apply the transformations\n", - " apply_transformations(atf_dict, socet_df)\n", + "# apply_two_isis_transformations(atf_dict, socet_df)\n", " \n", " # Define column remap for socet dataframe\n", - " column_remap = {'l.': 'y', 's.': 'x',\n", - " 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',\n", - " 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',\n", - " 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}\n", + "# column_map = {'pt_id': 'id', 'l.': 'y', 's.': 'x',\n", + "# 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type',\n", + "# 'lat_Y_North': 'aprioriY', 'long_X_East': 'aprioriX', 'ht': 'aprioriZ',\n", + "# 'sig0': 'aprioriLatitudeSigma', 'sig1': 'aprioriLongitudeSigma', 'sig2': 'aprioriRadiusSigma',\n", + "# 'sig_l': 'linesigma', 'sig_s': 'samplesigma'}\n", " \n", " # Rename the columns using the column remap above\n", - " socet_df.rename(columns = column_remap, inplace=True)\n", + "# socet_df.rename(columns = column_map, inplace=True)\n", " \n", " # Return the socet dataframe to be converted to a control net\n", " return socet_df\n", "\n", "# creates a dict of serial numbers with the cub being the key\n", - "def serial_numbers(image_dict, path):\n", + "def serial_numbers(images, path, extension):\n", " serial_dict = dict()\n", - "\n", - " for key in image_dict:\n", - " snum = sn.generate_serial_number(os.path.join(path, image_dict[key]))\n", + " \n", + " for image in images:\n", + " snum = sn.generate_serial_number(os.path.join(path, image + extension))\n", " snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')\n", - " serial_dict[key] = snum\n", + " serial_dict[image] = snum\n", " return serial_dict" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 86, "metadata": { - "scrolled": false + "scrolled": true }, "outputs": [], "source": [ "# Setup stuffs for the cub information namely the path and extension\n", - "path = '/Volumes/Blueman/'\n", - "atf_file = get_path('CTX_Athabasca_Middle_step0.atf')\n", + "path = '/home/acpaquette/repos/plio/test_cubes'\n", + "targetname = 'Mars'\n", + "# Extension of your cub files\n", + "extension = '.8bit.cub'\n", "\n", - "image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',\n", - " 'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',\n", - " 'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',\n", - " 'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',\n", - " 'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',\n", - " 'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',\n", - " 'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}\n", + "# Path to atf file\n", + "atf_file = ('/home/acpaquette/repos/plio/plio/examples/SocetSet/Relative.atf')\n", "\n", "socet_df = socet2isis(atf_file)\n", "\n", - "images = pd.unique(socet_df['ipf_file'])\n", + "# images = pd.unique(socet_df['ipf_file'])\n", "\n", - "serial_dict = serial_numbers(image_dict, path)\n", + "# serial_dict = serial_numbers(images, path, extension)\n", "\n", "# creates the control network\n", - "cn.to_isis('/Volumes/Blueman/banana.net', socet_df, serial_dict)" + "# cn.to_isis('/home/acpaquette/repos/plio/plio/examples/SocetSet/cn.net', socet_df, serial_dict, targetname = targetname)" + ] + }, + { + "cell_type": "code", + "execution_count": 116, + "metadata": {}, + "outputs": [], + "source": [ + "return_df = cn.from_isis(\"/home/acpaquette/repos/plio/plio/examples/SocetSet/cn.net\")\n", + "\n", + "columns = []\n", + "column_index = []\n", + "\n", + "for i, column in enumerate(list(return_df.columns)):\n", + " if column not in columns:\n", + " column_index.append(i)\n", + " columns.append(column)\n", + "\n", + "return_df = return_df.iloc[:, column_index]" + ] + }, + { + "cell_type": "code", + "execution_count": 117, + "metadata": {}, + "outputs": [], + "source": [ + "column_map = {'pt_id': 'id', 'l.': 'y', 's.': 'x',\n", + " 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type',\n", + " 'lat_Y_North': 'aprioriY', 'long_X_East': 'aprioriX', 'ht': 'aprioriZ',\n", + " 'sig0': 'aprioriLatitudeSigma', 'sig1': 'aprioriLongitudeSigma', 'sig2': 'aprioriRadiusSigma',\n", + " 'sig_l': 'linesigma', 'sig_s': 'samplesigma'}\n", + "\n", + "column_map = {k: v for v, k in column_map.items()}\n", + "return_df.rename(columns = column_map, inplace=True)\n", + "return_df.drop(['chooserName', 'datetime', 'referenceIndex', 'jigsawRejected', 'editLock', 'aprioriSurfPointSource', 'aprioriSurfPointSourceFile','aprioriRadiusSource', 'aprioriRadiusSourceFile'] , axis = 1, inplace=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 129, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "<div>\n", + "<style scoped>\n", + " .dataframe tbody tr th:only-of-type {\n", + " vertical-align: middle;\n", + " }\n", + "\n", + " .dataframe tbody tr th {\n", + " vertical-align: top;\n", + " }\n", + "\n", + " .dataframe thead th {\n", + " text-align: right;\n", + " }\n", + "</style>\n", + "<table border=\"1\" class=\"dataframe\">\n", + " <thead>\n", + " <tr style=\"text-align: right;\">\n", + " <th></th>\n", + " <th>lat_Y_North</th>\n", + " <th>long_X_East</th>\n", + " <th>ht</th>\n", + " </tr>\n", + " </thead>\n", + " <tbody>\n", + " <tr>\n", + " <th>0</th>\n", + " <td>139525.230749</td>\n", + " <td>3.390974e+06</td>\n", + " <td>4506.496945</td>\n", + " </tr>\n", + " <tr>\n", + " <th>1</th>\n", + " <td>139525.230749</td>\n", + " <td>3.390974e+06</td>\n", + " <td>4506.496945</td>\n", + " </tr>\n", + " <tr>\n", + " <th>2</th>\n", + " <td>139489.278045</td>\n", + " <td>3.390969e+06</td>\n", + " <td>4516.454802</td>\n", + " </tr>\n", + " <tr>\n", + " <th>3</th>\n", + " <td>139489.278045</td>\n", + " <td>3.390969e+06</td>\n", + " <td>4516.454802</td>\n", + " </tr>\n", + " <tr>\n", + " <th>4</th>\n", + " <td>139823.489797</td>\n", + " <td>3.390990e+06</td>\n", + " <td>4536.274914</td>\n", + " </tr>\n", + " <tr>\n", + " <th>5</th>\n", + " <td>139823.489797</td>\n", + " <td>3.390990e+06</td>\n", + " <td>4536.274914</td>\n", + " </tr>\n", + " <tr>\n", + " <th>6</th>\n", + " <td>139772.738004</td>\n", + " <td>3.390936e+06</td>\n", + " <td>4518.050219</td>\n", + " </tr>\n", + " <tr>\n", + " <th>7</th>\n", + " <td>139772.738004</td>\n", + " <td>3.390936e+06</td>\n", + " <td>4518.050219</td>\n", + " </tr>\n", + " <tr>\n", + " <th>8</th>\n", + " <td>139575.914815</td>\n", + " <td>3.390952e+06</td>\n", + " <td>3816.666542</td>\n", + " </tr>\n", + " <tr>\n", + " <th>9</th>\n", + " <td>139575.914815</td>\n", + " <td>3.390952e+06</td>\n", + " <td>3816.666542</td>\n", + " </tr>\n", + " <tr>\n", + " <th>10</th>\n", + " <td>139614.756296</td>\n", + " <td>3.390953e+06</td>\n", + " <td>3791.232717</td>\n", + " </tr>\n", + " <tr>\n", + " <th>11</th>\n", + " <td>139614.756296</td>\n", + " <td>3.390953e+06</td>\n", + " <td>3791.232717</td>\n", + " </tr>\n", + " <tr>\n", + " <th>12</th>\n", + " <td>139912.041374</td>\n", + " <td>3.390914e+06</td>\n", + " <td>3875.608660</td>\n", + " </tr>\n", + " <tr>\n", + " <th>13</th>\n", + " <td>139912.041374</td>\n", + " <td>3.390914e+06</td>\n", + " <td>3875.608660</td>\n", + " </tr>\n", + " <tr>\n", + " <th>14</th>\n", + " <td>139909.452033</td>\n", + " <td>3.390930e+06</td>\n", + " <td>3845.361327</td>\n", + " </tr>\n", + " <tr>\n", + " <th>15</th>\n", + " <td>139909.452033</td>\n", + " <td>3.390930e+06</td>\n", + " <td>3845.361327</td>\n", + " </tr>\n", + " <tr>\n", + " <th>16</th>\n", + " <td>139669.826849</td>\n", + " <td>3.391120e+06</td>\n", + " <td>3270.672620</td>\n", + " </tr>\n", + " <tr>\n", + " <th>17</th>\n", + " <td>139669.826849</td>\n", + " <td>3.391120e+06</td>\n", + " <td>3270.672620</td>\n", + " </tr>\n", + " <tr>\n", + " <th>18</th>\n", + " <td>139694.517017</td>\n", + " <td>3.391205e+06</td>\n", + " <td>3289.744506</td>\n", + " </tr>\n", + " <tr>\n", + " <th>19</th>\n", + " <td>139694.517017</td>\n", + " <td>3.391205e+06</td>\n", + " <td>3289.744506</td>\n", + " </tr>\n", + " <tr>\n", + " <th>20</th>\n", + " <td>139968.793338</td>\n", + " <td>3.391126e+06</td>\n", + " <td>3274.711397</td>\n", + " </tr>\n", + " <tr>\n", + " <th>21</th>\n", + " <td>139968.793338</td>\n", + " <td>3.391126e+06</td>\n", + " <td>3274.711397</td>\n", + " </tr>\n", + " <tr>\n", + " <th>22</th>\n", + " <td>139979.200780</td>\n", + " <td>3.391138e+06</td>\n", + " <td>3298.297228</td>\n", + " </tr>\n", + " <tr>\n", + " <th>23</th>\n", + " <td>139979.200780</td>\n", + " <td>3.391138e+06</td>\n", + " <td>3298.297228</td>\n", + " </tr>\n", + " <tr>\n", + " <th>24</th>\n", + " <td>139688.031217</td>\n", + " <td>3.391041e+06</td>\n", + " <td>4253.956077</td>\n", + " </tr>\n", + " <tr>\n", + " <th>25</th>\n", + " <td>139688.031217</td>\n", + " <td>3.391041e+06</td>\n", + " <td>4253.956077</td>\n", + " </tr>\n", + " <tr>\n", + " <th>26</th>\n", + " <td>139686.910823</td>\n", + " <td>3.391089e+06</td>\n", + " <td>4216.743792</td>\n", + " </tr>\n", + " <tr>\n", + " <th>27</th>\n", + " <td>139686.910823</td>\n", + " <td>3.391089e+06</td>\n", + " <td>4216.743792</td>\n", + " </tr>\n", + " <tr>\n", + " <th>28</th>\n", + " <td>139786.205284</td>\n", + " <td>3.390979e+06</td>\n", + " <td>3579.127600</td>\n", + " </tr>\n", + " <tr>\n", + " <th>29</th>\n", + " <td>139786.205284</td>\n", + " <td>3.390979e+06</td>\n", + " <td>3579.127600</td>\n", + " </tr>\n", + " <tr>\n", + " <th>30</th>\n", + " <td>139785.010997</td>\n", + " <td>3.391002e+06</td>\n", + " <td>3546.549796</td>\n", + " </tr>\n", + " <tr>\n", + " <th>31</th>\n", + " <td>139785.010997</td>\n", + " <td>3.391002e+06</td>\n", + " <td>3546.549796</td>\n", + " </tr>\n", + " </tbody>\n", + "</table>\n", + "</div>" + ], + "text/plain": [ + " lat_Y_North long_X_East ht\n", + "0 139525.230749 3.390974e+06 4506.496945\n", + "1 139525.230749 3.390974e+06 4506.496945\n", + "2 139489.278045 3.390969e+06 4516.454802\n", + "3 139489.278045 3.390969e+06 4516.454802\n", + "4 139823.489797 3.390990e+06 4536.274914\n", + "5 139823.489797 3.390990e+06 4536.274914\n", + "6 139772.738004 3.390936e+06 4518.050219\n", + "7 139772.738004 3.390936e+06 4518.050219\n", + "8 139575.914815 3.390952e+06 3816.666542\n", + "9 139575.914815 3.390952e+06 3816.666542\n", + "10 139614.756296 3.390953e+06 3791.232717\n", + "11 139614.756296 3.390953e+06 3791.232717\n", + "12 139912.041374 3.390914e+06 3875.608660\n", + "13 139912.041374 3.390914e+06 3875.608660\n", + "14 139909.452033 3.390930e+06 3845.361327\n", + "15 139909.452033 3.390930e+06 3845.361327\n", + "16 139669.826849 3.391120e+06 3270.672620\n", + "17 139669.826849 3.391120e+06 3270.672620\n", + "18 139694.517017 3.391205e+06 3289.744506\n", + "19 139694.517017 3.391205e+06 3289.744506\n", + "20 139968.793338 3.391126e+06 3274.711397\n", + "21 139968.793338 3.391126e+06 3274.711397\n", + "22 139979.200780 3.391138e+06 3298.297228\n", + "23 139979.200780 3.391138e+06 3298.297228\n", + "24 139688.031217 3.391041e+06 4253.956077\n", + "25 139688.031217 3.391041e+06 4253.956077\n", + "26 139686.910823 3.391089e+06 4216.743792\n", + "27 139686.910823 3.391089e+06 4216.743792\n", + "28 139786.205284 3.390979e+06 3579.127600\n", + "29 139786.205284 3.390979e+06 3579.127600\n", + "30 139785.010997 3.391002e+06 3546.549796\n", + "31 139785.010997 3.391002e+06 3546.549796" + ] + }, + "execution_count": 129, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "return_df[['lat_Y_North', 'long_X_East', 'ht']]" + ] + }, + { + "cell_type": "code", + "execution_count": 128, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "<div>\n", + "<style scoped>\n", + " .dataframe tbody tr th:only-of-type {\n", + " vertical-align: middle;\n", + " }\n", + "\n", + " .dataframe tbody tr th {\n", + " vertical-align: top;\n", + " }\n", + "\n", + " .dataframe thead th {\n", + " text-align: right;\n", + " }\n", + "</style>\n", + "<table border=\"1\" class=\"dataframe\">\n", + " <thead>\n", + " <tr style=\"text-align: right;\">\n", + " <th></th>\n", + " <th>lat_Y_North</th>\n", + " <th>long_X_East</th>\n", + " <th>ht</th>\n", + " </tr>\n", + " </thead>\n", + " <tbody>\n", + " <tr>\n", + " <th>0</th>\n", + " <td>0.095708</td>\n", + " <td>2.356167</td>\n", + " <td>-2342.889214</td>\n", + " </tr>\n", + " <tr>\n", + " <th>1</th>\n", + " <td>0.095708</td>\n", + " <td>2.356167</td>\n", + " <td>-2342.889214</td>\n", + " </tr>\n", + " <tr>\n", + " <th>2</th>\n", + " <td>0.095920</td>\n", + " <td>2.355564</td>\n", + " <td>-2349.638414</td>\n", + " </tr>\n", + " <tr>\n", + " <th>3</th>\n", + " <td>0.095920</td>\n", + " <td>2.355564</td>\n", + " <td>-2349.638414</td>\n", + " </tr>\n", + " <tr>\n", + " <th>4</th>\n", + " <td>0.096339</td>\n", + " <td>2.361186</td>\n", + " <td>-2314.316425</td>\n", + " </tr>\n", + " <tr>\n", + " <th>5</th>\n", + " <td>0.096339</td>\n", + " <td>2.361186</td>\n", + " <td>-2314.316425</td>\n", + " </tr>\n", + " <tr>\n", + " <th>6</th>\n", + " <td>0.095954</td>\n", + " <td>2.360368</td>\n", + " <td>-2370.502882</td>\n", + " </tr>\n", + " <tr>\n", + " <th>7</th>\n", + " <td>0.095954</td>\n", + " <td>2.360368</td>\n", + " <td>-2370.502882</td>\n", + " </tr>\n", + " <tr>\n", + " <th>8</th>\n", + " <td>0.081058</td>\n", + " <td>2.357037</td>\n", + " <td>-2363.989968</td>\n", + " </tr>\n", + " <tr>\n", + " <th>9</th>\n", + " <td>0.081058</td>\n", + " <td>2.357037</td>\n", + " <td>-2363.989968</td>\n", + " </tr>\n", + " <tr>\n", + " <th>10</th>\n", + " <td>0.080518</td>\n", + " <td>2.357691</td>\n", + " <td>-2360.922571</td>\n", + " </tr>\n", + " <tr>\n", + " <th>11</th>\n", + " <td>0.080518</td>\n", + " <td>2.357691</td>\n", + " <td>-2360.922571</td>\n", + " </tr>\n", + " <tr>\n", + " <th>12</th>\n", + " <td>0.082311</td>\n", + " <td>2.362733</td>\n", + " <td>-2388.123298</td>\n", + " </tr>\n", + " <tr>\n", + " <th>13</th>\n", + " <td>0.082311</td>\n", + " <td>2.362733</td>\n", + " <td>-2388.123298</td>\n", + " </tr>\n", + " <tr>\n", + " <th>14</th>\n", + " <td>0.081668</td>\n", + " <td>2.362678</td>\n", + " <td>-2371.973499</td>\n", + " </tr>\n", + " <tr>\n", + " <th>15</th>\n", + " <td>0.081668</td>\n", + " <td>2.362678</td>\n", + " <td>-2371.973499</td>\n", + " </tr>\n", + " <tr>\n", + " <th>16</th>\n", + " <td>0.069458</td>\n", + " <td>2.358505</td>\n", + " <td>-2193.309629</td>\n", + " </tr>\n", + " <tr>\n", + " <th>17</th>\n", + " <td>0.069458</td>\n", + " <td>2.358505</td>\n", + " <td>-2193.309629</td>\n", + " </tr>\n", + " <tr>\n", + " <th>18</th>\n", + " <td>0.069861</td>\n", + " <td>2.358862</td>\n", + " <td>-2106.769773</td>\n", + " </tr>\n", + " <tr>\n", + " <th>19</th>\n", + " <td>0.069861</td>\n", + " <td>2.358862</td>\n", + " <td>-2106.769773</td>\n", + " </tr>\n", + " <tr>\n", + " <th>20</th>\n", + " <td>0.069543</td>\n", + " <td>2.363543</td>\n", + " <td>-2174.971745</td>\n", + " </tr>\n", + " <tr>\n", + " <th>21</th>\n", + " <td>0.069543</td>\n", + " <td>2.363543</td>\n", + " <td>-2174.971745</td>\n", + " </tr>\n", + " <tr>\n", + " <th>22</th>\n", + " <td>0.070044</td>\n", + " <td>2.363710</td>\n", + " <td>-2162.103231</td>\n", + " </tr>\n", + " <tr>\n", + " <th>23</th>\n", + " <td>0.070044</td>\n", + " <td>2.363710</td>\n", + " <td>-2162.103231</td>\n", + " </tr>\n", + " <tr>\n", + " <th>24</th>\n", + " <td>0.090342</td>\n", + " <td>2.358866</td>\n", + " <td>-2269.610862</td>\n", + " </tr>\n", + " <tr>\n", + " <th>25</th>\n", + " <td>0.090342</td>\n", + " <td>2.358866</td>\n", + " <td>-2269.610862</td>\n", + " </tr>\n", + " <tr>\n", + " <th>26</th>\n", + " <td>0.089550</td>\n", + " <td>2.358814</td>\n", + " <td>-2222.328983</td>\n", + " </tr>\n", + " <tr>\n", + " <th>27</th>\n", + " <td>0.089550</td>\n", + " <td>2.358814</td>\n", + " <td>-2222.328983</td>\n", + " </tr>\n", + " <tr>\n", + " <th>28</th>\n", + " <td>0.076012</td>\n", + " <td>2.360565</td>\n", + " <td>-2328.281125</td>\n", + " </tr>\n", + " <tr>\n", + " <th>29</th>\n", + " <td>0.076012</td>\n", + " <td>2.360565</td>\n", + " <td>-2328.281125</td>\n", + " </tr>\n", + " <tr>\n", + " <th>30</th>\n", + " <td>0.075320</td>\n", + " <td>2.360529</td>\n", + " <td>-2305.362047</td>\n", + " </tr>\n", + " <tr>\n", + " <th>31</th>\n", + " <td>0.075320</td>\n", + " <td>2.360529</td>\n", + " <td>-2305.362047</td>\n", + " </tr>\n", + " </tbody>\n", + "</table>\n", + "</div>" + ], + "text/plain": [ + " lat_Y_North long_X_East ht\n", + "0 0.095708 2.356167 -2342.889214\n", + "1 0.095708 2.356167 -2342.889214\n", + "2 0.095920 2.355564 -2349.638414\n", + "3 0.095920 2.355564 -2349.638414\n", + "4 0.096339 2.361186 -2314.316425\n", + "5 0.096339 2.361186 -2314.316425\n", + "6 0.095954 2.360368 -2370.502882\n", + "7 0.095954 2.360368 -2370.502882\n", + "8 0.081058 2.357037 -2363.989968\n", + "9 0.081058 2.357037 -2363.989968\n", + "10 0.080518 2.357691 -2360.922571\n", + "11 0.080518 2.357691 -2360.922571\n", + "12 0.082311 2.362733 -2388.123298\n", + "13 0.082311 2.362733 -2388.123298\n", + "14 0.081668 2.362678 -2371.973499\n", + "15 0.081668 2.362678 -2371.973499\n", + "16 0.069458 2.358505 -2193.309629\n", + "17 0.069458 2.358505 -2193.309629\n", + "18 0.069861 2.358862 -2106.769773\n", + "19 0.069861 2.358862 -2106.769773\n", + "20 0.069543 2.363543 -2174.971745\n", + "21 0.069543 2.363543 -2174.971745\n", + "22 0.070044 2.363710 -2162.103231\n", + "23 0.070044 2.363710 -2162.103231\n", + "24 0.090342 2.358866 -2269.610862\n", + "25 0.090342 2.358866 -2269.610862\n", + "26 0.089550 2.358814 -2222.328983\n", + "27 0.089550 2.358814 -2222.328983\n", + "28 0.076012 2.360565 -2328.281125\n", + "29 0.076012 2.360565 -2328.281125\n", + "30 0.075320 2.360529 -2305.362047\n", + "31 0.075320 2.360529 -2305.362047" + ] + }, + "execution_count": 128, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "socet_df[['lat_Y_North', 'long_X_East', 'ht']]" ] }, { @@ -288,7 +985,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.3" + "version": "3.6.4" } }, "nbformat": 4, diff --git a/plio/examples/SocetSet/cub_map.csv b/plio/examples/SocetSet/cub_map.csv new file mode 100644 index 0000000000000000000000000000000000000000..cca0f2c17b832f4df5ac001bafde185ca679cdd7 --- /dev/null +++ b/plio/examples/SocetSet/cub_map.csv @@ -0,0 +1,7 @@ +P01_001540_1889_XI_08N204W,P01_001540_1889_XI_08N204W.lev1.cub +P01_001606_1897_XI_09N203W,P01_001606_1897_XI_09N203W.lev1.cub +P02_001804_1889_XI_08N204W,P02_001804_1889_XI_08N204W.lev1.cub +P03_002226_1895_XI_09N203W,P03_002226_1895_XI_09N203W.lev1.cub +P03_002371_1888_XI_08N204W,P03_002371_1888_XI_08N204W.lev1.cub +P19_008344_1894_XN_09N203W,P19_008344_1894_XN_09N203W.lev1.cub +P20_008845_1894_XN_09N203W,P20_008845_1894_XN_09N203W.lev1.cub diff --git a/plio/io/io_controlnetwork.py b/plio/io/io_controlnetwork.py index 87b877a604648018d03daedd4f8695890c28ea40..cebe7e6c946a039bd09b64de7f0cad8c0b73edba 100644 --- a/plio/io/io_controlnetwork.py +++ b/plio/io/io_controlnetwork.py @@ -1,6 +1,7 @@ from time import gmtime, strftime import pandas as pd +import numpy as np import pvl from plio.io import ControlNetFileV0002_pb2 as cnf @@ -186,22 +187,23 @@ class IsisStore(object): header_bytes = find_in_dict(pvl_header, 'HeaderBytes') point_start_byte = find_in_dict(pvl_header, 'PointsStartByte') version = find_in_dict(pvl_header, 'Version') + if version == 2: point_attrs = [i for i in cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name if i != 'measures'] measure_attrs = [i for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name] - + cols = point_attrs + measure_attrs cp = cnf.ControlPointFileEntryV0002() self._handle.seek(header_start_byte) pbuf_header = cnf.ControlNetFileHeaderV0002() pbuf_header.ParseFromString(self._handle.read(header_bytes)) - + self._handle.seek(point_start_byte) cp = cnf.ControlPointFileEntryV0002() pts = [] for s in pbuf_header.pointMessageSizes: - cp.ParseFromString(self._handle.read(s)) + cp.ParseFromString(self._handle.read(s)) pt = [getattr(cp, i) for i in point_attrs if i != 'measures'] for measure in cp.measures: @@ -267,24 +269,24 @@ class IsisStore(object): # As per protobuf docs for assigning to a repeated field. if attr == 'aprioriCovar': arr = g.iloc[0]['aprioriCovar'] - point_spec.aprioriCovar.extend(arr.ravel().tolist()) + if isinstance(arr, np.ndarray): + arr = arr.ravel().tolist() + + point_spec.aprioriCovar.extend(arr) else: setattr(point_spec, attr, attrtype(g.iloc[0][attr])) - point_spec.type = 2 # Hardcoded to free + point_spec.type = 2 # Hardcoded to free this is bad # The reference index should always be the image with the lowest index point_spec.referenceIndex = 0 - # A single extend call is cheaper than many add calls to pack points measure_iterable = [] - for node_id, m in g.iterrows(): measure_spec = point_spec.Measure() # For all of the attributes, set if they are an dict accessible attr of the obj. for attr, attrtype in self.measure_attrs: if attr in g.columns: setattr(measure_spec, attr, attrtype(m[attr])) - measure_spec.serialnumber = serials[m.image_index] measure_spec.sample = m.x measure_spec.line = m.y @@ -298,7 +300,6 @@ class IsisStore(object): point_message = point_spec.SerializeToString() point_sizes.append(point_spec.ByteSize()) point_messages.append(point_message) - return point_messages, point_sizes def create_buffer_header(self, networkid, targetname, diff --git a/plio/spatial/transformations.py b/plio/spatial/transformations.py index 18160eea9860e78f8ea19d6a73584d809e12b67f..efc4ad609424a1c88e4ca778370b647249655e4c 100644 --- a/plio/spatial/transformations.py +++ b/plio/spatial/transformations.py @@ -45,7 +45,6 @@ def line_sample_size(record, path): sample_size = int(sample_size)/2.0 + record['s.'] + 1 return sample_size, line_size, img_index -# converts known to ISIS keywords def known(record): """ Converts the known field from a socet dataframe into the @@ -67,7 +66,6 @@ def known(record): elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3: return 'Constrained' -# converts +/- 180 system to 0 - 360 system def to_360(num): """ Transforms a given number into 0 - 360 space @@ -140,7 +138,6 @@ def og2oc(dlat, dMajorRadius, dMinorRadius): print ("Error in og2oc conversion") return dlat -# gets eRadius and pRadius from a .prj file def get_axis(file): """ Gets eRadius and pRadius from a .prj file @@ -291,7 +288,5 @@ def serial_numbers(image_dict, path): serial_dict = dict() for key in image_dict: - snum = sn.generate_serial_number(os.path.join(path, image_dict[key])) - snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO') - serial_dict[key] = snum + serial_dict[key] = sn.generate_serial_number(os.path.join(path, image_dict[key])) return serial_dict