From be9a72a36bfca0bfb97b673a3ad4add35bea763b Mon Sep 17 00:00:00 2001 From: Tyler Thatcher <tdt62@nau.edu> Date: Thu, 3 May 2018 17:30:18 -0700 Subject: [PATCH] Added isisCNET creation --- notebooks/Socet2ISIS.ipynb | 96 ++++++++++++++++++++++++++++++++------ 1 file changed, 82 insertions(+), 14 deletions(-) diff --git a/notebooks/Socet2ISIS.ipynb b/notebooks/Socet2ISIS.ipynb index d4068c8..e4c8101 100644 --- a/notebooks/Socet2ISIS.ipynb +++ b/notebooks/Socet2ISIS.ipynb @@ -87,16 +87,18 @@ "gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);\n", "ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]\n", "\n", - "gpf_df = read_gpf(gpf_file).set_index('point_id')\n", - "ipf_df = read_ipf(ipf_list).set_index('pt_id')\n", + "gpf_df = read_gpf(gpf_file)\n", + "ipf_df = read_ipf(ipf_list)\n", "\n", "point_diff = ipf_df.index.difference(gpf_df.index)\n", "\n", + "\n", + "\n", "if len(point_diff) != 0:\n", " warnings.warn(\"The following points found in ipf files missing from gpf file: \\n\\n{}. \\\n", " \\n\\nContinuing, but these points will be missing from the control network\".format(list(point_diff)))\n", "\n", - "new_df = ipf_df.merge(gpf_df, left_index=True, right_index=True)" + "new_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')" ] }, { @@ -108,10 +110,24 @@ "import math\n", "import pyproj\n", "\n", + "image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',\n", + " 'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',\n", + " 'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',\n", + " 'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',\n", + " 'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',\n", + " 'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',\n", + " 'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}\n", + "\n", "# converts columns l. and s. to isis\n", "def line_sample_size(record):\n", " with open(atf_dict['PATH'] + '/' + record['ipf_file'] + '.sup') as f:\n", " for i, line in enumerate(f):\n", + " if i == 2:\n", + " img_index = line.split('\\\\')\n", + " img_index = img_index[-1].strip()\n", + " img_index = img_index.split('.')[0]\n", + " img_index = image_dict[img_index]\n", + " \n", " if i == 3:\n", " line_size = line.split(' ')\n", " line_size = line_size[-1].strip()\n", @@ -126,7 +142,7 @@ " \n", " line_size = int(line_size)/2.0 + record['l.'] + 1\n", " sample_size = int(sample_size)/2.0 + record['s.'] + 1\n", - " return sample_size, line_size\n", + " return sample_size, line_size, img_index\n", " \n", "# converts known to ISIS keywords\n", "def known(record):\n", @@ -179,15 +195,15 @@ " \n", "# function to convert lat_Y_North to ISIS_lat\n", "def lat_ISIS_coord(record, semi_major, semi_minor):\n", - " ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)\n", - " coord_360 = to_360(ocentric_coord)\n", - " return coord_360\n", + " ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)\n", + " coord_360 = to_360(ocentric_coord)\n", + " return coord_360\n", "\n", "# function to convert long_X_East to ISIS_lon\n", "def lon_ISIS_coord(record, semi_major, semi_minor):\n", - " ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)\n", - " coord_360 = to_360(ocentric_coord)\n", - " return coord_360\n", + " ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)\n", + " coord_360 = to_360(ocentric_coord)\n", + " return coord_360\n", "\n", "def body_fix(record, semi_major, semi_minor):\n", " ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)\n", @@ -195,14 +211,14 @@ " lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])\n", " return lon, lat, height\n", "\n", + "# applys transformations to columns\n", "def socet2isis(prj_file):\n", " eRadius, pRadius = get_axis(prj_file)\n", - " new_df['s.'], new_df['l.'] = (zip(*new_df.apply(sample_size, axis=1)))\n", + " new_df['s.'], new_df['l.'], new_df['image_index'] = (zip(*new_df.apply(line_sample_size, axis=1)))\n", " new_df['known'] = new_df.apply(known, axis=1)\n", " new_df['lat_Y_North'] = new_df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)\n", " new_df['long_X_East'] = new_df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)\n", " new_df['long_X_East'], new_df['lat_Y_North'], new_df['ht'] = zip(*new_df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))\n", - "\n", "socet2isis('CTX_Athabasca_Middle.prj')" ] }, @@ -212,9 +228,60 @@ "metadata": {}, "outputs": [], "source": [ + "new_df['image_index']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "column_remap = {'l.': 'x', 's.': 'y',\n", + "'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',\n", + "'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',\n", + "'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}\n", + "\n", + "new_df.rename(columns=column_remap, inplace=True)\n", + "\n", "new_df" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import plio.io.io_controlnetwork as cn\n", + "import plio.io.isis_serial_number as sn\n", + "\n", + "# creates a dict of serial numbers with the cub being the key\n", + "def serial_numbers():\n", + " serial_dict = {}\n", + " image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',\n", + " 'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',\n", + " 'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',\n", + " 'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',\n", + " 'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',\n", + " 'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',\n", + " 'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}\n", + " \n", + " for key in image_dict:\n", + " serial_dict[image_dict[key]] = sn.generate_serial_number('/home/tthatcher/Desktop/Projects/Plio/' + image_dict[key])\n", + " return serial_dict\n", + "\n", + "# serial number dictionary\n", + "serial_dict = serial_numbers()\n", + "\n", + "print(serial_dict)\n", + "\n", + "# creates the control network\n", + "cnet = cn.to_isis('/home/tthatcher/Desktop/Projects/Plio/cn.csv', new_df, serial_dict)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -228,7 +295,8 @@ "\n", "@read_ipf.register(str)\n", "def read_ipf_str(input_data):\n", - " \"\"\"\n", + " \"\"\"AttributeError: 'Series' object has no attribute 'image_index'\n", + "\n", " Read a socet ipf file into a pandas data frame\n", "\n", " Parameters\n", @@ -245,7 +313,7 @@ " # Check that the number of rows is matching the expected number\n", " with open(input_data, 'r') as f:\n", " for i, l in enumerate(f):\n", - " if i == 1:\n", + " if i == 1:/home/tthatcher/Desktop/Projects/Plio/plio\n", " cnt = int(l)\n", " elif i == 2:\n", " col = l\n", -- GitLab