Skip to content
Snippets Groups Projects
Socet2ISIS.ipynb 8.25 KiB
Newer Older
  • Learn to ignore specific revisions
  •    "execution_count": null,
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "outputs": [],
    
        "import sys\n",
        "\n",
        "import pandas as pd\n",
    
        "import math\n",
    
        "from plio.io.io_bae import read_gpf, read_ipf, read_atf, save_gpf, save_ipf\n",
    
        "from plio.utils.utils import find_in_dict\n",
    
        "from plio.spatial.transformations import apply_isis_transformations, apply_socet_transformations, serial_numbers\n",
    
        "import plio.io.io_controlnetwork as cn\n",
        "import plio.io.isis_serial_number as sn"
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
      {
       "cell_type": "code",
    
       "execution_count": null,
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "metadata": {},
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "outputs": [],
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "source": [
    
        "def socet2isis(at_file, cub_file_path, extension, target_name, outpath=None):\n",
    
        "    # Setup the at_file, path to cubes, and control network out path\n",
        "    at_file = at_file\n",
        "    cnet_out = os.path.split(os.path.splitext(at_file)[0])[1]\n",
        "    cub_path = cub_file_path\n",
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
        "\n",
    
        "    if(outpath):\n",
        "        outpath = outpath\n",
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
        "    else:\n",
    
        "        outpath = os.path.split(at_file)[0]\n",
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
        "        \n",
    
        "    # Read in and setup the atf dict of information\n",
    
        "    atf_dict = read_atf(at_file)\n",
    
        "    \n",
        "    # Get the gpf and ipf files using atf dict\n",
        "    gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);\n",
        "    ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]\n",
        "    \n",
        "    # Read in the gpf file and ipf file(s) into seperate dataframes\n",
        "    gpf_df = read_gpf(gpf_file)\n",
        "    ipf_df = read_ipf(ipf_list)\n",
        "\n",
        "    # Check for differences between point ids using each dataframes\n",
        "    # point ids as a reference\n",
        "    gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))\n",
        "    ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))\n",
        "\n",
        "    point_diff = ipf_pt_idx.difference(gpf_pt_idx)\n",
        "\n",
        "    if len(point_diff) != 0:\n",
        "        warnings.warn(\"The following points found in ipf files missing from gpf file: \\n\\n{}. \\\n",
        "                      \\n\\nContinuing, but these points will be missing from the control network\".format(list(point_diff)))\n",
        "        \n",
        "    # Merge the two dataframes on their point id columns\n",
        "    socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')\n",
        "    \n",
        "    # Apply the transformations\n",
    
        "    apply_socet_transformations(atf_dict, socet_df)\n",
    
        "    \n",
        "    # Define column remap for socet dataframe\n",
    
        "    column_map = {'pt_id': 'id', 'l.': 'y', 's.': 'x',\n",
        "                  'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type',\n",
        "                  'lat_Y_North': 'aprioriY', 'long_X_East': 'aprioriX', 'ht': 'aprioriZ',\n",
        "                  'sig0': 'aprioriLatitudeSigma', 'sig1': 'aprioriLongitudeSigma', 'sig2': 'aprioriRadiusSigma',\n",
        "                  'sig_l': 'linesigma', 'sig_s': 'samplesigma'}\n",
    
        "    \n",
        "    # Rename the columns using the column remap above\n",
    
        "    socet_df.rename(columns = column_map, inplace=True)\n",
    
        "    # Build an image and serial dict assuming the cubes will be named as the IPFs are\n",
        "    image_dict = {i: i + extension for i in pd.unique(socet_df['ipf_file'])}\n",
    
        "    serial_dict = serial_numbers(image_dict, cub_path)\n",
    
        "    # creates the control network\n",
    
        "    cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict, targetname = targetname)"
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       ]
      },
      {
       "cell_type": "code",
    
       "execution_count": null,
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "metadata": {
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "outputs": [],
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
       "source": [
    
        "def isis2socet(cnet_path, eRadius, eccentricity, cub_path, extension, cub_list, out_gpf, adjusted_flag = False):\n",
        "    pRadius = eRadius * math.sqrt(1 - (eccentricity ** 2))\n",
        "    \n",
        "    df = cn.from_isis(cnet_path)\n",
        "    # Create cub dict to map ipf to cub\n",
        "    cub_dict = {i: i + extension for i in cub_list}\n",
        "\n",
        "    # Create serial dict to match serial to ipf\n",
        "    serial_dict = {sn.generate_serial_number(os.path.join(cub_path, i + extension)): i for i in cub_list}\n",
        "\n",
        "    # Remove duplicate columns\n",
        "    # There are better ways to do this but pandas was not having it\n",
        "    columns = []\n",
        "    column_index = []\n",
        "\n",
        "    for i, column in enumerate(list(df.columns)):\n",
        "        if column not in columns:\n",
        "            column_index.append(i)\n",
        "            columns.append(column)\n",
        "\n",
        "    df = df.iloc[:, column_index]\n",
        "\n",
        "    # Begin translation\n",
        "    # Remap the ISIS columns to socet column names\n",
        "    column_map = {'id': 'pt_id', 'line': 'l.', 'sample': 's.', \n",
        "                  'lineResidual': 'res_l', 'sampleResidual': 'res_s', 'type': 'known', \n",
        "                  'aprioriLatitudeSigma': 'sig0', 'aprioriLongitudeSigma': 'sig1', 'aprioriRadiusSigma': 'sig2', \n",
        "                  'linesigma': 'sig_l', 'samplesigma': 'sig_s', 'ignore': 'stat'}\n",
        "\n",
        "    # Depending on the adjusted flag, set the renames for columns appropriately\n",
        "    if adjusted_flag:\n",
        "        column_map['adjustedY'] = 'lat_Y_North'\n",
        "        column_map['adjustedX'] = 'long_X_East'\n",
        "        column_map['adjustedZ'] = 'ht'\n",
        "    else:\n",
        "        column_map['aprioriY'] = 'lat_Y_North'\n",
        "        column_map['aprioriX'] = 'long_X_East'\n",
        "        column_map['aprioriZ'] = 'ht'\n",
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
        "\n",
    
        "    df.rename(columns = column_map, inplace=True)\n",
        "    \n",
        "    apply_isis_transformations(df, eRadius, pRadius, serial_dict, extension, cub_path)\n",
        "\n",
        "    # Save the ipf\n",
        "    save_ipf(df, os.path.split(out_gpf)[0])\n",
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
        "\n",
    
        "    # Get the first record from each group as there all the same, put them\n",
        "    # into a list, and sort it\n",
        "    points = [int(i[1].index[0]) for i in df.groupby('pt_id')]\n",
        "    points.sort()\n",
        "\n",
        "    # Set the gpf_df to only the values we need and do a small rename\n",
        "    gpf_df = df.iloc[points].copy()\n",
        "    gpf_df.rename(columns = {'pt_id': 'point_id'}, inplace=True)\n",
        "\n",
        "    # Save the gpf\n",
        "    save_gpf(gpf_df, out_gpf)"
    
       ]
      },
      {
       "cell_type": "code",
       "execution_count": null,
       "metadata": {},
       "outputs": [],
       "source": [
    
        "# Setup stuffs for the cub information namely the path and extension\n",
        "cub_path = '/Path/to/cubs'\n",
    
        "# Name of the target body\n",
        "targetname = 'Mars'\n",
        "extension = 'cub.-->extension<--'\n",
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
        "\n",
    
        "# Path to atf file\n",
        "atf_file = 'Path/to/socket/set/at_file.atf'\n",
    
        "socet2isis(atf_file, cub_path, extension, targetname)"
    
       "execution_count": null,
    
       "outputs": [],
       "source": [
    
        "# Setup stuffs for the cub information namely the path and extension\n",
        "# along with eRadius and eccentricity\n",
        "cnet = \"Path/to/control/network.net\"\n",
    
        "\n",
        "eRadius = 3.39619000000000e+006\n",
    
        "eccentricity = 1.08339143554195e-001\n",
    
        "cub_path = 'Path/to/cubs'\n",
        "extension = 'cub.-->extension<--'\n",
    
        "# List of cubes to use\n",
    
        "cub_list = ['D06_029601_1846_XN_04N224W', \n",
        "            'F05_037684_1857_XN_05N224W']\n",
        "\n",
    
        "out_gpf = \"/Users/adampaquette/Desktop/InSightE09_XW.gpf\"\n",
    
        "adjusted_flag = False\n",
    
        "isis2socet(cnet, eRadius, eccentricity, cub_path, extension, cub_list, out_gpf, adjusted_flag)"
    
    Tyler Thatcher's avatar
    Tyler Thatcher committed
      {
       "cell_type": "code",
       "execution_count": null,
       "metadata": {},
       "outputs": [],
    
       "source": []
    
      }
     ],
     "metadata": {
      "kernelspec": {
       "display_name": "Python 3",
       "language": "python",
       "name": "python3"
      },
      "language_info": {
       "codemirror_mode": {
        "name": "ipython",
        "version": 3
       },
       "file_extension": ".py",
       "mimetype": "text/x-python",
       "name": "python",
       "nbconvert_exporter": "python",
       "pygments_lexer": "ipython3",