From e5983d442e71af099ce663f79f50285e5f8c56ad Mon Sep 17 00:00:00 2001 From: Austin Sanders <arsanders@usgs.gov> Date: Wed, 1 Nov 2023 14:04:20 -0600 Subject: [PATCH] Added tutorial for writing tests for ale --- .../writing-tests-for-ale-drivers.ipynb | 555 ++++++++++++++++++ mkdocs.yml | 2 + 2 files changed, 557 insertions(+) create mode 100644 docs/getting-started/writing-tests/writing-tests-for-ale-drivers.ipynb diff --git a/docs/getting-started/writing-tests/writing-tests-for-ale-drivers.ipynb b/docs/getting-started/writing-tests/writing-tests-for-ale-drivers.ipynb new file mode 100644 index 0000000..8b8bd1e --- /dev/null +++ b/docs/getting-started/writing-tests/writing-tests-for-ale-drivers.ipynb @@ -0,0 +1,555 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "83ffc4ff", + "metadata": {}, + "source": [ + "# Writing Tests for ALE Drivers\n", + "\n", + "Before merging a new driver into ALE, it is necessary to create automated tests to ensure that the driver functions properly. Writing a test for ALE drivers entails two separate stages -- preparing the test data and writing the test itself.\n", + "\n", + "\n", + "<div class=\"admonition info\">\n", + " <p class=\"admonition-title\">Prerequisites</p>\n", + " <p>This tutorial assumes that the user has:\n", + " <list>\n", + " <li>an active conda environment with ALE and its dependencies</li>\n", + " <li>base and mission-specific SPICE data (camera kernels)</li>\n", + " <li>access to ISIS applications (spiceinit)</li>\n", + " </list>\n", + " </p>\n", + "</div>\n", + "\n", + "## Prepare Test Data\n", + "\n", + "Naturally, ALE drivers require instrument-specific data. If you have not yet identified a source of test data for your driver, the [PDS Image Atlas](https://pds.nasa.gov/datasearch/data-search/) provides a catalog of data from which you can select relevant images.\n", + "\n", + "The data used in this tutorial can be found [here](https://wms.lroc.asu.edu/lroc/view_lroc/LRO-L-LROC-2-EDR-V1.0/M1435111335LE)\n", + "\n", + "### Create and Spiceinit a Cube\n", + "\n", + "After downloading image data, it is necessary to convert the image to ISIS cube format and spiceinit the data. The following command will convert and spiceinit an LRO NAC image, but the specific \"2isis\" command is mission-specific. Unlike other code segments in this notebook, these commands should be run in a terminal.\n", + "``` BASH\n", + "lronac2isis from=M1435111335LE.IMG to=M1435111335LE.cub\n", + "spiceinit from=M1435111335LE.cub\n", + "```\n", + "\n", + "### Save the Cube Label\n", + "After creating the ISIS formatted cube, it is necessary to pull the label off the cube. This is easily performed using ISIS's `catlab` utility via the command line:\n", + "\n", + "``` BASH\n", + "catlab from=M1435111335LE.cub to=M1435111335LE_isis3.lbl\n", + "```\n", + "\n", + "### Slice the Kernels\n", + "\n", + "Due to repository size limitations, it is necessary to 'slice' the kernels with *ckslicer*, which is a NAIF utility that can be downloaded [here](https://naif.jpl.nasa.gov/naif/utilities.html). Make sure that you download the utility that corresponds to your operating system and remember where you downloaded the program.\n", + "\n", + "\n", + "#### Import and Set Up Data Locations\n", + "This portion of the code is responsible for loading the libraries necessary for slicing and merging kernels. It's not necessary to alter any of the imports, but be sure to edit the cube locations so that they correspond to your directory structure and create the output_dir if necessary!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9f5ef5e4", + "metadata": {}, + "outputs": [], + "source": [ + "import spiceypy as spice\n", + "import pvl\n", + "import os\n", + "import re\n", + "import subprocess\n", + "from ale import util\n", + "from itertools import chain\n", + "import io\n", + "import networkx as nx\n", + "\n", + "\n", + "# These should be provided when running this script. \n", + "cube = \"M1435111335LE.cub\"\n", + "output_dir = \"kernels/\" # Output dir for created kernel files\n", + "data_dir = \"/Users/arsanders/isis_efs/isis_data/\" # Dir of where to pull original kernels from" + ] + }, + { + "cell_type": "markdown", + "id": "ce636b9b", + "metadata": {}, + "source": [ + "#### Create Utility Functions\n", + "\n", + "Next, it is necessary to create utility functions to merge intervals together and add light-time correction. There is generally no need to adjust these functions, but it is necessary to load them into memory -- just run this cell!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6e3638b5", + "metadata": {}, + "outputs": [], + "source": [ + "def merge_intervals(intervals):\n", + " \"\"\"\n", + " Merge a set of intervals. The intervals are assumed to be closed, that is they include the end-points.\n", + " \n", + " Parameters\n", + " ----------\n", + " intervals : list\n", + " The input list of intrvals where each interval is a tuple of (start, end)\n", + " \n", + " Returns\n", + " -------\n", + " : list\n", + " A sorted list of the merged intervals\n", + " \"\"\"\n", + " sorted_intervals = sorted(intervals, key=lambda tup: tup[0])\n", + " merged = [sorted_intervals[0]]\n", + " for interval in sorted_intervals[1:]:\n", + " # No intersection\n", + " if interval[0] > merged[-1][1]:\n", + " merged.append(interval)\n", + " # Intersection, but new interval isn't wholey contained\n", + " elif interval[1] > merged[-1][1]:\n", + " merged[-1] = (merged[-1][0], interval[1])\n", + " return merged\n", + "\n", + "def add_light_time_correction(cube_info, padding=120):\n", + " \"\"\"\n", + " Compute the time intervals for the image and any light time correction\n", + " \n", + " Parameters\n", + " ----------\n", + " cube_info : ordered dict\n", + " The cube info from ale.util.generate_kernels_from_cube\n", + " padding : float\n", + " Time padding in seconds to add to each interval\n", + " \n", + " Returns\n", + " -------\n", + " : list\n", + " A sorted list of the intervals as (start_et, stop_et)\n", + " \"\"\"\n", + " image_start_et = spice.scs2e(cube_info['SpacecraftID'], cube_info['SpacecraftClockCount'])\n", + " \n", + " image_end_et = image_start_et + cube_info['ExposureDuration'] * cube_info['Lines']\n", + "\n", + " inst_state, inst_lt = spice.spkez(cube_info['SpacecraftID'], image_start_et, 'J2000', 'NONE', 0)\n", + " target_state, target_lt = spice.spkez(cube_info['TargetID'], image_start_et, 'J2000', 'NONE', 0)\n", + " sun_state, sun_lt = spice.spkez(10, image_start_et, 'J2000', 'NONE', cube_info['TargetID'])\n", + " \n", + " intervals = [\n", + " (image_start_et - padding, image_end_et + padding),\n", + " (image_start_et - padding - inst_lt, image_end_et + padding - inst_lt),\n", + " (image_start_et - padding - target_lt, image_end_et + padding - target_lt),\n", + " (image_start_et - padding - sun_lt, image_end_et + padding - sun_lt)]\n", + " return merge_intervals(intervals)" + ] + }, + { + "cell_type": "markdown", + "id": "1c385749", + "metadata": {}, + "source": [ + "#### Read and Furnish Kernels\n", + "This section of the code is responsible for reading the list of kernels from a spiceinit'd cube and loading those kernels into memory. No edits are necessary to this portion of the notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62cdfe04", + "metadata": {}, + "outputs": [], + "source": [ + "# Get dictionary of kernel lists from cube\n", + "cube_info = util.generate_kernels_from_cube(cube, format_as = 'dict')\n", + "\n", + "# Replace path variables with absolute paths for kernels\n", + "for kernel_list in cube_info:\n", + " for index, kern in enumerate(cube_info[kernel_list]):\n", + " if kern is not None:\n", + " cube_info[kernel_list][index] = data_dir + kern.strip('$')\n", + " \n", + "# Create ordered list of kernels for furnishing\n", + "kernels = [kernel for kernel in chain.from_iterable(cube_info.values()) if isinstance(kernel, str)]\n", + "spice.furnsh(kernels)" + ] + }, + { + "cell_type": "markdown", + "id": "2857c02b", + "metadata": {}, + "source": [ + "#### Load and Update the Cube Information\n", + "This portion of the code is responsible for reading the .cub label and updating the dictionary with information relevant to the upcoming kernel slicing process.\n", + "\n", + "<div class=\"admonition note\">\n", + " <p class=\"admonition-title\">Updating Dictionary Elements</p>\n", + " <p>\n", + " In some circumstances, it may be necessary to update the lowest-level dictionary elements to match those found in your cube's label, i.e. \"SpacecraftClockStartCount\" or \"Line Exposure Duration\" may not be present in your label. It may also be necessary to add (or remove) a list index if your value contains a unit specifier. For example, the \"LineExposureDuration\" keyword specifies units, so we have to add the [0] to pull the value out of the tuple. If your LineExposureDuration does not contain units, simply remove the [0].\n", + " </p>\n", + "</div>" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9edd0925", + "metadata": {}, + "outputs": [], + "source": [ + "# Loads cube as pvl to extract rest of data\n", + "cube_pvl = pvl.load(cube)\n", + "\n", + "# Save other necesary info in cube_info dict\n", + "cube_info.update(Lines = cube_pvl['IsisCube']['Core']['Dimensions']['Lines'])\n", + "cube_info.update(SpacecraftClockCount = cube_pvl['IsisCube']['Instrument']['SpacecraftClockStartCount'])\n", + "cube_info.update(ExposureDuration = cube_pvl['IsisCube']['Instrument']['LineExposureDuration'][0])\n", + "cube_info.update(TargetID = spice.bods2c(cube_pvl['IsisCube']['Instrument']['TargetName']))\n", + "cube_info.update(SpacecraftID = spice.bods2c(cube_pvl['IsisCube']['Instrument']['SpacecraftName']))" + ] + }, + { + "cell_type": "markdown", + "id": "015f8f28", + "metadata": {}, + "source": [ + "#### Process and Slice Kernels\n", + "After collecting the necessary metadata, it is possible to 'slice' out the portions of kernels that are relevant to your specific image. The following code will slice the kernels and convert them to transfer format. This cell may require the user to specify the full path to the ckslicer executable. Alternatively, the user can place the utility in the same directory as the notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c49ca1d2", + "metadata": {}, + "outputs": [], + "source": [ + "# Account for light time correction\n", + "intervals = add_light_time_correction(cube_info)\n", + "\n", + "# For each binary ck kernel specified in cube, run the ckslicer, comment and to-transfer commands\n", + "for ck in [k for k in kernels if k.lower().endswith('.bc')]:\n", + " ck_path, ck_file_extension = os.path.splitext(ck)\n", + " ck_basename = os.path.basename(ck_path)\n", + " for index, interval in enumerate(intervals):\n", + " for frame in util.get_ck_frames(ck):\n", + " output_basename = os.path.join(output_dir, ck_basename + '_' + str(index) + '_sliced_' + str(frame))\n", + " output_kern = output_basename + ck_file_extension\n", + " output_comments = output_basename + '.cmt'\n", + " start_sclk = spice.sce2s(cube_info['SpacecraftID'], interval[0])\n", + " end_sclk = spice.sce2s(cube_info['SpacecraftID'], interval[1])\n", + " # Create new sliced ck kernel\n", + " ckslicer_command = [\"./ckslicer\", \n", + " '-LSK {}'.format(cube_info['LeapSecond'][0]), \n", + " '-SCLK {}'.format(cube_info['SpacecraftClock'][0]), \n", + " '-INPUTCK {}'.format(ck), \n", + " '-OUTPUTCK {}'.format(output_kern),\n", + " '-ID {}'.format(str(frame)),\n", + " '-TIMETYPE {}'.format('SCLK'),\n", + " '-START {}'.format(start_sclk),\n", + " '-STOP {}'.format(end_sclk)]\n", + " subprocess.run(ckslicer_command, check=True)\n", + "\n", + " # Remove old comments from new ck kernel\n", + " commnt_command = ['commnt', '-d {}'.format(output_kern)]\n", + " subprocess.run(commnt_command, check=True)\n", + "\n", + " with open(output_comments, 'w+') as comment_file:\n", + " comment_file.write(\"This CK is for testing with the image: {}\\n\".format(cube))\n", + " comment_file.write(\"\\nThis CK was generated using the following command: {}\\n\")\n", + " comment_file.write(\" \".join(ckslicer_command))\n", + "\n", + " # Add new comments to new ck kernel\n", + " new_commnts_command = [\"commnt\", \"-a {}\".format(output_kern), output_comments]\n", + " subprocess.run(new_commnts_command, check=True)\n", + "\n", + " # Create the transfer file of the new ck kernel\n", + " subprocess.run([\"toxfr\", output_kern], check=True)" + ] + }, + { + "cell_type": "markdown", + "id": "305b31d1", + "metadata": {}, + "source": [ + "#### Merge the Kernels\n", + "After slicing the kernels, it is necessary to merge them. No edits are necessary for this portion of the notebook." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76a3955b", + "metadata": {}, + "outputs": [], + "source": [ + "# Create the config file for the spkmerge command\n", + "for index, interval in enumerate(intervals):\n", + " output_spk_basename = os.path.join(output_dir, os.path.basename(os.path.splitext(cube)[0]) + '_' + str(index))\n", + " output_spk = output_spk_basename + '.bsp'\n", + " start_utc = spice.et2utc(interval[0], 'c', 3)\n", + " end_utc = spice.et2utc(interval[1], 'c', 3)\n", + " spk_dep_tree = util.create_spk_dependency_tree([k for k in kernels if k.lower().endswith('.bsp')])\n", + " config_string = util.spkmerge_config_string(spk_dep_tree,\n", + " output_spk,\n", + " [cube_info['TargetID'], cube_info['SpacecraftID'], 10],\n", + " cube_info['LeapSecond'][0],\n", + " start_utc,\n", + " end_utc)\n", + " with open(output_spk_basename + '.conf', 'w+') as spk_config:\n", + " spk_config.write(config_string)\n", + "\n", + " # Create the new SPK\n", + " spkmerge_command = [\"spkmerge\", spk_config.name]\n", + " subprocess.run(spkmerge_command, check=True)\n", + "\n", + " # Create the transfer file of the new SPK kernel\n", + " subprocess.run([\"toxfr\", output_spk], check=True)\n" + ] + }, + { + "cell_type": "markdown", + "id": "74afcc35", + "metadata": {}, + "source": [ + "### Generate an ISD\n", + "ALE provides the isd_generate.py utility within the 'ale' directory. To use this utility, the user must ensure that the ALESPICEROOT environment variable is set to the data directory containing kernels. This can be achieved using\n", + "\n", + "``` BASH\n", + "export ALESPICEROOT=<your_data_directory>\n", + "```\n", + "\n", + "After setting ALESPICEROOT, the user can use the python utility to generate an ISD. An example is as follows:\n", + "\n", + "``` BASH\n", + "python isd_generate.py M1435111335LE.cub -o lro_nac_isd.json\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "a4c2d5db", + "metadata": {}, + "source": [ + "### Place Test Data in ALE\n", + "There are numerous data locations and naming conventions that must be followed in order for the tests to function properly. To correctly integrate the test data into ALE's structure, the user should:\n", + "\n", + "- Ensure that your files are named as follows:\n", + " - isd: \\<instrument_name\\>_isd.json\n", + " - cube label: \\<filename\\>_isis3.lbl\n", + "- Create a directory within ale/tests/pytests/data that shares a name with your test data\n", + "- Place the cube label into the directory that you created\n", + "- Place all transfer kernels into the directory that you created (*.xc, *.xsp, *.tls, *.tpc, *.ti, *.tsc)\n", + "- Place the isd within the existing ale/tests/pytests/data/isds directory" + ] + }, + { + "cell_type": "markdown", + "id": "fa6aefcf", + "metadata": {}, + "source": [ + "## Writing the Test\n", + "ALE uses the PyTest library for automated testing. This section will guide the user through the process of creating a PyTest for an ALE driver using the test data that was generated using the first part of this tutorial.\n", + "\n", + "### Create a Test Fixture\n", + "In PyTest, a test [fixture](https://docs.pytest.org/en/6.2.x/fixture.html) is an object that models test data, and it includes all the data, setup, and teardown instructions that are necessary to perform automated tests. The following cell demonstrates a test fixture that provides all the necessary test data for our automated tests." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2a00dbc9", + "metadata": {}, + "outputs": [], + "source": [ + "@pytest.fixture(scope=\"module\")\n", + "def test_kernels():\n", + " updated_kernels = {}\n", + " binary_kernels = {}\n", + " for image in image_dict.keys():\n", + " kernels = get_image_kernels(image)\n", + " updated_kernels[image], binary_kernels[image] = convert_kernels(kernels)\n", + " yield updated_kernels\n", + " for kern_list in binary_kernels.values():\n", + " for kern in kern_list:\n", + " os.remove(kern)" + ] + }, + { + "cell_type": "markdown", + "id": "c78dfc26", + "metadata": {}, + "source": [ + "### Create a 'Loads' Test\n", + "The first test that a driver needs to pass is whether or not it is able to parse a label as expected. The following cell performs the parsing, loading, and comparison of the ISD to a 'truth' ISD that comes from the ISIS Camera Model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f50251ed", + "metadata": {}, + "outputs": [], + "source": [ + "# Test load of LROC labels\n", + "@pytest.mark.parametrize(\"label_type, kernel_type\", [('isis3', 'naif'), ('isis3', 'isis')])\n", + "@pytest.mark.parametrize(\"image\", ['M1435111335LE'])\n", + "def test_load(test_kernels, label_type, image, kernel_type):\n", + " if kernel_type == 'naif':\n", + " label_file = get_image_label(image, label_type)\n", + " isd_str = ale.loads(label_file, props={'kernels': test_kernels[image]})\n", + " compare_isd = image_dict[image]\n", + " else:\n", + " label_file = get_image(image)\n", + " isd_str = ale.loads(label_file)\n", + " compare_isd = get_isd('lro_isis')\n", + "\n", + " isd_obj = json.loads(isd_str)\n", + " comparison = compare_dicts(isd_obj, compare_isd)\n", + " assert comparison == []" + ] + }, + { + "cell_type": "markdown", + "id": "65da3144", + "metadata": {}, + "source": [ + "### Create Comparison Tests\n", + "\n", + "After verifying that the driver passes the 'loads' test, it is necessary to test each function within the driver. For this portion of testing, it is necessary to test any function that your driver overrides. In general, this step involves testing that the driver's method returns the expected value from the cube label.\n", + "\n", + "\n", + "<div class=\"admonition note\">\n", + " <p class=\"admonition-title\">Notes on 'patch'</p>\n", + " <p>The unittest library provides a means of mocking objects using 'patch'. All spice calls are mocked using this process.For more information on the use of 'patch,' visit the unittest <a href=https://docs.python.org/3/library/unittest.mock.html>documentation</a>.</p>\n", + "</div>\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "757de3a8", + "metadata": {}, + "outputs": [], + "source": [ + "# ========= Test isislabel and naifspice driver =========\n", + "class test_isis_naif(unittest.TestCase):\n", + "\n", + " def setUp(self):\n", + " label = get_image_label('M1435111335LE', 'isis3')\n", + " self.driver = LroLrocNacIsisLabelNaifSpiceDriver(label)\n", + "\n", + " def test_short_mission_name(self):\n", + " assert self.driver.short_mission_name == 'lro'\n", + "\n", + " def test_intrument_id(self):\n", + " assert self.driver.instrument_id == 'LRO_LROCNACL'\n", + "\n", + " def test_usgscsm_distortion_model(self):\n", + " with patch('ale.drivers.lro_drivers.spice.gdpool', return_value=np.array([1.0])) as gdpool, \\\n", + " patch('ale.drivers.lro_drivers.spice.bods2c', return_value=-12345) as bods2c:\n", + " distortion_model = self.driver.usgscsm_distortion_model\n", + " assert distortion_model['lrolrocnac']['coefficients'] == [1.0]\n", + " gdpool.assert_called_with('INS-12345_OD_K', 0, 1)\n", + " bods2c.assert_called_with('LRO_LROCNACL')\n", + "\n", + " def test_odtk(self):\n", + " with patch('ale.drivers.lro_drivers.spice.gdpool', return_value=np.array([1.0])) as gdpool, \\\n", + " patch('ale.drivers.lro_drivers.spice.bods2c', return_value=-12345) as bods2c:\n", + " assert self.driver.odtk == [1.0]\n", + " gdpool.assert_called_with('INS-12345_OD_K', 0, 1)\n", + " bods2c.assert_called_with('LRO_LROCNACL')\n", + "\n", + " def test_light_time_correction(self):\n", + " assert self.driver.light_time_correction == 'NONE'\n", + "\n", + " def test_detector_center_sample(self):\n", + " with patch('ale.drivers.lro_drivers.spice.gdpool', return_value=np.array([1.0])) as gdpool, \\\n", + " patch('ale.drivers.lro_drivers.spice.bods2c', return_value=-12345) as bods2c:\n", + " assert self.driver.detector_center_sample == 0.5\n", + " gdpool.assert_called_with('INS-12345_BORESIGHT_SAMPLE', 0, 1)\n", + " bods2c.assert_called_with('LRO_LROCNACL')\n", + "\n", + " def test_exposure_duration(self):\n", + " np.testing.assert_almost_equal(self.driver.exposure_duration, .0010334296)\n", + "\n", + " def test_ephemeris_start_time(self):\n", + " with patch('ale.drivers.lro_drivers.spice.scs2e', return_value=321) as scs2e:\n", + " np.testing.assert_almost_equal(self.driver.ephemeris_start_time, 322.05823191)\n", + " scs2e.assert_called_with(-85, '1/270649237:07208')\n", + "\n", + " def test_multiplicative_line_error(self):\n", + " assert self.driver.multiplicative_line_error == 0.0045\n", + "\n", + " def test_additive_line_error(self):\n", + " assert self.driver.additive_line_error == 0\n", + "\n", + " def test_constant_time_offset(self):\n", + " assert self.driver.constant_time_offset == 0\n", + "\n", + " def test_additional_preroll(self):\n", + " assert self.driver.additional_preroll == 1024\n", + "\n", + " def test_sampling_factor(self):\n", + " assert self.driver.sampling_factor == 1\n", + "\n", + " @patch('ale.transformation.FrameChain')\n", + " @patch('ale.transformation.FrameChain.from_spice', return_value=ale.transformation.FrameChain())\n", + " @patch('ale.transformation.FrameChain.compute_rotation', return_value=TimeDependentRotation([[0, 0, 1, 0]], [0], 0, 0))\n", + " def test_spacecraft_direction(self, compute_rotation, from_spice, frame_chain):\n", + " with patch('ale.drivers.lro_drivers.LroLrocNacIsisLabelNaifSpiceDriver.target_frame_id', \\\n", + " new_callable=PropertyMock) as target_frame_id, \\\n", + " patch('ale.drivers.lro_drivers.LroLrocNacIsisLabelNaifSpiceDriver.ephemeris_start_time', \\\n", + " new_callable=PropertyMock) as ephemeris_start_time, \\\n", + " patch('ale.drivers.lro_drivers.spice.cidfrm', return_value=[-12345]) as cidfrm, \\\n", + " patch('ale.drivers.lro_drivers.spice.scs2e', return_value=0) as scs2e, \\\n", + " patch('ale.drivers.lro_drivers.spice.bods2c', return_value=-12345) as bods2c, \\\n", + " patch('ale.drivers.lro_drivers.spice.spkezr', return_value=[[1, 1, 1, 1, 1, 1], 0]) as spkezr, \\\n", + " patch('ale.drivers.lro_drivers.spice.mxv', return_value=[1, 1, 1]) as mxv:\n", + " ephemeris_start_time.return_value = 0\n", + " assert self.driver.spacecraft_direction > 0\n", + " spkezr.assert_called_with(self.driver.spacecraft_name, 0, 'J2000', 'None', self.driver.target_name)\n", + " compute_rotation.assert_called_with(1, -12345)\n", + " np.testing.assert_array_equal(np.array([[-1.0, 0.0, 0.0], [0.0, -1.0, 0.0], [0.0, 0.0, 1.0]]), mxv.call_args[0][0])\n", + " np.testing.assert_array_equal(np.array([1, 1, 1]), mxv.call_args[0][1])\n", + "\n", + " def test_focal2pixel_lines(self):\n", + " with patch('ale.drivers.lro_drivers.spice.gdpool', return_value=[0, 1, 0]) as gdpool, \\\n", + " patch('ale.drivers.lro_drivers.LroLrocNacIsisLabelNaifSpiceDriver.ikid', \\\n", + " new_callable=PropertyMock) as ikid, \\\n", + " patch('ale.drivers.lro_drivers.LroLrocNacIsisLabelNaifSpiceDriver.spacecraft_direction', \\\n", + " new_callable=PropertyMock) as spacecraft_direction:\n", + " spacecraft_direction.return_value = -1\n", + " np.testing.assert_array_equal(self.driver.focal2pixel_lines, [0, -1, 0])\n", + " spacecraft_direction.return_value = 1\n", + " np.testing.assert_array_equal(self.driver.focal2pixel_lines, [0, 1, 0])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/mkdocs.yml b/mkdocs.yml index 4bf94f6..3eea389 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -44,6 +44,8 @@ nav: - Locating and Ingesting Image Data: "getting-started/Using ISIS: First Steps/Locating and Ingesting Image Data.md" - Adding SPICE: "getting-started/Using ISIS: First Steps/Adding Spice.md" - Exporting ISIS Data: "getting-started/Using ISIS: First Steps/Exporting ISIS Data.md" + - Writing Tests: + - Writing Tests for ALE Drivers: "getting-started/writing-tests/writing-tests-for-ale-drivers.ipynb" - How-To Guides: - Home: how-to-guides/index.md - Environment Setup and Maintenance: -- GitLab