diff --git a/.travis.yml b/.travis.yml
index 90f2e834f3850227eafa0da81e11d529dabf2d3d..ae8bb6f22461186ae5b6d8c7b578153f63f0a19c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -53,8 +53,10 @@ after_success:
   # Need to do the build in the root
   - source deactivate
   - conda install -q conda-build anaconda-client
-  - conda config --set anaconda_upload yes
-  - conda build --token $CONDA_UPLOAD_TOKEN --python $PYTHON_VERSION recipe -q
+  - conda config --set anaconda_upload no
+  - conda build recipe -q
+  - buildir=(`conda build recipe --output`)
+  - anaconda -t="$CONDA_UPLOAD_TOKEN" upload $builddir --label dev --force;
 
   # Docs to gh-pages
   - source activate test  # Reactivate the env to have all deps installed.
diff --git a/ci_support/upload_or_check_non_existence.py b/ci_support/upload_or_check_non_existence.py
deleted file mode 100644
index 9cedfddd950207f2cb38706ec7a430f915f8dad5..0000000000000000000000000000000000000000
--- a/ci_support/upload_or_check_non_existence.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python
-from __future__ import print_function
-
-import argparse
-import hashlib
-import os
-import subprocess
-import sys
-
-from binstar_client.utils import get_binstar
-import binstar_client.errors
-import conda.config
-from conda_build.metadata import MetaData
-from conda_build.build import bldpkg_path
-
-
-def built_distribution_already_exists(cli, meta, owner):
-    """
-    Checks to see whether the built recipe (aka distribution) already
-    exists on the owner/user's binstar account.
-    """
-    distro_name = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist())
-    fname = bldpkg_path(meta)
-    try:
-        dist_info = cli.distribution(owner, meta.name(), meta.version(),
-                                     distro_name)
-    except binstar_client.errors.NotFound:
-        dist_info = {}
-
-    exists = bool(dist_info)
-    # Unfortunately, we cannot check the md5 quality of the built distribution, as
-    # this will depend on fstat information such as modification date (because
-    # distributions are tar files). Therefore we can only assume that the distribution
-    # just built, and the one on anaconda.org are the same.
-#    if exists:
-#        md5_on_binstar = dist_info.get('md5')
-#        with open(fname, 'rb') as fh:
-#            md5_of_build = hashlib.md5(fh.read()).hexdigest()
-#
-#        if md5_on_binstar != md5_of_build:
-#            raise ValueError('This build ({}), and the build already on binstar '
-#                             '({}) are different.'.format(md5_of_build, md5_on_binstar))
-    return exists
-
-
-def upload(cli, meta, owner, channels):
-    try:
-        with open('binstar.token', 'w') as fh:
-            fh.write(cli.token)
-        subprocess.check_call(['anaconda', '--quiet', '-t', 'binstar.token',
-                               'upload', bldpkg_path(meta),
-                               '--user={}'.format(owner),
-                               '--channel={}'.format(channels)],
-                              env=os.environ)
-    finally:
-        os.remove('binstar.token')
-
-
-def distribution_exists_on_channel(binstar_cli, meta, owner, channel='main'):
-    """
-    Determine whether a distribution exists on a specific channel.
-    Note from @pelson: As far as I can see, there is no easy way to do this on binstar.
-    """
-    fname = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist())
-    distributions_on_channel = [dist['basename'] for dist in
-                                binstar_cli.show_channel(owner=owner, channel=channel)['files']]
-    return fname in distributions_on_channel
-
-
-def add_distribution_to_channel(binstar_cli, meta, owner, channel='main'):
-    """
-    Add a(n already existing) distribution on binstar to another channel.
-    Note - the addition is done based on name and version - no build strings etc.
-    so if you have a foo-0.1-np18 and foo-0.1-np19 *both* will be added to the channel.
-    """
-    package_fname = '{}/{}.tar.bz2'.format(conda.config.subdir, meta.dist())
-    binstar_cli.add_channel(channel, owner, meta.name(), meta.version())
-
-
-def main():
-    token = os.environ.get('BINSTAR_KEY')
-
-    description = ('Upload or check consistency of a built version of a '
-                   'conda recipe with binstar. Note: The existence of the '
-                   'BINSTAR_KEY environment variable determines '
-                   'whether the upload should actually take place.')
-    parser = argparse.ArgumentParser(description=description)
-    parser.add_argument('recipe_dir', help='the conda recipe directory')
-    parser.add_argument('owner', help='the binstar owner/user')
-    parser.add_argument('--channel', help='the binstar channel', default='main')
-    args = parser.parse_args()
-    recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel
-
-    cli = get_binstar(argparse.Namespace(token=token, site=None))
-    meta = MetaData(recipe_dir)
-    if meta.skip():
-        print("No upload to take place - this configuration was skipped in build/skip.")
-        return
-    exists = built_distribution_already_exists(cli, meta, owner)
-    if token:
-        on_channel = distribution_exists_on_channel(cli, meta, owner, channel)
-        if not exists:
-            upload(cli, meta, owner, channel)
-            print('Uploaded {}'.format(bldpkg_path(meta)))
-        elif not on_channel:
-            print('Adding distribution {} to {}\'s {} channel'
-                  ''.format(bldpkg_path(meta), owner, channel))
-            add_distribution_to_channel(cli, meta, owner, channel)
-        else:
-            print('Distribution {} already \nexists on {}\'s {} channel.'
-                  ''.format(bldpkg_path(meta), owner, channel))
-    else:
-        print("No BINSTAR_KEY present, so no upload is taking place. "
-              "The distribution just built {} already available on {}'s "
-              "{} channel.".format('is' if exists else 'is not',
-                                   owner, channel))
-
-if __name__ == '__main__':
-    main()
\ No newline at end of file
diff --git a/doc_requirements.txt b/doc_requirements.txt
deleted file mode 100644
index 601a01e46bdb39b9e9b3b2025370a9e937ba8670..0000000000000000000000000000000000000000
--- a/doc_requirements.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-nbsphinx
-jupyter_client
diff --git a/plio/examples/Apollo15/AS15-M-0295_SML.png b/plio/examples/Apollo15/AS15-M-0295_SML.png
index 39c9d38ad5f2f5496442a8bb9d0a9e15b97c1ac9..7da796f96ee096507a94769aa99cb2e81c33415f 100644
Binary files a/plio/examples/Apollo15/AS15-M-0295_SML.png and b/plio/examples/Apollo15/AS15-M-0295_SML.png differ
diff --git a/plio/examples/Apollo15/AS15-M-0295_SML_geo.tif b/plio/examples/Apollo15/AS15-M-0295_SML_geo.tif
deleted file mode 100644
index e62cfafbfe4535d73f81f20b611c3539a27c418d..0000000000000000000000000000000000000000
Binary files a/plio/examples/Apollo15/AS15-M-0295_SML_geo.tif and /dev/null differ
diff --git a/plio/examples/Apollo15/AS15-M-0296_SML.png b/plio/examples/Apollo15/AS15-M-0296_SML.png
index f9b3b44102e2d3f8b2f2a072a358bb5cca1fdcc4..d583f9fa59a2afd0a6322a52f8f995c3b679d065 100644
Binary files a/plio/examples/Apollo15/AS15-M-0296_SML.png and b/plio/examples/Apollo15/AS15-M-0296_SML.png differ
diff --git a/plio/examples/Apollo15/AS15-M-0296_SML_geo.tif b/plio/examples/Apollo15/AS15-M-0296_SML_geo.tif
deleted file mode 100644
index 46f1ba3bb7e9bc41ebf0404cd62bf93b963c33cd..0000000000000000000000000000000000000000
Binary files a/plio/examples/Apollo15/AS15-M-0296_SML_geo.tif and /dev/null differ
diff --git a/plio/examples/Apollo15/AS15-M-0297_SML.png b/plio/examples/Apollo15/AS15-M-0297_SML.png
index c6e0f4af973875fe159a7bfba3c19cd1ceb5f8af..09d42d2b9b4f29ecd032a3a374982e735759defb 100644
Binary files a/plio/examples/Apollo15/AS15-M-0297_SML.png and b/plio/examples/Apollo15/AS15-M-0297_SML.png differ
diff --git a/plio/examples/Apollo15/AS15-M-0298_SML.png b/plio/examples/Apollo15/AS15-M-0298_SML.png
index f25f7f4dc7fba5bf01ea39c4de7a0e9a2586ef43..ddd987ba351723a9c29120f66cb58411163c3db8 100644
Binary files a/plio/examples/Apollo15/AS15-M-0298_SML.png and b/plio/examples/Apollo15/AS15-M-0298_SML.png differ
diff --git a/plio/examples/Apollo15/AS15-M-0299_SML.png b/plio/examples/Apollo15/AS15-M-0299_SML.png
index 89b4e31f5342b73ae7cb0cec7825c9be331f0b87..57415e4dc17107d0e380356ebd4541d87dc897e1 100644
Binary files a/plio/examples/Apollo15/AS15-M-0299_SML.png and b/plio/examples/Apollo15/AS15-M-0299_SML.png differ
diff --git a/plio/examples/Apollo15/AS15-M-0300_SML.png b/plio/examples/Apollo15/AS15-M-0300_SML.png
index d7c6d324c46ed965b6bc4829da533e402a519370..c8ff320781a431f53f05b96092d2edf6ca161f6f 100644
Binary files a/plio/examples/Apollo15/AS15-M-0300_SML.png and b/plio/examples/Apollo15/AS15-M-0300_SML.png differ
diff --git a/plio/examples/Apollo15/test.png b/plio/examples/Apollo15/test.png
deleted file mode 100644
index 39c9d38ad5f2f5496442a8bb9d0a9e15b97c1ac9..0000000000000000000000000000000000000000
Binary files a/plio/examples/Apollo15/test.png and /dev/null differ
diff --git a/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_0.0N0.0_MERC.tif b/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_0.0N0.0_MERC.tif
index f0c86c311c974d2e8a4ee6c07f4b963f6041a547..a80928e4eb2232580af0bb7f320fddcddf44044f 100644
Binary files a/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_0.0N0.0_MERC.tif and b/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_0.0N0.0_MERC.tif differ
diff --git a/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_90.0N0.0_POLA.tif b/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_90.0N0.0_POLA.tif
index 27c0a230c89d929e81602f9d1f30d358e35686fd..e47b318148f7c268ebf1aa4e20ee7051695bf24c 100644
Binary files a/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_90.0N0.0_POLA.tif and b/plio/examples/Projections/Mars_MGS_MOLA_ClrShade_MAP2_90.0N0.0_POLA.tif differ
diff --git a/plio/io/__init__.py b/plio/io/__init__.py
index ce170f1a6a76dcee06ca96726abca1abbed72f5a..f7acda990e30eff5af63f6cd73cb1c54842664b6 100644
--- a/plio/io/__init__.py
+++ b/plio/io/__init__.py
@@ -1,23 +1,28 @@
 # Conditional imports for GDAL
 import importlib
 import warnings
+import sys
 
-gdal = importlib.find_loader('gdal')
-ogr = importlib.find_loader('osgeo.ogr')
-osr = importlib.find_loader('osr')
+if 'osgeo' in sys.modules:
+    gdal = importlib.util.find_spec('gdal')
+    ogr = importlib.util.find_spec('osgeo.ogr')
+    osr = importlib.util.find_spec('osr')
 
-if gdal:
-    gdal = gdal.load_module()
-    ogr = ogr.load_module()
-    osr = osr.load_module()
+    gdal = gdal.loader.load_module()
+    ogr = ogr.loader.load_module()
+    osr = osr.loader.load_module()
     gdal.UseExceptions() 
+else:
+    gdal = None
+    ogr = None
+    osr = None
 
 def conditional_gdal(func):
     def has_gdal(*args, **kwargs):
         if gdal:
             return func(*args, **kwargs)
         else:
-            warning.warn('Trying to call a GDAL method, but GDAL is not installed.')
+            warnings.warn('Trying to call a GDAL method, but GDAL is not installed.')
         return None
     return has_gdal
 
diff --git a/plio/io/io_gdal.py b/plio/io/io_gdal.py
index 49f12100520eff8a5dc14a4b9217bba02b0d9074..9573c0fc6b5c871a3a84644d8ac01aa2747b94c2 100644
--- a/plio/io/io_gdal.py
+++ b/plio/io/io_gdal.py
@@ -14,6 +14,7 @@ from plio.utils.utils import find_in_dict
 from plio.io import gdal, ogr, osr
 
 NP2GDAL_CONVERSION = {
+  "byte": 1,
   "uint8": 1,
   "int8": 1,
   "uint16": 2,
@@ -26,17 +27,12 @@ NP2GDAL_CONVERSION = {
   "complex128": 11,
 }
 
-GDAL2NP_CONVERSION = {}
+GDAL2NP_CONVERSION = {v:k for k,v in NP2GDAL_CONVERSION.items()}
 
 DEFAULT_PROJECTIONS = {'mars':'GEOGCS["Mars 2000",DATUM["D_Mars_2000",SPHEROID["Mars_2000_IAU_IAG",3396190.0,169.89444722361179]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]]',
                        'moon':'GEOGCS["Moon 2000",DATUM["D_Moon_2000",SPHEROID["Moon_2000_IAU_IAG",1737400.0,0.0]],PRIMEM["Greenwich",0],UNIT["Decimal_Degree",0.0174532925199433]]'}
 DEFAULT_RADII = {'mars': 3396190.0}
 
-for k, v in iter(NP2GDAL_CONVERSION.items()):
-    GDAL2NP_CONVERSION[v] = k
-
-GDAL2NP_CONVERSION[1] = 'int8'
-
 
 class GeoDataset(object):
     """
@@ -488,7 +484,7 @@ class GeoDataset(object):
         px, py = map(int, self.inverse_affine * (lon, lat))
         return px, py
 
-    def read_array(self, band=1, pixels=None, dtype='float32'):
+    def read_array(self, band=1, pixels=None, dtype=None):
         """
         Extract the required data as a NumPy array
 
@@ -502,7 +498,7 @@ class GeoDataset(object):
                  [xstart, ystart, xstop, ystop]. Default pixels=None.
 
         dtype : str
-                The NumPy dtype for the output array. Default dtype='float32'.
+                The NumPy dtype for the output array. Defaults to the band dtype.
 
         Returns
         -------
@@ -512,6 +508,9 @@ class GeoDataset(object):
         """
         band = self.dataset.GetRasterBand(band)
 
+        if dtype is None:
+            dtype = GDAL2NP_CONVERSION[band.DataType]
+
         dtype = getattr(np, dtype)
 
         if not pixels:
diff --git a/plio/io/io_spectral_profiler.py b/plio/io/io_spectral_profiler.py
index dc31aa02c1dd6dfbff914d22c487459ec31acd14..a27e75ec82a3214a1e34fd77773ff805a8763bc8 100755
--- a/plio/io/io_spectral_profiler.py
+++ b/plio/io/io_spectral_profiler.py
@@ -77,7 +77,7 @@ class Spectral_Profiler(object):
                         ncols -= 1
             strbytes = map(str, bytelengths)
             rowdtype = list(zip(columns, map(''.join, zip(['>'] * ncols, datatypes, strbytes))))
-            d = np.fromstring(indata.read(rowbytes * nrows), dtype=rowdtype,
+            d = np.frombuffer(indata.read(rowbytes * nrows), dtype=rowdtype,
                               count=nrows)
             self.ancillary_data = pd.DataFrame(d, columns=columns,
                                                index=np.arange(nrows))
@@ -124,7 +124,7 @@ class Spectral_Profiler(object):
                 lines = d['LINES']
                 scaling_factor = d['SCALING_FACTOR']
 
-                arr = np.fromstring(indata.read(lines * 296*2), dtype='>H').astype(np.float64)
+                arr = np.frombuffer(indata.read(lines * 296*2), dtype='>H').astype(np.float64)
                 arr = arr.reshape(lines, -1)
 
                 # If the data is scaled, apply the scaling factor
diff --git a/plio/io/tests/test_io_gdal.py b/plio/io/tests/test_io_gdal.py
index 38ef04f619244b094284171607ea8f1d505f8d98..5bb1991e421196761cabc7ce78e18bc00255001d 100644
--- a/plio/io/tests/test_io_gdal.py
+++ b/plio/io/tests/test_io_gdal.py
@@ -97,12 +97,13 @@ class TestMercator(unittest.TestCase):
     def test_read_array(self):
         arr = self.dataset.read_array()
         self.assertEqual(arr.shape, (1694, 2304))
-        self.assertEqual(arr.dtype, np.float32)
+        self.assertEqual(arr.dtype, np.int8)
 
     def test_read_array_set_dtype(self):
-        arr = self.dataset.read_array(dtype='int8')
-        self.assertEqual(arr.dtype, np.int8)
-        self.assertAlmostEqual(np.mean(arr), 10.10353227, 6)
+        arr = self.dataset.read_array(dtype="float32")
+        self.assertEqual(arr.dtype, np.float32)
+        self.assertEqual(np.min(arr), 0)
+        self.assertEqual(np.max(arr), 255)
 
 @pytest.mark.skipif(gdal is None, reason="GDAL not installed")
 class TestLambert(unittest.TestCase):
diff --git a/recipe/meta.yaml b/recipe/meta.yaml
index d1c03491721c4cbc9adc663fdaf1b9f145825d8e..7f701ffcce92f41cc809fb6f91df8c3f039d567f 100644
--- a/recipe/meta.yaml
+++ b/recipe/meta.yaml
@@ -1,12 +1,15 @@
+{% set data = load_setup_py_data() %}
+
 package:
   name: plio
-  version: 0.1.4
+  version: {{ data.get('version') }}
 
 source:
   git_url: https://github.com/USGS-Astrogeology/plio
 
 build:
-  number: 1
+  number: {{ GIT_DESCRIBE_NUMBER }}
+  string: dev
 
 extra:
   channels:
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7d1e0773b5925f0a694afe69f3d3d8b9c99ba8c8
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+--index-url https://pypi.python.org/simple/
+
+-e .
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 040954e7c3d709e8f7d1074a299f187c3a9d8c81..7f1566aa3fb09bb467540131aecc39b9dea46f89 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ def setup_package():
 
     setup(
         name = "plio",
-        version = '0.1.4',
+        version = '1.0.0',
         author = "Jay Laura",
         author_email = "jlaura@usgs.gov",
         description = ("I/O API to support planetary data formats."),
@@ -35,8 +35,12 @@ def setup_package():
         zip_safe=False,
         scripts=['bin/socetnet2isis', 'bin/isisnet2socet'],
         install_requires=[
+            'libgdal',
             'gdal',
             'numpy',
+            'pyproj',
+            'jinja2',
+            'ncurses',
             'pvl',
             'protobuf',
             'h5py',