Skip to content
Snippets Groups Projects
Commit 935bf92d authored by jlaura's avatar jlaura Committed by GitHub
Browse files

Updates for ISD Generation (#28)

* Makes gdal an optional dependency

* Refactors all bae I/O into a single namespace.

* typos

* Fixes the gdal dependency test

* Adds tests for the JSON

* Tests fail without test data

* Updates for comments from @thareUSGS

* Adds ISIS3 V2 ControlNetwork Reader
parent 8f00a1f3
No related branches found
No related tags found
No related merge requests found
__version__ = "0.1.1"
__version__ = "0.1.2"
# Submodule imports
from . import io
......
File added
SEMI_MAJOR_AXIS 3.39619000000000e+006
ECCENTRICITY 1.08339143554195e-001
\ No newline at end of file
This diff is collapsed.
import json
import re
import numpy as np
import pandas as pd
def socetset_keywords_to_json(keywords, ell=None):
"""
Convert a SocetCet keywords.list file to JSON
Parameters
----------
keywords : str
Path to the socetset keywords.list file
Returns
-------
: str
The serialized JSON string.
"""
matcher = re.compile(r'\b(?!\d)\w+\b')
numeric_matcher = re.compile(r'\W-?(?:0|[1-9]\d*)(?:\.\d*)?(?:[eE][+\-]?\d+)?')
stream = {}
def parse(fi):
with open(fi, 'r') as f:
for l in f:
l = l.rstrip()
if not l:
continue
matches = matcher.findall(l)
if matches:
key = matches[0]
stream[key] = []
# Case where the kw are strings after the key
if len(matches) > 1:
stream[key] = matches[1:]
# Case where the kw are numeric types after the key
else:
nums = numeric_matcher.findall(l)
if len(nums) == 1:
stream[key] = float(nums[0])
else:
stream[key] += map(float, nums)
else:
# Case where the values are on a newline after the key
nums = numeric_matcher.findall(l)
stream[key] += map(float, nums)
parse(keywords)
if ell:
parse(ell)
return json.dumps(stream)
def read_gpf(input_data):
"""
Read a socet gpf file into a pandas data frame
......
from time import gmtime, strftime
import pandas as pd
import pvl
from plio.io import ControlNetFileV0002_pb2 as cnf
from plio.utils.utils import xstr
from plio.utils.utils import xstr, find_in_dict
VERSION = 2
HEADERSTARTBYTE = 65536
DEFAULTUSERNAME = 'None'
......@@ -26,8 +26,24 @@ def write_filelist(lst, path="fromlist.lis"):
handle.write('\n')
return
class IsisControlNetwork(pd.DataFrame):
def to_isis(path, obj, serials, mode='wb', version=VERSION,
# normal properties
_metadata = ['header']
@property
def _constructor(self):
return IsisControlNetwork
def from_isis(path, remove_empty=True):
# Now get ready to work with the binary
with IsisStore(path, mode='rb') as store:
df = store.read()
return df
def to_isis(path, obj, serials, mode='wb', version=2,
headerstartbyte=HEADERSTARTBYTE,
networkid='None', targetname='None',
description='None', username=DEFAULTUSERNAME,
......@@ -111,10 +127,8 @@ def to_isis(path, obj, serials, mode='wb', version=VERSION,
buffer_header_size, points_bytes,
creation_date, modified_date)
store.write(header)
class IsisStore(object):
"""
Class to manage IO of an ISIS control network (version 2).
......@@ -136,6 +150,7 @@ class IsisStore(object):
9: str,
11: None,
14: None}
self.header_attrs = [(i.name, bt[i.type]) for i in cnf._CONTROLNETFILEHEADERV0002.fields]
self.point_attrs = [(i.name, bt[i.type]) for i in cnf._CONTROLPOINTFILEENTRYV0002.fields]
self.measure_attrs = [(i.name, bt[i.type]) for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields]
......@@ -147,11 +162,55 @@ class IsisStore(object):
self._open()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, traceback):
self.close()
def close(self):
if self._handle is not None:
self._handle.close()
self._handle = None
def _open(self):
if self._mode in ['wb', 'a']:
self._handle = open(self._path, self._mode)
else:
raise NotImplementedError
def read(self):
"""
Given an ISIS store, read the underlying ISIS3 compatible control network and
return an IsisControlNetwork dataframe.
"""
pvl_header = pvl.load(self._path)
header_start_byte = find_in_dict(pvl_header, 'HeaderStartByte')
header_bytes = find_in_dict(pvl_header, 'HeaderBytes')
point_start_byte = find_in_dict(pvl_header, 'PointsStartByte')
version = find_in_dict(pvl_header, 'Version')
if version == 2:
point_attrs = [i for i in cnf._CONTROLPOINTFILEENTRYV0002.fields_by_name if i != 'measures']
measure_attrs = [i for i in cnf._CONTROLPOINTFILEENTRYV0002_MEASURE.fields_by_name]
cols = point_attrs + measure_attrs
cp = cnf.ControlPointFileEntryV0002()
self._handle.seek(header_start_byte)
pbuf_header = cnf.ControlNetFileHeaderV0002()
pbuf_header.ParseFromString(self._handle.read(header_bytes))
self._handle.seek(point_start_byte)
cp = cnf.ControlPointFileEntryV0002()
pts = []
for s in pbuf_header.pointMessageSizes:
cp.ParseFromString(self._handle.read(s))
pt = [getattr(cp, i) for i in point_attrs if i != 'measures']
for measure in cp.measures:
meas = pt + [getattr(measure, j) for j in measure_attrs]
pts.append(meas)
df = IsisControlNetwork(pts, columns=cols)
df.header = pvl_header
return df
def write(self, data, offset=0):
"""
......@@ -358,14 +417,3 @@ class IsisStore(object):
])
return pvl.dumps(header, cls=encoder)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, traceback):
self.close()
def close(self):
if self._handle is not None:
self._handle.close()
self._handle = None
......@@ -4,17 +4,22 @@ import os
import warnings
import affine
import gdal
import numpy as np
import osr
import pvl
try:
# Try the full GDAL stack
import gdal
from osgeo import ogr
import osr
gdal.UseExceptions()
has_gdal = True
except:
has_gdal = False
from plio.io import extract_metadata
from plio.geofuncs import geofuncs
from plio.utils.utils import find_in_dict
gdal.UseExceptions()
NP2GDAL_CONVERSION = {
"uint8": 1,
......@@ -153,6 +158,8 @@ class GeoDataset(object):
"""
self.file_name = file_name
if not has_gdal:
raise ImportError('No module name gdal.')
self.dataset = gdal.Open(file_name)
if self.dataset is None:
raise IOError('File not found :', file_name)
......@@ -535,7 +542,8 @@ def array_to_raster(array, file_name, projection=None,
A GDAL supported bittype, e.g. GDT_Int32
Default: GDT_Float64
"""
if not has_gdal:
raise ImportError('No module named gdal.')
driver = gdal.GetDriverByName(outformat)
try:
y, x, bands = array.shape
......@@ -611,6 +619,9 @@ def match_rasters(match_to, match_from, destination,
match_from__srs = match_from.dataset.GetProjection()
match_from__gt = match_from.geotransform
if not has_gdal:
raise ImportError('No module named gdal.')
dst = gdal.GetDriverByName('GTiff').Create(destination, width, height, 1,
gdalconst.GDT_Float64)
......
import numpy as np
from osgeo import gdal
from .io_gdal import GeoDataset
def openm3(input_data):
def open(input_data):
if input_data.split('.')[-1] == 'hdr':
# GDAL wants the img, but many users aim at the .hdr
input_data = input_data.split('.')[0] + '.img'
ds = gdal.Open(input_data)
ref_array = ds.GetRasterBand(1).ReadAsArray()
metadata = ds.GetMetadata()
ds = GeoDataSet(input_data)
ref_array = ds.read_array()
metadata = ds.metadata
wv_array = metadatatoband(metadata)
return wv_array, ref_array, ds
def metadatatoband(metadata):
wv2band = []
for k, v in metadata.items():
......
import numpy as np
from osgeo import gdal
def openmi(input_data):
ds = gdal.Open(input_data)
band_pointers = []
nbands = ds.RasterCount
for b in xrange(1, nbands + 1):
band_pointers.append(ds.GetRasterBand(b))
ref_array = ds.GetRasterBand(1).ReadAsArray()
wv_array = None
return wv_array, ref_array[::3, ::3], ds
from .io_gdal import GeoDataset
def open(input_data):
ds = GeoDataset(input_data)
return ds
def getspectra(x, y, ds):
nbands = ds.RasterCount
......
import json
import numpy as np
import pandas as pd
from pandas.util.testing import assert_frame_equal
from plio.io.io_gpf import read_gpf
from plio.io.io_gpf import save_gpf
from plio.io.io_bae import socetset_keywords_to_json, read_gpf, save_gpf
from plio.examples import get_path
import pytest
......@@ -43,3 +43,14 @@ def test_write_gpf(gpf):
truth_arr = np.genfromtxt(gpf, skip_header=3)
test_arr = np.genfromtxt('out.gpf', skip_header=3)
np.testing.assert_array_almost_equal(truth_arr, test_arr)
def test_create_from_socet_lis():
socetlis = get_path('socet_isd.lis')
socetell = get_path('ellipsoid.ell')
js = json.loads(socetset_keywords_to_json(socetlis))
assert isinstance(js, dict) # This is essentially a JSON linter
# Manually validated
assert 'RECTIFICATION_TERMS' in js.keys()
assert 'SEMI_MAJOR_AXIS' in js.keys() # From ellipsoid file
assert 'NUMBER_OF_EPHEM' in js.keys()
assert len(js['EPHEM_PTS']) / 3 == js['NUMBER_OF_EPHEM']
\ No newline at end of file
......@@ -10,9 +10,23 @@ from plio.io import io_controlnetwork
from plio.io import ControlNetFileV0002_pb2 as cnf
from plio.utils.utils import find_in_dict
from plio.examples import get_path
import pytest
@pytest.fixture
def apollo_cnet():
return get_path('apollo_out.net')
sys.path.insert(0, os.path.abspath('..'))
def test_cnet_read(apollo_cnet):
df = io_controlnetwork.from_isis(apollo_cnet)
assert len(df) == find_in_dict(df.header, 'NumberOfMeasures')
assert isinstance(df, io_controlnetwork.IsisControlNetwork)
assert len(df.groupby('id')) == find_in_dict(df.header, 'NumberOfPoints')
class TestWriteIsisControlNetwork(unittest.TestCase):
@classmethod
......
......@@ -226,3 +226,12 @@ class TestWriter(unittest.TestCase):
os.remove('test.tif')
except:
pass
class TestWithoutGdal(unittest.TestCase):
def test_without_gdal(self):
io_gdal.has_gdal = False
with self.assertRaises(ImportError):
io_gdal.GeoDataset('foo')
def tearDown(self):
io_gdal.has_gdal = True
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment