Skip to content
Snippets Groups Projects
Commit 166ed66b authored by jay's avatar jay
Browse files

Fixes for writing a valid CNet. Also THEMIS ISIS3 serial numbers.

parent 7967e2f8
No related branches found
No related tags found
No related merge requests found
No preview for this file type
import json
import warnings
import numpy as np
def intersection_to_pixels(inverse_affine, ul, ur, lr, ll):
......@@ -27,6 +28,10 @@ def intersection_to_pixels(inverse_affine, ul, ur, lr, ll):
miny = np.inf
maxy = -np.inf
if inverse_affine == None:
warnings.warn('Inverse affine transformation not available.')
return None
for c in [ul, ur, lr, ll]:
px, py = map(int, inverse_affine * (c[0], c[1]))
......@@ -51,7 +56,6 @@ def compute_overlap(geodata_a, geodata_b):
p1 = geodata_a.footprint
p2 = geodata_b.footprint
intersection = json.loads(p1.Intersection(p2).ExportToJson())['coordinates'][0]
ul, ur, lr, ll = find_four_corners(intersection)
a_intersection = intersection_to_pixels(geodata_a.inverse_affine, ul, ur, lr, ll)
......@@ -120,7 +124,6 @@ def find_four_corners(coords, threshold=120):
plio.geofuncs.geofuncs.find_corners
"""
corners = find_corners(coords, threshold)
corners.sort(key = lambda x:x[1])
upper = corners[2:]
lower = corners[:2]
......
......@@ -9,7 +9,6 @@ VERSION = 2
HEADERSTARTBYTE = 65536
DEFAULTUSERNAME = 'None'
def write_filelist(lst, path="fromlist.lis"):
"""
Writes a filelist to a file so it can be used in ISIS3.
......@@ -86,7 +85,6 @@ def to_isis(path, obj, serials, mode='wb', version=VERSION,
Suffix to tbe added to the point id. If the suffix is '_bar', pointids
will be in the form '1_bar, 2_bar, ..., n_bar'.
"""
with IsisStore(path, mode) as store:
if not creation_date:
creation_date = strftime("%Y-%m-%d %H:%M:%S", gmtime())
......@@ -128,8 +126,8 @@ class IsisStore(object):
"""
def __init__(self, path, mode=None, **kwargs):
self.pointid = 0
self.nmeasures = 0
self.npoints = 0
# Conversion from buffer types to Python types
bt = {1: float,
......@@ -196,9 +194,15 @@ class IsisStore(object):
point_sizes = []
point_messages = []
for i, g in df.groupby('point_id'):
# Get the point specification from the protobuf
point_spec = cnf.ControlPointFileEntryV0002()
point_spec.id = _set_pid(i)
# Set the ID and then loop over all of the attributes that the
# point has and check for corresponding columns in the group and
# set with the correct type
#point_spec.id = _set_pid(i)
point_spec.id = _set_pid(i)
for attr, attrtype in self.point_attrs:
if attr in g.columns:
# As per protobuf docs for assigning to a repeated field.
......@@ -217,23 +221,25 @@ class IsisStore(object):
for node_id, m in g.iterrows():
measure_spec = point_spec.Measure()
measure_spec.serialnumber = serials[m.image_index]
# For all of the attributes, set if they are an dict accessible attr of the obj.
for attr, attrtype in self.measure_attrs:
if attr in g.columns:
setattr(measure_spec, attr, attrtype(m[attr]))
measure_spec.serialnumber = serials[m.image_index]
measure_spec.sample = m.x
measure_spec.line = m.y
measure_spec.type = 2
measure_iterable.append(measure_spec)
self.nmeasures += 1
point_spec.measures.extend(measure_iterable)
self.npoints += 1
point_spec.measures.extend(measure_iterable)
point_message = point_spec.SerializeToString()
point_sizes.append(point_spec.ByteSize())
point_messages.append(point_message)
self.pointid += 1
return point_messages, point_sizes
def create_buffer_header(self, networkid, targetname,
......@@ -342,7 +348,7 @@ class IsisStore(object):
('Created', creation_date),
('LastModified', modified_date),
('Description', description),
('NumberOfPoints', self.pointid),
('NumberOfPoints', self.npoints),
('NumberOfMeasures', self.nmeasures),
('Version', version)
])
......
......@@ -200,6 +200,9 @@ class GeoDataset(object):
@property
def inverse_affine(self):
# If det(A) == 0, the transformation is degenerate
if self.forward_affine.is_degenerate:
return None
self._ia = ~self.forward_affine
return self._ia
......@@ -475,10 +478,9 @@ class GeoDataset(object):
# Check that the read start is not outside of the image
xstart, ystart, xcount, ycount = pixels
xmax, ymax = map(int, self.xy_extent[1])
# If the image is south up, flip the roi
if self.north_up == False:
ystart = ymax - ystart - ycount
ystart = ymax - (ystart + ycount)
if xstart < 0:
xstart = 0
......@@ -488,10 +490,11 @@ class GeoDataset(object):
if xstart + xcount > xmax:
xcount = xmax - xstart
if ystart + ycount > ymax:
if ystart + ycount > ymax:
ycount = ymax - ystart
array = band.ReadAsArray(xstart, ystart, xcount, ycount).astype(dtype)
#if self.north_up == False:
# array = np.flipud(array)
return array
def compute_overlap(self, geodata, **kwargs):
......
......@@ -38,7 +38,6 @@ def get_isis_translation(label):
spacecraft_name = find_in_dict(label, 'SpacecraftName')
for row in plio.data_session.query(StringToMission).filter(StringToMission.key==spacecraft_name):
spacecraft_name = row.value.lower()
# Try and pull an instrument identifier
try:
instrumentid = find_in_dict(label, 'InstrumentId').capitalize()
......@@ -51,7 +50,6 @@ def get_isis_translation(label):
Translations.instrument==instrumentid):
# Convert the JSON back to a PVL object
translation = PVLModule(row.translation)
return translation
......@@ -75,6 +73,7 @@ def generate_serial_number(label):
label = pvl.load(label, cls=SerialNumberDecoder)
# Get the translation information
translation = get_isis_translation(label)
if not translation:
warnings.warn('Unable to load an appropriate image translation.')
return
......@@ -91,11 +90,11 @@ def generate_serial_number(label):
search_translation = {group['Translation'][1]:group['Translation'][0]}
sub_group = find_nested_in_dict(label, search_position)
serial_entry = sub_group[search_key]
if serial_entry in search_translation.keys():
serial_entry = search_translation[serial_entry]
elif '*' in search_translation.keys() and search_translation['*'] != '*':
serial_entry = search_translation['*']
serial_number.append(serial_entry)
except:
pass
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment