Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
Plio
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
aflab
astrogeology
Plio
Commits
2d0053b0
Commit
2d0053b0
authored
7 years ago
by
Tyler Thatcher
Committed by
GitHub
7 years ago
Browse files
Options
Downloads
Plain Diff
Merge pull request #2 from acpaquette/master
Final working notebook.
parents
10997c29
2777c524
No related branches found
No related tags found
No related merge requests found
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
notebooks/Socet2ISIS.ipynb
+9
-8
9 additions, 8 deletions
notebooks/Socet2ISIS.ipynb
with
9 additions
and
8 deletions
notebooks/Socet2ISIS.ipynb
+
9
−
8
View file @
2d0053b0
...
...
@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count":
1
,
"execution_count":
4
,
"metadata": {},
"outputs": [],
"source": [
...
...
@@ -16,7 +16,7 @@
"import math\n",
"import pyproj\n",
"\n",
"sys.path.insert(0, \"/home/tthatcher/Desktop/Projects/Plio/plio\")\n",
"
#
sys.path.insert(0, \"/home/tthatcher/Desktop/Projects/Plio/plio\")\n",
"\n",
"from plio.examples import get_path\n",
"from plio.io.io_bae import read_gpf, read_ipf\n",
...
...
@@ -26,7 +26,7 @@
},
{
"cell_type": "code",
"execution_count":
2
,
"execution_count":
8
,
"metadata": {},
"outputs": [],
"source": [
...
...
@@ -212,7 +212,7 @@
" apply_transformations(atf_dict, socet_df)\n",
" \n",
" # Define column remap for socet dataframe\n",
" column_remap = {'l.': '
x
', 's.': '
y
',\n",
" column_remap = {'l.': '
y
', 's.': '
x
',\n",
" 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',\n",
" 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',\n",
" 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}\n",
...
...
@@ -228,13 +228,15 @@
" serial_dict = dict()\n",
" \n",
" for image in images:\n",
" serial_dict[image] = sn.generate_serial_number(os.path.join(path, image + extension))\n",
" snum = sn.generate_serial_number(os.path.join(path, image + extension))\n",
" snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')\n",
" serial_dict[image] = snum\n",
" return serial_dict"
]
},
{
"cell_type": "code",
"execution_count":
3
,
"execution_count":
10
,
"metadata": {
"scrolled": false
},
...
...
@@ -255,7 +257,6 @@
"# Setup stuffs for the cub information namely the path and extension\n",
"path = '/Volumes/Blueman/'\n",
"extension = '.lev1.cub'\n",
"\n",
"prj_file = get_path('CTX_Athabasca_Middle_step0.atf')\n",
"\n",
"socet_df = socet2isis(prj_file)\n",
...
...
@@ -265,7 +266,7 @@
"serial_dict = serial_numbers(images, path, extension)\n",
"\n",
"# creates the control network\n",
"
cnet =
cn.to_isis('/Volumes/Blueman/cn.
csv
', socet_df, serial_dict)"
"cn.to_isis('/Volumes/Blueman/cn.
net
', socet_df, serial_dict)"
]
},
{
...
...
%% Cell type:code id: tags:
```
python
import
os
import
sys
from
functools
import
singledispatch
import
warnings
import
pandas
as
pd
import
numpy
as
np
import
math
import
pyproj
sys
.
path
.
insert
(
0
,
"
/home/tthatcher/Desktop/Projects/Plio/plio
"
)
#
sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio")
from
plio.examples
import
get_path
from
plio.io.io_bae
import
read_gpf
,
read_ipf
import
plio.io.io_controlnetwork
as
cn
import
plio.io.isis_serial_number
as
sn
```
%% Cell type:code id: tags:
```
python
# Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others)
def
read_atf
(
atf_file
):
with
open
(
atf_file
)
as
f
:
files
=
[]
ipf
=
[]
sup
=
[]
files_dict
=
[]
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for
line
in
f
:
if
line
[
-
4
:
-
1
]
==
'
prj
'
or
line
[
-
4
:
-
1
]
==
'
gpf
'
or
line
[
-
4
:
-
1
]
==
'
sup
'
or
line
[
-
4
:
-
1
]
==
'
ipf
'
or
line
[
-
4
:
-
1
]
==
'
atf
'
:
files
.
append
(
line
)
files
=
np
.
array
(
files
)
# Creates appropriate arrays for certain files in the right format
for
file
in
files
:
file
=
file
.
strip
()
file
=
file
.
split
(
'
'
)
# Grabs all the IPF files
if
file
[
1
].
endswith
(
'
.ipf
'
):
ipf
.
append
(
file
[
1
])
# Grabs all the SUP files
if
file
[
1
].
endswith
(
'
.sup
'
):
sup
.
append
(
file
[
1
])
files_dict
.
append
(
file
)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict
=
(
dict
(
files_dict
))
# Sets the value of IMAGE_IPF to all IPF images
files_dict
[
'
IMAGE_IPF
'
]
=
ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict
[
'
IMAGE_SUP
'
]
=
sup
# Sets the value of PATH to the path of the ATF file
files_dict
[
'
PATH
'
]
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
atf_file
))
return
files_dict
# converts columns l. and s. to isis
def
line_sample_size
(
record
,
path
):
with
open
(
os
.
path
.
join
(
path
,
record
[
'
ipf_file
'
]
+
'
.sup
'
))
as
f
:
for
i
,
line
in
enumerate
(
f
):
if
i
==
2
:
img_index
=
line
.
split
(
'
\\
'
)
img_index
=
img_index
[
-
1
].
strip
()
img_index
=
img_index
.
split
(
'
.
'
)[
0
]
if
i
==
3
:
line_size
=
line
.
split
(
'
'
)
line_size
=
line_size
[
-
1
].
strip
()
assert
int
(
line_size
)
>
0
,
"
Line number {} from {} is a negative number: Invalid Data
"
.
format
(
line_size
,
record
[
'
ipf_file
'
])
if
i
==
4
:
sample_size
=
line
.
split
(
'
'
)
sample_size
=
sample_size
[
-
1
].
strip
()
assert
int
(
sample_size
)
>
0
,
"
Sample number {} from {} is a negative number: Invalid Data
"
.
format
(
sample_size
,
record
[
'
ipf_file
'
])
break
line_size
=
int
(
line_size
)
/
2.0
+
record
[
'
l.
'
]
+
1
sample_size
=
int
(
sample_size
)
/
2.0
+
record
[
'
s.
'
]
+
1
return
sample_size
,
line_size
,
img_index
# converts known to ISIS keywords
def
known
(
record
):
if
record
[
'
known
'
]
==
0
:
return
'
Free
'
elif
record
[
'
known
'
]
==
1
or
record
[
'
known
'
]
==
2
or
record
[
'
known
'
]
==
3
:
return
'
Constrained
'
# converts +/- 180 system to 0 - 360 system
def
to_360
(
num
):
return
num
%
360
# ocentric to ographic latitudes
def
oc2og
(
dlat
,
dMajorRadius
,
dMinorRadius
):
try
:
dlat
=
math
.
radians
(
dlat
)
dlat
=
math
.
atan
(((
dMajorRadius
/
dMinorRadius
)
**
2
)
*
(
math
.
tan
(
dlat
)))
dlat
=
math
.
degrees
(
dlat
)
except
:
print
(
"
Error in oc2og conversion
"
)
return
dlat
# ographic to ocentric latitudes
def
og2oc
(
dlat
,
dMajorRadius
,
dMinorRadius
):
try
:
dlat
=
math
.
radians
(
dlat
)
dlat
=
math
.
atan
((
math
.
tan
(
dlat
)
/
((
dMajorRadius
/
dMinorRadius
)
**
2
)))
dlat
=
math
.
degrees
(
dlat
)
except
:
print
(
"
Error in og2oc conversion
"
)
return
dlat
# gets eRadius and pRadius from a .prj file
def
get_axis
(
file
):
with
open
(
file
)
as
f
:
from
collections
import
defaultdict
files
=
defaultdict
(
list
)
for
line
in
f
:
ext
=
line
.
strip
().
split
(
'
'
)
files
[
ext
[
0
]].
append
(
ext
[
-
1
])
eRadius
=
float
(
files
[
'
A_EARTH
'
][
0
])
pRadius
=
eRadius
*
(
1
-
float
(
files
[
'
E_EARTH
'
][
0
]))
return
eRadius
,
pRadius
# function to convert lat_Y_North to ISIS_lat
def
lat_ISIS_coord
(
record
,
semi_major
,
semi_minor
):
ocentric_coord
=
og2oc
(
record
[
'
lat_Y_North
'
],
semi_major
,
semi_minor
)
coord_360
=
to_360
(
ocentric_coord
)
return
coord_360
# function to convert long_X_East to ISIS_lon
def
lon_ISIS_coord
(
record
,
semi_major
,
semi_minor
):
ocentric_coord
=
og2oc
(
record
[
'
long_X_East
'
],
semi_major
,
semi_minor
)
coord_360
=
to_360
(
ocentric_coord
)
return
coord_360
def
body_fix
(
record
,
semi_major
,
semi_minor
):
ecef
=
pyproj
.
Proj
(
proj
=
'
geocent
'
,
a
=
semi_major
,
b
=
semi_minor
)
lla
=
pyproj
.
Proj
(
proj
=
'
latlon
'
,
a
=
semi_major
,
b
=
semi_minor
)
lon
,
lat
,
height
=
pyproj
.
transform
(
lla
,
ecef
,
record
[
'
long_X_East
'
],
record
[
'
lat_Y_North
'
],
record
[
'
ht
'
])
return
lon
,
lat
,
height
# applys transformations to columns
def
apply_transformations
(
atf_dict
,
df
):
prj_file
=
os
.
path
.
join
(
atf_dict
[
'
PATH
'
],
atf_dict
[
'
PROJECT
'
].
split
(
'
\\
'
)[
-
1
])
eRadius
,
pRadius
=
get_axis
(
prj_file
)
df
[
'
s.
'
],
df
[
'
l.
'
],
df
[
'
image_index
'
]
=
(
zip
(
*
df
.
apply
(
line_sample_size
,
path
=
atf_dict
[
'
PATH
'
],
axis
=
1
)))
df
[
'
known
'
]
=
df
.
apply
(
known
,
axis
=
1
)
df
[
'
lat_Y_North
'
]
=
df
.
apply
(
lat_ISIS_coord
,
semi_major
=
eRadius
,
semi_minor
=
pRadius
,
axis
=
1
)
df
[
'
long_X_East
'
]
=
df
.
apply
(
lon_ISIS_coord
,
semi_major
=
eRadius
,
semi_minor
=
pRadius
,
axis
=
1
)
df
[
'
long_X_East
'
],
df
[
'
lat_Y_North
'
],
df
[
'
ht
'
]
=
zip
(
*
df
.
apply
(
body_fix
,
semi_major
=
eRadius
,
semi_minor
=
pRadius
,
axis
=
1
))
def
socet2isis
(
prj_file
):
# Read in and setup the atf dict of information
atf_dict
=
read_atf
(
prj_file
)
# Get the gpf and ipf files using atf dict
gpf_file
=
os
.
path
.
join
(
atf_dict
[
'
PATH
'
],
atf_dict
[
'
GP_FILE
'
]);
ipf_list
=
[
os
.
path
.
join
(
atf_dict
[
'
PATH
'
],
i
)
for
i
in
atf_dict
[
'
IMAGE_IPF
'
]]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df
=
read_gpf
(
gpf_file
)
ipf_df
=
read_ipf
(
ipf_list
)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx
=
pd
.
Index
(
pd
.
unique
(
gpf_df
[
'
point_id
'
]))
ipf_pt_idx
=
pd
.
Index
(
pd
.
unique
(
ipf_df
[
'
pt_id
'
]))
point_diff
=
ipf_pt_idx
.
difference
(
gpf_pt_idx
)
if
len
(
point_diff
)
!=
0
:
warnings
.
warn
(
"
The following points found in ipf files missing from gpf file:
\n\n
{}.
\
\n\n
Continuing, but these points will be missing from the control network
"
.
format
(
list
(
point_diff
)))
# Merge the two dataframes on their point id columns
socet_df
=
ipf_df
.
merge
(
gpf_df
,
left_on
=
'
pt_id
'
,
right_on
=
'
point_id
'
)
# Apply the transformations
apply_transformations
(
atf_dict
,
socet_df
)
# Define column remap for socet dataframe
column_remap
=
{
'
l.
'
:
'
x
'
,
'
s.
'
:
'
y
'
,
column_remap
=
{
'
l.
'
:
'
y
'
,
'
s.
'
:
'
x
'
,
'
res_l
'
:
'
LineResidual
'
,
'
res_s
'
:
'
SampleResidual
'
,
'
known
'
:
'
Type
'
,
'
lat_Y_North
'
:
'
AprioriY
'
,
'
long_X_East
'
:
'
AprioriX
'
,
'
ht
'
:
'
AprioriZ
'
,
'
sig0
'
:
'
AprioriLatitudeSigma
'
,
'
sig1
'
:
'
AprioriLongitudeSigma
'
,
'
sig2
'
:
'
AprioriRadiusSigma
'
}
# Rename the columns using the column remap above
socet_df
.
rename
(
columns
=
column_remap
,
inplace
=
True
)
# Return the socet dataframe to be converted to a control net
return
socet_df
# creates a dict of serial numbers with the cub being the key
def
serial_numbers
(
images
,
path
,
extension
):
serial_dict
=
dict
()
for
image
in
images
:
serial_dict
[
image
]
=
sn
.
generate_serial_number
(
os
.
path
.
join
(
path
,
image
+
extension
))
snum
=
sn
.
generate_serial_number
(
os
.
path
.
join
(
path
,
image
+
extension
))
snum
=
snum
.
replace
(
'
Mars_Reconnaissance_Orbiter
'
,
'
MRO
'
)
serial_dict
[
image
]
=
snum
return
serial_dict
```
%% Cell type:code id: tags:
```
python
# Setup stuffs for the cub information namely the path and extension
path
=
'
/Volumes/Blueman/
'
extension
=
'
.lev1.cub
'
prj_file
=
get_path
(
'
CTX_Athabasca_Middle_step0.atf
'
)
socet_df
=
socet2isis
(
prj_file
)
images
=
pd
.
unique
(
socet_df
[
'
ipf_file
'
])
serial_dict
=
serial_numbers
(
images
,
path
,
extension
)
# creates the control network
cnet
=
cn
.
to_isis
(
'
/Volumes/Blueman/cn.
csv
'
,
socet_df
,
serial_dict
)
cn
.
to_isis
(
'
/Volumes/Blueman/cn.
net
'
,
socet_df
,
serial_dict
)
```
%% Output
/Users/adampaquette/anaconda/envs/pysat/lib/python3.6/site-packages/ipykernel_launcher.py:173: UserWarning: The following points found in ipf files missing from gpf file:
['P03_002226_1895_XI_09N203W_15', 'P03_002226_1895_XI_09N203W_16', 'P03_002226_1895_XI_09N203W_17', 'P03_002226_1895_XI_09N203W_18', 'P03_002226_1895_XI_09N203W_19', 'P03_002226_1895_XI_09N203W_20', 'P03_002226_1895_XI_09N203W_21', 'P03_002226_1895_XI_09N203W_22', 'P03_002226_1895_XI_09N203W_24', 'P03_002226_1895_XI_09N203W_26', 'P03_002226_1895_XI_09N203W_30', 'P03_002226_1895_XI_09N203W_31', 'P03_002226_1895_XI_09N203W_32', 'P03_002226_1895_XI_09N203W_34', 'P03_002226_1895_XI_09N203W_36', 'P03_002226_1895_XI_09N203W_37', 'P03_002226_1895_XI_09N203W_44', 'P03_002226_1895_XI_09N203W_48', 'P03_002226_1895_XI_09N203W_49', 'P03_002226_1895_XI_09N203W_56', 'P03_002226_1895_XI_09N203W_57', 'P03_002226_1895_XI_09N203W_61', 'P03_002226_1895_XI_09N203W_62', 'P03_002226_1895_XI_09N203W_63', 'P03_002226_1895_XI_09N203W_65', 'P19_008344_1894_XN_09N203W_4', 'P20_008845_1894_XN_09N203W_15'].
Continuing, but these points will be missing from the control network
%% Cell type:code id: tags:
```
python
```
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment