Skip to content
Snippets Groups Projects
Commit 7ddb1eac authored by Tyler Thatcher's avatar Tyler Thatcher Committed by GitHub
Browse files

Merge pull request #43 from acpaquette/ipf

Transformations and bin script
parents 0a14df41 1683c7bd
No related branches found
No related tags found
No related merge requests found
import os
import numpy as np
# Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others)
def read_atf(atf_file):
with open(atf_file) as f:
files = []
ipf = []
sup = []
files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line)
files = np.array(files)
# Creates appropriate arrays for certain files in the right format
for file in files:
file = file.strip()
file = file.split(' ')
# Grabs all the IPF files
if file[1].endswith('.ipf'):
ipf.append(file[1])
# Grabs all the SUP files
if file[1].endswith('.sup'):
sup.append(file[1])
files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict
#!/usr/bin/env python
import argparse
import os
import sys
import warnings
import csv
import numpy as np
from plio.examples import get_path
from plio.io.io_bae import read_atf, read_gpf, read_ipf
from plio.spatial.transformations import *
import plio.io.io_controlnetwork as cn
import pandas as pd
def parse_args():
parser = argparse.ArgumentParser()
# Add args here
parser.add_argument('at_file', help='Path to the .atf file for a project.')
parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.')
parser.add_argument('cub_ipf_map', help='Path to map file for all ipfs and cubes.')
parser.add_argument('--outpath', help='Directory for the control network to be output to.',
required = False)
return parser.parse_args()
def main(args):
print('Getting some work done')
# Setup the at_file, path to cubes, and control network out path
at_file = args.at_file
cnet_out = os.path.split(os.path.splitext(at_file)[0])[1]
cub_path = args.cub_file_path
if(args.outpath):
outpath = args.outpath
else:
outpath = os.path.split(at_file)[0]
with open(args.cub_ipf_map) as cub_ipf_map:
reader = csv.reader(cub_ipf_map, delimiter = ',')
image_dict = dict([(row[0], row[1]) for row in reader])
# Read in and setup the atf dict of information
atf_dict = read_atf(at_file)
# Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: " +
"\n\n{}\n\n".format("\n".join(point_diff)) +
"Continuing, but these points will be missing from the control " +
"network.", stacklevel=3)
# Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations
apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma',
'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True)
images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(image_dict, cub_path)
# creates the control network
cn.to_isis(os.path.join(outpath, cnet_out + '.net'), socet_df, serial_dict)
if __name__ == '__main__':
main(parse_args())
\ No newline at end of file
main(parse_args())
%% Cell type:code id: tags:
``` python
import os
import sys
from functools import singledispatch
import warnings
import pandas as pd
import numpy as np
import math
import pyproj
# sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio")
from plio.examples import get_path
from plio.io.io_bae import read_gpf, read_ipf
import plio.io.io_controlnetwork as cn
import plio.io.isis_serial_number as sn
```
%% Cell type:code id: tags:
``` python
# Reads a .atf file and outputs all of the
# .ipf, .gpf, .sup, .prj, and path to locate the
# .apf file (should be the same as all others)
def read_atf(atf_file):
with open(atf_file) as f:
files = []
ipf = []
sup = []
files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line)
files = np.array(files)
# Creates appropriate arrays for certain files in the right format
for file in files:
file = file.strip()
file = file.split(' ')
# Grabs all the IPF files
if file[1].endswith('.ipf'):
ipf.append(file[1])
# Grabs all the SUP files
if file[1].endswith('.sup'):
sup.append(file[1])
files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict
# converts columns l. and s. to isis
def line_sample_size(record, path):
with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:
for i, line in enumerate(f):
if i == 2:
img_index = line.split('\\')
img_index = img_index[-1].strip()
img_index = img_index.split('.')[0]
if i == 3:
line_size = line.split(' ')
line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4:
sample_size = line.split(' ')
sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break
line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index
# converts known to ISIS keywords
def known(record):
if record['known'] == 0:
return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained'
# converts +/- 180 system to 0 - 360 system
def to_360(num):
return num % 360
# ocentric to ographic latitudes
def oc2og(dlat, dMajorRadius, dMinorRadius):
try:
dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat)
except:
print ("Error in oc2og conversion")
return dlat
# ographic to ocentric latitudes
def og2oc(dlat, dMajorRadius, dMinorRadius):
try:
dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat)
except:
print ("Error in og2oc conversion")
return dlat
# gets eRadius and pRadius from a .prj file
def get_axis(file):
with open(file) as f:
from collections import defaultdict
files = defaultdict(list)
for line in f:
ext = line.strip().split(' ')
files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius
# function to convert lat_Y_North to ISIS_lat
def lat_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
# function to convert long_X_East to ISIS_lon
def lon_ISIS_coord(record, semi_major, semi_minor):
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def body_fix(record, semi_major, semi_minor):
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height
# applys transformations to columns
def apply_transformations(atf_dict, df):
prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1])
eRadius, pRadius = get_axis(prj_file)
df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))
df['known'] = df.apply(known, axis=1)
df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))
def socet2isis(prj_file):
# Read in and setup the atf dict of information
atf_dict = read_atf(prj_file)
# Get the gpf and ipf files using atf dict
gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']);
ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']]
# Read in the gpf file and ipf file(s) into seperate dataframes
gpf_df = read_gpf(gpf_file)
ipf_df = read_ipf(ipf_list)
# Check for differences between point ids using each dataframes
# point ids as a reference
gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id']))
ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id']))
point_diff = ipf_pt_idx.difference(gpf_pt_idx)
if len(point_diff) != 0:
warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \
\n\nContinuing, but these points will be missing from the control network".format(list(point_diff)))
# Merge the two dataframes on their point id columns
socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id')
# Apply the transformations
apply_transformations(atf_dict, socet_df)
# Define column remap for socet dataframe
column_remap = {'l.': 'y', 's.': 'x',
'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type',
'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ',
'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'}
# Rename the columns using the column remap above
socet_df.rename(columns = column_remap, inplace=True)
# Return the socet dataframe to be converted to a control net
return socet_df
# creates a dict of serial numbers with the cub being the key
def serial_numbers(images, path, extension):
def serial_numbers(image_dict, path):
serial_dict = dict()
for image in images:
snum = sn.generate_serial_number(os.path.join(path, image + extension))
for key in image_dict:
snum = sn.generate_serial_number(os.path.join(path, image_dict[key]))
snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO')
serial_dict[image] = snum
serial_dict[key] = snum
return serial_dict
```
%% Cell type:code id: tags:
``` python
# Setup stuffs for the cub information namely the path and extension
path = '/Volumes/Blueman/'
extension = '.lev1.cub'
prj_file = get_path('CTX_Athabasca_Middle_step0.atf')
atf_file = get_path('CTX_Athabasca_Middle_step0.atf')
socet_df = socet2isis(prj_file)
image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub',
'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub',
'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub',
'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub',
'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub',
'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub',
'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'}
socet_df = socet2isis(atf_file)
images = pd.unique(socet_df['ipf_file'])
serial_dict = serial_numbers(images, path, extension)
serial_dict = serial_numbers(image_dict, path)
# creates the control network
cn.to_isis('/Volumes/Blueman/cn.net', socet_df, serial_dict)
cn.to_isis('/Volumes/Blueman/banana.net', socet_df, serial_dict)
```
%% Output
/Users/adampaquette/anaconda/envs/pysat/lib/python3.6/site-packages/ipykernel_launcher.py:173: UserWarning: The following points found in ipf files missing from gpf file:
['P03_002226_1895_XI_09N203W_15', 'P03_002226_1895_XI_09N203W_16', 'P03_002226_1895_XI_09N203W_17', 'P03_002226_1895_XI_09N203W_18', 'P03_002226_1895_XI_09N203W_19', 'P03_002226_1895_XI_09N203W_20', 'P03_002226_1895_XI_09N203W_21', 'P03_002226_1895_XI_09N203W_22', 'P03_002226_1895_XI_09N203W_24', 'P03_002226_1895_XI_09N203W_26', 'P03_002226_1895_XI_09N203W_30', 'P03_002226_1895_XI_09N203W_31', 'P03_002226_1895_XI_09N203W_32', 'P03_002226_1895_XI_09N203W_34', 'P03_002226_1895_XI_09N203W_36', 'P03_002226_1895_XI_09N203W_37', 'P03_002226_1895_XI_09N203W_44', 'P03_002226_1895_XI_09N203W_48', 'P03_002226_1895_XI_09N203W_49', 'P03_002226_1895_XI_09N203W_56', 'P03_002226_1895_XI_09N203W_57', 'P03_002226_1895_XI_09N203W_61', 'P03_002226_1895_XI_09N203W_62', 'P03_002226_1895_XI_09N203W_63', 'P03_002226_1895_XI_09N203W_65', 'P19_008344_1894_XN_09N203W_4', 'P20_008845_1894_XN_09N203W_15'].
Continuing, but these points will be missing from the control network
%% Cell type:code id: tags:
``` python
```
......
P01_001540_1889_XI_08N204W,P01_001540_1889_XI_08N204W.lev1.cub
P01_001606_1897_XI_09N203W,P01_001606_1897_XI_09N203W.lev1.cub
P02_001804_1889_XI_08N204W,P02_001804_1889_XI_08N204W.lev1.cub
P03_002226_1895_XI_09N203W,P03_002226_1895_XI_09N203W.lev1.cub
P03_002371_1888_XI_08N204W,P03_002371_1888_XI_08N204W.lev1.cub
P19_008344_1894_XN_09N203W,P19_008344_1894_XN_09N203W.lev1.cub
P20_008845_1894_XN_09N203W,P20_008845_1894_XN_09N203W.lev1.cub
......@@ -271,3 +271,63 @@ def save_gpf(df, output_file):
outGPF.close()
return
def read_atf(atf_file):
"""
Reads a .atf file and outputs all of the
.ipf, .gpf, .sup, .prj, and path to locate the
.apf file (should be the same as all others)
Parameters
----------
atf_file : str
Full path to a socet atf file
Returns
-------
files_dict : dict
Dict of files and data associated with a socet
project
"""
with open(atf_file) as f:
files = []
ipf = []
sup = []
files_dict = []
# Grabs every PRJ, GPF, SUP, and IPF image from the ATF file
for line in f:
if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf':
files.append(line)
files = np.array(files)
# Creates appropriate arrays for certain files in the right format
for file in files:
file = file.strip()
file = file.split(' ')
# Grabs all the IPF files
if file[1].endswith('.ipf'):
ipf.append(file[1])
# Grabs all the SUP files
if file[1].endswith('.sup'):
sup.append(file[1])
files_dict.append(file)
# Creates a dict out of file lists for GPF, PRJ, IPF, and ATF
files_dict = (dict(files_dict))
# Sets the value of IMAGE_IPF to all IPF images
files_dict['IMAGE_IPF'] = ipf
# Sets the value of IMAGE_SUP to all SUP images
files_dict['IMAGE_SUP'] = sup
# Sets the value of PATH to the path of the ATF file
files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file))
return files_dict
import os
import math
import pyproj
import plio.io.isis_serial_number as sn
def line_sample_size(record, path):
"""
Converts columns l. and s. to sample size, line size, and generates an
image index
Parameters
----------
record : object
Pandas series object
path : str
Path to the associated sup files for a socet project
Returns
-------
: list
A list of sample_size, line_size, and img_index
"""
with open(os.path.join(path, record['ipf_file'] + '.sup')) as f:
for i, line in enumerate(f):
if i == 2:
img_index = line.split('\\')
img_index = img_index[-1].strip()
img_index = img_index.split('.')[0]
if i == 3:
line_size = line.split(' ')
line_size = line_size[-1].strip()
assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file'])
if i == 4:
sample_size = line.split(' ')
sample_size = sample_size[-1].strip()
assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file'])
break
line_size = int(line_size)/2.0 + record['l.'] + 1
sample_size = int(sample_size)/2.0 + record['s.'] + 1
return sample_size, line_size, img_index
def known(record):
"""
Converts the known field from a socet dataframe into the
isis point_type column
Parameters
----------
record : object
Pandas series object
Returns
-------
: str
String representation of a known field
"""
if record['known'] == 0:
return 'Free'
elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3:
return 'Constrained'
def to_360(num):
"""
Transforms a given number into 0 - 360 space
Parameters
----------
num : int
A given integer
Returns
-------
: int
num moduloed by 360
"""
return num % 360
def oc2og(dlat, dMajorRadius, dMinorRadius):
"""
Ocentric to ographic latitudes
Parameters
----------
dlat : float
Latitude to convert
dMajorRadius : float
Radius from the center of the body to the equater
dMinorRadius : float
Radius from the pole to the center of mass
Returns
-------
dlat : float
Converted latitude into ographic space
"""
try:
dlat = math.radians(dlat)
dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat)))
dlat = math.degrees(dlat)
except:
print ("Error in oc2og conversion")
return dlat
def og2oc(dlat, dMajorRadius, dMinorRadius):
"""
Ographic to ocentric latitudes
Parameters
----------
dlat : float
Latitude to convert
dMajorRadius : float
Radius from the center of the body to the equater
dMinorRadius : float
Radius from the pole to the center of mass
Returns
-------
dlat : float
Converted latitude into ocentric space
"""
try:
dlat = math.radians(dlat)
dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2)))
dlat = math.degrees(dlat)
except:
print ("Error in og2oc conversion")
return dlat
def get_axis(file):
"""
Gets eRadius and pRadius from a .prj file
Parameters
----------
file : str
file with path to a given socet project file
Returns
-------
: list
A list of the eRadius and pRadius of the project file
"""
with open(file) as f:
from collections import defaultdict
files = defaultdict(list)
for line in f:
ext = line.strip().split(' ')
files[ext[0]].append(ext[-1])
eRadius = float(files['A_EARTH'][0])
pRadius = eRadius * (1 - float(files['E_EARTH'][0]))
return eRadius, pRadius
def lat_ISIS_coord(record, semi_major, semi_minor):
"""
Function to convert lat_Y_North to ISIS_lat
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
coord_360 : float
Converted latitude into ocentric space, and mapped
into 0 to 360
"""
ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def lon_ISIS_coord(record, semi_major, semi_minor):
"""
Function to convert long_X_East to ISIS_lon
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
coord_360 : float
Converted longitude into ocentric space, and mapped
into 0 to 360
"""
ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor)
coord_360 = to_360(ocentric_coord)
return coord_360
def body_fix(record, semi_major, semi_minor):
"""
Transforms latitude, longitude, and height of a socet point into
a body fixed point
Parameters
----------
record : object
Pandas series object
semi_major : float
Radius from the center of the body to the equater
semi_minor : float
Radius from the pole to the center of mass
Returns
-------
: list
Body fixed lat, lon and height coordinates as lat, lon, ht
"""
ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor)
lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor)
lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht'])
return lon, lat, height
def apply_transformations(atf_dict, df):
"""
Takes a atf dictionary and a socet dataframe and applies the necessary
transformations to convert that dataframe into a isis compatible
dataframe
Parameters
----------
atf_dict : dict
Dictionary containing information from an atf file
df : object
Pandas dataframe object
"""
prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1])
eRadius, pRadius = get_axis(prj_file)
df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1)))
df['known'] = df.apply(known, axis=1)
df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1)
df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1))
def serial_numbers(image_dict, path):
"""
Creates a dict of serial numbers with the cub being the key
Parameters
----------
images : list
path : str
extension : str
Returns
-------
serial_dict : dict
"""
serial_dict = dict()
for key in image_dict:
serial_dict[key] = sn.generate_serial_number(os.path.join(path, image_dict[key]))
return serial_dict
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment