diff --git a/src/scripts/model_maker.py b/src/scripts/model_maker.py index 7776fdeb668f3dd2969d62f28b2577f297ad5c3e..4df085115d4654c5b8bec39cd9b52c90919c9955 100755 --- a/src/scripts/model_maker.py +++ b/src/scripts/model_maker.py @@ -24,6 +24,7 @@ # The script requires python3. import math +import multiprocessing import numpy as np import os import pdb @@ -369,10 +370,21 @@ def load_model(model_file): max_rad = float(model['particle_settings']['max_rad']) # random_aggregate() checks internally whether application is INCLUSION #random_aggregate(sconf, gconf, rnd_seed, max_rad) - check = random_compact(sconf, gconf, rnd_seed, max_rad) + rnd_engine = "COMPACT" + try: + rnd_engine = model['system_settings']['rnd_engine'] + except KeyError: + # use compact generator, if no specification is given + rnd_engine = "COMPACT" + if (rnd_engine == "COMPACT"): + check = random_compact(sconf, gconf, rnd_seed, max_rad) + elif (rnd_engine == "LOOSE"): + check = random_aggregate(sconf, gconf, rnd_seed, max_rad) + else: + print("ERROR: unrecognized random generator engine.") + return (None, None) if (check != 0): - print("INFO: stopping with exit code %d."%check) - exit(check) + print("WARNING: %d sphere(s) could not be placed."%check) else: if (len(model['geometry_settings']['x_coords']) != gconf['nsph']): print("ERROR: coordinate vectors do not match the number of spheres!") @@ -386,6 +398,36 @@ def load_model(model_file): if (max_rad == 0.0): max_rad = 20.0 * max(sconf['ros']) write_obj(sconf, gconf, max_rad) + try: + max_gpu_ram = int(model['system_settings']['max_gpu_ram']) + if (max_gpu_ram > 0): + max_gpu_ram_bytes = max_gpu_ram * 1024 * 1024 * 1024 + matrix_dim = 2 * gconf['nsph'] * gconf['li'] * (gconf['li'] + 2) + matrix_size_bytes = 16 * matrix_dim * matrix_dim + if (matrix_size_bytes < max_gpu_ram_bytes): + max_gpu_processes = int(max_gpu_ram_bytes / matrix_size_bytes) + print("INFO: system supports up to %d simultaneous processes on GPU."%max_gpu_processes) + else: + print("WARNING: estimated matrix size is larger than available GPU memory!") + else: + print("INFO: no GPU RAM declared.") + max_host_ram = int(model['system_settings']['max_host_ram']) + if (max_host_ram > 0): + max_host_ram_bytes = max_host_ram * 1024 * 1024 * 1024 + matrix_dim = 2 * gconf['nsph'] * gconf['li'] * (gconf['li'] + 2) + matrix_size_bytes = 16 * matrix_dim * matrix_dim + if (matrix_size_bytes < max_host_ram_bytes): + max_host_processes = int(max_host_ram_bytes / matrix_size_bytes / 2) + print("INFO: system supports up to %d simultaneous processes."%max_host_processes) + else: + print("WARNING: estimated matrix size is larger than available host memory!") + else: + print("WARNING: no host RAM declared!") + except KeyError as ex: + print(ex) + print("WARNING: missing system description! Cannot estimate recommended execution.") + cpu_count = multiprocessing.cpu_count() + print("INFO: the number of detected CPUs is %d."%cpu_count) else: # model is None print("ERROR: could not parse " + model_file + "!") return (sconf, gconf) @@ -554,7 +596,6 @@ def random_aggregate(scatterer, geometry, seed, max_rad, max_attempts=100): while (not is_placed): if (attempts > max_attempts): result += 1 - print("WARNING: could not place sphere %d in allowed radius!"%i) break # while(not is_placed) vec_thetas[i] = math.pi * random.random() vec_phis[i] = 2.0 * math.pi * random.random() @@ -662,16 +703,21 @@ def random_compact(scatterer, geometry, seed, max_rad): y_centers = np.arange(-1.0 * max_rad + radius, max_rad, math.sqrt(3.0) * radius) z_centers = np.arange(-1.0 * max_rad + radius, max_rad, math.sqrt(3.0) * radius) x_offset = radius - y_offset = radius / math.sqrt(3.0) - z_offset = 0.0 + y_offset = radius + x_layer_offset = radius + y_layer_offset = radius / math.sqrt(3.0) tmp_spheres = [] n_cells = len(x_centers) * len(y_centers) * len(z_centers) print("INFO: the cubic space would contain %d spheres."%n_cells) - n_max_spheres = int(max_rad * max_rad * max_rad / (radius * radius * radius) * 0.74) + n_max_spheres = int((max_rad / radius) * (max_rad / radius) * (max_rad / radius) * 0.74) print("INFO: the maximum radius allows for %d spheres."%n_max_spheres) for zi in range(len(z_centers)): + if (x_layer_offset == 0.0): + x_layer_offset = radius + else: + x_layer_offset = 0.0 if (y_offset == 0.0): - y_offset = radius / math.sqrt(3.0) + y_offset = radius else: y_offset = 0.0 for yi in range(len(y_centers)): @@ -680,7 +726,7 @@ def random_compact(scatterer, geometry, seed, max_rad): else: x_offset = 0.0 for xi in range(len(x_centers)): - x = x_centers[xi] + x_offset + x = x_centers[xi] + x_offset + x_layer_offset y = y_centers[yi] + y_offset z = z_centers[zi] extent = radius + math.sqrt(x * x + y * y + z * z) @@ -694,10 +740,11 @@ def random_compact(scatterer, geometry, seed, max_rad): #tmp_spheres = [{'itype': 1, 'x': 0.0, 'y': 0.0, 'z': 0.0}] current_n = len(tmp_spheres) print("INFO: before erosion there are %d spheres in use."%current_n) - rho = 2.0 * max_rad + rho = 10.0 * max_rad + discard_rad = 100.0 * max_rad while (current_n > nsph): - theta = 2.0 * math.pi * random.random() - phi = math.pi * random.random() + theta = math.pi * random.random() + phi = 2.0 * math.pi * random.random() x0 = rho * math.sin(theta) * math.cos(phi) y0 = rho * math.sin(theta) * math.sin(phi) z0 = rho * math.cos(theta) @@ -705,7 +752,7 @@ def random_compact(scatterer, geometry, seed, max_rad): minimum_distance = 1000.0 * max_rad for di in range(len(tmp_spheres)): x1 = tmp_spheres[di]['x'] - if (x1 == max_rad): + if (x1 == discard_rad): continue y1 = tmp_spheres[di]['y'] z1 = tmp_spheres[di]['z'] @@ -717,7 +764,7 @@ def random_compact(scatterer, geometry, seed, max_rad): if (distance < minimum_distance): closest_index = di minimum_distance = distance - tmp_spheres[closest_index]['x'] = max_rad + tmp_spheres[closest_index]['x'] = discard_rad current_n -= 1 vec_spheres = [] sph_index = 0 @@ -900,17 +947,13 @@ def write_legacy_sconf(conf): # spherical unit, plus a single scene file) to allow for model visualization # with 3D software tools. # -# The spherical units are saved as `sphere_XXXX.obj` files. The material -# information is collected in a `model.mtl` file and the whole scene is written -# to a `model.obj` file. -# # \param scatterer: `dict` Scatterer configuration dictionary (gets modified) # \param geometry: `dict` Geometry configuration dictionary (gets modified) # \param max_rad: `float` Maximum allowed radial extension of the aggregate def write_obj(scatterer, geometry, max_rad): out_dir = scatterer['out_file'].absolute().parent - out_model_path = Path(out_dir, "model.obj") - out_material_path = Path(out_dir, "model.mtl") + out_model_path = Path(str(geometry['out_file']) + ".obj") + out_material_path = Path(str(geometry['out_file']) + ".mtl") color_strings = [ "1.0 1.0 1.0\n", # white "1.0 0.0 0.0\n", # red @@ -945,14 +988,15 @@ def write_obj(scatterer, geometry, max_rad): pl.add_mesh(mesh, color=None) pl.export_obj(str(Path(str(out_dir), "TMP_MODEL.obj"))) tmp_model_file = open(str(Path(str(out_dir), "TMP_MODEL.obj")), "r") - out_model_file = open(str(Path(str(out_dir), "model.obj")), "w") + out_model_file = open(str(out_model_path), "w") + mtl_line = "mtllib {0:s}\n".format(out_material_path.name) sph_index = 0 sph_type_index = 0 old_sph_type_index = 0 str_line = tmp_model_file.readline() while (str_line != ""): if (str_line.startswith("mtllib")): - str_line = "mtllib model.mtl\n" + str_line = mtl_line elif (str_line.startswith("g ")): sph_index += 1 sph_type_index = scatterer['vec_types'][sph_index - 1]