Skip to content

Commit

Permalink
remove save_largest_edge arg to generate_database, is always done now
Browse files Browse the repository at this point in the history
  • Loading branch information
smroid committed Feb 15, 2025
1 parent c893b33 commit 39144bd
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 19 deletions.
5 changes: 0 additions & 5 deletions tetra3/cli/generate_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,6 @@ def main():
"edge ratios are each quantized: `pattern_bins = 0.25 / pattern_max_error` "
"Default 0.001, corresponding to pattern_bins=250. For a database with "
"limiting magnitude 7, this yields a reasonable pattern hash collision rate.")
parser.add_argument("--save-largest-edge", type=bool, default=True,
help="If True (default), the absolute size of each pattern is stored "
"(via its largest edge angle) in a separate array. This makes the database "
"larger but the solver faster.")
parser.add_argument("--multiscale-step", type=float, default=1.5,
help="Determines the largest ratio between subsequent FOVs that is allowed "
"when generating a multiscale database. If the ratio max_fov/min_fov "
Expand All @@ -87,7 +83,6 @@ def main():
verification_stars_per_fov=args.verification_stars_per_fov,
star_max_magnitude=args.star_max_magnitude,
pattern_max_error=args.pattern_max_error,
save_largest_edge=args.save_largest_edge,
multiscale_step=args.multiscale_step,
epoch_proper_motion=args.epoch_proper_motion,
)
Expand Down
20 changes: 6 additions & 14 deletions tetra3/tetra3.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,7 @@ def generate_database(self, max_fov, min_fov=None, save_as=None,
star_catalog='hip_main',
lattice_field_oversampling=100, patterns_per_lattice_field=50,
verification_stars_per_fov=150, star_max_magnitude=None,
pattern_max_error=.001, save_largest_edge=True,
pattern_max_error=.001,
multiscale_step=1.5, epoch_proper_motion='now',
pattern_stars_per_fov=None, simplify_pattern=None):
"""Create a database and optionally save it to file.
Expand Down Expand Up @@ -974,9 +974,6 @@ def generate_database(self, max_fov, min_fov=None, save_as=None,
pattern_bins = 0.25 / pattern_max_error
Default .001, corresponding to pattern_bins=250. For a database with limiting magnitude
7, this yields a reasonable pattern hash collision rate.
save_largest_edge (bool, optional): If True (default), the absolute size of each
pattern is stored (via its largest edge angle) in a separate array. This makes the
database larger but the solver faster.
multiscale_step (float, optional): Determines the largest ratio between subsequent FOVs
that is allowed when generating a multiscale database. Defaults to 1.5. If the ratio
max_fov/min_fov is less than sqrt(multiscale_step) a single scale database is built.
Expand All @@ -993,7 +990,6 @@ def generate_database(self, max_fov, min_fov=None, save_as=None,
+ str((max_fov, min_fov, save_as, star_catalog, lattice_field_oversampling,
patterns_per_lattice_field, verification_stars_per_fov,
star_max_magnitude, pattern_max_error,
save_largest_edge,
multiscale_step, epoch_proper_motion)))
if pattern_stars_per_fov is not None and pattern_stars_per_fov != lattice_field_oversampling:
self._logger.warning(
Expand Down Expand Up @@ -1026,7 +1022,6 @@ def generate_database(self, max_fov, min_fov=None, save_as=None,
star_max_magnitude = float(star_max_magnitude)
PATTERN_SIZE = 4
pattern_bins = round(1/4/pattern_max_error)
save_largest_edge = bool(save_largest_edge)
if epoch_proper_motion is None or str(epoch_proper_motion).lower() == 'none':
epoch_proper_motion = None
self._logger.debug('Proper motions will not be considered')
Expand Down Expand Up @@ -1262,9 +1257,8 @@ def logk(x, k):
self._logger.info('Catalog size %s and type %s.' %
(pattern_catalog.shape, pattern_catalog.dtype))

if save_largest_edge:
pattern_largest_edge = np.zeros(catalog_length, dtype=np.float16)
self._logger.info('Storing largest edges as type %s' % pattern_largest_edge.dtype)
pattern_largest_edge = np.zeros(catalog_length, dtype=np.float16)
self._logger.info('Storing largest edges as type %s' % pattern_largest_edge.dtype)

# Gather collision information.
pattern_hashes_seen = set()
Expand Down Expand Up @@ -1320,9 +1314,8 @@ def logk(x, k):
pattern = [pattern[i] for (_, i) in centroid_distances]

(index, collision) = _insert_at_index(pattern, hash_index, pattern_catalog)
if save_largest_edge:
# Store as milliradian to better use float16 range
pattern_largest_edge[index] = largest_angle*1000
# Store as milliradian to better use float16 range
pattern_largest_edge[index] = largest_angle*1000
if is_novel_index and collision:
table_collisions += 1

Expand All @@ -1336,8 +1329,7 @@ def logk(x, k):
self._star_kd_tree = vector_kd_tree
self._star_catalog_IDs = star_catID
self._pattern_catalog = pattern_catalog
if save_largest_edge:
self._pattern_largest_edge = pattern_largest_edge
self._pattern_largest_edge = pattern_largest_edge
self._db_props['pattern_mode'] = 'edge_ratio'
self._db_props['pattern_size'] = PATTERN_SIZE
self._db_props['pattern_bins'] = pattern_bins
Expand Down

0 comments on commit 39144bd

Please sign in to comment.