Skip to content

Commit

Permalink
Merge branch 'master' into feature_meep
Browse files Browse the repository at this point in the history
  • Loading branch information
HelgeGehring committed Jan 31, 2021
2 parents 70e7dde + 5b2ea6d commit 8f4355d
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 28 deletions.
4 changes: 2 additions & 2 deletions gdshelpers/export/gdsii_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def _cell_to_gdsii_binary(cell, grid_steps_per_unit, max_points, max_line_points


def write_cell_to_gdsii_file(outfile, cell, unit=1e-6, grid_steps_per_unit=1000, max_points=4000, max_line_points=4000,
timestamp=None, parallel=False):
timestamp=None, parallel=False, max_workers=None):
name = 'gdshelpers_exported_library'
grid_step_unit = unit / grid_steps_per_unit
timestamp = datetime.datetime.now() if timestamp is None else timestamp
Expand Down Expand Up @@ -113,7 +113,7 @@ def add_cells_to_unique_list(start_cell):
# UNITS REAL_8 1/grid_steps_per_unit grid_step_unit
if parallel:
from concurrent.futures import ProcessPoolExecutor
with ProcessPoolExecutor() as pool:
with ProcessPoolExecutor(max_workers=max_workers) as pool:
num = len(cells)
for binary in pool.map(_cell_to_gdsii_binary, cells, (grid_steps_per_unit,) * num, (max_points,) * num,
(max_line_points,) * num, (timestamp,) * num):
Expand Down
37 changes: 11 additions & 26 deletions gdshelpers/geometry/chip.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ def start_viewer(self):
import gdspy
gdspy.LayoutViewer(library=self.get_gdspy_lib(), depth=10)

def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=False):
def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=False, max_workers=None):
"""
Exports the layout and creates an DLW-file, if DLW-features are used.
Expand All @@ -305,6 +305,8 @@ def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=Fal
:param parallel: Defines if parallelization is used (only supported in Python 3).
Standard value will be changed to True in a future version.
Deactivating can be useful for debugging reasons.
:param max_workers: If parallel is True, this can be used to limit the number of parallel processes.
This can be useful if you run into out-of-memory errors otherwise.
"""

if library is not None:
Expand All @@ -317,8 +319,8 @@ def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=Fal
elif name.endswith('.gds'):
name = name[:-4]
library = library or 'gdshelpers'
elif name.endswith('.oasis'):
name = name[:-6]
elif name.endswith('.oas'):
name = name[:-4]
library = library or 'fatamorgana'
elif name.endswith('.dxf'):
name = name[:-4]
Expand All @@ -331,15 +333,16 @@ def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=Fal
import shutil

with NamedTemporaryFile('wb', delete=False) as tmp:
write_cell_to_gdsii_file(tmp, self, grid_steps_per_unit=grid_steps_per_micron, parallel=parallel)
write_cell_to_gdsii_file(tmp, self, grid_steps_per_unit=grid_steps_per_micron, parallel=parallel,
max_workers=max_workers)
shutil.move(tmp.name, name + '.gds')

elif library == 'gdspy':
import gdspy

if parallel:
from concurrent.futures import ProcessPoolExecutor
with ProcessPoolExecutor() as pool:
with ProcessPoolExecutor(max_workers=max_workers) as pool:
self.get_gdspy_cell(pool)
else:
self.get_gdspy_cell()
Expand All @@ -348,7 +351,7 @@ def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=Fal
gdspy_cells = self.get_gdspy_lib().cell_dict.values()
if parallel:
from concurrent.futures import ProcessPoolExecutor
with ProcessPoolExecutor() as pool:
with ProcessPoolExecutor(max_workers=max_workers) as pool:
binary_cells = pool.map(gdspy.Cell.to_gds, gdspy_cells, [grid_steps_per_micron] * len(gdspy_cells))
else:
binary_cells = map(gdspy.Cell.to_gds, gdspy_cells, [grid_steps_per_micron] * len(gdspy_cells))
Expand All @@ -360,31 +363,13 @@ def save(self, name=None, library=None, grid_steps_per_micron=1000, parallel=Fal

if parallel:
from concurrent.futures import ProcessPoolExecutor
with ProcessPoolExecutor() as pool:
with ProcessPoolExecutor(max_workers=max_workers) as pool:
cells = self.get_oasis_cells(grid_steps_per_micron, pool)
else:
cells = self.get_oasis_cells(grid_steps_per_micron)

layout.cells = [cells[0]] + list(set(cells[1:]))

# noinspection PyUnresolvedReferences
def replace_names_by_ids(oasis_layout):
name_id = {}
for cell_id, cell in enumerate(oasis_layout.cells):
if cell.name.string in name_id:
raise RuntimeError(
'Each cell name should be unique, name "' + cell.name.string + '" is used multiple times')
name_id[cell.name.string] = cell_id
cell.name = cell_id
for cell in oasis_layout.cells:
for placement in cell.placements:
placement.name = name_id[placement.name.string]

oasis_layout.cellnames = {v: k for k, v in name_id.items()}

# improves performance for reading oasis file and workaround for fatamorgana-bug
replace_names_by_ids(layout)

with open(name + '.oas', 'wb') as f:
layout.write(f)
elif library == 'ezdxf':
Expand All @@ -407,7 +392,7 @@ def save_desc(self, filename: str):
"""
if not filename.endswith('.desc'):
filename += '.desc'
with open(filename + '.desc', 'w') as f:
with open(filename, 'w') as f:
json.dump(self.get_desc(), f, indent=True)

def get_reduced_layer(self, layer: int):
Expand Down

0 comments on commit 8f4355d

Please sign in to comment.