Skip to content

Commit

Permalink
Remove get_writer function and initialize writer variable in calling …
Browse files Browse the repository at this point in the history
…function

Remove the `get_writer` function and initialize the writer variable directly in the calling function.

* Remove the `get_writer` function from `openfl-workspace/experimental/workflow/AggregatorBasedWorkflow/102_aggregator_validation/src/utils.py` and initialize the writer variable directly in the `write_metric` function.
* Remove the `get_writer` function from `openfl-workspace/experimental/workflow/AggregatorBasedWorkflow/104_keras_mnist/src/utils.py` and initialize the writer variable directly in the `write_metric` function.
* Remove the `get_writer` function from `openfl-tutorials/deprecated/native_api/Federated_Pytorch_MNIST_Tutorial.ipynb` and initialize the writer variable directly in the `write_metric` function.

Signed-off-by: Chaurasiya, Payal <[email protected]>
  • Loading branch information
payalcha committed Dec 11, 2024
1 parent ea6a61b commit b95b63e
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -135,15 +135,8 @@
"source": [
"from torch.utils.tensorboard import SummaryWriter\n",
"\n",
"writer = None\n",
"\n",
"def get_writer():\n",
" global writer\n",
" if not writer:\n",
" writer = SummaryWriter('./logs/cnn_mnist', flush_secs=5)\n",
"\n",
"def write_metric(node_name, task_name, metric_name, metric, round_number):\n",
" get_writer()\n",
" writer = SummaryWriter('./logs/cnn_mnist', flush_secs=5)\n",
" writer.add_scalar(\"{}/{}/{}\".format(node_name, task_name, metric_name),\n",
" metric, round_number)"
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,7 @@
from torch.utils.tensorboard import SummaryWriter


writer = None


def get_writer():
"""Create global writer object."""
global writer
if not writer:
writer = SummaryWriter('./logs/cnn_mnist', flush_secs=5)
return writer


def write_metric(node_name, task_name, metric_name, metric, round_number):
"""Write metric callback."""
writer = get_writer()
writer = SummaryWriter('./logs/cnn_mnist', flush_secs=5)
writer.add_scalar(f'{node_name}/{task_name}/{metric_name}', metric, round_number)
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,7 @@

from tensorflow.summary import SummaryWriter

writer = None

def get_writer():
"""Create global writer object."""
global writer
if not writer:
writer = SummaryWriter('./logs/cnn_mnist', flush_secs=5)
return writer

def write_metric(node_name, task_name, metric_name, metric, round_number):
"""Write metric callback."""
writer = get_writer()
writer = SummaryWriter('./logs/cnn_mnist', flush_secs=5)
writer.add_scalar(f'{node_name}/{task_name}/{metric_name}', metric, round_number)

0 comments on commit b95b63e

Please sign in to comment.