diff --git a/examples/xgboost-comprehensive/README.md b/examples/xgboost-comprehensive/README.md index a31830b1d1f8..85610de61880 100644 --- a/examples/xgboost-comprehensive/README.md +++ b/examples/xgboost-comprehensive/README.md @@ -37,7 +37,6 @@ Instead of aggregating multiple clients, there is only one single client participating in the training per round in the cyclic training scenario. The trained local XGBoost trees will be passed to the next client as an initialised model for next round's boosting. - ## Project Setup Start by cloning the example project. We prepared a single-line command that you can copy into your shell which will checkout the example for you: diff --git a/examples/xgboost-comprehensive/utils.py b/examples/xgboost-comprehensive/utils.py index 05a5630dc771..0597c4486896 100644 --- a/examples/xgboost-comprehensive/utils.py +++ b/examples/xgboost-comprehensive/utils.py @@ -246,7 +246,7 @@ def sample( if min_num_clients is None: min_num_clients = num_clients self.wait_for(min_num_clients) - + # Sample clients which meet the criterion available_cids = list(self.clients) if criterion is not None: