Skip to content

Commit

Permalink
print target class labels too in feature imporance tables
Browse files Browse the repository at this point in the history
  • Loading branch information
borauyar committed May 31, 2024
1 parent 53dee25 commit d81fe29
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 3 deletions.
2 changes: 1 addition & 1 deletion flexynesis/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ class AvailableModels(NamedTuple):

# evaluate predictions; (if any supervised learning happened)
if any([args.target_variables, args.surv_event_var, args.batch_variables]):
print("[INFO] Computing model evaluation metrics sdfadf")
print("[INFO] Computing model evaluation metrics")
metrics_df = flexynesis.evaluate_wrapper(model.predict(test_dataset), test_dataset,
surv_event_var=model.surv_event_var,
surv_time_var=model.surv_time_var)
Expand Down
2 changes: 2 additions & 0 deletions flexynesis/models/direct_pred.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,8 +317,10 @@ def compute_feature_importance(self, dataset, target_var, steps=5, batch_size =
features = dataset.features[layers[j]]
# Ensure tensors are already on CPU before converting to numpy
importances = imp[i][j][0].detach().numpy()
target_class_label = dataset.label_mappings[target_var].get(i) if target_var in dataset.label_mappings else ''
df_list.append(pd.DataFrame({'target_variable': target_var,
'target_class': i,
'target_class_label': target_class_label,
'layer': layers[j],
'name': features,
'importance': importances}))
Expand Down
7 changes: 5 additions & 2 deletions flexynesis/models/supervised_vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -468,9 +468,12 @@ def compute_feature_importance(self, dataset, target_var, steps = 5, batch_size
for j in range(len(layers)):
features = self.dataset.features[layers[j]]
importances = imp[i][j][0].detach().numpy()
target_class_label = dataset.label_mappings[target_var].get(i) if target_var in dataset.label_mappings else ''
df_list.append(pd.DataFrame({'target_variable': target_var,
'target_class': i, 'layer': layers[j],
'name': features, 'importance': importances}))
'target_class': i,
'target_class_label': target_class_label,
'layer': layers[j],
'name': features, 'importance': importances}))
df_imp = pd.concat(df_list, ignore_index = True)

# save scores in model
Expand Down
2 changes: 2 additions & 0 deletions flexynesis/models/triplet_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,8 +390,10 @@ def compute_feature_importance(self, dataset, target_var, steps = 5, batch_size
features = dataset.features[layers[j]]
# Ensure tensors are already on CPU before converting to numpy
importances = imp[i][j][0].detach().numpy() # 0 => extract importances only for the anchor
target_class_label = dataset.label_mappings[target_var].get(i) if target_var in dataset.label_mappings else ''
df_list.append(pd.DataFrame({'target_variable': target_var,
'target_class': i,
'target_class_label': target_class_label,
'layer': layers[j],
'name': features,
'importance': importances}))
Expand Down

0 comments on commit d81fe29

Please sign in to comment.