diff --git a/CHANGELOG.md b/CHANGELOG.md index e50f90b..df24b11 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.1.0] - 2024-07-08 +## [0.1.0] - 2024-07-05 ### Added - NSDlib version 0.1.0 release diff --git a/README.md b/README.md index cd2bd53..8bad272 100644 --- a/README.md +++ b/README.md @@ -16,53 +16,175 @@ All custom implementations are provided under `nsdlib/algorithms` package. Each ## Implemented features: ### Node evaluation algorithms -- [Algebraic](https://www.centiserver.org/centrality/Algebraic_Centrality/) -- [Average Distance](https://www.centiserver.org/centrality/Average_Distance/) -- [Barycenter](https://www.centiserver.org/centrality/Barycenter_Centrality/) -- [Betweenness](https://www.centiserver.org/centrality/Shortest-Paths_Betweenness_Centrality/) -- [BottleNeck]( https://www.centiserver.org/centrality/BottleNeck/) -- [Centroid](https://www.centiserver.org/centrality/Centroid_value/) -- [Closeness](https://www.centiserver.org/centrality/Closeness_Centrality/) -- [ClusterRank](https://www.centiserver.org/centrality/ClusterRank/) -- [Communicability Betweenness](https://www.centiserver.org/centrality/Communicability_Betweenness_Centrality/) -- [Coreness](https://www.centiserver.org/centrality/Coreness_Centrality/) -- [Current Flow Betweenness](https://www.centiserver.org/centrality/Current-Flow_Betweenness_Centrality/) -- [Current Flow Closeness](https://www.centiserver.org/centrality/Current-Flow_Closeness_Centrality/) -- [Decay](https://www.centiserver.org/centrality/Decay_Centrality/) -- [Degree](https://www.centiserver.org/centrality/Degree_Centrality/) -- [Diffusion degree](https://www.centiserver.org/centrality/Diffusion_Degree/) -- [Eigenvector](https://www.centiserver.org/centrality/Eigenvector_Centrality/) -- [Entropy](https://www.centiserver.org/centrality/Entropy_Centrality/) -- [Geodestic k path](https://www.centiserver.org/centrality/Geodesic_K-Path_Centrality/) -- [Group Betweenness Centrality](https://www.centiserver.org/centrality/Group_Betweenness_Centrality/) -- [Group Closeness](https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.centrality.group_closeness_centrality.html) -- [Group Degree](https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.centrality.group_degree_centrality.html) -- [Harmonic](https://www.centiserver.org/centrality/Harmonic_Centrality/) -- [Heatmap](https://www.centiserver.org/centrality/Heatmap_Centrality/) -- [Katz](https://www.centiserver.org/centrality/Katz_Centrality/) -- [Hubbell](https://www.centiserver.org/centrality/Hubbell_Centrality/) -- [Laplacian](https://www.centiserver.org/centrality/Laplacian_Centrality/) -- [Leverage](https://www.centiserver.org/centrality/Leverage_Centrality/) -- [Lin](https://www.centiserver.org/centrality/Lin_Centrality/) -- [Load](https://www.centiserver.org/centrality/Load_Centrality/) -- [Mnc](https://www.centiserver.org/centrality/MNC_Maximum_Neighborhood_Component/) -- [Pagerank](https://www.centiserver.org/centrality/PageRank/) -- [Pdi](https://www.centiserver.org/centrality/Pairwise_Disconnectivity_Index/) -- [Percolation](https://www.centiserver.org/centrality/Percolation_Centrality/) -- [Radiality](https://www.centiserver.org/centrality/Radiality_Centrality/) -- [Rumor](https://www.centiserver.org/centrality/Rumor_Centrality/) -- [Second Order](https://www.centiserver.org/centrality/Second_Order_Centrality/) -- [Semi Local](https://www.centiserver.org/centrality/Semi_Local_Centrality/) -- [Subgraph](https://www.centiserver.org/centrality/Subgraph_Centrality/) -- [Topological](https://www.centiserver.org/centrality/Topological_Coefficient/) -- [Trophic Levels](https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.centrality.trophic_levels.html) +- [algebraic_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.algebraic_centrality.html) +- [average_distance_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.average_distance_centrality.html) +- [barycenter_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.barycenter_centrality.html) +- [betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.betweenness_centrality.html) +- [bottle_neck_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.bottle_neck_centrality.html) +- [centroid_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.centroid_centrality.html) +- [closeness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.closeness_centrality.html) +- [cluster_rank_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.cluster_rank_centrality.html) +- [communicability_betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.communicability_betweenness_centrality.html) +- [coreness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.coreness_centrality.html) +- [current_flow_betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.current_flow_betweenness_centrality.html) +- [current_flow_closeness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.current_flow_closeness_centrality.html) +- [decay_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.decay_centrality.html) +- [degree_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.degree_centrality.html) +- [diffusion_degree_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.diffusion_degree_centrality.html) +- [eccentricity_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.eccentricity_centrality.html) +- [eigenvector_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.eigenvector_centrality.html) +- [entropy_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.entropy_centrality.html) +- [geodestic_k_path_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.geodestic_k_path_centrality.html) +- [group_betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.group_betweenness_centrality.html) +- [group_closeness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.group_closeness_centrality.html) +- [group_degree_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.group_degree_centrality.html) +- [harmonic_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.harmonic_centrality.html) +- [heatmap_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.heatmap_centrality.html) +- [hubbell_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.hubbell_centrality.html) +- [katz_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.katz_centrality.html) +- [laplacian_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.laplacian_centrality.html) +- [leverage_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.leverage_centrality.html) +- [lin_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.lin_centrality.html) +- [load_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.load_centrality.html) +- [mnc_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.mnc_centrality.html) +- [pagerank_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.pagerank_centrality.html) +- [pdi_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.pdi_centrality.html) +- [percolation_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.percolation_centrality.html) +- [radiality_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.radiality_centrality.html) +- [rumor_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.rumor_centrality.html) +- [second_order_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.second_order_centrality.html) +- [semi_local_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.semi_local_centrality.html) +- [subgraph_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.subgraph_centrality.html) +- [topological_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.topological_centrality.html) +- [trophic_levels_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.trophic_levels_centrality.html) +- [algebraic_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.algebraic_centrality.html) +- [average_distance_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.average_distance_centrality.html) +- [barycenter_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.barycenter_centrality.html) +- [betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.betweenness_centrality.html) +- [bottle_neck_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.bottle_neck_centrality.html) +- [centroid_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.centroid_centrality.html) +- [closeness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.closeness_centrality.html) +- [cluster_rank_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.cluster_rank_centrality.html) +- [communicability_betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.communicability_betweenness_centrality.html) +- [coreness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.coreness_centrality.html) +- [current_flow_betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.current_flow_betweenness_centrality.html) +- [current_flow_closeness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.current_flow_closeness_centrality.html) +- [decay_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.decay_centrality.html) +- [degree_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.degree_centrality.html) +- [diffusion_degree_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.diffusion_degree_centrality.html) +- [eccentricity_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.eccentricity_centrality.html) +- [eigenvector_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.eigenvector_centrality.html) +- [entropy_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.entropy_centrality.html) +- [geodestic_k_path_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.geodestic_k_path_centrality.html) +- [group_betweenness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.group_betweenness_centrality.html) +- [group_closeness_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.group_closeness_centrality.html) +- [group_degree_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.group_degree_centrality.html) +- [harmonic_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.harmonic_centrality.html) +- [heatmap_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.heatmap_centrality.html) +- [hubbell_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.hubbell_centrality.html) +- [katz_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.katz_centrality.html) +- [laplacian_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.laplacian_centrality.html) +- [leverage_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.leverage_centrality.html) +- [lin_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.lin_centrality.html) +- [load_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.load_centrality.html) +- [mnc_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.mnc_centrality.html) +- [pagerank_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.pagerank_centrality.html) +- [pdi_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.pdi_centrality.html) +- [percolation_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.percolation_centrality.html) +- [radiality_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.radiality_centrality.html) +- [rumor_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.rumor_centrality.html) +- [second_order_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.second_order_centrality.html) +- [semi_local_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.semi_local_centrality.html) +- [subgraph_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.subgraph_centrality.html) +- [topological_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.topological_centrality.html) +- [trophic_levels_centrality](https://netcenlib.readthedocs.io/en/latest/source/netcenlib.algorithms.trophic_levels_centrality.html) ### Outbreak detection algorithms -- test +- [CPM_Bipartite](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.CPM_Bipartite.html) +- [agdl](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.agdl.html) +- [angel](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.angel.html) +- [aslpaw](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.aslpaw.html) +- [async_fluid](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.async_fluid.html) +- [bayan](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.bayan.html) +- [belief](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.belief.html) +- [bimlpa](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.bimlpa.html) +- [coach](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.coach.html) +- [condor](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.condor.html) +- [conga](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.conga.html) +- [congo](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.congo.html) +- [core_expansion](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.core_expansion.html) +- [cpm](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.cpm.html) +- [dcs](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.dcs.html) +- [demon](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.demon.html) +- [der](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.der.html) +- [dpclus](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.dpclus.html) +- [ebgc](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.ebgc.html) +- [ego_networks](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.ego_networks.html) +- [eigenvector](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.eigenvector.html) +- [em](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.em.html) +- [endntm](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.endntm.html) +- [eva](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.eva.html) +- [frc_fgsn](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.frc_fgsn.html) +- [ga](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.ga.html) +- [gdmp2](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.gdmp2.html) +- [girvan_newman](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.girvan_newman.html) +- [graph_entropy](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.graph_entropy.html) +- [greedy_modularity](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.greedy_modularity.html) +- [head_tail](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.head_tail.html) +- [hierarchical_link_community](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.hierarchical_link_community.html) +- [ilouvain](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.ilouvain.html) +- [infomap](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.infomap.html) +- [infomap_bipartite](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.infomap_bipartite.html) +- [ipca](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.ipca.html) +- [kclique](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.kclique.html) +- [kcut](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.kcut.html) +- [label_propagation](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.label_propagation.html) +- [lais2](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lais2.html) +- [leiden](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.leiden.html) +- [lemon](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lemon.html) +- [lfm](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lfm.html) +- [louvain](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.louvain.html) +- [lpam](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lpam.html) +- [lpanni](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lpanni.html) +- [lswl](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lswl.html) +- [lswl_plus](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.lswl_plus.html) +- [markov_clustering](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.markov_clustering.html) +- [mcode](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.mcode.html) +- [mod_m](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.mod_m.html) +- [mod_r](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.mod_r.html) +- [multicom](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.multicom.html) +- [node_perception](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.node_perception.html) +- [overlapping_seed_set_expansion](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.overlapping_seed_set_expansion.html) +- [paris](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.paris.html) +- [percomvc](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.percomvc.html) +- [principled_clustering](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.principled_clustering.html) +- [pycombo](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.pycombo.html) +- [r_spectral_clustering](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.r_spectral_clustering.html) +- [rb_pots](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.rb_pots.html) +- [rber_pots](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.rber_pots.html) +- [ricci_community](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.ricci_community.html) +- [sbm_dl](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.sbm_dl.html) +- [sbm_dl_nested](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.sbm_dl_nested.html) +- [scan](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.scan.html) +- [siblinarity_antichain](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.siblinarity_antichain.html) +- [significance_communities](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.significance_communities.html) +- [slpa](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.slpa.html) +- [spectral](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.spectral.html) +- [spinglass](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.spinglass.html) +- [surprise_communities](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.surprise_communities.html) +- [threshold_clustering](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.threshold_clustering.html) +- [tiles](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.tiles.html) +- [umstmo](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.umstmo.html) +- [wCommunity](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.wCommunity.html) +- [walkscan](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.walkscan.html) +- [walktrap](https://cdlib.readthedocs.io/en/latest/reference/generated/cdlib.algorithms.walktrap.html) ### Graph reconstruction algorithms - SbRP +### Ensemble methods +This package provides implementation for easily combining multiple source detection methods into one ensemble method. Use 'EnsembleSourceDetector' with config objects as arguments to create an ensemble method. + ## How to use Library can be installed using pip: @@ -99,21 +221,111 @@ print(evaluation) ``` -- by importing and using specific method: +For performing ensemble source detection, use 'EnsembleSourceDetector' class and configure it with 'EnsembleSourceDetectionConfig' object. This approach allows for seamless source detection and result evaluation. + +```python + +import networkx as nx + +from nsdlib.common.models import SourceDetectionConfig, \ + EnsembleSourceDetectionConfig +from nsdlib.source_detection import SourceDetector, EnsembleSourceDetector +from nsdlib.taxonomies import NodeEvaluationAlgorithm, EnsembleVotingType + +G = nx.karate_club_graph() + +config_netsleuth = SourceDetectionConfig( + node_evaluation_algorithm=NodeEvaluationAlgorithm.NETSLEUTH, +) + +config_degree = SourceDetectionConfig( + node_evaluation_algorithm=NodeEvaluationAlgorithm.CENTRALITY_DEGREE, +) + +ensemble_config = EnsembleSourceDetectionConfig( + detection_configs=[config_netsleuth, config_degree], + voting_type=EnsembleVotingType.HARD, + classifier_weights=[0.5, 0.5], +) + +source_detector = EnsembleSourceDetector(ensemble_config) + +result, evaluation = source_detector.detect_sources_and_evaluate(G=G, + IG=G, real_sources=[0,33]) +print(evaluation) + + +``` + +- by importing and using specific method, each method has appropriate prefix to understand what is the purpose of it: + +```python +import networkx as nx + +import nsdlib as nsd + +G = nx.karate_club_graph() +IG = G.copy() +IG.remove_nodes_from([10,15,20,33]) +real_sources = [0,8] + +EIG = nsd.reconstruction_sbrp(G, IG) + +outbreaks = nsd.outbreaks_leiden(EIG) + +detected_sources = [] +for outbreak in outbreaks.communities: + outbreak_G = G.subgraph(outbreak) + nodes_evaluation = nsd.evaluation_jordan_center(outbreak_G) + outbreak_detected_source = max(nodes_evaluation, key=nodes_evaluation.get) + print(f"Outbreak: {outbreak}, Detected Source: {outbreak_detected_source}") + detected_sources.append(outbreak_detected_source) + +evaluation = nsd.compute_source_detection_evaluation( + G=EIG, + real_sources=real_sources, + detected_sources=detected_sources, +) +print(evaluation) + +``` + +This method allows you to directly specify the process of source detection, making it easy to do any modifications to standardlogic. +- by using appropriate enum and method for computing desired method: ```python -from typing import Any + import networkx as nx -from networkx import Graph -from nsdlib.source_detection import compute_centrality -from nsdlib.taxonomies import Centrality +import nsdlib as nsd +from nsdlib import PropagationReconstructionAlgorithm, NodeEvaluationAlgorithm, OutbreaksDetectionAlgorithm -g: Graph = nx.karate_club_graph() -centrality_centroid: dict[Any, float] = compute_centrality(g, Centrality.CENTROID) +G = nx.karate_club_graph() +IG = G.copy() +IG.remove_nodes_from([10,15,20,33]) +real_sources = [0,8] + +EIG = nsd.reconstruct_propagation(G, IG, PropagationReconstructionAlgorithm.SBRP) + +outbreaks = nsd.identify_outbreaks(EIG, OutbreaksDetectionAlgorithm.LEIDEN) +outbreaks_G = nsd.create_subgraphs_based_on_outbreaks(EIG, outbreaks) +detected_sources = [] +for outbreak in outbreaks_G: + nodes_evaluation = nsd.evaluate_nodes(outbreak, NodeEvaluationAlgorithm.CENTRALITY_AVERAGE_DISTANCE) + outbreak_detected_source = max(nodes_evaluation, key=nodes_evaluation.get) + print(f"Outbreak: {outbreak}, Detected Source: {outbreak_detected_source}") + detected_sources.append(outbreak_detected_source) + +evaluation = nsd.compute_source_detection_evaluation( + G=EIG, + real_sources=real_sources, + detected_sources=detected_sources, +) +print(evaluation) ``` -This method allows you not to directly specify centrality, making it easy to compute different centralises in a loop. +This approach is more flexible and allows for the computation of multiple techniques at once or when iterating over multiple methods making it easy to perform analysis of selected set of techniques. + For more examples and details, please refer to the [official documentation](https://nsdlib.readthedocs.io/en/latest/index.html). diff --git a/docs/_static/logo.png b/docs/_static/logo.png new file mode 100644 index 0000000..98258f5 Binary files /dev/null and b/docs/_static/logo.png differ diff --git a/docs/conf.py b/docs/conf.py index 631efa1..77750b9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,7 +17,6 @@ # -- Project information ----------------------------------------------------- now = datetime.datetime.now() - project = "NSDLib" author = "Damian Frąszczak, Edyta Frąszczak" copyright = f"{now.year}, {author}" diff --git a/docs/files/CODE_OF_CONDUCT.md b/docs/files/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..55b979c --- /dev/null +++ b/docs/files/CODE_OF_CONDUCT.md @@ -0,0 +1,43 @@ +# Code of Conduct for the Network Source Detection Library) Project + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned with this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [INSERT EMAIL ADDRESS]. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality regarding the reporter of an incident. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. diff --git a/docs/files/CONTRIBUTING.md b/docs/files/CONTRIBUTING.md new file mode 100644 index 0000000..6992825 --- /dev/null +++ b/docs/files/CONTRIBUTING.md @@ -0,0 +1,82 @@ +# Contributing to NSDLib + +We warmly welcome contributions to NSDLib! This document provides guidelines for contributing to this project. By participating in this project, you agree to abide by its terms. + +## Table of Contents +- [How to Contribute](#how-to-contribute) +- [Local Development Setup](#local-development-setup) +- [Releasing a New Version](#releasing-a-new-version) +- [Pre-commit Hooks](#pre-commit-hooks) + +## How to Contribute + +### Reporting Bugs and Requesting Features + +- **Bug Reports**: Please use the [Bug Report Template](.github/ISSUE_TEMPLATE/bug_report.md) to report any bugs. Provide as much detail as possible to help us understand and fix the issue. +- **Feature Requests**: For proposing new features or enhancements, use the [Feature Request Template](.github/ISSUE_TEMPLATE/feature_request.md). Describe the feature, its benefits, and possible implementation if you have one in mind. + +### Coding Style + +- **PEP 8**: All Python code must adhere to the [PEP 8 style guide](https://www.python.org/dev/peps/pep-0008/), except where explicitly mentioned. +- **Comments and Docstrings**: Use comments and docstrings to explain the purpose of complex code blocks. Follow the [PEP 257](https://www.python.org/dev/peps/pep-0257/) docstring conventions. + +### Implementation Requirements + +- **Source Detection Method Implementation**: + - Each new method must be implemented in a separate file within the `nsdlib/algorithms` directory in appropriate package according to its intended purpose e.g. reconstruction algorithm should be placed in `reconstruction` package. + - The file name should match the method's name. + - Each file must contain a single function, named after the new method name. + - Each alg function must be exposed in the `nsdlib/algorithms` package to be accessible for external use. + - Add an entry for the new alg in the appropiate taxonomy class, e.g. for reconstruction algorithm new entry should be placed into `PropagationReconstructionAlgorithm` enum to ensure it's recognized and accessible through a standardized interface. + +- **Testing**: + - Contributions must include tests covering the new functionality. We require at least 80% test coverage for changes. + - Use the `pytest` framework for writing tests. + +- **Documentation**: + - Update the project documentation to reflect the addition of new method or any other significant changes. + - Ensure that examples, usage guides, and API documentation are clear and updated. + +### Making Changes + +1. **Create an Issue**: For every change, whether a bug fix or a feature implementation, please open a new issue. This helps us keep track of what's being worked on and discuss potential changes before the development work starts. +2. **Follow the Style Guide and Contribution Requirements**: Adhere to the [Coding Style](#coding-style) and [Contribution Requirements](#contribution-requirements). +3. **Use Pre-commit Hooks**: This project uses pre-commit hooks to ensure code style and quality. Run `pre-commit install` after cloning the repository to set up the hooks locally. For more, check [Pre-commit Hooks](#pre-commit-hooks). +4. **Submit a Pull Request**: Once you're ready, submit a pull request linked to the issue you've created. Describe your changes clearly in the PR description. + + +## Local development setup + +By default venv is used to work on the project. After creating venv, install the requirements: + +```bash +pip install -r requirements.txt +pip install -r requirements.dev.txt +``` +and you are ready to go. + +## Release a version + +- Merge your PR into **`main`** +- Update changelog in CHANGELOG.md +- Change the version in src/nsdlib/version.py +- Commit. `git commit -m 'Release version x.y.z'` +- Tag the commit. `git tag -a x.y.z -m 'Release version x.y.z'` +- Push (do not forget --tags). `git push origin master --tags` +- Release will be created automatically by GitHub Actions + + +## Pre-commit Hooks + +This project supports [**pre-commit**](https://pre-commit.com/). To use it please install it +in the `pip install pre-commit` and then run `pre-commit install` and you are ready to go. +Bunch of checks will be executed before commit and files will be formatted correctly. + +Pre-commit works on staged files while commiting. To run it without a command one should run `pre-commit run`. Changes has to be staged. + +To run pre-commit hooks on all changes in the branch: + +1. Sync branch with main +1. Run `git diff --name-only --diff-filter=MA origin/master | xargs pre-commit run --files` + +For branches that are not based on `master` you might replace `origin/master` with `origin/{your_branch}` diff --git a/docs/files/INTRODUCTION.md b/docs/files/INTRODUCTION.md new file mode 100644 index 0000000..8eb80d8 --- /dev/null +++ b/docs/files/INTRODUCTION.md @@ -0,0 +1,11 @@ +# NSDlib + +NSDlib (Network source detection library) is a comprehensive library designed for detecting sources of propagation in networks. This library offers a variety of algorithms that help researchers and developers analyze and identify the origins of information (epidemic etc.) spread within networks. + +## Overview + +NSDLib is a complex library designed for easy integration into existing projects. It aims to be a comprehensive repository +of source detection methods, outbreak detection techniques, and propagation graph reconstruction tools. Researchers worldwide are encouraged to contribute and utilize this library, +facilitating the development of new techniques to combat misinformation and improve propagation analysis. +Each year, new techniques are introduced through scientific papers, often with only pseudo-code descriptions, making it +difficult for researchers to evaluate and compare them with existing methods. NSDlib tries to bridge this gap and enhance researchers to put their implementations here. diff --git a/docs/files/QUICK_START.md b/docs/files/QUICK_START.md new file mode 100644 index 0000000..4ddde30 --- /dev/null +++ b/docs/files/QUICK_START.md @@ -0,0 +1,114 @@ +# Quick Start Guide for NSDLib + +NSDlib (Network source detection library) is a comprehensive library designed for detecting sources of propagation in networks. This library offers a variety of algorithms that help researchers and developers analyze and identify the origins of information (epidemic etc.) spread within networks. + +## Installation + +Install NSDLib using pip: + +```bash +pip install nsdlib +``` + +## Basic Usage +NSDLib offers two approaches for computing centrality measures: direct function calls and using the compute_centrality method with centrality enums. + +### 'SourceDetector' class +by utilizing 'SourceDetector' class and configuring it with 'SourceDetectionConfig' object. This approach allows for seamless source detection and result evaluation. + +```python +import networkx as nx + +from nsdlib.common.models import SourceDetectionConfig +from nsdlib.source_detection import SourceDetector +from nsdlib.taxonomies import NodeEvaluationAlgorithm + + +G = nx.karate_club_graph() + +config = SourceDetectionConfig( + node_evaluation_algorithm=NodeEvaluationAlgorithm.NETSLEUTH, +) + +source_detector = SourceDetector(config) + +result, evaluation = source_detector.detect_sources_and_evaluate(G=G, + IG=G, real_sources=[0,33]) +print(evaluation) + + +``` +### Direct method calls +by importing and using specific method, each method has appropriate prefix to understand what is the purpose of it: + +```python +import networkx as nx + +import nsdlib as nsd + +G = nx.karate_club_graph() +IG = G.copy() +IG.remove_nodes_from([10,15,20,33]) +real_sources = [0,8] + +EIG = nsd.reconstruction_sbrp(G, IG) + +outbreaks = nsd.outbreaks_leiden(EIG) + +detected_sources = [] +for outbreak in outbreaks.communities: + outbreak_G = G.subgraph(outbreak) + nodes_evaluation = nsd.evaluation_jordan_center(outbreak_G) + outbreak_detected_source = max(nodes_evaluation, key=nodes_evaluation.get) + print(f"Outbreak: {outbreak}, Detected Source: {outbreak_detected_source}") + detected_sources.append(outbreak_detected_source) + +evaluation = nsd.compute_source_detection_evaluation( + G=EIG, + real_sources=real_sources, + detected_sources=detected_sources, +) +print(evaluation) + +``` + +This method allows you to directly specify the process of source detection, making it easy to do any modifications to standardlogic. + +### Enum usage +by using appropriate enum and method for computing desired method: + +```python + +import networkx as nx + +import nsdlib as nsd +from nsdlib import PropagationReconstructionAlgorithm, NodeEvaluationAlgorithm, OutbreaksDetectionAlgorithm + +G = nx.karate_club_graph() +IG = G.copy() +IG.remove_nodes_from([10,15,20,33]) +real_sources = [0,8] + +EIG = nsd.reconstruct_propagation(G, IG, PropagationReconstructionAlgorithm.SBRP) + +outbreaks = nsd.identify_outbreaks(EIG, OutbreaksDetectionAlgorithm.LEIDEN) +outbreaks_G = nsd.create_subgraphs_based_on_outbreaks(EIG, outbreaks) +detected_sources = [] +for outbreak in outbreaks_G: + nodes_evaluation = nsd.evaluate_nodes(outbreak, NodeEvaluationAlgorithm.CENTRALITY_AVERAGE_DISTANCE) + outbreak_detected_source = max(nodes_evaluation, key=nodes_evaluation.get) + print(f"Outbreak: {outbreak}, Detected Source: {outbreak_detected_source}") + detected_sources.append(outbreak_detected_source) + +evaluation = nsd.compute_source_detection_evaluation( + G=EIG, + real_sources=real_sources, + detected_sources=detected_sources, +) +print(evaluation) +``` + +This approach is more flexible and allows for the computation of multiple techniques at once or when iterating over multiple methods making it easy to perform analysis of selected set of techniques. + + +If you would like to test ``NSDLib`` functionalities without installing it on your machine consider using the preconfigured [Jupyter notebook](nsdlib.ipynb). diff --git a/docs/files/nsdlib.ipynb b/docs/files/nsdlib.ipynb new file mode 100644 index 0000000..d5792b7 --- /dev/null +++ b/docs/files/nsdlib.ipynb @@ -0,0 +1,253 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "8201518347de614b", + "metadata": { + "collapsed": false + }, + "source": [ + "# Introduction to NSDLib: Network Centrality Library\n", + " NSDLib (Network centrality library) is a tool to compute a wide range of centrality measures for a given network. The library is designed to work with Python Networkx library.\n", + "\n", + "The goal of NSDLib is to offer a comprehensive repository for implementing a broad spectrum of centrality measures. Each year, new measures are introduced through scientific papers, often with only pseudo-code descriptions, making it difficult for researchers to evaluate and compare them with existing methods. While implementations of well-known centrality measures exist, recent innovations are frequently absent. NSDLib strives to bridge this gap. It references the renowned CentiServer portal for well-known centrality measures and their originating papers, aiming to encompass all these measures in the future." + ] + }, + { + "cell_type": "markdown", + "id": "d8258e3ea1ca90fc", + "metadata": { + "collapsed": false + }, + "source": [ + "## Table of Contents\n", + "\n", + "### 1. [Installation](#Installation)\n", + "### 2. [Usage](#Usage)\n", + "### 3. [Conclusion](#Conclusion)\n" + ] + }, + { + "cell_type": "markdown", + "id": "58bcba1df3d0e49c", + "metadata": { + "collapsed": false + }, + "source": [ + "## Installation\n", + "To install NSDLib, you can use pip:\n" + ] + }, + { + "cell_type": "code", + "id": "b381088ebcc58325", + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": "!pip install nsdlib", + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "8715072a02f0319f", + "metadata": { + "collapsed": false + }, + "source": [ + "## Usage\n", + "To use NSDLib, you need to import the library and create a networkx graphs. Then, you can use the library to compute node evaluation, identify propagation outbreaks and reconstruction propagation graphs. Here is an example of how to use NSDLib:\n" + ] + }, + { + "cell_type": "code", + "id": "61b29b2237c37c42", + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import networkx as nx\n", + "import nsdlib as nsd\n", + "from nsdlib.common.models import SourceDetectionConfig\n", + "from nsdlib.source_detection import SourceDetector\n", + "from nsdlib.taxonomies import NodeEvaluationAlgorithm\n" + ], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "c95770f04bb13de1", + "metadata": { + "collapsed": false + }, + "source": [ + "## Creating Graphs\n", + "We'll use a built-in NetworkX graph for demonstration purposes, to have propagation graph we will remove some nodes.\n" + ] + }, + { + "cell_type": "code", + "id": "c4b54fc0cad454ca", + "metadata": { + "ExecuteTime": { + "start_time": "2024-02-22T10:24:27.617857700Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "G = nx.karate_club_graph()\n", + "\n", + "IG = G.copy()\n", + "IG.remove_nodes_from([10,15,20,33])" + ], + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "c9cda74e41bf6f7e", + "metadata": { + "collapsed": false + }, + "source": [ + "## Reconstructing Propagation Graph\n", + "\n", + "Let's try to reconstruct propagation graph, as some nodes could be not observed. In our example we removed some of them.\n" + ] + }, + { + "cell_type": "code", + "id": "c6aed6b640d14f6c", + "metadata": { + "ExecuteTime": { + "start_time": "2024-02-22T10:24:27.617857700Z" + }, + "collapsed": false + }, + "outputs": [], + "source": "EIG = nsd.reconstruction_sbrp(G, IG)\n", + "execution_count": null + }, + { + "cell_type": "markdown", + "id": "54dfcb3b5db9fa55", + "metadata": { + "collapsed": false + }, + "source": [ + "## Finding outbreaks\n", + "\n", + "It is common that propagation starts from multiple sources. For most cases we try to divide network into smaller groups called outbreaks that should contain at least one source node. We should do it on reconstructed propagation graph.\n" + ] + }, + { + "cell_type": "code", + "id": "94ab78fa7659e69f", + "metadata": { + "ExecuteTime": { + "start_time": "2024-02-22T10:24:27.619004100Z" + }, + "collapsed": false + }, + "outputs": [], + "source": "outbreaks = nsd.outbreaks_leiden(EIG)", + "execution_count": null + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "## Doing source detection\n", + "\n", + "When we have outbreaks we can try to detect sources of propagation. We can use different algorithms for this task. In this example we will use degree centrality to evaluate nodes and then take one with highest score as source." + ], + "id": "57d2c24f8914db60" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "detected_sources = []\n", + "for outbreak in outbreaks.communities:\n", + " outbreak_G = G.subgraph(outbreak)\n", + " nodes_evaluation = nsd.evaluation_degree_centrality(outbreak_G)\n", + " outbreak_detected_source = max(nodes_evaluation, key=nodes_evaluation.get)\n", + " print(f\"Outbreak: {outbreak}, Detected Source: {outbreak_detected_source}\")\n", + " detected_sources.append(outbreak_detected_source)\n", + " " + ], + "id": "49daba73bd249bff" + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": [ + "## Source detection evaluation\n", + "\n", + "When we have found nodes its time to evaluate our classification. To do this we need to know real sources of propagation. In real cases we won't have this data but for research it is popular to use simulations or have some data from real cases." + ], + "id": "4458466b4c59f541" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "real_sources = [0,8]\n", + "\n", + "evaluation = nsd.compute_source_detection_evaluation(\n", + " G=EIG,\n", + " real_sources=real_sources,\n", + " detected_sources=detected_sources,\n", + ")\n", + "print(evaluation)" + ], + "id": "ba9c37e8c45dde2a" + }, + { + "cell_type": "markdown", + "id": "92c70d79aff2b146", + "metadata": { + "collapsed": false + }, + "source": [ + "## Conclusion\n", + "\n", + "In this notebook, we introduced basic usage of NSDLib for performing source detection process in given network. For more detailed examples and advanced features, refer to the NSDLib documentation and the [Jupyter Notebook](netcenlib.ipynb) available in the repository.\n" + ] + }, + { + "cell_type": "markdown", + "id": "b69ead6658dc6ca", + "metadata": { + "collapsed": false + }, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..78b3804 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,31 @@ +.. NSDLib documentation master file, created by + sphinx-quickstart on Tue Feb 20 15:27:13 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to NSDlib's documentation! +===================================== + +``NSDLib`` (Network source detection library) is a comprehensive library designed for detecting sources of propagation in networks. This library offers a variety of algorithms that help researchers and developers analyze and identify the origins of information (epidemic etc.) spread within networks. + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + Introduction + Quick start + Contributing + Code of conduct + Reference + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + +.. _`Source`: https://github.com/damianfraszczak/nsdlib +.. _`Distribution`: https://pypi.org/project/nsdlib/ diff --git a/docs/source/modules.rst b/docs/source/modules.rst new file mode 100644 index 0000000..4ec26b0 --- /dev/null +++ b/docs/source/modules.rst @@ -0,0 +1,7 @@ +src +=== + +.. toctree:: + :maxdepth: 4 + + nsdlib diff --git a/docs/source/nsdlib.algorithms.algorithms_utils.rst b/docs/source/nsdlib.algorithms.algorithms_utils.rst new file mode 100644 index 0000000..a030dbe --- /dev/null +++ b/docs/source/nsdlib.algorithms.algorithms_utils.rst @@ -0,0 +1,7 @@ +nsdlib.algorithms.algorithms\_utils module +========================================== + +.. automodule:: nsdlib.algorithms.algorithms_utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.evaluation.dynamic_age.rst b/docs/source/nsdlib.algorithms.evaluation.dynamic_age.rst new file mode 100644 index 0000000..64a1c9d --- /dev/null +++ b/docs/source/nsdlib.algorithms.evaluation.dynamic_age.rst @@ -0,0 +1,7 @@ +nsdlib.algorithms.evaluation.dynamic\_age module +================================================ + +.. automodule:: nsdlib.algorithms.evaluation.dynamic_age + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.evaluation.jordan_center.rst b/docs/source/nsdlib.algorithms.evaluation.jordan_center.rst new file mode 100644 index 0000000..0486d08 --- /dev/null +++ b/docs/source/nsdlib.algorithms.evaluation.jordan_center.rst @@ -0,0 +1,7 @@ +nsdlib.algorithms.evaluation.jordan\_center module +================================================== + +.. automodule:: nsdlib.algorithms.evaluation.jordan_center + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.evaluation.net_sleuth.rst b/docs/source/nsdlib.algorithms.evaluation.net_sleuth.rst new file mode 100644 index 0000000..f46233f --- /dev/null +++ b/docs/source/nsdlib.algorithms.evaluation.net_sleuth.rst @@ -0,0 +1,7 @@ +nsdlib.algorithms.evaluation.net\_sleuth module +=============================================== + +.. automodule:: nsdlib.algorithms.evaluation.net_sleuth + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.evaluation.rst b/docs/source/nsdlib.algorithms.evaluation.rst new file mode 100644 index 0000000..2eb341a --- /dev/null +++ b/docs/source/nsdlib.algorithms.evaluation.rst @@ -0,0 +1,20 @@ +nsdlib.algorithms.evaluation package +==================================== + +Submodules +---------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.algorithms.evaluation.dynamic_age + nsdlib.algorithms.evaluation.jordan_center + nsdlib.algorithms.evaluation.net_sleuth + +Module contents +--------------- + +.. automodule:: nsdlib.algorithms.evaluation + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.outbreaks.rst b/docs/source/nsdlib.algorithms.outbreaks.rst new file mode 100644 index 0000000..f9647ed --- /dev/null +++ b/docs/source/nsdlib.algorithms.outbreaks.rst @@ -0,0 +1,10 @@ +nsdlib.algorithms.outbreaks package +=================================== + +Module contents +--------------- + +.. automodule:: nsdlib.algorithms.outbreaks + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.reconstruction.rst b/docs/source/nsdlib.algorithms.reconstruction.rst new file mode 100644 index 0000000..26fb5ad --- /dev/null +++ b/docs/source/nsdlib.algorithms.reconstruction.rst @@ -0,0 +1,19 @@ +nsdlib.algorithms.reconstruction package +======================================== + +Submodules +---------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.algorithms.reconstruction.sbrp + nsdlib.algorithms.reconstruction.utils + +Module contents +--------------- + +.. automodule:: nsdlib.algorithms.reconstruction + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.reconstruction.sbrp.rst b/docs/source/nsdlib.algorithms.reconstruction.sbrp.rst new file mode 100644 index 0000000..1a8c9a8 --- /dev/null +++ b/docs/source/nsdlib.algorithms.reconstruction.sbrp.rst @@ -0,0 +1,7 @@ +nsdlib.algorithms.reconstruction.sbrp module +============================================ + +.. automodule:: nsdlib.algorithms.reconstruction.sbrp + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.reconstruction.utils.rst b/docs/source/nsdlib.algorithms.reconstruction.utils.rst new file mode 100644 index 0000000..a91acf4 --- /dev/null +++ b/docs/source/nsdlib.algorithms.reconstruction.utils.rst @@ -0,0 +1,7 @@ +nsdlib.algorithms.reconstruction.utils module +============================================= + +.. automodule:: nsdlib.algorithms.reconstruction.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.algorithms.rst b/docs/source/nsdlib.algorithms.rst new file mode 100644 index 0000000..d20af90 --- /dev/null +++ b/docs/source/nsdlib.algorithms.rst @@ -0,0 +1,28 @@ +nsdlib.algorithms package +========================= + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.algorithms.evaluation + nsdlib.algorithms.outbreaks + nsdlib.algorithms.reconstruction + +Submodules +---------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.algorithms.algorithms_utils + +Module contents +--------------- + +.. automodule:: nsdlib.algorithms + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.common.exceptions.rst b/docs/source/nsdlib.common.exceptions.rst new file mode 100644 index 0000000..32affc5 --- /dev/null +++ b/docs/source/nsdlib.common.exceptions.rst @@ -0,0 +1,7 @@ +nsdlib.common.exceptions module +=============================== + +.. automodule:: nsdlib.common.exceptions + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.common.models.rst b/docs/source/nsdlib.common.models.rst new file mode 100644 index 0000000..6f5f949 --- /dev/null +++ b/docs/source/nsdlib.common.models.rst @@ -0,0 +1,7 @@ +nsdlib.common.models module +=========================== + +.. automodule:: nsdlib.common.models + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.common.nx_utils.rst b/docs/source/nsdlib.common.nx_utils.rst new file mode 100644 index 0000000..ccfe9e9 --- /dev/null +++ b/docs/source/nsdlib.common.nx_utils.rst @@ -0,0 +1,7 @@ +nsdlib.common.nx\_utils module +============================== + +.. automodule:: nsdlib.common.nx_utils + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.common.rst b/docs/source/nsdlib.common.rst new file mode 100644 index 0000000..795b5cc --- /dev/null +++ b/docs/source/nsdlib.common.rst @@ -0,0 +1,20 @@ +nsdlib.common package +===================== + +Submodules +---------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.common.exceptions + nsdlib.common.models + nsdlib.common.nx_utils + +Module contents +--------------- + +.. automodule:: nsdlib.common + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.commons.rst b/docs/source/nsdlib.commons.rst new file mode 100644 index 0000000..35f3ce2 --- /dev/null +++ b/docs/source/nsdlib.commons.rst @@ -0,0 +1,7 @@ +nsdlib.commons module +===================== + +.. automodule:: nsdlib.commons + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.rst b/docs/source/nsdlib.rst new file mode 100644 index 0000000..288b81f --- /dev/null +++ b/docs/source/nsdlib.rst @@ -0,0 +1,31 @@ +nsdlib package +============== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.algorithms + nsdlib.common + +Submodules +---------- + +.. toctree:: + :maxdepth: 4 + + nsdlib.commons + nsdlib.setup + nsdlib.source_detection + nsdlib.taxonomies + nsdlib.version + +Module contents +--------------- + +.. automodule:: nsdlib + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.setup.rst b/docs/source/nsdlib.setup.rst new file mode 100644 index 0000000..c25ec9a --- /dev/null +++ b/docs/source/nsdlib.setup.rst @@ -0,0 +1,7 @@ +nsdlib.setup module +=================== + +.. automodule:: nsdlib.setup + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.source_detection.rst b/docs/source/nsdlib.source_detection.rst new file mode 100644 index 0000000..4f4fe68 --- /dev/null +++ b/docs/source/nsdlib.source_detection.rst @@ -0,0 +1,7 @@ +nsdlib.source\_detection module +=============================== + +.. automodule:: nsdlib.source_detection + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.taxonomies.rst b/docs/source/nsdlib.taxonomies.rst new file mode 100644 index 0000000..66b4060 --- /dev/null +++ b/docs/source/nsdlib.taxonomies.rst @@ -0,0 +1,7 @@ +nsdlib.taxonomies module +======================== + +.. automodule:: nsdlib.taxonomies + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/nsdlib.version.rst b/docs/source/nsdlib.version.rst new file mode 100644 index 0000000..8e99728 --- /dev/null +++ b/docs/source/nsdlib.version.rst @@ -0,0 +1,7 @@ +nsdlib.version module +===================== + +.. automodule:: nsdlib.version + :members: + :undoc-members: + :show-inheritance: diff --git a/src/aa.py b/src/aa.py deleted file mode 100644 index b860a01..0000000 --- a/src/aa.py +++ /dev/null @@ -1,18 +0,0 @@ -import networkx as nx - -from nsdlib.common.models import SourceDetectionConfig -from nsdlib.source_detection import SourceDetector -from nsdlib.taxonomies import NodeEvaluationAlgorithm - -G = nx.karate_club_graph() - -config = SourceDetectionConfig( - node_evaluation_algorithm=NodeEvaluationAlgorithm.NETSLEUTH, -) - -source_detector = SourceDetector(config) - -result, evaluation = source_detector.detect_sources_and_evaluate( - G=G, IG=G, real_sources=[0, 33] -) -print(result.global_scores) diff --git a/src/nsdlib/__init__.py b/src/nsdlib/__init__.py index 35465b3..feeb407 100644 --- a/src/nsdlib/__init__.py +++ b/src/nsdlib/__init__.py @@ -1,3 +1,12 @@ # flake8: noqa from nsdlib.algorithms import * +from nsdlib.common.models import NODE_TYPE, SourceDetectionEvaluation +from nsdlib.common.nx_utils import * +from nsdlib.source_detection import EnsembleSourceDetector, SourceDetector +from nsdlib.taxonomies import ( + EnsembleVotingType, + NodeEvaluationAlgorithm, + OutbreaksDetectionAlgorithm, + PropagationReconstructionAlgorithm, +) diff --git a/src/nsdlib/algorithms/__init__.py b/src/nsdlib/algorithms/__init__.py index 2f30626..ee87307 100644 --- a/src/nsdlib/algorithms/__init__.py +++ b/src/nsdlib/algorithms/__init__.py @@ -1,91 +1,138 @@ # flake8: noqa -from nsdlib.algorithms.outbreaks_detection import ( - CPM_Bipartite as outbreaks_detection_CPM_Bipartite, - agdl as outbreaks_detection_agdl, - angel as outbreaks_detection_angel, - aslpaw as outbreaks_detection_aslpaw, - async_fluid as outbreaks_detection_async_fluid, - attribute_clustering as outbreaks_detection_attribute_clustering, - bayan as outbreaks_detection_bayan, - belief as outbreaks_detection_belief, - bimlpa as outbreaks_detection_bimlpa, - bipartite_clustering as outbreaks_detection_bipartite_clustering, - coach as outbreaks_detection_coach, - condor as outbreaks_detection_condor, - conga as outbreaks_detection_conga, - congo as outbreaks_detection_congo, - core_expansion as outbreaks_detection_core_expansion, - cpm as outbreaks_detection_cpm, - crisp_partition as outbreaks_detection_crisp_partition, - dcs as outbreaks_detection_dcs, - demon as outbreaks_detection_demon, - der as outbreaks_detection_der, - dpclus as outbreaks_detection_dpclus, - ebgc as outbreaks_detection_ebgc, - edge_clustering as outbreaks_detection_edge_clustering, - ego_networks as outbreaks_detection_ego_networks, - eigenvector as outbreaks_detection_eigenvector, - em as outbreaks_detection_em, - endntm as outbreaks_detection_endntm, - eva as outbreaks_detection_eva, - frc_fgsn as outbreaks_detection_frc_fgsn, - ga as outbreaks_detection_ga, - gdmp2 as outbreaks_detection_gdmp2, - girvan_newman as outbreaks_detection_girvan_newman, - graph_entropy as outbreaks_detection_graph_entropy, - greedy_modularity as outbreaks_detection_greedy_modularity, - head_tail as outbreaks_detection_head_tail, - hierarchical_link_community as outbreaks_detection_hierarchical_link_community, - ilouvain as outbreaks_detection_ilouvain, - infomap as outbreaks_detection_infomap, - infomap_bipartite as outbreaks_detection_infomap_bipartite, - internal as outbreaks_detection_internal, - internal_dcd as outbreaks_detection_internal_dcd, - ipca as outbreaks_detection_ipca, - kclique as outbreaks_detection_kclique, - kcut as outbreaks_detection_kcut, - label_propagation as outbreaks_detection_label_propagation, - lais2 as outbreaks_detection_lais2, - leiden as outbreaks_detection_leiden, - lemon as outbreaks_detection_lemon, - lfm as outbreaks_detection_lfm, - louvain as outbreaks_detection_louvain, - lpam as outbreaks_detection_lpam, - lpanni as outbreaks_detection_lpanni, - lswl as outbreaks_detection_lswl, - lswl_plus as outbreaks_detection_lswl_plus, - markov_clustering as outbreaks_detection_markov_clustering, - mcode as outbreaks_detection_mcode, - mod_m as outbreaks_detection_mod_m, - mod_r as outbreaks_detection_mod_r, - multicom as outbreaks_detection_multicom, - node_perception as outbreaks_detection_node_perception, - overlapping_partition as outbreaks_detection_overlapping_partition, - overlapping_seed_set_expansion as outbreaks_detection_overlapping_seed_set_expansion, - paris as outbreaks_detection_paris, - percomvc as outbreaks_detection_percomvc, - principled_clustering as outbreaks_detection_principled_clustering, - pycombo as outbreaks_detection_pycombo, - r_spectral_clustering as outbreaks_detection_r_spectral_clustering, - rb_pots as outbreaks_detection_rb_pots, - rber_pots as outbreaks_detection_rber_pots, - ricci_community as outbreaks_detection_ricci_community, - sbm_dl as outbreaks_detection_sbm_dl, - sbm_dl_nested as outbreaks_detection_sbm_dl_nested, - scan as outbreaks_detection_scan, - siblinarity_antichain as outbreaks_detection_siblinarity_antichain, - significance_communities as outbreaks_detection_significance_communities, - slpa as outbreaks_detection_slpa, - spectral as outbreaks_detection_spectral, - spinglass as outbreaks_detection_spinglass, - surprise_communities as outbreaks_detection_surprise_communities, - temporal_partition as outbreaks_detection_temporal_partition, - threshold_clustering as outbreaks_detection_threshold_clustering, - tiles as outbreaks_detection_tiles, - umstmo as outbreaks_detection_umstmo, - walkscan as outbreaks_detection_walkscan, - walktrap as outbreaks_detection_walktrap, - wCommunity as outbreaks_detection_wCommunity, +from nsdlib.algorithms.algorithms_utils import * +from nsdlib.algorithms.evaluation import ( + algebraic_centrality as evaluation_algebraic_centrality, + average_distance_centrality as evaluation_average_distance_centrality, + barycenter_centrality as evaluation_barycenter_centrality, + betweenness_centrality as evaluation_betweenness_centrality, + bottle_neck_centrality as evaluation_bottle_neck_centrality, + centroid_centrality as evaluation_centroid_centrality, + closeness_centrality as evaluation_closeness_centrality, + cluster_rank_centrality as evaluation_cluster_rank_centrality, + communicability_betweenness_centrality as evaluation_communicability_betweenness_centrality, + coreness_centrality as evaluation_coreness_centrality, + current_flow_betweenness_centrality as evaluation_current_flow_betweenness_centrality, + current_flow_closeness_centrality as evaluation_current_flow_closeness_centrality, + decay_centrality as evaluation_decay_centrality, + degree_centrality as evaluation_degree_centrality, + diffusion_degree_centrality as evaluation_diffusion_degree_centrality, + dynamic_age as evaluation_dynamic_age, + eccentricity_centrality as evaluation_eccentricity_centrality, + eigenvector_centrality as evaluation_eigenvector_centrality, + entropy_centrality as evaluation_entropy_centrality, + geodestic_k_path_centrality as evaluation_geodestic_k_path_centrality, + group_betweenness_centrality as evaluation_group_betweenness_centrality, + group_closeness_centrality as evaluation_group_closeness_centrality, + group_degree_centrality as evaluation_group_degree_centrality, + harmonic_centrality as evaluation_harmonic_centrality, + heatmap_centrality as evaluation_heatmap_centrality, + hubbell_centrality as evaluation_hubbell_centrality, + jordan_center as evaluation_jordan_center, + katz_centrality as evaluation_katz_centrality, + laplacian_centrality as evaluation_laplacian_centrality, + leverage_centrality as evaluation_leverage_centrality, + lin_centrality as evaluation_lin_centrality, + load_centrality as evaluation_load_centrality, + mnc_centrality as evaluation_mnc_centrality, + net_sleuth as evaluation_net_sleuth, + pagerank_centrality as evaluation_pagerank_centrality, + pdi_centrality as evaluation_pdi_centrality, + percolation_centrality as evaluation_percolation_centrality, + radiality_centrality as evaluation_radiality_centrality, + rumor_centrality as evaluation_rumor_centrality, + second_order_centrality as evaluation_second_order_centrality, + semi_local_centrality as evaluation_semi_local_centrality, + subgraph_centrality as evaluation_subgraph_centrality, + topological_centrality as evaluation_topological_centrality, + trophic_levels_centrality as evaluation_trophic_levels_centrality, +) +from nsdlib.algorithms.outbreaks import ( + CPM_Bipartite as outbreaks_CPM_Bipartite, + agdl as outbreaks_agdl, + angel as outbreaks_angel, + aslpaw as outbreaks_aslpaw, + async_fluid as outbreaks_async_fluid, + attribute_clustering as outbreaks_attribute_clustering, + bayan as outbreaks_bayan, + belief as outbreaks_belief, + bimlpa as outbreaks_bimlpa, + bipartite_clustering as outbreaks_bipartite_clustering, + coach as outbreaks_coach, + condor as outbreaks_condor, + conga as outbreaks_conga, + congo as outbreaks_congo, + core_expansion as outbreaks_core_expansion, + cpm as outbreaks_cpm, + crisp_partition as outbreaks_crisp_partition, + dcs as outbreaks_dcs, + demon as outbreaks_demon, + der as outbreaks_der, + dpclus as outbreaks_dpclus, + ebgc as outbreaks_ebgc, + edge_clustering as outbreaks_edge_clustering, + ego_networks as outbreaks_ego_networks, + eigenvector as outbreaks_eigenvector, + em as outbreaks_em, + endntm as outbreaks_endntm, + eva as outbreaks_eva, + frc_fgsn as outbreaks_frc_fgsn, + ga as outbreaks_ga, + gdmp2 as outbreaks_gdmp2, + girvan_newman as outbreaks_girvan_newman, + graph_entropy as outbreaks_graph_entropy, + greedy_modularity as outbreaks_greedy_modularity, + head_tail as outbreaks_head_tail, + hierarchical_link_community as outbreaks_hierarchical_link_community, + ilouvain as outbreaks_ilouvain, + infomap as outbreaks_infomap, + infomap_bipartite as outbreaks_infomap_bipartite, + internal as outbreaks_internal, + internal_dcd as outbreaks_internal_dcd, + ipca as outbreaks_ipca, + kclique as outbreaks_kclique, + kcut as outbreaks_kcut, + label_propagation as outbreaks_label_propagation, + lais2 as outbreaks_lais2, + leiden as outbreaks_leiden, + lemon as outbreaks_lemon, + lfm as outbreaks_lfm, + louvain as outbreaks_louvain, + lpam as outbreaks_lpam, + lpanni as outbreaks_lpanni, + lswl as outbreaks_lswl, + lswl_plus as outbreaks_lswl_plus, + markov_clustering as outbreaks_markov_clustering, + mcode as outbreaks_mcode, + mod_m as outbreaks_mod_m, + mod_r as outbreaks_mod_r, + multicom as outbreaks_multicom, + node_perception as outbreaks_node_perception, + overlapping_partition as outbreaks_overlapping_partition, + overlapping_seed_set_expansion as outbreaks_overlapping_seed_set_expansion, + paris as outbreaks_paris, + percomvc as outbreaks_percomvc, + principled_clustering as outbreaks_principled_clustering, + pycombo as outbreaks_pycombo, + r_spectral_clustering as outbreaks_r_spectral_clustering, + rb_pots as outbreaks_rb_pots, + rber_pots as outbreaks_rber_pots, + ricci_community as outbreaks_ricci_community, + sbm_dl as outbreaks_sbm_dl, + sbm_dl_nested as outbreaks_sbm_dl_nested, + scan as outbreaks_scan, + siblinarity_antichain as outbreaks_siblinarity_antichain, + significance_communities as outbreaks_significance_communities, + slpa as outbreaks_slpa, + spectral as outbreaks_spectral, + spinglass as outbreaks_spinglass, + surprise_communities as outbreaks_surprise_communities, + temporal_partition as outbreaks_temporal_partition, + threshold_clustering as outbreaks_threshold_clustering, + tiles as outbreaks_tiles, + umstmo as outbreaks_umstmo, + walkscan as outbreaks_walkscan, + walktrap as outbreaks_walktrap, + wCommunity as outbreaks_wCommunity, ) from nsdlib.algorithms.reconstruction import sbrp as reconstruction_sbrp diff --git a/src/nsdlib/algorithms/algorithms_utils.py b/src/nsdlib/algorithms/algorithms_utils.py index 3c20aca..30c9a89 100644 --- a/src/nsdlib/algorithms/algorithms_utils.py +++ b/src/nsdlib/algorithms/algorithms_utils.py @@ -1,15 +1,12 @@ from functools import lru_cache from typing import Dict, List, Set, Union +from cdlib import NodeClustering from netcenlib.common import nx_cached from netcenlib.common.nx_cached import MAX_SIZE from networkx import Graph -from nsdlib.algorithms import ( - node_evaluation, - outbreaks_detection, - reconstruction, -) +from nsdlib.algorithms import evaluation, outbreaks, reconstruction from nsdlib.common.models import NODE_TYPE, SourceDetectionEvaluation from nsdlib.taxonomies import ( NodeEvaluationAlgorithm, @@ -18,13 +15,18 @@ ) +def node_clustering_into_communities(result: NodeClustering) -> Dict[NODE_TYPE, list]: + """Convert the node clustering result into a dictionary.""" + return {index: community for index, community in enumerate(result.communities)} + + def identify_outbreaks( network: Graph, outbreaks_alg: OutbreaksDetectionAlgorithm, *args, **kwargs -) -> Dict[int, list]: +) -> Dict[NODE_TYPE, list]: """Identify outbreaks in a given network.""" function_name = f"{outbreaks_alg.value.lower()}" - result = getattr(outbreaks_detection, function_name)(network, *args, **kwargs) - return {index: community for index, community in enumerate(result.communities)} + result = getattr(outbreaks, function_name)(network, *args, **kwargs) + return node_clustering_into_communities(result) def evaluate_nodes( @@ -32,7 +34,7 @@ def evaluate_nodes( ): """Evaluate nodes in a given network.""" function_name = f"{evaluation_alg.value.lower()}" - return getattr(node_evaluation, function_name)(network, *args, **kwargs) + return getattr(evaluation, function_name)(network, *args, **kwargs) def reconstruct_propagation( diff --git a/src/nsdlib/algorithms/evaluation/__init__.py b/src/nsdlib/algorithms/evaluation/__init__.py new file mode 100644 index 0000000..29578ea --- /dev/null +++ b/src/nsdlib/algorithms/evaluation/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa + +from netcenlib.algorithms import * + +from nsdlib.algorithms.evaluation.dynamic_age import dynamic_age +from nsdlib.algorithms.evaluation.jordan_center import jordan_center +from nsdlib.algorithms.evaluation.net_sleuth import net_sleuth diff --git a/src/nsdlib/algorithms/node_evaluation/dynamic_age.py b/src/nsdlib/algorithms/evaluation/dynamic_age.py similarity index 100% rename from src/nsdlib/algorithms/node_evaluation/dynamic_age.py rename to src/nsdlib/algorithms/evaluation/dynamic_age.py diff --git a/src/nsdlib/algorithms/node_evaluation/jordan_center.py b/src/nsdlib/algorithms/evaluation/jordan_center.py similarity index 100% rename from src/nsdlib/algorithms/node_evaluation/jordan_center.py rename to src/nsdlib/algorithms/evaluation/jordan_center.py diff --git a/src/nsdlib/algorithms/node_evaluation/net_sleuth.py b/src/nsdlib/algorithms/evaluation/net_sleuth.py similarity index 100% rename from src/nsdlib/algorithms/node_evaluation/net_sleuth.py rename to src/nsdlib/algorithms/evaluation/net_sleuth.py diff --git a/src/nsdlib/algorithms/node_evaluation/__init__.py b/src/nsdlib/algorithms/node_evaluation/__init__.py deleted file mode 100644 index caa935b..0000000 --- a/src/nsdlib/algorithms/node_evaluation/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# flake8: noqa - -from netcenlib.algorithms import * - -from nsdlib.algorithms.node_evaluation.dynamic_age import dynamic_age -from nsdlib.algorithms.node_evaluation.jordan_center import jordan_center -from nsdlib.algorithms.node_evaluation.net_sleuth import net_sleuth diff --git a/src/nsdlib/algorithms/outbreaks_detection/__init__.py b/src/nsdlib/algorithms/outbreaks/__init__.py similarity index 100% rename from src/nsdlib/algorithms/outbreaks_detection/__init__.py rename to src/nsdlib/algorithms/outbreaks/__init__.py