Skip to content

Commit

Permalink
renaming cpu_count or total_cores to n_jobs
Browse files Browse the repository at this point in the history
  • Loading branch information
Schefflera-Arboricola committed Aug 24, 2024
1 parent 6051193 commit fc89e35
Show file tree
Hide file tree
Showing 11 changed files with 38 additions and 38 deletions.
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/approximation/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ def _process_pair_chunk(pairs_chunk):
]

pairs = list(iter_func(nbunch, 2))
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()
if get_chunks == "chunks":
num_in_chunk = max(min(len(pairs) // total_cores, 10), 1)
num_in_chunk = max(min(len(pairs) // n_jobs, 10), 1)
pairs_chunks = nxp.chunks(pairs, num_in_chunk)
else:
pairs_chunks = get_chunks(pairs)
Expand Down
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/bipartite/redundancy.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ def node_redundancy(G, nodes=None, get_chunks="chunks"):
"Cannot compute redundancy coefficient for a node"
" that has fewer than two neighbors."
)
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()
if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down
8 changes: 4 additions & 4 deletions nx_parallel/algorithms/centrality/betweenness.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ def betweenness_centrality(
else:
nodes = seed.sample(list(G.nodes), k)

total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
node_chunks = nxp.create_iterables(G, "node", total_cores, nodes)
node_chunks = nxp.create_iterables(G, "node", n_jobs, nodes)
else:
node_chunks = get_chunks(nodes)

Expand Down Expand Up @@ -116,10 +116,10 @@ def edge_betweenness_centrality(
else:
nodes = seed.sample(list(G.nodes), k)

total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
node_chunks = nxp.create_iterables(G, "node", total_cores, nodes)
node_chunks = nxp.create_iterables(G, "node", n_jobs, nodes)
else:
node_chunks = get_chunks(nodes)

Expand Down
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/connectivity/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@ def _process_pair_chunk(pairs_chunk):
]

pairs = list(iter_func(nbunch, 2))
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()
if get_chunks == "chunks":
num_in_chunk = max(min(len(pairs) // total_cores, 10), 1)
num_in_chunk = max(min(len(pairs) // n_jobs, 10), 1)
pairs_chunks = nxp.chunks(pairs, num_in_chunk)
else:
pairs_chunks = get_chunks(pairs)
Expand Down
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/efficiency_measures.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ def _local_efficiency_node_subset(G, chunk):
if hasattr(G, "graph_object"):
G = G.graph_object

total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(G.nodes) // total_cores, 1)
num_in_chunk = max(len(G.nodes) // n_jobs, 1)
node_chunks = list(nxp.chunks(G.nodes, num_in_chunk))
else:
node_chunks = get_chunks(G.nodes)
Expand Down
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/isolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ def number_of_isolates(G, get_chunks="chunks"):
if hasattr(G, "graph_object"):
G = G.graph_object

cpu_count = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

isolates_list = list(nx.isolates(G))
if get_chunks == "chunks":
num_in_chunk = max(len(isolates_list) // cpu_count, 1)
num_in_chunk = max(len(isolates_list) // n_jobs, 1)
isolate_chunks = nxp.chunks(isolates_list, num_in_chunk)
else:
isolate_chunks = get_chunks(isolates_list)
Expand Down
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/shortest_paths/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down
8 changes: 4 additions & 4 deletions nx_parallel/algorithms/shortest_paths/unweighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down Expand Up @@ -86,10 +86,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down
24 changes: 12 additions & 12 deletions nx_parallel/algorithms/shortest_paths/weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down Expand Up @@ -103,10 +103,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down Expand Up @@ -147,10 +147,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down Expand Up @@ -191,10 +191,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down Expand Up @@ -235,10 +235,10 @@ def _process_node_chunk(node_chunk):
G = G.graph_object

nodes = G.nodes
total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(nodes) // total_cores, 1)
num_in_chunk = max(len(nodes) // n_jobs, 1)
node_chunks = nxp.chunks(nodes, num_in_chunk)
else:
node_chunks = get_chunks(nodes)
Expand Down Expand Up @@ -290,9 +290,9 @@ def dist_path(v):
def _johnson_subset(chunk):
return {node: dist_path(node) for node in chunk}

total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()
if get_chunks == "chunks":
num_in_chunk = max(len(G.nodes) // total_cores, 1)
num_in_chunk = max(len(G.nodes) // n_jobs, 1)
node_chunks = nxp.chunks(G.nodes, num_in_chunk)
else:
node_chunks = get_chunks(G.nodes)
Expand Down
8 changes: 4 additions & 4 deletions nx_parallel/algorithms/tournament.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ def is_closed(G, nodes):
if hasattr(G, "graph_object"):
G = G.graph_object

cpu_count = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(G) // cpu_count, 1)
num_in_chunk = max(len(G) // n_jobs, 1)
node_chunks = nxp.chunks(G, num_in_chunk)
else:
node_chunks = get_chunks(G)
Expand Down Expand Up @@ -80,10 +80,10 @@ def tournament_is_strongly_connected(G, get_chunks="chunks"):
def is_reachable_subset(G, chunk):
return all(nx.tournament.is_reachable(G, u, v) for v in chunk for u in G)

cpu_count = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(min(len(G) // cpu_count, 10), 1)
num_in_chunk = max(min(len(G) // n_jobs, 10), 1)
node_chunks = nxp.chunks(G, num_in_chunk)
else:
node_chunks = get_chunks(G)
Expand Down
4 changes: 2 additions & 2 deletions nx_parallel/algorithms/vitality.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ def closeness_vitality_chunk_subset(chunk):
after = nx.wiener_index(G.subgraph(set(G) - {node}), weight=weight)
return wiener_index - after

total_cores = nxp.get_n_jobs()
n_jobs = nxp.get_n_jobs()

if get_chunks == "chunks":
num_in_chunk = max(len(G) // total_cores, 1)
num_in_chunk = max(len(G) // n_jobs, 1)
node_chunks = nxp.chunks(G.nodes, num_in_chunk)
else:
node_chunks = get_chunks(G.nodes)
Expand Down

0 comments on commit fc89e35

Please sign in to comment.