-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
169 lines (124 loc) · 5.43 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
import networkx as nx
import numpy as np
import pickle
#import pickle5 as pickle
import os.path as osp
import os
from tqdm import tqdm_notebook as tqdm
from load_temporal_graph import load_df_rssi_filter, build_graphs, get_array_of_contacts
def temporal_clustered_network_generation(n_time_steps, nb_clusters, cluster_size, p_intra, p_inter):
graphs = []
for time_step in tqdm(range(n_time_steps)):
G = clustered_network_generation(nb_clusters,cluster_size,p_intra,p_inter)
# while nx.is_connected(G) == False:
# G = clustered_network_generation(nb_clusters,cluster_size,p_intra,p_inter)
graphs.append(G)
return graphs
def clustered_network_generation(nb_clusters,cluster_size,p_intra,p_inter):
sizes = [cluster_size for n in range(nb_clusters)]
# P is a matrix of size nb_clusters where element (i,j) is the edge probability between cluster i and cluster j
# I put p_intra on the diagonal and p_inter out-diagonal
P = np.array([[p_inter for x in range(nb_clusters)] for y in range(nb_clusters)])
np.fill_diagonal(P, p_intra)
G = nx.stochastic_block_model(sizes, P)
return G
def temporal_clustered_network_generation_diff_sizes(n_time_steps, sizes,p_intra,p_inter):
graphs = []
for time_step in tqdm(range(n_time_steps)):
G = clustered_network_generation_diff_sizes(sizes,p_intra,p_inter)
# while nx.is_connected(G) == False:
# G = clustered_network_generation(nb_clusters,cluster_size,p_intra,p_inter)
graphs.append(G)
return graphs
def clustered_network_generation_diff_sizes(sizes,p_intra,p_inter):
# P is a matrix of size nb_clusters where element (i,j) is the edge probability between cluster i and cluster j
# I put p_intra on the diagonal and p_inter out-diagonal
P = np.array([[p_inter for x in range(len(sizes))] for y in range(len(sizes))])
np.fill_diagonal(P, p_intra)
G = nx.stochastic_block_model(sizes, P)
return G
def temporal_graph_load(name):
path = 'Graphs/' + name + '/'
with open(path + 'num_snapshots.txt', 'r') as input:
n_graphs = int(float(input.read()))
graphs = []
for idx in range(n_graphs):
with open(path + 'graph_%d.pkl' % idx, 'rb') as input:
graph = pickle.load(input)
graphs.append(graph)
return graphs
def temporal_graph_len(name):
path = 'Graphs/' + name + '/'
with open(path + 'num_snapshots.txt', 'r') as input:
n_graphs = int(float(input.read()))
graphs = []
for idx in range(n_graphs):
with open(path + 'graph_%d.pkl' % idx, 'rb') as input:
graph = pickle.load(input)
graphs.append(graph)
graph_len = len(graphs)
return graph_len
def temporal_graph_save(graphs, name):
path = 'Graphs/' + name + '/'
if not osp.exists(path):
os.makedirs(path)
with open(path + 'num_snapshots.txt', 'w') as output:
output.write('%f' % len(graphs))
for idx, graph in enumerate(graphs):
with open(path + 'graph_%d.pkl' % idx, 'wb') as output:
pickle.dump(graph, output, pickle.HIGHEST_PROTOCOL)
def get_individuals_from_graphs(graphs):
"""
Get the individuals who are present in the list of graphs.
The function return a list of all the unique individuals who are present in
the list of snapshots of the temporal graph.
Parameters
----------
graphs: list
snapshots of a temporal graph
Returns
----------
nodes_list: list
individuals in the list of graphs
"""
nodes_list = []
for g in graphs:
nodes_list.extend(list(g.nodes()))
nodes_list = np.unique(nodes_list)
return nodes_list
def get_DTU_graph_rssi_filter(temporal_gap, rssi_filter, n_individuals=None, n_row=None):
name = 'DTU'
csv_file = '../../../covid_isolation_project/new/Dataset/bt_symmetric.csv'
graphs = get_graph_from_csv_rssi_filter(name, csv_file, temporal_gap, rssi_filter, n_individuals, n_row)
return graphs
def temporal_graph_save(graphs, name):
path = 'Graphs/' + name + '/'
if not os.path.exists(path):
os.makedirs(path)
with open(path + 'num_snapshots.txt', 'w') as output:
output.write('%f' % len(graphs))
for idx, graph in enumerate(graphs):
with open(path + 'graph_%d.pkl' % idx, 'wb') as output:
pickle.dump(graph, output, pickle.HIGHEST_PROTOCOL)
def temporal_graph_load(name):
path = 'Graphs/' + name + '/'
with open(path + 'num_snapshots.txt', 'r') as input:
n_graphs = int(float(input.read()))
graphs = []
for idx in range(n_graphs):
with open(path + 'graph_%d.pkl' % idx, 'rb') as input:
graph = pickle.load(input)
graphs.append(graph)
return graphs
def get_graph_from_csv_rssi_filter(name, csv_file, temporal_gap, rssi_filter, n_individuals=None, n_row=None):
name += '_temporal_gap_%.0f_rssi_filter_%d' % (temporal_gap, rssi_filter)
if os.path.exists('Graphs/' + name + '/'):
print('Graph already computed: load from memory')
graphs = temporal_graph_load(name)
else:
print('Graph not already computed: build from data')
df = load_df_rssi_filter(csv_file, rssi_filter, n_individuals=n_individuals, n_row=n_row)
graphs = build_graphs(get_array_of_contacts(df, temporal_gap, column_name='# timestamp'),
temporal_gap)
temporal_graph_save(graphs, name)
return graphs