diff --git a/core/Embedding/examples/line_wiki.py b/core/Embedding/examples/line_wiki.py index 1cccd67f21..078d2b2a84 100644 --- a/core/Embedding/examples/line_wiki.py +++ b/core/Embedding/examples/line_wiki.py @@ -49,30 +49,30 @@ def plot_embeddings(embeddings,): nx.draw(G, node_size=10, font_size=10, font_color="blue", font_weight="bold") plt.savefig('wiki_line.png') - model = LINE(G, embedding_size=128, order='all') + model = LINE(G, embedding_size=1048, order='all') model.train(batch_size=1024, epochs=10, verbose=2) embeddings = model.get_embeddings() evaluate_embeddings(embeddings) plot_embeddings(embeddings) - import pandas as pd + # import pandas as pd - df = pd.DataFrame() - df['source'] = [str(i) for i in [0, 1, 2, 3, 4, 4, 6, 7, 7, 9]] - df['target'] = [str(i) for i in [1, 4, 4, 4, 6, 7, 5, 8, 9, 8]] + # df = pd.DataFrame() + # df['source'] = [str(i) for i in [0, 1, 2, 3, 4, 4, 6, 7, 7, 9]] + # df['target'] = [str(i) for i in [1, 4, 4, 4, 6, 7, 5, 8, 9, 8]] - G = nx.from_pandas_edgelist(df, create_using=nx.Graph()) + # G = nx.from_pandas_edgelist(df, create_using=nx.Graph()) - model = LINE(G, embedding_size=2, order='all') - model.train(batch_size=1024, epochs=2000, verbose=2) + # model = LINE(G, embedding_size=2, order='all') + # model.train(batch_size=1024, epochs=2000, verbose=2) - embeddings = model.get_embeddings() - # print(embeddings) - x, y = [], [] - print(sorted(embeddings.items(), key=lambda x: x[0])) - for k, i in embeddings.items(): - x.append(i[0]) - y.append(i[1]) - plt.scatter(x, y) - plt.show() + # embeddings = model.get_embeddings() + # # print(embeddings) + # x, y = [], [] + # print(sorted(embeddings.items(), key=lambda x: x[0])) + # for k, i in embeddings.items(): + # x.append(i[0]) + # y.append(i[1]) + # plt.scatter(x, y) + # plt.show() diff --git a/core/Embedding/examples/sdne_wiki.py b/core/Embedding/examples/sdne_wiki.py index 8285d41e3c..8aecbfeb0b 100644 --- a/core/Embedding/examples/sdne_wiki.py +++ b/core/Embedding/examples/sdne_wiki.py @@ -4,7 +4,7 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from ge.classify import read_node_label, Classifier -from ge import SDNE +from ge.models.sdne_tf import SDNE from sklearn.linear_model import LogisticRegression import matplotlib.pyplot as plt @@ -45,9 +45,9 @@ def plot_embeddings(embeddings,): if __name__ == "__main__": - # G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt', - # create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)]) - # + G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt', + create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)]) + # model = SDNE(G, hidden_size=[256, 64],) # model.train(batch_size=3000, epochs=40, verbose=2) # embeddings = model.get_embeddings() @@ -55,20 +55,20 @@ def plot_embeddings(embeddings,): # evaluate_embeddings(embeddings) # plot_embeddings(embeddings) - import pandas as pd + # import pandas as pd - df = pd.DataFrame() - df['source'] = [str(i) for i in [0, 1, 2, 3, 4, 4, 6, 7, 7, 9]] - df['target'] = [str(i) for i in [1, 4, 4, 4, 6, 7, 5, 8, 9, 8]] + # df = pd.DataFrame() + # df['source'] = [str(i) for i in [0, 1, 2, 3, 4, 4, 6, 7, 7, 9]] + # df['target'] = [str(i) for i in [1, 4, 4, 4, 6, 7, 5, 8, 9, 8]] - G = nx.from_pandas_edgelist(df, create_using=nx.Graph()) + # G = nx.from_pandas_edgelist(df, create_using=nx.Graph()) # Set Pytorch environment model = SDNE(G, hidden_size=[4,2],) - # model.train(batch_size=3000, epochs=40, verbose=2) + model.train(batch_size=3000, epochs=40, verbose=2) embeddings = model.get_embeddings() # print(embeddings) diff --git a/core/Embedding/ge/models/sdne_tf.py b/core/Embedding/ge/models/sdne_tf.py new file mode 100644 index 0000000000..a15338eb5c --- /dev/null +++ b/core/Embedding/ge/models/sdne_tf.py @@ -0,0 +1,140 @@ +import time +import numpy as np +import tensorflow as tf +from tensorflow.keras.layers import Dense, Input +from tensorflow.keras.models import Model +from tensorflow.keras.regularizers import l1_l2 +from tensorflow.keras import backend as K +from tensorflow.keras.callbacks import History +import scipy.sparse as sp + +from ..utils import preprocess_nxgraph + +def l_2nd(beta): + def loss_2nd(y_true, y_pred): + b_ = tf.ones_like(y_true) + b_ = tf.where(tf.equal(y_true, 0), 1.0, beta) + x = K.square((y_true - y_pred) * b_) + t = K.sum(x, axis=-1) + return K.mean(t) + return loss_2nd + +def l_1st(alpha): + def loss_1st(y_true, y_pred): + L = y_true + Y = y_pred + batch_size = tf.cast(K.shape(L)[0], dtype=tf.float32) + return alpha * 2 * tf.linalg.trace(tf.matmul(tf.matmul(Y, L, transpose_a=True), Y)) / batch_size + return loss_1st + +def create_model(node_size, hidden_size=[256, 128], l1=1e-5, l2=1e-4): + A = Input(shape=(node_size,)) + L = Input(shape=(None,)) + fc = A + for i in range(len(hidden_size)): + if i == len(hidden_size) - 1: + fc = Dense(hidden_size[i], activation='relu', + kernel_regularizer=l1_l2(l1, l2), name='1st')(fc) + else: + fc = Dense(hidden_size[i], activation='relu', + kernel_regularizer=l1_l2(l1, l2))(fc) + Y = fc + for i in reversed(range(len(hidden_size) - 1)): + fc = Dense(hidden_size[i], activation='relu', + kernel_regularizer=l1_l2(l1, l2))(fc) + A_ = Dense(node_size, 'relu', name='2nd')(fc) + model = Model(inputs=[A, L], outputs=[A_, Y]) + emb = Model(inputs=A, outputs=Y) + return model, emb + +class SDNE(object): + def __init__(self, graph, hidden_size=[32, 16], alpha=1e-6, beta=5., nu1=1e-5, nu2=1e-4): + self.graph = graph + self.idx2node, self.node2idx = preprocess_nxgraph(self.graph) + self.node_size = self.graph.number_of_nodes() + self.hidden_size = hidden_size + self.alpha = alpha + self.beta = beta + self.nu1 = nu1 + self.nu2 = nu2 + self.A, self.L = self._create_A_L(self.graph, self.node2idx) + self.reset_model() + self.inputs = [self.A, self.L] + self._embeddings = {} + + def reset_model(self, opt='adam'): + self.model, self.emb_model = create_model(self.node_size, hidden_size=self.hidden_size, l1=self.nu1, + l2=self.nu2) + self.model.compile(opt, [l_2nd(self.beta), l_1st(self.alpha)]) + self.get_embeddings() + + def train(self, batch_size=1024, epochs=1, initial_epoch=0, verbose=1): + if batch_size >= self.node_size: + if batch_size > self.node_size: + print('batch_size({0}) > node_size({1}),set batch_size = {1}'.format( + batch_size, self.node_size)) + batch_size = self.node_size + return self.model.fit([self.A.todense(), self.L.todense()], [self.A.todense(), self.L.todense()], + batch_size=batch_size, epochs=epochs, initial_epoch=initial_epoch, verbose=verbose, + shuffle=False) + else: + steps_per_epoch = (self.node_size - 1) // batch_size + 1 + hist = History() + hist.on_train_begin() + logs = {} + for epoch in range(initial_epoch, epochs): + start_time = time.time() + losses = np.zeros(3) + for i in range(steps_per_epoch): + index = np.arange( + i * batch_size, min((i + 1) * batch_size, self.node_size)) + A_train = self.A[index, :].todense() + L_mat_train = self.L[index][:, index].todense() + inp = [A_train, L_mat_train] + batch_losses = self.model.train_on_batch(inp, inp) + losses += batch_losses + losses = losses / steps_per_epoch + + logs['loss'] = losses[0] + logs['2nd_loss'] = losses[1] + logs['1st_loss'] = losses[2] + epoch_time = int(time.time() - start_time) + hist.on_epoch_end(epoch, logs) + if verbose > 0: + print('Epoch {0}/{1}'.format(epoch + 1, epochs)) + print('{0}s - loss: {1: .4f} - 2nd_loss: {2: .4f} - 1st_loss: {3: .4f}'.format( + epoch_time, losses[0], losses[1], losses[2])) + return hist + + def evaluate(self): + return self.model.evaluate(x=self.inputs, y=self.inputs, batch_size=self.node_size) + + def get_embeddings(self): + self._embeddings = {} + embeddings = self.emb_model.predict(self.A.todense(), batch_size=self.node_size) + look_back = self.idx2node + for i, embedding in enumerate(embeddings): + self._embeddings[look_back[i]] = embedding + return self._embeddings + + def _create_A_L(self, graph, node2idx): + node_size = graph.number_of_nodes() + A_data = [] + A_row_index = [] + A_col_index = [] + + for edge in graph.edges(): + v1, v2 = edge + edge_weight = graph[v1][v2].get('weight', 1) + + A_data.append(edge_weight) + A_row_index.append(node2idx[v1]) + A_col_index.append(node2idx[v2]) + + A = sp.csr_matrix((A_data, (A_row_index, A_col_index)), shape=(node_size, node_size)) + A_ = sp.csr_matrix((A_data + A_data, (A_row_index + A_col_index, A_col_index + A_row_index)), + shape=(node_size, node_size)) + + D = sp.diags(A_.sum(axis=1).flatten().tolist()[0]) + L = D - A_ + return A, L diff --git a/results/arxiv_2023_acc_struc2vec.csv b/results/arxiv_2023_acc_struc2vec.csv index 51bcc956ec..dd7c301636 100644 --- a/results/arxiv_2023_acc_struc2vec.csv +++ b/results/arxiv_2023_acc_struc2vec.csv @@ -29,4 +29,14 @@ arxiv_2023_7crof3co_struc2vec,0.8647822765469825,15,11,5,1,0.0104,0.0219,0.0385, arxiv_2023_un7h7o9f_struc2vec,0.8679653679653679,16,11,7,6,0.0924,0.1243,0.2131,0.5821,0.1322,0.0924,0.1243,0.2131,0.5821,0.9526,0.9476 arxiv_2023_yput780x_struc2vec,0.8295136236312707,15,9,2,3,0.0158,0.026,0.1245,0.4286,0.0485,0.0158,0.026,0.1245,0.4286,0.9179,0.9107 arxiv_2023_k78yhk8w_struc2vec,0.9064171122994652,18,8,2,7,0.0148,0.0267,0.0601,0.3443,0.0345,0.0148,0.0267,0.0601,0.3443,0.9516,0.9268 -Best,0.9064171122994652,30,20,32,9,0.0924,0.1243,0.2131,0.6249,0.1322,0.0924,0.1243,0.2131,0.6249,0.9592,0.9476 +arxiv_2023_673phci2_struc2vec,0.8773873185637892,20,10,1,10,0.0046,0.0377,0.1286,0.6636,0.0543,0.0046,0.0377,0.1286,0.6636,0.9514,0.9474 +arxiv_2023_j2zwxdh6_struc2vec,0.8417366946778712,21,4,2,6,0.0155,0.0568,0.1581,0.5279,0.0626,0.0155,0.0568,0.1581,0.5279,0.9151,0.9199 +arxiv_2023_1eudhktc_struc2vec,0.8330786860198625,21,6,4,5,0.0053,0.0138,0.1059,0.5139,0.0414,0.0053,0.0138,0.1059,0.5139,0.8801,0.8973 +arxiv_2023_qf0zs17s_struc2vec,0.8980137509549274,18,8,2,6,0.0211,0.0293,0.0794,0.5034,0.046,0.0211,0.0293,0.0794,0.5034,0.9578,0.9411 +arxiv_2023_8s9gudnz_struc2vec,0.8348612172141584,16,5,4,6,0.0015,0.0097,0.0469,0.4242,0.0254,0.0015,0.0097,0.0469,0.4242,0.8833,0.8905 +arxiv_2023_sp3hte6d_struc2vec,0.8750954927425516,20,4,1,9,0.0043,0.0328,0.1579,0.6053,0.0525,0.0043,0.0328,0.1579,0.6053,0.9486,0.9436 +arxiv_2023_posojkaw_struc2vec,0.8854087089381207,16,10,3,7,0.0173,0.055,0.1953,0.6155,0.0719,0.0173,0.055,0.1953,0.6155,0.9631,0.9552 +arxiv_2023_qw3qucvo_struc2vec,0.8949579831932774,20,7,2,5,0.0043,0.0219,0.0476,0.301,0.0254,0.0043,0.0219,0.0476,0.301,0.9473,0.9192 +arxiv_2023_czg53cqf_struc2vec,0.8497580850522027,19,9,4,7,0.0025,0.0076,0.043,0.454,0.0266,0.0025,0.0076,0.043,0.454,0.9362,0.9199 +arxiv_2023_dnvz0x3o_struc2vec,0.8689839572192514,19,7,3,7,0.0138,0.029,0.107,0.4533,0.0479,0.0138,0.029,0.107,0.4533,0.9301,0.9222 +Best,0.9064171122994652,30,20,32,10,0.0924,0.1243,0.2131,0.6636,0.1322,0.0924,0.1243,0.2131,0.6636,0.9631,0.9552 diff --git a/results/cora_acc_struc2vec.csv b/results/cora_acc_struc2vec.csv index 965898dedc..574103d3e7 100644 --- a/results/cora_acc_struc2vec.csv +++ b/results/cora_acc_struc2vec.csv @@ -19,4 +19,44 @@ cora_0xp1iqbr_struc2vec,0.6349809885931559,30,40,20,11,0.0114,0.019,0.1027,0.577 cora_7o1bqmib_struc2vec,0.6007604562737643,30,30,18,9,0.0,0.019,0.1521,0.6236,0.0493,0.0,0.019,0.1521,0.6236,0.6315,0.6482 cora_0jkwewb4_struc2vec,0.6311787072243346,10,40,32,13,0.0114,0.038,0.1863,0.635,0.0639,0.0114,0.038,0.1863,0.635,0.6727,0.6853 cora_kug8zglv_struc2vec,0.5893536121673004,10,20,18,13,0.0,0.019,0.1217,0.5475,0.0386,0.0,0.019,0.1217,0.5475,0.6228,0.6239 -Best,0.6501901140684411,30,50,32,13,0.0342,0.1027,0.1863,0.6806,0.0833,0.0342,0.1027,0.1863,0.6806,0.6939,0.6853 +cora_3ggovvtb_struc2vec,0.6254752851711026,10,60,22,13,0.0646,0.1103,0.2662,0.6008,0.1273,0.0646,0.1103,0.2662,0.6008,0.6711,0.7186 +cora_a1xxdfuf_struc2vec,0.6121673003802282,10,70,26,12,0.0076,0.0608,0.1331,0.5779,0.0576,0.0076,0.0608,0.1331,0.5779,0.636,0.651 +cora_6qqk43m3_struc2vec,0.6254752851711026,30,70,20,12,0.0266,0.0456,0.1483,0.6122,0.0686,0.0266,0.0456,0.1483,0.6122,0.6613,0.6791 +cora_t2mti2j6_struc2vec,0.6178707224334601,30,40,22,9,0.0114,0.0114,0.0989,0.6198,0.0439,0.0114,0.0114,0.0989,0.6198,0.6354,0.6337 +cora_3ijqlzn7_struc2vec,0.6159695817490495,10,60,28,11,0.0,0.0646,0.1369,0.6578,0.0559,0.0,0.0646,0.1369,0.6578,0.6649,0.6748 +cora_om3u65gy_struc2vec,0.6083650190114068,30,70,26,9,0.0,0.0418,0.1445,0.6274,0.0441,0.0,0.0418,0.1445,0.6274,0.6498,0.649 +cora_7jvfbwue_struc2vec,0.6330798479087453,20,50,24,11,0.0,0.0456,0.1103,0.654,0.0518,0.0,0.0456,0.1103,0.654,0.6682,0.676 +cora_fk6qr8sm_struc2vec,0.6692015209125475,20,40,26,9,0.0,0.038,0.1597,0.6502,0.0509,0.0,0.038,0.1597,0.6502,0.6831,0.6856 +cora_tol6pnu5_struc2vec,0.6615969581749049,20,50,28,10,0.0,0.0228,0.1901,0.6084,0.0531,0.0,0.0228,0.1901,0.6084,0.6509,0.6807 +cora_dn1latf7_struc2vec,0.6406844106463878,20,40,22,11,0.0,0.038,0.1749,0.5856,0.0564,0.0,0.038,0.1749,0.5856,0.645,0.6719 +cora_oye9km32_struc2vec,0.6349809885931559,30,70,26,12,0.0076,0.057,0.1141,0.6274,0.0579,0.0076,0.057,0.1141,0.6274,0.6718,0.6816 +cora_h6auyehp_struc2vec,0.6387832699619772,10,50,28,12,0.038,0.057,0.2243,0.6008,0.0942,0.038,0.057,0.2243,0.6008,0.6481,0.6953 +cora_l793w0zy_struc2vec,0.6673003802281369,20,70,20,11,0.0266,0.0456,0.1103,0.654,0.0646,0.0266,0.0456,0.1103,0.654,0.6818,0.6866 +cora_5blqxrxt_struc2vec,0.6178707224334601,10,50,22,9,0.0,0.0608,0.1635,0.6122,0.0628,0.0,0.0608,0.1635,0.6122,0.662,0.675 +cora_brz5zv0p_struc2vec,0.6216730038022814,30,60,26,11,0.0114,0.0798,0.2053,0.6502,0.0733,0.0114,0.0798,0.2053,0.6502,0.6651,0.6917 +cora_vocjh75d_struc2vec,0.6140684410646388,30,60,26,11,0.0,0.019,0.1749,0.6046,0.0477,0.0,0.019,0.1749,0.6046,0.6353,0.6556 +cora_zjy0xm2u_struc2vec,0.6634980988593155,10,70,28,12,0.0076,0.0114,0.1977,0.6274,0.0511,0.0076,0.0114,0.1977,0.6274,0.6533,0.6772 +cora_f5ef2y1p_struc2vec,0.6330798479087453,10,70,20,10,0.0076,0.0228,0.1331,0.635,0.0558,0.0076,0.0228,0.1331,0.635,0.6636,0.6771 +cora_5b2087bf_struc2vec,0.629277566539924,20,60,24,9,0.0,0.0342,0.1065,0.5817,0.0424,0.0,0.0342,0.1065,0.5817,0.6299,0.6419 +cora_87fy7ctx_struc2vec,0.6520912547528517,20,50,24,11,0.0,0.057,0.1597,0.6084,0.0608,0.0,0.057,0.1597,0.6084,0.6538,0.679 +cora_eexpp17j_struc2vec,0.6368821292775665,30,40,24,11,0.0,0.0266,0.1445,0.6388,0.0443,0.0,0.0266,0.1445,0.6388,0.6583,0.6645 +cora_c2rf9a8t_struc2vec,0.6406844106463878,20,60,24,11,0.0,0.038,0.2053,0.616,0.0586,0.0,0.038,0.2053,0.616,0.6606,0.6819 +cora_1i0mxn50_struc2vec,0.5874524714828897,30,60,20,12,0.0,0.0114,0.1749,0.5133,0.0452,0.0,0.0114,0.1749,0.5133,0.5797,0.616 +cora_9nzt5gpd_struc2vec,0.6673003802281369,10,50,24,10,0.0494,0.0875,0.2281,0.7224,0.1103,0.0494,0.0875,0.2281,0.7224,0.7207,0.7434 +cora_dwrfm99m_struc2vec,0.6311787072243346,20,40,22,11,0.0152,0.0684,0.1217,0.597,0.0625,0.0152,0.0684,0.1217,0.597,0.6432,0.6688 +cora_8mwrn672_struc2vec,0.5931558935361216,10,70,28,9,0.0152,0.0342,0.1179,0.5475,0.057,0.0152,0.0342,0.1179,0.5475,0.6055,0.6356 +cora_539lo4dk_struc2vec,0.623574144486692,10,40,24,11,0.0114,0.038,0.1597,0.5894,0.0628,0.0114,0.038,0.1597,0.5894,0.6313,0.6659 +cora_bi3p005x_struc2vec,0.6102661596958175,10,70,22,10,0.0076,0.019,0.1179,0.6046,0.047,0.0076,0.019,0.1179,0.6046,0.6373,0.645 +cora_hgwxcl3m_struc2vec,0.6178707224334601,30,70,26,13,0.0,0.0646,0.1825,0.5894,0.0632,0.0,0.0646,0.1825,0.5894,0.6419,0.6693 +cora_7qs6bg2k_struc2vec,0.6083650190114068,30,60,22,12,0.0,0.0076,0.1483,0.5361,0.0396,0.0,0.0076,0.1483,0.5361,0.6042,0.6262 +cora_2j45awrr_struc2vec,0.6577946768060836,10,40,26,12,0.0418,0.1369,0.2319,0.6996,0.1097,0.0418,0.1369,0.2319,0.6996,0.7009,0.7269 +cora_24j0llwy_struc2vec,0.6653992395437263,10,40,22,12,0.0,0.0266,0.2091,0.6388,0.0596,0.0,0.0266,0.2091,0.6388,0.6861,0.6981 +cora_fdxv35ce_struc2vec,0.623574144486692,20,70,22,12,0.0,0.0228,0.1065,0.5627,0.043,0.0,0.0228,0.1065,0.5627,0.6405,0.6521 +cora_g71fuijn_struc2vec,0.6273764258555133,20,40,28,13,0.0076,0.0989,0.1939,0.6084,0.0722,0.0076,0.0989,0.1939,0.6084,0.6742,0.6899 +cora_ay22f547_struc2vec,0.6825095057034221,10,60,26,12,0.0266,0.0456,0.2053,0.673,0.0806,0.0266,0.0456,0.2053,0.673,0.71,0.7262 +cora_dz1d36j0_struc2vec,0.6387832699619772,20,60,22,11,0.0,0.0228,0.1369,0.7034,0.048,0.0,0.0228,0.1369,0.7034,0.6968,0.6839 +cora_7ttcm3nm_struc2vec,0.6197718631178707,30,50,26,11,0.0,0.0266,0.1293,0.6046,0.0469,0.0,0.0266,0.1293,0.6046,0.6277,0.6452 +cora_0i7hsidj_struc2vec,0.6216730038022814,30,70,26,9,0.0,0.0532,0.1369,0.6122,0.0552,0.0,0.0532,0.1369,0.6122,0.6533,0.6678 +cora_vxbrjuwm_struc2vec,0.6273764258555133,20,50,24,13,0.0,0.0266,0.2167,0.597,0.0534,0.0,0.0266,0.2167,0.597,0.6321,0.6648 +cora_jqmbam1x_struc2vec,0.6577946768060836,20,50,28,9,0.0,0.0646,0.1749,0.654,0.0686,0.0,0.0646,0.1749,0.654,0.6928,0.7019 +Best,0.6825095057034221,30,70,32,13,0.0646,0.1369,0.2662,0.7224,0.1273,0.0646,0.1369,0.2662,0.7224,0.7207,0.7434 diff --git a/results/pubmed_acc_struc2vec.csv b/results/pubmed_acc_struc2vec.csv index 6f0fa8fa52..e8cf78a94c 100644 --- a/results/pubmed_acc_struc2vec.csv +++ b/results/pubmed_acc_struc2vec.csv @@ -16,4 +16,18 @@ pubmed_yoj0f40u_struc2vec,0.7626353790613718,30,22,20,9,0.0135,0.023,0.0695,0.34 pubmed_mgwyzvjo_struc2vec,0.7089350180505415,15,22,20,7,0.0135,0.0203,0.0483,0.3005,0.0308,0.0135,0.0203,0.0483,0.3005,0.7877,0.7852 pubmed_7oxau1lw_struc2vec,0.7655685920577617,25,22,14,9,0.0239,0.0366,0.1169,0.3867,0.051,0.0239,0.0366,0.1169,0.3867,0.8348,0.8363 pubmed_3mcjwy93_struc2vec,0.7481949458483754,15,24,16,8,0.0113,0.0162,0.0496,0.31,0.0283,0.0113,0.0162,0.0496,0.31,0.815,0.8064 -Best,0.7716606498194946,30,24,32,9,0.0343,0.0433,0.1169,0.4296,0.056,0.0343,0.0433,0.1169,0.4296,0.8399,0.8363 +pubmed_0x19uxzx_struc2vec,0.7281137184115524,30,20,16,7,0.0307,0.0672,0.0889,0.3317,0.0562,0.0307,0.0672,0.0889,0.3317,0.7788,0.8004 +pubmed_baziv88m_struc2vec,0.7258574007220217,30,24,14,9,0.0018,0.0366,0.0605,0.3321,0.0314,0.0018,0.0366,0.0605,0.3321,0.7968,0.8004 +pubmed_v9nae47m_struc2vec,0.7560920577617328,20,16,16,10,0.0158,0.0465,0.0713,0.3899,0.041,0.0158,0.0465,0.0713,0.3899,0.8076,0.8191 +pubmed_f1vzgoyr_struc2vec,0.7581227436823105,15,20,18,9,0.0185,0.028,0.0821,0.3615,0.0413,0.0185,0.028,0.0821,0.3615,0.8382,0.8304 +pubmed_dkd5naey_struc2vec,0.7290162454873647,15,22,18,9,0.0104,0.0438,0.0862,0.3249,0.0394,0.0104,0.0438,0.0862,0.3249,0.778,0.7943 +pubmed_ozv5zdii_struc2vec,0.7175090252707581,22,14,17,9,0.0032,0.0203,0.0812,0.3145,0.0282,0.0032,0.0203,0.0812,0.3145,0.7852,0.7935 +pubmed_pbpdx4sc_struc2vec,0.7116425992779783,30,18,14,9,0.0262,0.0442,0.0889,0.3732,0.0494,0.0262,0.0442,0.0889,0.3732,0.7735,0.7948 +pubmed_zpet9nnz_struc2vec,0.7441335740072202,15,22,18,7,0.0149,0.023,0.0438,0.343,0.0301,0.0149,0.023,0.0438,0.343,0.7978,0.8001 +pubmed_2otd4dav_struc2vec,0.756768953068592,19,16,14,10,0.0063,0.0122,0.0835,0.3421,0.031,0.0063,0.0122,0.0835,0.3421,0.8435,0.8305 +pubmed_042igi88_struc2vec,0.7709837545126353,15,20,20,9,0.0122,0.0149,0.0672,0.3497,0.0323,0.0122,0.0149,0.0672,0.3497,0.8555,0.8358 +pubmed_87b772v1_struc2vec,0.7154783393501805,15,20,18,7,0.0167,0.0221,0.0618,0.2888,0.033,0.0167,0.0221,0.0618,0.2888,0.7957,0.788 +pubmed_r8xk90e0_struc2vec,0.7369133574007221,18,20,15,6,0.0108,0.0203,0.0708,0.3917,0.0343,0.0108,0.0203,0.0708,0.3917,0.78,0.8012 +pubmed_4vn6ub8j_struc2vec,0.7360108303249098,15,22,18,7,0.0158,0.0275,0.0754,0.2929,0.0358,0.0158,0.0275,0.0754,0.2929,0.8199,0.8086 +pubmed_ef5s5knt_struc2vec,0.7423285198555957,22,18,15,7,0.0027,0.0266,0.0519,0.3452,0.0263,0.0027,0.0266,0.0519,0.3452,0.7951,0.8007 +Best,0.7716606498194946,30,24,32,10,0.0343,0.0672,0.1169,0.4296,0.0562,0.0343,0.0672,0.1169,0.4296,0.8555,0.8363