Delete Problems/__pycache__ directory
[Neurips2024_16722.git] / exp2.py
blobf41d5874a68d8985c4233e645ce9df8c1e78d24c
2 import numpy as np
3 from analysis.analysis import error
4 from graph.graph import Random
5 from Optimizers import DOPTIMIZER as dopt
6 from utilities import utilities as ut
7 from Problems.logistic_regression import LR_L2
8 from Optimizers import COPTIMIZER as copt
9 import os
10 from utilities.plot_utils import plot_exp2
13 seed = np.random.randint(12345)
14 seed = 8075
15 np.random.seed(seed)
18 #### create asynchronous setup
19 num_nodes = 20
20 mean_comp = 1/np.array([5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100])
22 T_active_exp2, Tv_nodes_exp2, node_comp_time_exp2 = \
23 ut.create_computation_time(num_nodes, max_iter=int(12000), comp_time_dist='exp', mean_comp=mean_comp,\
24 min_comp = None, max_comp=None, variance_comp=None, make_integer=True)
27 #### Logistic Regression Problem ==> p: dimension of the model, L: L-smooth constant, N: total number of training samples, b: average number of local samples, l: regularization factor
28 lr_0 = LR_L2(num_nodes, limited_labels=False, balanced=True, train=1000 , regularization=True, lamda=None)
30 #### Create gossip matrices
31 zero_row_sum, zero_column_sum, row_stochastic, col_stochastic, N_out, neighbors = Random(num_nodes, prob=0.6, Laplacian_dividing_factor= 2).directed()
33 #### some parameters of the algorithms
34 depoch = 12500
35 rho = 0.1
36 alpha = 0.7
37 gamma = 0.5
38 eta = 1.0
39 expScale = 1/50
41 step_asy_dagp = 1.5/lr_0.L
42 step_asy_spa = 1/lr_0.L/12
43 step_center = 1/lr_0.L/2
44 step_size_appg = 1.5/lr_0.L
47 ## find the optimal solution of Logistic regression
48 theta_c0 = np.random.normal(0,1,lr_0.p)
49 cepoch = 60000
50 _, theta_opt, F_opt = copt.CGD(lr_0,step_center, cepoch, theta_c0)
51 error_lr_0 = error(lr_0,theta_opt,F_opt)
54 #### Run the optimization algorithms and compute the performance metrics
55 x_asy_dagp, _, _, _, _, _ = \
56 dopt.Asy_DAGP(T_active_exp2, Tv_nodes_exp2, lr_0, zero_row_sum, zero_column_sum, step_asy_dagp, depoch, num_nodes, lr_0.p, rho, alpha, gamma, eta, neighbors, \
57 cons=False, delay_type='exp', min_delay=None, max_delay=None, expScale_delay=expScale, drop_msg=False, drop_prob=0.)
59 x_asyspa, _, _, _ = \
60 dopt.Asy_SPA(T_active_exp2, Tv_nodes_exp2, lr_0, step_asy_spa, depoch, num_nodes, lr_0.p, N_out, neighbors, delay_type='exp', min_delay=None, max_delay=None,\
61 expScale_delay=expScale, decreasing_step_size=False, correct_step_size=True, drop_msg=False, drop_prob=0.)
63 x_appg, _, _, _ = \
64 dopt.APPG(T_active_exp2, Tv_nodes_exp2, lr_0, step_size_appg, depoch, num_nodes, lr_0.p, N_out, neighbors, \
65 delay_type='exp', min_delay=None, max_delay=None, expScale_delay=expScale, drop_msg=False, drop_prob=0.)
68 res_F_asy_dagp = error_lr_0.cost_gap_path( np.sum(x_asy_dagp, axis = 1)/num_nodes)
69 res_F_asyspa = error_lr_0.cost_gap_path( np.sum(x_asyspa, axis = 1)/num_nodes)
70 res_F_appg = error_lr_0.cost_gap_path( np.sum(x_appg, axis = 1)/num_nodes)
73 #### save data and plot results
74 plot_exp2(T_active_exp2, res_F_asy_dagp, res_F_asyspa, res_F_appg, \
75 current_dir=os.path.dirname(os.path.abspath(__file__)), save_results_folder='exp2')