Update README.md
[Neurips2024_16722.git] / exp1.py
blobd1860ef41ddd9e7e8d81b73fcc90836b0602bf96
2 import numpy as np
3 from Problems.synthetic_cosh import synthetic
4 from analysis.analysis import error
5 from graph.graph import Random
6 from Optimizers import DOPTIMIZER as dopt
7 from utilities import utilities as ut
8 from utilities.plot_utils import plot_exp1
9 import os
11 seed = np.random.randint(12345)
12 seed = 45
13 np.random.seed(seed)
16 #### create asynchronous setup
17 num_nodes = 10
18 dim = 5
19 comp_time_dist = 'random_uniform'
21 mincomp = np.array([1,1,1,1,1,1,1,1,1,1])
22 maxcomp = np.array([5,10,15,20,25,30,35,40,45,50])
24 T_active_exp1, Tv_nodes_exp1, node_comp_time_exp1 = \
25 ut.create_computation_time(num_nodes, max_iter=int(1e5), comp_time_dist=comp_time_dist, mean_comp=None,\
26 min_comp=mincomp, max_comp=maxcomp, variance_comp=None, make_integer=True)
29 #### some parameters of the algorithms
30 learning_rate = 0.01
31 max_iter_syn = 60000
32 max_iter_asyn = 60000
34 rho = 0.01
35 alpha = 0.1
36 gamma = 0.5
37 eta = 1.0
38 expScale = 1/10.
39 theta_0 = np.random.randn(num_nodes,dim)
42 #### Problem setup: parameters of the synthetic functions and constraints.
43 prd = synthetic(seed, num_nodes, dim)
44 error_prd = error(prd,np.zeros(num_nodes),0)
47 #### Create gossip matrices
48 zero_row_sum,zero_col_sum,row_stochastic,col_stochastic, N_out, neighbors = Random(num_nodes, prob=0.8, Laplacian_dividing_factor= 2).directed()
51 #### Run the optimization algorithms and compute the performance metrics
52 x_dagp, _, _, _ = \
53 dopt.DAGP(prd, zero_row_sum, zero_col_sum, learning_rate, max_iter_syn, \
54 theta_0, rho , alpha, cons = True)
56 x_asy_dagp, _, _, _, _, Delay_mat_dagp = \
57 dopt.Asy_DAGP(T_active_exp1, Tv_nodes_exp1, prd, zero_row_sum, zero_col_sum, learning_rate, max_iter_asyn, num_nodes, dim, rho, alpha, gamma, eta, neighbors, \
58 cons = True, delay_type='exp', min_delay=None, max_delay=None, expScale_delay=expScale, \
59 drop_msg=False, drop_prob=0.)
61 f_DAGP = error_prd.cost_path(np.sum(x_dagp, axis=1)/num_nodes)
62 f_asy_dagp = error_prd.cost_path(np.sum(x_asy_dagp, axis=1)/num_nodes)
65 ##### part 2: experiments for the Throttled setup
66 np.random.seed(seed)
68 mincomp_2 = np.array([1,2,1,1,1,1,2,2,1,1])
69 maxcomp_2 = np.array([5,20,15,20,50,60,35,40,45,50])
71 T_active_exp1_2, Tv_nodes_exp1_2, node_comp_time_exp1_2 = \
72 ut.create_computation_time(num_nodes, max_iter=int(1e5), comp_time_dist=comp_time_dist, mean_comp=None,\
73 min_comp = mincomp_2, max_comp=maxcomp_2, variance_comp=None, make_integer=True)
76 x_dagp_2, _, _, _ = \
77 dopt.DAGP(prd, zero_row_sum, zero_col_sum, learning_rate, max_iter_syn, \
78 theta_0, rho , alpha, cons = True)
80 x_asy_dagp_2, _, _, _, _, Delay_mat_dagp_2 = \
81 dopt.Asy_DAGP(T_active_exp1_2, Tv_nodes_exp1_2, prd, zero_row_sum, zero_col_sum, learning_rate, max_iter_asyn, num_nodes, dim, rho, alpha, gamma, eta, neighbors, \
82 cons = True, delay_type='exp', min_delay=None, max_delay=None, expScale_delay=expScale, \
83 drop_msg=False, drop_prob=0.)
85 f_DAGP_2 = error_prd.cost_path(np.sum(x_dagp_2, axis=1)/num_nodes)
86 f_asy_dagp_2 = error_prd.cost_path(np.sum(x_asy_dagp_2, axis=1)/num_nodes)
89 #### save data and plot results
90 plot_exp1(f_DAGP, f_DAGP_2, f_asy_dagp, f_asy_dagp_2, max_iter_syn, node_comp_time_exp1, \
91 node_comp_time_exp1_2, neighbors, Delay_mat_dagp, Delay_mat_dagp_2, T_active_exp1, \
92 T_active_exp1_2, current_dir=os.path.dirname(os.path.abspath(__file__)), save_results_folder='exp1')