1 #### imports and functions
3 from Problems
.synthetic_cosh
import synthetic
4 from analysis
.analysis
import error
5 from Optimizers
import DOPTIMIZER
as dopt
6 from utilities
import utilities
as ut
8 from graph
.graph_pg_extra
import generate_graph_and_matrices
9 from utilities
.plot_utils
import plot_exp3
13 seed
= np
.random
.randint(12345)
18 #### create asynchronous setup
21 comp_time_dist
= 'random_uniform'
23 mincomp
= np
.array([1,1,1,1,1,1,1,1,1,1])
24 maxcomp
= np
.array([5,10,15,20,25,30,35,40,45,50])
26 T_active_exp3
, Tv_nodes_exp3
, node_comp_time_exp3
= \
27 ut
.create_computation_time(num_nodes
, max_iter
=int(1e5
), comp_time_dist
=comp_time_dist
, mean_comp
=None,\
28 min_comp
=mincomp
, max_comp
=maxcomp
, variance_comp
=None, make_integer
=True)
31 #### some parameters of the algorithms
39 relax_param
= 0.8*np
.ones((num_nodes
))
42 #### Problem setup: parameters of the synthetic functions and constraints.
43 prd
= synthetic(seed
, num_nodes
, dim
)
44 error_prd
= error(prd
,np
.zeros(num_nodes
),0)
47 #### Create gossip matrices
48 L
, C
, W
, D
, V
, neighbors_list
, edges_connected
, edge_indices
, num_edges
, H
, neighbors
, zero_row_sum
, zero_col_sum
= generate_graph_and_matrices(num_nodes
, 0.8, plot
=False)
51 #### find the optimal solution by running the DAGP algorithm
52 x_dagp
, z_dagp
, h_dagp
, g_dagp
= dopt
.DAGP(prd
, zero_row_sum
, zero_col_sum
, learning_rate
, 2*depoch
, np
.random
.randn(num_nodes
,dim
), rho
, alpha
, cons
= True)
53 f_dagp
= error_prd
.cost_path(np
.sum(x_dagp
, axis
=1)/num_nodes
)
57 #### Run the optimization algorithms and compute the performance metrics
58 x_asy_dagp
, _
, _
, _
, _
, _
= \
59 dopt
.Asy_DAGP(T_active_exp3
, Tv_nodes_exp3
, prd
, zero_row_sum
, zero_col_sum
, learning_rate
, depoch
, num_nodes
, dim
, rho
, alpha
, gamma
, eta
, neighbors
, \
60 cons
= True, delay_type
='exp', min_delay
=None, max_delay
=None, expScale_delay
=expScale
, drop_msg
=False, drop_prob
=0.)
63 dopt
.Asy_pg_extra(relax_param
, T_active_exp3
, Tv_nodes_exp3
, prd
, W
, V
, edge_indices
, learning_rate
, depoch
, num_nodes
, num_edges
, dim
, neighbors
, \
64 cons
= True, delay_type
='exp', min_delay
=None, max_delay
=None, expScale_delay
=expScale
, drop_msg
=False, drop_prob
=0.)
66 f_asy_pgex
= abs(error_prd
.cost_path(np
.sum(x_pgex
, axis
=1)/num_nodes
) - f_opt
)
67 f_asy_dagp
= abs(error_prd
.cost_path(np
.sum(x_asy_dagp
, axis
=1)/num_nodes
) - f_opt
)
70 #### save data and plot results: the optimality gap is plotted versus iteration. In the papaer, it is plotted versus the communications.
71 plot_exp3(T_active_exp3
, f_asy_dagp
, f_asy_pgex
, current_dir
=os
.path
.dirname(os
.path
.abspath(__file__
)), save_results_folder
='exp3', plot_iter
=depoch
)