6 class synthetic(): ### minimize log(cosh(cx-e) s.t. ax-b<=0)
7 def __init__(self
, seed
, num_nodes
, dim
):
11 self
.A
= np
.random
.randn(num_nodes
, dim
)
12 self
.e
= np
.random
.randn(num_nodes
)
13 self
.C
= np
.random
.randn(num_nodes
, dim
)
14 self
.b
= np
.matmul(self
.A
, np
.random
.randn(dim
)) + 1*(np
.random
.rand(num_nodes
))
19 self
.p
= self
.A
.shape
[1]
20 self
.dim
= self
.A
.shape
[1]
22 def F_val(self
, theta
):
23 return np
.sum(np
.log10(np
.cosh(np
.matmul(self
.C
,theta
)-self
.e
)))
25 def localgrad(self
, theta
, idx
):
26 grad2
= (1 / np
.cosh(np
.inner(self
.C
[idx
],theta
[idx
])-self
.e
[idx
])) *\
27 np
.sinh(np
.inner(self
.C
[idx
], theta
[idx
])-self
.e
[idx
]) * self
.C
[idx
]
30 def gd_grad(self
, theta
, dim
):
32 for i
in range(self
.n
):
33 grad
+= (1 / np
.cosh(np
.inner(self
.C
[i
],theta
)-self
.e
[i
])) *\
34 np
.sinh(np
.inner(self
.C
[i
], theta
)-self
.e
[i
]) * self
.C
[i
]
37 def networkgrad(self
, theta
):
38 grad
= np
.zeros((self
.n
, self
.p
))
39 for i
in range(self
.n
):
40 grad
[i
] = self
.localgrad(theta
, i
)
43 def grad(self
, theta
):
46 def local_projection(self
, idx
, theta
):
47 tmp
= np
.inner(self
.A
[idx
],theta
[idx
]) - self
.b
[idx
]
51 return theta
[idx
] - tmp
*( (self
.A
[idx
]) / (np
.linalg
.norm(self
.A
[idx
]))**2 )
53 def network_projection(self
, theta
):
54 proj
= np
.zeros((self
.n
, self
.p
))
55 for i
in range(self
.n
):
56 proj
[i
] = self
.local_projection(i
, theta
)