Update README.md
[Neurips2024_16722.git] / Problems / synthetic_cosh.py
blobea5e4d02b12c3f38ab6a014e22ae734a7c4f5bb2
2 import numpy as np
6 class synthetic(): ### minimize log(cosh(cx-e) s.t. ax-b<=0)
7 def __init__(self, seed, num_nodes, dim):
8 np.random.seed(seed)
9 self.n = num_nodes
10 self.d = dim
11 self.A = np.random.randn(num_nodes, dim)
12 self.e = np.random.randn(num_nodes)
13 self.C = np.random.randn(num_nodes, dim)
14 self.b = np.matmul(self.A, np.random.randn(dim)) + 1*(np.random.rand(num_nodes))
16 self.N = 1
17 self.X_train = None
18 self.Y_train = None
19 self.p = self.A.shape[1]
20 self.dim = self.A.shape[1]
22 def F_val(self, theta):
23 return np.sum(np.log10(np.cosh(np.matmul(self.C,theta)-self.e)))
25 def localgrad(self, theta, idx):
26 grad2 = (1 / np.cosh(np.inner(self.C[idx],theta[idx])-self.e[idx])) *\
27 np.sinh(np.inner(self.C[idx], theta[idx])-self.e[idx]) * self.C[idx]
28 return grad2
30 def gd_grad(self, theta, dim):
31 grad = np.zeros(dim)
32 for i in range(self.n):
33 grad += (1 / np.cosh(np.inner(self.C[i],theta)-self.e[i])) *\
34 np.sinh(np.inner(self.C[i], theta)-self.e[i]) * self.C[i]
35 return grad
37 def networkgrad(self, theta):
38 grad = np.zeros((self.n, self.p))
39 for i in range(self.n):
40 grad[i] = self.localgrad(theta, i)
41 return grad
43 def grad(self, theta):
44 pass
46 def local_projection(self, idx, theta):
47 tmp = np.inner(self.A[idx],theta[idx]) - self.b[idx]
48 if tmp < 0:
49 return theta[idx]
50 else:
51 return theta[idx] - tmp*( (self.A[idx]) / (np.linalg.norm(self.A[idx]))**2 )
53 def network_projection(self, theta):
54 proj = np.zeros((self.n, self.p))
55 for i in range(self.n):
56 proj[i] = self.local_projection(i, theta)
57 return proj