-
Notifications
You must be signed in to change notification settings - Fork 0
/
Random.py
732 lines (637 loc) · 29.1 KB
/
Random.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
import argparse
import logging
import os
import pickle
import random
from collections import Counter
import cvxpy as cp
import networkx as nx
import numpy as np
from cvxpy.atoms.elementwise.minimum import minimum
from cvxpy.atoms.elementwise.power import power
import topologies
# questions:
# Do we need upper bound of 1 on any variables?
# Can we incorporate capacity constraints?
# How to x_{v(i,j)} contribute to z/capacity constraints/costs etc.?
def minhalf(a, b):
""" Return min(a,b) + 0.5*[a-b]_+ = 0.5 *(a + min(a,b)) """
return 0.5 * (a + minimum(a, b))
class CacheNetwork:
""" A class modeling a cache network. """
def __init__(self, G, we, wv, dem, cat, hyperE=[], c={}):
""" Instantiate a new network. Inputs are:
- G : a networkx graph
- we : dictionary containing edge weight/cost functions; must be constructed from cvxpy atoms
- wv : dictionary containing node storage weight/cost functions; must be constructed from cvxpy atoms
- dem : dictionary containing demand; d[t][i] contains demand for item i in target node t
- cat : content catalog
- hyperE : two dimensional list containing pairs of hyperedges
- c : dictionary containing storage capacity constraints (optional)"""
logging.debug("Initializing object...")
self.G = G
self.we = we
self.wv = wv
self.demand = dem
self.catalog = sorted(cat)
self.targets = sorted(dem.keys())
self.c = c
self.hyperE = hyperE
def is_target(self, t, i):
"""Detect whether node t is a target for item i."""
return t in self.demand and i in self.demand[t]
def random_x(self):
dem_max = {}
for i in self.catalog:
dem_max[i] = 0
for t in self.targets:
for i in self.catalog:
dem_max[i] = max(dem_max[i], self.demand[t][i])
dem_sum = sum(dem_max.values())
x = {}
for v in self.G.nodes():
x[v] = {}
for i in self.catalog:
x[v][i] = self.c[v] * dem_max[i] / dem_sum
for j in self.catalog:
if j > i:
x[v][(i, j)] = 0
return x
def cvx_init(self, x):
"""Construct cvxpy problem instance"""
self.vars = {} # used to access variables outside cvx program
# cache variables
logging.debug("Loading cache variables...")
self.vars['x'] = x
# xi book-keeping variables
logging.debug("Creating xi book-keeping variables...")
xi = {}
for t in self.demand:
xi[t] = {}
for v in self.G.nodes():
xi[t][v] = {}
for i in self.catalog: # not self.demand[t], because of cross coding you may want to use traffic not demanded by t
xi[t][v][i] = cp.Variable()
for j in self.catalog:
if j > i:
xi[t][v][(i, j)] = cp.Variable()
self.vars['xi'] = xi
# z flow variables
logging.debug("Creating z flow variables...")
z = {}
for e in self.G.edges():
z[e] = {}
for i in self.catalog:
z[e][i] = cp.Variable()
for j in self.catalog:
if j > i:
z[e][(i, j)] = cp.Variable()
self.vars['z'] = z
# rho book-keeping variables
logging.debug("Creating rho book-keeping variables...")
rho = {}
for t in self.demand:
rho[t] = {}
for e in self.G.edges():
rho[t][e] = {}
for i in self.catalog:
rho[t][e][i] = cp.Variable()
for j in self.catalog:
if j > i:
rho[t][e][(i, j)] = cp.Variable()
self.vars['rho'] = rho
# mu decoding capability variables
logging.debug("Creating mu decoding capability variables...")
mu = {}
for t in self.demand:
mu[t] = {}
for v in self.G.nodes():
mu[t][v] = {}
for i in self.catalog:
mu[t][v][i] = cp.Variable()
self.vars['mu'] = mu
# objective
logging.debug("Creating objective...")
obj = 0
for e in self.we:
ze = 0
for ii in z[e]:
ze += z[e][ii]
obj += self.we[e](ze)
capacities = list(self.c.values())
if capacities[-1] == float('Inf'): # if no constraint
for v in self.wv:
xv = 0
for ii in x[v]:
xv += x[v][ii]
obj += self.wv[v](xv)
# constraints
logging.debug("Creating constraints...")
constr = []
logging.debug("Creating xi variable non-negativity constraints...")
for t in xi:
for v in xi[t]:
for ii in xi[t][v]:
constr.append(xi[t][v][ii] >= 0)
logging.debug("Creating rho variable non-negativity constraints...")
for t in rho:
for e in rho[t]:
for ii in rho[t][e]:
constr.append(rho[t][e][ii] >= 0)
logging.debug("Creating z variable non-negativity constraints...")
for e in z:
for ii in z[e]:
constr.append(z[e][ii] >= 0)
logging.debug("Creating mu variable non-negativity constraints...")
for t in mu:
for v in mu[t]:
for ii in mu[t][v]:
constr.append(mu[t][v][ii] >= 0)
# hyperedges for z flow
logging.debug("Creating hyperedges for z flow variables...")
for hyperedge in self.hyperE:
for pos in range(len(hyperedge) - 1):
for ii in z[hyperedge[pos]]:
constr.append(z[hyperedge[pos]][ii] == z[hyperedge[pos + 1]][ii])
# book-keeping constraints
logging.debug("Creating z book-keeping constraints...")
for t in rho:
for e in rho[t]:
for ii in rho[t][e]:
constr.append(rho[t][e][ii] <= z[e][ii])
logging.debug("Creating x book-keeping constraints...")
for t in xi:
for v in xi[t]:
for ii in xi[t][v]:
constr.append(xi[t][v][ii] <= x[v][ii])
# flow constraints
logging.debug("Creating rho flow constraints...")
# Decodability rate constraints
logging.debug("Creating in flows...")
in_flow = {}
out_flow = {}
for t in self.targets:
in_flow[t] = {}
out_flow[t] = {}
for v in self.G.nodes():
in_edges = self.G.in_edges(v)
out_edges = self.G.out_edges(v)
in_flow[t][v] = {}
out_flow[t][v] = {}
for i in self.catalog:
in_flow[t][v][i] = xi[t][v][i]
for e in in_edges:
in_flow[t][v][i] += rho[t][e][i]
for j in self.catalog:
if j > i:
in_flow[t][v][(i, j)] = xi[t][v][(i, j)]
for e in in_edges:
in_flow[t][v][(i, j)] += rho[t][e][(i, j)]
out_flow[t][v][i] = 0
for e in out_edges:
out_flow[t][v][i] += rho[t][e][i]
for j in self.catalog:
if j > i:
out_flow[t][v][(i, j)] = 0
for e in out_edges:
out_flow[t][v][(i, j)] += rho[t][e][(i, j)]
self.vars['in_flow'] = in_flow
self.vars['out_flow'] = out_flow
logging.debug("Creating decodability constraints...")
for t in self.targets:
for v in self.G.nodes():
for i in self.catalog:
dec_rate = in_flow[t][v][i]
for j in self.catalog:
if j < i:
dec_rate += minimum(in_flow[t][v][(j, i)], mu[t][v][j])
if j > i:
dec_rate += minhalf(in_flow[t][v][(i, j)], in_flow[t][v][j])
constr.append(dec_rate >= mu[t][v][i])
# Outgoing flow is bounded by decodability
logging.debug("Creating unitary outgoing flow constraints...")
for t in self.targets:
for v in self.G.nodes():
for i in self.catalog:
constr.append(mu[t][v][i] >= out_flow[t][v][i])
logging.debug("Creating pair outgoing flow constraints...")
for t in self.targets:
for v in self.G.nodes():
for i in self.catalog:
for j in self.catalog:
if j > i:
constr.append(2 * minimum(mu[t][v][i], mu[t][v][j]) >= out_flow[t][v][(i, j)])
# Demand should be met
logging.debug("Creating demand constraints...")
for t in self.targets:
for i in self.demand[t]: # demand met should be restricted to i's in demand[t]
constr.append(mu[t][t][i] >= self.demand[t][i])
self.problem = cp.Problem(cp.Minimize(obj), constr)
logging.debug("Problem is DCP: " + str(self.problem.is_dcp()))
def solve(self):
"""Solve cvxpy instance"""
logging.debug("Running cvxpy solver...")
self.problem.solve(solver=cp.MOSEK, verbose=True)
def test_feasibility(self, tol=1e-5):
"""Confirm that the solution is feasible, upto tolerance tol."""
x = self.vars['x']
xi = self.vars['xi']
z = self.vars['z']
rho = self.vars['rho']
mu = self.vars['mu']
in_flow = self.vars['in_flow']
out_flow = self.vars['out_flow']
logging.debug("Asserting xi variable non-negativity...")
for t in xi:
for v in xi[t]:
for ii in xi[t][v]:
assert (xi[t][v][ii].value >= -tol), "Target %s cache %s item %s has negative xi value: %f" % (
t, v, ii, xi[t][v][ii].value)
logging.debug("Asserting rho variable non-negativity")
for t in rho:
for e in rho[t]:
for ii in rho[t][e]:
assert (rho[t][e][ii].value >= -tol), "Target %s edge %s item %s has negative rho value %f" % (
t, e, ii, rho[t][e][ii].value)
logging.debug('Asserting flow variable non-negativity...')
for e in z:
for ii in z[e]:
assert (z[e][ii].value >= -tol), "Edge %s, item %s has negative z value: %f" % (e, ii, z[v][ii].value)
logging.debug("Asserting mu variable non-negativity")
for t in mu:
for v in mu[t]:
for ii in mu[t][v]:
assert (mu[t][v][ii].value >= -tol), "Target %s cache %s item %s has negative mu value %f" % (
t, v, ii, mu[t][v][ii].value)
logging.debug("Asserting hyperedges for z flow variables...")
for hyperedge in self.hyperE:
for pos in range(len(hyperedge) - 1):
for ii in z[hyperedge[pos]]:
assert (abs(z[hyperedge[pos]][ii].value - z[hyperedge[pos + 1]][
ii].value) <= tol), "Edge %s and edge %s item %s do not have the same z with %f and %f" % (
hyperedge[pos], hyperedge[pos + 1], ii, z[hyperedge[pos]][ii].value,
z[hyperedge[pos + 1]][ii].value)
# assert(z[hyperedge[pos]][ii].value == z[hyperedge[pos+1]][ii].value ), "Edge %s and edge %s item %s do not have the same z with %f and %f" % (hyperedge[pos], hyperedge[pos+1], ii, z[hyperedge[pos]][ii].value, z[hyperedge[pos+1]][ii].value)
logging.debug("Asserting z book-keeping constraints...")
for t in rho:
for e in rho[t]:
for ii in rho[t][e]:
assert (z[e][ii].value - rho[t][e][
ii].value >= -tol), "Target %s edge %s item %s not satisfy z book-keeping with %f" % (
t, e, ii, z[e][ii].value - rho[t][e][ii].value)
logging.debug("Asserting x book-keeping constraints...")
for t in xi:
for v in xi[t]:
for ii in xi[t][v]:
assert (x[v][ii] - xi[t][v][
ii].value >= -tol), "Target %s cache %s item %s not satisfy z book-keeping with %f" % (
t, v, ii, z[e][ii].value - rho[t][e][ii].value)
logging.debug("Asserting decodability constraints...")
for t in self.targets:
for v in self.G.nodes():
for i in self.catalog:
dec_rate = in_flow[t][v][i]
for j in self.catalog:
if j < i:
dec_rate += minimum(in_flow[t][v][(j, i)], mu[t][v][j])
if j > i:
dec_rate += minhalf(in_flow[t][v][(i, j)], in_flow[t][v][j])
assert (dec_rate.value - mu[t][v][
i].value >= -tol), "Target %s cache %s item %s not satisfy decodability constraints with %f" % (
t, v, i, dec_rate.value - mu[t][v][i].value)
logging.debug("Asserting unitary outgoing flow constraints...")
for t in self.targets:
for v in self.G.nodes():
for i in self.catalog:
if type(out_flow[t][v][i]) is int: # there is no out edges
assert (mu[t][v][
i].value >= -tol), "Target %s cache %s item %s not satisfy unitary outgoing flow constraints with %f" % (
t, v, i, mu[t][v][i].value)
else:
assert (mu[t][v][i].value - out_flow[t][v][
i].value >= -tol), "Target %s cache %s item %s not satisfy unitary outgoing flow constraints with %f" % (
t, v, i, mu[t][v][i].value - out_flow[t][v][i].value)
logging.debug("Asserting pair outgoing flow constraints...")
for t in self.targets:
for v in self.G.nodes():
for i in self.catalog:
for j in self.catalog:
if j > i:
if type(out_flow[t][v][(i, j)]) is int: # there is no out edges
assert (2 * minimum(mu[t][v][i], mu[t][v][
j]).value >= -tol), "Target %s cache %s item (%s, %s) not satisfy pair outgoing flow constraints with %f" % (
t, v, i, j, 2 * minimum(mu[t][v][i], mu[t][v][j]).value)
else:
assert (2 * minimum(mu[t][v][i], mu[t][v][j]).value - out_flow[t][v][(i,
j)].value >= -tol), "Target %s cache %s item (%s, %s) not satisfy pair outgoing flow constraints with %f" % (
t, v, i, j, 2 * minimum(mu[t][v][i], mu[t][v][j]).value - out_flow[t][v][(i, j)].value)
logging.debug("Asserting demand constraints...")
for t in self.targets:
for i in self.demand[t]: # demand met should be restricted to i's in demand[t]
assert (mu[t][t][i].value - self.demand[t][
i] >= -tol), "Target %s cache %s item %s not satisfy decodability constraints with %f" % (
t, t, i, mu[t][t][i].value - self.demand[t][i].value)
def solution(self):
x = self.vars['x']
z = self.vars['z']
mu = self.vars['mu']
for e in z:
for ii in z[e]:
z[e][ii] = z[e][ii].value
for t in self.targets:
for i in self.demand[t]:
mu[t][t][i] = mu[t][t][i].value
objE = 0
for e in self.we:
ze = 0
for ii in z[e]:
ze += z[e][ii]
objE += self.we[e](ze).value
objV = 0
for v in self.wv:
xv = 0
for ii in x[v]:
xv += x[v][ii]
objV += self.wv[v](xv).value
return x, z, mu, objE, objV
def main():
parser = argparse.ArgumentParser(description='Simulate a Network of Coded Caches',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--debug_level', default='DEBUG', type=str, help='Debug Level',
choices=['INFO', 'DEBUG', 'WARNING', 'ERROR'])
parser.add_argument('--graph_type', type=str, help='Graph type', choices=['Maddah-Ali', 'tree',
'erdos_renyi', 'balanced_tree',
'hypercube', "cicular_ladder", "cycle",
"grid_2d",
'lollipop', 'expander', 'star',
'barabasi_albert', 'watts_strogatz',
'regular', 'powerlaw_tree', 'small_world',
'geant', 'abilene', 'dtelekom',
'servicenetwork'])
parser.add_argument('--graph_size', default=100, type=int, help='Network size')
parser.add_argument('--graph_degree', default=3, type=int,
help='Degree. Used by balanced_tree, regular, barabasi_albert, watts_strogatz')
parser.add_argument('--graph_p', default=0.10, type=int, help='Probability, used in erdos_renyi, watts_strogatz')
parser.add_argument('--target_nodes', default=5, type=int, help='Number of nodes generating demands')
parser.add_argument('--catalog_size', default=4, type=int, help='Catalog size')
parser.add_argument('--random_seed', default=10, type=int, help='Random seed')
parser.add_argument('--zipf_parameter', default=1.2, type=float,
help='parameter of Zipf, used in demand distribution')
parser.add_argument('--capacity', default=float('Inf'), type=float, help='capacity of each node')
parser.add_argument('--difference', default=0.0, type=float, help='difference of two items used in exploring')
parser.add_argument('--penalty', default=1.0, type=float, help='penalty of edge')
parser.add_argument('--penalty_mul', default=2.0, type=float, help='increase penalty for tree')
parser.add_argument('--capacity_mul', default=2.0, type=float, help='decrease capacity for tree')
parser.add_argument('--method', type=str, help='Methods to solve problem', choices=['ma', 'cc'])
args = parser.parse_args()
args.debug_level = eval("logging." + args.debug_level)
np.random.seed(args.random_seed)
random.seed(args.random_seed)
logging.basicConfig(level=args.debug_level)
def graphGenerator():
if args.graph_type == 'Maddah-Ali':
return nx.balanced_tree(2, 1)
if args.graph_type == 'tree':
temp = nx.balanced_tree(3, 2)
temp.add_node(-1)
temp.add_edge(-1, 0)
return temp
if args.graph_type == "erdos_renyi":
return nx.erdos_renyi_graph(args.graph_size, args.graph_p)
if args.graph_type == "balanced_tree":
ndim = int(np.ceil(np.log(args.graph_size) / np.log(args.graph_degree)))
return nx.balanced_tree(args.graph_degree, ndim)
if args.graph_type == "cicular_ladder":
ndim = int(np.ceil(args.graph_size * 0.5))
return nx.circular_ladder_graph(ndim)
if args.graph_type == "cycle":
return nx.cycle_graph(args.graph_size)
if args.graph_type == 'grid_2d':
ndim = int(np.ceil(np.sqrt(args.graph_size)))
return nx.grid_2d_graph(ndim, ndim)
if args.graph_type == 'lollipop':
ndim = int(np.ceil(args.graph_size * 0.5))
return nx.lollipop_graph(ndim, ndim)
if args.graph_type == 'expander':
ndim = int(np.ceil(np.sqrt(args.graph_size)))
return nx.margulis_gabber_galil_graph(ndim)
if args.graph_type == "hypercube":
ndim = int(np.ceil(np.log(args.graph_size) / np.log(2.0)))
return nx.hypercube_graph(ndim)
if args.graph_type == "star":
ndim = args.graph_size - 1
return nx.star_graph(ndim)
if args.graph_type == 'barabasi_albert':
return nx.barabasi_albert_graph(args.graph_size, args.graph_degree)
if args.graph_type == 'watts_strogatz':
return nx.connected_watts_strogatz_graph(args.graph_size, args.graph_degree, args.graph_p)
if args.graph_type == 'regular':
return nx.random_regular_graph(args.graph_degree, args.graph_size)
if args.graph_type == 'powerlaw_tree':
return nx.random_powerlaw_tree(args.graph_size)
if args.graph_type == 'small_world':
ndim = int(np.ceil(np.sqrt(args.graph_size)))
return nx.navigable_small_world_graph(ndim)
if args.graph_type == 'geant':
return topologies.GEANT()
if args.graph_type == 'dtelekom':
return topologies.Dtelekom()
if args.graph_type == 'abilene':
return topologies.Abilene()
if args.graph_type == 'servicenetwork':
return topologies.ServiceNetwork()
def hyperedges():
if args.graph_type == 'Maddah-Ali':
return [[(0, 1), (0, 2)]]
if args.graph_type == 'tree':
hyperedges = []
hyperedge_node = [2, 3, 4]
for node_1 in hyperedge_node:
hyperedge = []
for node_2 in G.successors(node_1):
hyperedge.append((node_1, node_2))
hyperedges.append(hyperedge)
return hyperedges
else:
return []
def targetGenerator():
if args.graph_type == 'Maddah-Ali':
return [1, 2]
if args.graph_type == 'tree':
return [5, 6, 7, 8, 9, 10, 11, 12, 13]
else:
return [list(G.nodes())[i] for i in random.sample(range(graph_size), args.target_nodes)]
logging.info('Generating graph...')
temp_graph = graphGenerator()
logging.debug('nodes: ' + str(temp_graph.nodes)) # list
logging.debug('edges: ' + str(temp_graph.edges)) # list of node pair
G = nx.DiGraph() # generate a DiGraph
temp_nodes = list(temp_graph.nodes)
temp_nodes.sort()
number_map = dict(zip(temp_nodes, range(len(temp_graph.nodes()))))
G.add_nodes_from(number_map.values()) # add node from temp_graph to G
for (x, y) in temp_graph.edges(): # add edge from temp_graph to G
xx = number_map[x]
yy = number_map[y]
G.add_edge(min(xx, yy), max(xx, yy))
if args.graph_type != 'tree' and args.graph_type != 'Maddah-Ali':
G.add_edge(max(xx, yy), min(xx, yy))
graph_size = G.number_of_nodes()
logging.info('...done. Created graph with %d nodes and %d edges' % (G.number_of_nodes(), G.number_of_edges()))
logging.debug('G is:' + str(G.nodes) + str(G.edges))
# pos = nx.shell_layout(G)
# nx.draw_networkx(G, pos)
# plt.draw()
# plt.show()
we = {}
wv = {}
if args.graph_type != 'tree' and args.graph_type != 'Maddah-Ali':
closeness = nx.closeness_centrality(G)
keys, values = list(closeness.keys()), list(closeness.values())
values = np.power(values, -1)
values = (values - np.min(values)) * 3 + 1 # make more sparse, larger multiplier-> more sparse 5
distance = dict(zip(keys, values))
keys, values = list(closeness.keys()), list(closeness.values())
values = (values - np.min(values)) * 20 + 1 # make more sparse, larger multiplier-> more sparse
closeness = dict(zip(keys, values))
if args.graph_type == 'tree':
z_layer = [[(0, 1)], [(1, 2), (1, 3), (1, 4)],
[(2, 5), (2, 6), (2, 7), (3, 8), (3, 9), (3, 10), (4, 11), (4, 12), (4, 13)]]
pen = args.penalty
for layer in z_layer:
for e in layer:
we[e] = lambda x: power(x, pen)
pen *= args.penalty_mul
elif args.graph_type == 'Maddah-Ali':
for e in G.edges():
we[e] = lambda x: power(x, args.penalty)
else:
for (u, v) in G.edges():
average_distance = (distance[u] + distance[v]) / 2
we[(u, v)] = lambda x: power(x, average_distance)
if args.graph_type == 'Maddah-Ali' or args.graph_type == 'tree':
for v in G.nodes():
wv[v] = lambda x: power(x, 1)
else:
for v in G.nodes():
wv[v] = lambda x: power(x, distance[v])
capacity = {}
if args.graph_type == 'tree' or args.graph_type == 'Maddah-Ali':
if args.graph_type == 'tree':
x_layer = [[0], [1], [2, 3, 4], [5, 6, 7, 8, 9, 10, 11, 12, 13]]
cap = args.capacity
for layer in x_layer:
for v in layer:
capacity[v] = cap
cap /= args.capacity_mul
elif args.graph_type == 'Maddah-Ali':
for v in G.nodes():
capacity[v] = args.capacity
capacity[0] = args.catalog_size
else:
for v in G.nodes():
capacity[v] = closeness[v]
targets = targetGenerator()
catalog = range(args.catalog_size)
dem = {}
if args.graph_type == 'Maddah-Ali':
if args.method == 'cc':
dem[1] = {}
dem[2] = {}
dem[1][0] = (1 + args.difference) / 2
dem[1][1] = dem[1][0]
dem[1][2] = (1 - args.difference) / 2
dem[1][3] = dem[1][2]
dem[2][0] = (1 - args.difference) / 2
dem[2][1] = dem[2][0]
dem[2][2] = (1 + args.difference) / 2
dem[2][3] = dem[2][2]
elif args.method == 'ma':
'''MAN method'''
for t in targets:
dem[t] = {}
for i in catalog:
dem[t][i] = (1 + args.difference) / 2
else:
if args.method == 'cc':
if args.graph_type == 'tree':
scale = 100
else:
scale = 2000 / graph_size # 1000 / graph_size
for t in targets:
dem[t] = {}
sample = np.random.zipf(args.zipf_parameter, 1000)
demend = Counter(sample)
for i in catalog:
dem[t][i] = demend[args.catalog_size - i] / scale
# dem[t][i] = demend[i+1] / scale
if args.difference:
scale -= 5
print(dem)
elif args.method == 'ma':
'''MAN method'''
if args.graph_type == 'tree':
scale = 100
else:
scale = 2000 / graph_size # 1000 / graph_size
dist = {}
for i in catalog:
dist[i] = 0.0
for t in targets:
sample = np.random.zipf(args.zipf_parameter, 1000)
stat = Counter(sample)
for i in catalog:
if stat[args.catalog_size - i] / scale > dist[i]:
dist[i] = stat[args.catalog_size - i] / scale
# if stat[i+1]/scale > dist[i]:
# dist[i] = stat[i+1]/scale
if args.difference:
scale -= 5
for t in targets:
dem[t] = dist
print(dem)
hyperE = hyperedges()
CN = CacheNetwork(G, we, wv, dem, catalog, hyperE, capacity)
x = CN.random_x()
CN.cvx_init(x)
CN.solve()
print("Status:", CN.problem.solution.status)
print("Optimal Value:", CN.problem.solution.opt_val)
print('solve_time', CN.problem.solution.attr['solve_time'])
CN.test_feasibility(1e-2)
x, z, mu, objE, objV = CN.solution()
dir = 'random_' + args.method + '_100_50/'
if not os.path.exists(dir):
os.mkdir(dir)
output = dir + args.graph_type
with open(output, 'wb') as f:
pickle.dump([x, z, objE, objV], f)
logging.info('Saved in ' + output)
if __name__ == "__main__":
main()
# G = nx.DiGraph([(0, 1), (1, 2), (2, 3)])
# we = {}
# wv = {}
# for e in G.edges():
# we[e] = square
# for v in G.nodes():
# wv[v] = square
#
# dem = {}
# targets = [0,2]
# catalog = ['a','b','c','d']
# for t in targets:
# dem[t] = {}
# for i in catalog:
# dem[t][i] = np.random.rand()
#
#
# CN = CacheNetwork(G,we,wv,dem,catalog)
# CN.cvx_init()
# CN.solve()
# print("Status:",CN.problem.solution.status)
# print("Optimal Value:",CN.problem.solution.opt_val)
# print('solve_time',CN.problem.solution.attr['solve_time'])
# print('num_iters:',CN.problem.solution.attr['num_iters'])
#
# CN.test_feasibility(1e-2)