-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathencoder.py
70 lines (52 loc) · 2.71 KB
/
encoder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
from core import *
from distributions import *
def get_degrees_from(distribution_name, N, k):
""" Returns the random degrees from a given distribution of probabilities.
The degrees distribution must look like a Poisson distribution and the
degree of the first drop is 1 to ensure the start of decoding.
"""
if distribution_name == "ideal":
probabilities = ideal_distribution(N)
elif distribution_name == "robust":
probabilities = robust_distribution(N)
else:
probabilities = None
population = list(range(0, N+1))
return [1] + choices(population, probabilities, k=k-1)
def encode(blocks, drops_quantity):
""" Iterative encoding - Encodes new symbols and yield them.
Encoding one symbol is described as follow:
1. Randomly choose a degree according to the degree distribution, save it into "deg"
Note: below we prefer to randomly choose all the degrees at once for our symbols.
2. Choose uniformly at random 'deg' distinct input blocs.
These blocs are also called "neighbors" in graph theory.
3. Compute the output symbol as the combination of the neighbors.
In other means, we XOR the chosen blocs to produce the symbol.
"""
# print("--------------\n", blocks)
# for i in blocks:
# print(i)
# print("-----------------\n")
# Display statistics
blocks_n = len(blocks)
assert blocks_n <= drops_quantity, "Because of the unicity in the random neighbors, it is need to drop at least the same amount of blocks"
print("Generating graph...")
start_time = time.time()
# Generate random indexes associated to random degrees, seeded with the symbol id
random_degrees = get_degrees_from("robust", blocks_n, k=drops_quantity)
print("Ready for encoding.", flush=True)
for i in range(drops_quantity):
# Get the random selection, generated precedently (for performance)
selection_indexes, deg = generate_indexes(i, random_degrees[i], blocks_n)
# Xor each selected array within each other gives the drop (or just take one block if there is only one selected)
drop = blocks[selection_indexes[0]]
for n in range(1, deg):
drop = np.bitwise_xor(drop, blocks[selection_indexes[n]])
# drop = drop ^ blocks[selection_indexes[n]] # according to my tests, this has the same performance
# Create symbol, then log the process
symbol = Symbol(index=i, degree=deg, data=drop)
if VERBOSE:
symbol.log(blocks_n)
log("Encoding", i, drops_quantity, start_time)
yield symbol
print("\n----- Correctly dropped {} symbols (packet size={})".format(drops_quantity, PACKET_SIZE))