Skip to content

Commit 8f69934

Browse files
committed
realistic exp
1 parent 031f9e8 commit 8f69934

File tree

3 files changed

+62
-56
lines changed

3 files changed

+62
-56
lines changed

config/config.go

+8-8
Original file line numberDiff line numberDiff line change
@@ -19,23 +19,23 @@ type NodeData struct {
1919

2020
// Must change respected to the codign method
2121
const (
22-
ExpectedChunks = 10
22+
ExpectedChunks = 1
2323
)
2424

2525
// Reed-Solomon parameters
2626
const (
27-
DataShards = 10
28-
ParityShards = 2
27+
DataShards = 30
28+
ParityShards = 5
2929
)
3030

3131
// Luby-Transform parameters
3232
const (
33-
LTSourceBlocks = 10
33+
LTSourceBlocks = 1
3434
RandomSeed = 42
3535
)
3636

3737
var (
38-
c = 2.0
38+
c = 1.0
3939
LTEncodedBlockCount = int(c*math.Sqrt(float64(LTSourceBlocks))) + LTSourceBlocks
4040
)
4141

@@ -48,7 +48,7 @@ var (
4848
NodeID string
4949
CodingMethod string
5050
Mode string
51-
Nodes = 11
51+
Nodes = 2
5252
BazantineNodes = 3
5353
ReceivedChunks = sync.Map{}
5454
SentChunks = sync.Map{}
@@ -60,12 +60,12 @@ var (
6060
// Must be changed to the coding method
6161
// if LT then it should be LTEncodedBlockCount
6262
// if RS then it should be DataShards + ParityShards
63-
ChunksRecByNode = make([][]byte, DataShards+ParityShards)
63+
ChunksRecByNode = make([][]byte, LTEncodedBlockCount)
6464
ReadyCounter = 0
6565
StartTime time.Time
6666
OriginalLength = 18876679
6767
)
6868

6969
var (
70-
K = 10
70+
K = 1
7171
)

experimentbash.sh

+3-2
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@ rm -rf output
88
mkdir output
99

1010
# Number of regular nodes to start
11-
NUM_NODES=10
11+
NUM_NODES=2
1212
PORT_BASE=4007
1313
BOOTSTRAP_PORT_BASE=4001
1414
BOOTSTRAP_NODES=1
15-
CODING_METHOD="RS"
15+
CODING_METHOD="LT"
1616
MODE="download"
1717
IP_BASE="127.0.0."
1818

@@ -90,6 +90,7 @@ ip="${IP_BASE}7"
9090
tmux new-window -t nodes -n $session
9191
tmux send-keys -t nodes:$session "cd '$(pwd)' && cpulimit -l 50 -- go run ./cmd/main.go -node=Node1 -port=4006 -ip=$ip -mode=$MODE -coding=$CODING_METHOD" C-m
9292

93+
sleep 2
9394
# Add network latency to Node 1
9495
# add_network_latency lo0 "50ms"
9596

handlers/stream_handler.go

+51-46
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ var log = logrus.New()
2626

2727
var flag = true
2828

29-
const subChunkSize = 512 * 1024 // 256 KB
29+
const subChunkSize = 512 * 1024 // 512 KB
3030
const maxRetries = 5
3131

3232
// Helper function to write sub-chunks to the stream
@@ -310,56 +310,59 @@ func StoreReceivedChunk(nodeID string, chunkIndex int, chunk []byte, h host.Host
310310
nodeData.Received[chunkIndex] = chunk
311311
fmt.Printf("Node %s received chunk %d\n", nodeID, chunkIndex)
312312
fmt.Println("Length of received chunks:", len(nodeData.Received))
313-
314-
if config.Counter == config.ExpectedChunks {
315-
log.WithFields(logrus.Fields{"nodeID": nodeID}).Info("Node complete received data")
316-
317-
var _ string
318-
var err error
319-
log.WithField("codingMethod", config.CodingMethod).Info("Node decoding data")
320-
droplets := make([][]byte, 0, config.ExpectedChunks)
321-
if config.CodingMethod == "LT" {
322-
for _, droplet := range config.ChunksRecByNode {
323-
if len(droplet) > 0 {
324-
droplets = append(droplets, droplet)
313+
if config.K != 1 {
314+
if config.Counter == config.ExpectedChunks {
315+
log.WithFields(logrus.Fields{"nodeID": nodeID}).Info("Node complete received data")
316+
317+
var _ string
318+
var err error
319+
log.WithField("codingMethod", config.CodingMethod).Info("Node decoding data")
320+
droplets := make([][]byte, 0, config.ExpectedChunks)
321+
if config.CodingMethod == "LT" {
322+
for _, droplet := range config.ChunksRecByNode {
323+
if len(droplet) > 0 {
324+
droplets = append(droplets, droplet)
325+
}
325326
}
327+
_, err = lt.LTDecode(droplets)
328+
} else if config.CodingMethod == "RS" {
329+
_, err = rs.RSDecode(config.ChunksRecByNode)
326330
}
327-
_, err = lt.LTDecode(droplets)
328-
} else if config.CodingMethod == "RS" {
329-
_, err = rs.RSDecode(config.ChunksRecByNode)
330-
}
331331

332-
if (err != nil) && (config.CodingMethod == "LT") {
333-
log.WithFields(logrus.Fields{"nodeID": nodeID, "Error": err, "length of valid chunks:": len(droplets)}).Error("Node failed to decode data")
334-
flag = false
335-
return
336-
} else if (err != nil) && (config.CodingMethod == "RS") {
337-
log.WithFields(logrus.Fields{"nodeID": nodeID, "Error": err, "length of valid chunks:": len(config.ChunksRecByNode)}).Error("Node failed to decode data")
338-
// flag = false
339-
return
340-
}
332+
if (err != nil) && (config.CodingMethod == "LT") {
333+
log.WithFields(logrus.Fields{"nodeID": nodeID, "Error": err, "length of valid chunks:": len(droplets)}).Error("Node failed to decode data")
334+
flag = false
335+
return
336+
} else if (err != nil) && (config.CodingMethod == "RS") {
337+
log.WithFields(logrus.Fields{"nodeID": nodeID, "Error": err, "length of valid chunks:": len(config.ChunksRecByNode)}).Error("Node failed to decode data")
338+
// flag = false
339+
return
340+
}
341341

342-
log.WithFields(logrus.Fields{"nodeID": nodeID}).Info("Node reconstructed data")
343-
344-
// outputFilePath := fmt.Sprintf("output/%s_out.txt", config.NodeID)
345-
// if err := os.WriteFile(outputFilePath, []byte(decodedData), 0644); err != nil {
346-
// log.WithFields(logrus.Fields{"nodeID": nodeID, "Error": err}).Error("Node failed to write reconstructed data to file")
347-
// return
348-
// }
349-
if config.Mode == "upload" {
350-
for _, peerInfo := range config.ConnectedPeers {
351-
if peerInfo.ID.String() != nodeID {
352-
readyKey := fmt.Sprintf("%s-ready", peerInfo.ID.String())
353-
if _, ok := config.SentChunks.Load(readyKey); !ok {
354-
SendReady(context.Background(), h, peerInfo, nodeID)
355-
config.SentChunks.Store(readyKey, struct{}{})
342+
log.WithFields(logrus.Fields{"nodeID": nodeID}).Info("Node reconstructed data")
343+
344+
// outputFilePath := fmt.Sprintf("output/%s_out.txt", config.NodeID)
345+
// if err := os.WriteFile(outputFilePath, []byte(decodedData), 0644); err != nil {
346+
// log.WithFields(logrus.Fields{"nodeID": nodeID, "Error": err}).Error("Node failed to write reconstructed data to file")
347+
// return
348+
// }
349+
if config.Mode == "upload" {
350+
for _, peerInfo := range config.ConnectedPeers {
351+
if peerInfo.ID.String() != nodeID {
352+
readyKey := fmt.Sprintf("%s-ready", peerInfo.ID.String())
353+
if _, ok := config.SentChunks.Load(readyKey); !ok {
354+
SendReady(context.Background(), h, peerInfo, nodeID)
355+
config.SentChunks.Store(readyKey, struct{}{})
356+
}
356357
}
357358
}
359+
time.Sleep(10 * time.Second)
360+
} else if config.Mode == "download" {
361+
logrus.WithField("Total time", time.Since(config.StartTime)).Info("Total time")
358362
}
359-
time.Sleep(10 * time.Second)
360-
} else if config.Mode == "download" {
361-
logrus.WithField("Total time", time.Since(config.StartTime)).Info("Total time")
362363
}
364+
} else {
365+
logrus.WithField("Total time", time.Since(config.StartTime)).Info("Total time")
363366
}
364367
} else {
365368
return
@@ -384,7 +387,11 @@ func HandleDownloadStream(s network.Stream, h host.Host, wg *sync.WaitGroup) {
384387

385388
originalFilePath := "eth_transactions.json"
386389
originalData, _ := os.ReadFile(originalFilePath)
387-
390+
peerInfo := peer.AddrInfo{ID: s.Conn().RemotePeer()}
391+
if config.K == 1 {
392+
SendChunk(context.Background(), h, peerInfo, 0, originalData)
393+
return
394+
}
388395
var chunks [][]byte
389396

390397
if config.CodingMethod == "RS" {
@@ -397,8 +404,6 @@ func HandleDownloadStream(s network.Stream, h host.Host, wg *sync.WaitGroup) {
397404
panic("Invalid coding method")
398405
}
399406

400-
peerInfo := peer.AddrInfo{ID: s.Conn().RemotePeer()}
401-
402407
SendChunk(context.Background(), h, peerInfo, int(chunkIndex), chunks[chunkIndex])
403408
}
404409

0 commit comments

Comments
 (0)