1
+ package fetcher
2
+
3
+ import (
4
+ "fmt"
5
+ "strings"
6
+ "sync"
7
+
8
+ "github.com/umutphp/awesome-cli/internal/package/node"
9
+ "github.com/umutphp/awesome-cli/internal/package/parser"
10
+ )
11
+
12
+ const DOWNLOAD_CONCURRENCY = 4
13
+
14
+ type Progress struct {
15
+ Found uint64
16
+ Crawled uint64
17
+ Errors uint64
18
+ }
19
+
20
+ func FetchAllRepos (update chan Progress ) (Progress , error ) {
21
+ fetched , err := FetchAwsomeRootRepo ()
22
+ if err != nil {
23
+ return Progress {1 , 0 , 1 }, err
24
+ }
25
+
26
+ root := parser .ParseIndex (fetched )
27
+ urls := getAllChildrenURLs (root .GetChildren ())
28
+
29
+ progress := Progress {
30
+ Found : uint64 (1 + len (urls )),
31
+ Crawled : 1 ,
32
+ Errors : 0 ,
33
+ }
34
+ update <- progress
35
+
36
+ var wg sync.WaitGroup
37
+ queue := make (chan string )
38
+ errors := make (chan error )
39
+
40
+ for i := 0 ; i < 4 ; i ++ {
41
+ go fetchWorker (queue , errors , & wg )
42
+ }
43
+
44
+ allErrors := MultiError {[]error {}}
45
+ go func () {
46
+ for err := range errors {
47
+ if err == nil {
48
+ progress .Crawled = progress .Crawled + 1
49
+ } else {
50
+ progress .Errors = progress .Errors + 1
51
+ allErrors .Errors = append (allErrors .Errors , err )
52
+ }
53
+ update <- progress
54
+ }
55
+ close (update )
56
+ }()
57
+
58
+ for _ , url := range urls {
59
+ queue <- url
60
+ }
61
+
62
+ close (queue )
63
+ wg .Wait ()
64
+ close (errors )
65
+
66
+ if len (allErrors .Errors ) == 0 {
67
+ return progress , nil
68
+ }
69
+ return progress , allErrors
70
+ }
71
+
72
+ func getAllChildrenURLs (children []node.Node ) []string {
73
+ urls := []string {}
74
+ for _ , child := range children {
75
+ if child .GetURL () != "" {
76
+ urls = append (urls , child .GetURL ())
77
+ }
78
+ urls = append (urls , getAllChildrenURLs (child .GetChildren ())... )
79
+ }
80
+ return urls
81
+ }
82
+
83
+ type MultiError struct {
84
+ Errors []error
85
+ }
86
+
87
+ func (e MultiError ) Error () string {
88
+ errStrings := []string {}
89
+ for _ , err := range e .Errors {
90
+ errStrings = append (errStrings , err .Error ())
91
+ }
92
+ return strings .Join (errStrings , "\n " )
93
+ }
94
+
95
+ type FetchError struct {
96
+ URL string
97
+ Wrapped error
98
+ }
99
+
100
+ func (e FetchError ) Unwrap () error {
101
+ return e .Wrapped
102
+ }
103
+
104
+ func (e FetchError ) Error () string {
105
+ return fmt .Sprintf ("failed to fetch %q: %v" , e .URL , e .Wrapped )
106
+ }
107
+
108
+ func fetchWorker (queue chan string , errors chan error , wg * sync.WaitGroup ) {
109
+ wg .Add (1 )
110
+ defer wg .Done ()
111
+ for url := range queue {
112
+ if _ , err := FetchAwsomeRepo (url ); err != nil {
113
+ errors <- FetchError {url , err }
114
+ } else {
115
+ errors <- nil
116
+ }
117
+ }
118
+ }
0 commit comments