Skip to content

Commit

Permalink
solved loong#1 task using ticker to ensure that the crawler waits one…
Browse files Browse the repository at this point in the history
… second between each page fetch
  • Loading branch information
moamlrh committed Jul 19, 2024
1 parent 5199508 commit 1ce095a
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions 0-limit-crawler/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,12 @@ package main
import (
"fmt"
"sync"
"time"
)

// Crawl uses `fetcher` from the `mockfetcher.go` file to imitate a
// real crawler. It crawls until the maximum depth has reached.
func Crawl(url string, depth int, wg *sync.WaitGroup) {
func Crawl(url string, depth int, wg *sync.WaitGroup, ticker *time.Ticker) {
defer wg.Done()

if depth <= 0 {
Expand All @@ -35,15 +36,18 @@ func Crawl(url string, depth int, wg *sync.WaitGroup) {
for _, u := range urls {
// Do not remove the `go` keyword, as Crawl() must be
// called concurrently
go Crawl(u, depth-1, wg)
<-ticker.C
go Crawl(u, depth-1, wg, ticker)
}
return
}

func main() {
var wg sync.WaitGroup
ticker := time.NewTicker(1 * time.Second)
defer ticker.Stop()

wg.Add(1)
Crawl("http://golang.org/", 4, &wg)
Crawl("http://golang.org/", 4, &wg, ticker)
wg.Wait()
}

0 comments on commit 1ce095a

Please sign in to comment.