Created
December 14, 2016 21:12
-
-
Save estk/40e60995f54682da5bf998eaabd6e23d to your computer and use it in GitHub Desktop.
A Tour of Go: Web Crawler Solution
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"fmt" | |
"sync" | |
) | |
type Crawler struct { | |
crawled map[string]bool | |
mux sync.Mutex | |
} | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
func New() *Crawler { | |
return &Crawler{ | |
crawled: make(map[string]bool), | |
} | |
} | |
func (c *Crawler) visit(url string) bool { | |
c.mux.Lock() | |
defer c.mux.Unlock() | |
_, ok := c.crawled[url] | |
if ok { | |
return true | |
} | |
c.crawled[url] = true | |
return false | |
} | |
// Crawl uses fetcher to recursively crawl | |
// pages starting with url, to a maximum of depth. | |
func (c *Crawler) Crawl(url string, depth int, fetcher Fetcher) { | |
var wg sync.WaitGroup | |
v := c.visit(url) | |
if v || depth <= 0 { | |
return | |
} | |
body, urls, err := fetcher.Fetch(url) | |
if err != nil { | |
fmt.Println(err) | |
return | |
} | |
fmt.Printf("found: %s %q\n", url, body) | |
for _, u := range urls { | |
wg.Add(1) | |
go func(u string) { | |
defer wg.Done() | |
c.Crawl(u, depth-1, fetcher) | |
}(u) | |
} | |
wg.Wait() | |
return | |
} | |
func main() { | |
crawler := New() | |
crawler.Crawl("http://golang.org/", 4, fetcher) | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := f[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = fakeFetcher{ | |
"http://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"http://golang.org/pkg/", | |
"http://golang.org/cmd/", | |
}, | |
}, | |
"http://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/cmd/", | |
"http://golang.org/pkg/fmt/", | |
"http://golang.org/pkg/os/", | |
}, | |
}, | |
"http://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
"http://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Calling c.visit before checking for depth might end up not crawling pages that are also linked at at less depth but where the responsible crawler hasn't picked up on in yet