YAMAGUCHI::weblog

噛み付き地蔵に憧れて、この神の世界にやってきました。マドンナみたいな男の子、コッペです。

A Tour of Go Exercise #69-70

はじめに

Python界の情弱です。A Tour of Goの演習問題、最後までやりました。

こんな感じ

#69 Equivalent Binary Trees
package main

import (
	"fmt"
	"code.google.com/p/go-tour/tree"
)

/*
type Tree struct {
	Left  *Tree
	Value int
	Right *Tree
}
*/

// Walk walks the tree t sending all values
// from the tree to the channel ch.
func Walk(t *tree.Tree, ch chan int) {
	walk := func(t *tree.Tree, ch chan int) {}
	walk = func(t *tree.Tree, ch chan int) {
		if t.Left != nil {
			walk(t.Left, ch)
		}
		ch <- t.Value
		if t.Right != nil {
			walk(t.Right, ch)
		}
	}
	walk(t, ch)
	close(ch)
}

// Same determines whether the trees
// t1 and t2 contain the same values.
func Same(t1, t2 *tree.Tree) bool {
	fmt.Println(t1, t2)
	c1 := make(chan int)
	c2 := make(chan int)
	go Walk(t1, c1)
	go Walk(t2, c2)
	for i := range c1 {
		if i != <-c2 {
			return false
		}
	}
	return true
}

func main() {
	c := make(chan int)
	go Walk(tree.New(1), c)
	for i := range c {
		fmt.Println(i)
	}
	
	fmt.Println(Same(tree.New(1), tree.New(1)))
	fmt.Println(Same(tree.New(1), tree.New(2)))
}
#70 Web Crawler

これはちょっと色々と詰まってしまって、筋よくやろうと思ったんだけど、条件を満たすように書いたらCrawl()が再帰ではなくなってしまった。もっと良い解答が無いかなあ。

package main

import (
	"fmt"
)

type Fetcher interface {
	// Fetch returns the body of URL and
	// a slice of URLs found on that page.
	Fetch(url string) (body string, urls []string, err error)
}

type fetchResults struct {
	depth int
	urls []string
}

// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url string, depth int, fetcher Fetcher) {
	// TODO: Fetch URLs in parallel.
	// TODO: Don't fetch the same URL twice.
	// This implementation doesn't do either:
	ch_results := make(chan fetchResults)
	num_workers := 1
	visited := map[string]bool{url:true}

	worker := func(url string, depth int) {
		body, urls, err := fetcher.Fetch(url)
		
		if err != nil {
			fmt.Println(err)
		} else {
			fmt.Printf("found: %s %q\n", url, body)
		}
		ch_results <- fetchResults{depth, urls}
	}
	
	go worker(url, 0)
	
	for num_workers > 0 {
		result := <- ch_results
		num_workers--
		if result.depth > depth {
			continue
		}
		
		for _, url := range result.urls {
			if !visited[url] {
				visited[url] = true
				num_workers++
				go worker(url, result.depth+1)
			} else {
				fmt.Printf("visited: %s\n", url)
			}
		}
	}
}

func main() {
	Crawl("http://golang.org/", 4, fetcher)
}


// fakeFetcher is Fetcher that returns canned results.
type fakeFetcher map[string]*fakeResult

type fakeResult struct {
	body string
	urls     []string
}

func (f *fakeFetcher) Fetch(url string) (string, []string, error) {
	if res, ok := (*f)[url]; ok {
		return res.body, res.urls, nil
	}
	return "", nil, fmt.Errorf("not found: %s", url)
}

// fetcher is a populated fakeFetcher.
var fetcher = &fakeFetcher{
	"http://golang.org/": &fakeResult{
		"The Go Programming Language",
		[]string{
			"http://golang.org/pkg/",
			"http://golang.org/cmd/",
		},
	},
	"http://golang.org/pkg/": &fakeResult{
		"Packages",
		[]string{
			"http://golang.org/",
			"http://golang.org/cmd/",
			"http://golang.org/pkg/fmt/",
			"http://golang.org/pkg/os/",
		},
	},
	"http://golang.org/pkg/fmt/": &fakeResult{
		"Package fmt",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
	"http://golang.org/pkg/os/": &fakeResult{
		"Package os",
		[]string{
			"http://golang.org/",
			"http://golang.org/pkg/",
		},
	},
}