1.go test命令是一个按照约定和组织进行测试的程序
2.竞争检查器 go run -race 附带一个运行期对共享变量访问工具的test,出现WARNING: DATA RACE 说明有数据竞争
3.理想情况下是应该避免掉多余的工作的,称为duplicate suppression(重复抑制/避免)
4.设计并发,不重复,无阻塞 cache
1.并发: go func(){}()直接启动新的goroutine来实现
2.并发安全:使用sync.Mutex 互斥锁来实现
3.无阻塞:get之前锁定,赋值一个入口指针后立马解锁,然后进行http请求,这样不会被慢的http请求阻塞住
4.不重复:利用channel,多个并发同时写的时候,利用channel阻塞住,等第一个请求完写完后关闭channel,其他goroutine直接请求
package main import (
"fmt"
"golang.org/x/net/html"
"io/ioutil"
"log"
"net/http"
"sync"
"time"
) // 定义类型Memo
type Memo struct {
f Func
mu sync.Mutex
cache map[string]*entry
} type Func func(key string) (interface{}, error) type result struct {
value interface{}
err error
}
type entry struct {
res result
ready chan struct{} // closed when res is ready
} func main() {
//res, _ := httpGetBody("http://www.baidu.com")
//fmt.Println(string(res.([]byte))) //类型断言
//初始化
m := New(httpGetBody)
urls, _ := Extract("http://www.baidu.com")
var n sync.WaitGroup
for _, url := range urls {
n.Add(1)
go func(url string) {
fmt.Println(url)
start := time.Now()
value, err := m.Get(url)
if err != nil {
log.Print(err)
}
if value != nil {
fmt.Printf("%s, %s, %d bytes\n",
url, time.Since(start), len(value.([]byte)))
}
n.Done()
}(url)
}
n.Wait()
} //初始化Memo类型
func New(f Func) *Memo {
return &Memo{f: f, cache: make(map[string]*entry)}
} //获取数据放入缓存,如果缓存存在直接返回
func (memo *Memo) Get(key string) (interface{}, error) {
memo.mu.Lock()
e := memo.cache[key]
if e == nil {
e = &entry{ready: make(chan struct{})}
memo.cache[key] = e
memo.mu.Unlock()
//最耗时的函数部分没有锁,性能会提升
e.res.value, e.res.err = memo.f(key)
close(e.ready)
} else {
memo.mu.Unlock()
<-e.ready
}
return e.res.value, e.res.err
} //获取http get数据
func httpGetBody(url string) (interface{}, error) {
resp, err := http.Get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return ioutil.ReadAll(resp.Body)
} func Extract(url string) ([]string, error) {
resp, err := http.Get(url)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("getting %s: %s", url, resp.Status)
} doc, err := html.Parse(resp.Body)
resp.Body.Close()
if err != nil {
return nil, fmt.Errorf("parsing %s as HTML: %v", url, err)
} var links []string
visitNode := func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "a" {
for _, a := range n.Attr {
if a.Key != "href" {
continue
}
link, err := resp.Request.URL.Parse(a.Val)
if err != nil {
continue // ignore bad URLs
}
links = append(links, link.String())
}
}
}
forEachNode(doc, visitNode, nil)
return links, nil
}
func forEachNode(n *html.Node, pre, post func(n *html.Node)) {
if pre != nil {
pre(n)
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
forEachNode(c, pre, post)
}
if post != nil {
post(n)
}
}