我有一个Golang应用程序,它将url(作为查询参数提供)存储在数据库中。使用以下方法存储URL:
func AddArticle(db *sql.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
queryParam := r.FormValue("url")
insertedId := dao.SaveArticle(db, queryParam)
glog.Infof("add hostname %s, id: %d", getHostnameFromUrl(queryParam), insertedId)
// start routine which scrapes url
go dao.ScrapeArticle(db, insertedId)
// finally output confirmation page
renderObject := map[string]string{"hostname": getHostnameFromUrl(queryParam)}
render.DisplayPage(w, r, renderObject, "confirmation.html")
}
}
现在这个工作正常,但由于某些原因,当保存了很多新网址时,sqlite数据库告诉我发生了死锁。所以我想我想利用频道,只是为了防止那些死锁。但不幸的是,我不确定如何从这开始。
基本上,我想要的是:
url added
)我不知道如何开始并需要一些帮助。 F.E.我在哪里定义频道?
我的刮刀功能如下:
func ScrapeArticle(db *sql.DB, id int64) {
// time function duration
start := time.Now()
// storedArticle contains information stored in db which need to be updated through scraping
storedArticle := getArticleById(db, id)
// init goquery
doc, err := goquery.NewDocument(storedArticle.Url.String)
glog.Info("scraping article with url --> ", storedArticle.Url.String)
if err != nil {
glog.Error("error while scraping article with id %d -- > ", storedArticle.ID, err)
return
}
// start scraping page title
doc.Find("head").Each(func(i int, s *goquery.Selection) {
pageTitle := s.Find("title").Text()
storedArticle.Name = sql.NullString{String: strings.TrimSpace(pageTitle), Valid: true}
})
// now get meta description field
doc.Find("meta").Each(func(i int, s *goquery.Selection) {
if name, _ := s.Attr("name"); strings.EqualFold(name, "description") {
description, _ := s.Attr("content")
storedArticle.Description = sql.NullString{String: strings.TrimSpace(description), Valid: true}
}
})
// if unable to scrape title, then use url
if len(storedArticle.Name.String) == 0 {
storedArticle.Name.String = storedArticle.Url.String
}
// if unable to scrape description, then use default text
if len(storedArticle.Description.String) == 0 {
storedArticle.Description.String = noDescription
}
// debugging info
glog.Infof("scraped title --> %s (length: %d)", storedArticle.Name.String, len(storedArticle.Name.String))
glog.Infof("scraped description --> %s (length: %d)", storedArticle.Description.String, len(storedArticle.Description.String))
// after succesfull scraping, add page title (and more?) to article in db
updateArticle(db, storedArticle)
elapsed := time.Since(start)
glog.Infof("scraping article %d completed in %s", storedArticle.ID.Int64, elapsed.String())
}