Skip to content

Commit

Permalink
Implement search http endpoint.
Browse files Browse the repository at this point in the history
  • Loading branch information
robertabcd committed Apr 23, 2018
1 parent 51379f3 commit 8add7f3
Show file tree
Hide file tree
Showing 4 changed files with 188 additions and 5 deletions.
75 changes: 75 additions & 0 deletions cached_ops.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"log"
"net/url"
"strconv"

"github.com/ptt/pttweb/article"
Expand Down Expand Up @@ -84,6 +85,80 @@ func generateBbsIndex(key cache.Key) (cache.Cacheable, error) {
return bbsindex, nil
}

type BbsSearchRequest struct {
Brd pttbbs.Board
Page int
Query string
Preds []pttbbs.SearchPredicate
}

func (r *BbsSearchRequest) String() string {
return fmt.Sprintf("pttweb:bbssearch/%v/%v/%v", r.Brd.BrdName, r.Page, r.Query)
}

func generateBbsSearch(key cache.Key) (cache.Cacheable, error) {
r := key.(*BbsSearchRequest)
page := r.Page
if page == 0 {
page = 1
}
offset := -EntryPerPage * page

bbsindex := &BbsIndex{
Board: r.Brd,
Query: r.Query,
IsValid: true,
}

// Search articles
articles, totalPosts, err := ptt.Search(r.Brd.Ref(), r.Preds, offset, EntryPerPage)
if err != nil {
return nil, err
}

// Handle paging
paging := NewPaging(EntryPerPage, totalPosts)
if lastPage := paging.LastPageNo(); page > lastPage {
articles = nil
bbsindex.IsValid = false
} else if page == lastPage {
// We may get extra entries for last page.
n := totalPosts % EntryPerPage
if n < len(articles) {
articles = articles[:n]
}
}

// Show the page in reverse order.
for i, j := 0, len(articles)-1; i < j; i, j = i+1, j-1 {
articles[i], articles[j] = articles[j], articles[i]
}
bbsindex.Articles = articles

// Page links, in newest first order.
pageLink := func(n int) string {
u, err := router.Get("bbssearch").URLPath("brdname", r.Brd.BrdName)
if err != nil {
return ""
}
q := url.Values{}
q.Set("q", r.Query)
q.Set("page", strconv.Itoa(n))
u.RawQuery = q.Encode()
return u.String()
}
bbsindex.FirstPage = pageLink(paging.LastPageNo())
bbsindex.LastPage = pageLink(1)
if page > 1 {
bbsindex.NextPage = pageLink(page - 1)
}
if page < paging.LastPageNo() {
bbsindex.PrevPage = pageLink(page + 1)
}

return bbsindex, nil
}

type BoardAtomFeedRequest struct {
Brd pttbbs.Board
}
Expand Down
1 change: 1 addition & 0 deletions config.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import "errors"
type PttwebConfig struct {
Bind []string
BoarddAddress string
SearchAddress string
MandAddress string
MemcachedAddress string
TemplateDirectory string
Expand Down
1 change: 1 addition & 0 deletions page/pages.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ func (Classlist) TemplateName() string { return TnameClasslist }

type BbsIndex struct {
Board pttbbs.Board
Query string

FirstPage string
PrevPage string
Expand Down
116 changes: 111 additions & 5 deletions pttweb.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,11 @@ import (
)

const (
ArticleCacheTimeout = time.Minute * 10
BbsIndexCacheTimeout = time.Minute * 5
BbsIndexLastPageCacheTimeout = time.Minute * 1
ArticleCacheTimeout = time.Minute * 10
BbsIndexCacheTimeout = time.Minute * 5
BbsIndexLastPageCacheTimeout = time.Minute * 1
BbsSearchCacheTimeout = time.Minute * 10
BbsSearchLastPageCacheTimeout = time.Minute * 3
)

var (
Expand All @@ -47,6 +49,7 @@ var (
)

var ptt pttbbs.Pttbbs
var pttSearch pttbbs.Pttbbs
var mand manpb.ManServiceClient
var router *mux.Router
var cacheMgr *cache.CacheManager
Expand Down Expand Up @@ -87,6 +90,15 @@ func main() {
log.Fatal("cannot connect to boardd:", config.BoarddAddress, err)
}

if config.SearchAddress != "" {
pttSearch, err = pttbbs.NewGrpcRemotePtt(config.SearchAddress)
if err != nil {
log.Fatal("cannot connect to boardd:", config.SearchAddress, err)
}
} else {
pttSearch = ptt
}

// Init mand connection
if conn, err := grpc.Dial(config.MandAddress, grpc.WithInsecure(), grpc.WithBackoffMaxDelay(time.Second*5)); err != nil {
log.Fatal("cannot connect to mand:", config.MandAddress, err)
Expand Down Expand Up @@ -192,6 +204,9 @@ func createRouter() *mux.Router {
r.Path(ReplaceVars(`/bbs/{brdname}/index{page}.html`)).
Handler(ErrorWrapper(handleBbs)).
Name("bbsindex_page")
r.Path(ReplaceVars(`/bbs/{brdname}/search`)).
Handler(ErrorWrapper(handleBbsSearch)).
Name("bbssearch")

// Feed
r.Path(ReplaceVars(`/atom/{brdname}.xml`)).
Expand Down Expand Up @@ -439,8 +454,6 @@ func handleBbs(c *Context, w http.ResponseWriter) error {
timeout = BbsIndexCacheTimeout
}

var err error

brd, err := getBoardByName(c, brdname)
if err != nil {
return err
Expand All @@ -462,6 +475,99 @@ func handleBbs(c *Context, w http.ResponseWriter) error {
return page.ExecutePage(w, (*page.BbsIndex)(bbsindex))
}

func parseKeyValueTerm(term string) (pttbbs.SearchPredicate, bool) {
kv := strings.SplitN(term, ":", 2)
if len(kv) != 2 {
return nil, false
}
k, v := strings.ToLower(kv[0]), kv[1]
switch k {
case "author":
return pttbbs.WithAuthor(v), true
case "recommend":
n, err := strconv.Atoi(v)
if err != nil {
return nil, false
}
return pttbbs.WithRecommend(n), true
}
return nil, false
}

func parseQuery(query string) ([]pttbbs.SearchPredicate, error) {
segs := strings.Split(query, " ")
var titleSegs []string
var preds []pttbbs.SearchPredicate
for _, s := range segs {
if p, ok := parseKeyValueTerm(s); ok {
preds = append(preds, p)
} else {
titleSegs = append(titleSegs, s)
}
}
title := strings.TrimSpace(strings.Join(titleSegs, " "))
if title != "" {
// Put title first.
preds = append([]pttbbs.SearchPredicate{
pttbbs.WithTitle(title),
}, preds...)
}
return preds, nil
}

func handleBbsSearch(c *Context, w http.ResponseWriter) error {
vars := mux.Vars(c.R)
brdname := vars["brdname"]

if c.R.ParseForm() != nil {
w.WriteHeader(http.StatusBadRequest)
return nil
}
form := c.R.Form
query := strings.TrimSpace(form.Get("q"))

pageNo := 1
// Note: TODO move timeout into the generating function.
timeout := BbsSearchLastPageCacheTimeout

if pageStr := form.Get("page"); pageStr != "" {
pg, err := strconv.Atoi(pageStr)
if err != nil || pg <= 0 {
w.WriteHeader(http.StatusBadRequest)
return nil
}
pageNo = pg
timeout = BbsSearchCacheTimeout
}

preds, err := parseQuery(query)
if err != nil {
return err
}

brd, err := getBoardByName(c, brdname)
if err != nil {
return err
}

obj, err := cacheMgr.Get(&BbsSearchRequest{
Brd: *brd,
Page: pageNo,
Query: query,
Preds: preds,
}, ZeroBbsIndex, timeout, generateBbsSearch)
if err != nil {
return err
}
bbsindex := obj.(*BbsIndex)

if !bbsindex.IsValid {
return NewNotFoundError(fmt.Errorf("not a valid cache.BbsIndex: %v/%v", brd.BrdName, pageNo))
}

return page.ExecutePage(w, (*page.BbsIndex)(bbsindex))
}

func handleBoardAtomFeed(c *Context, w http.ResponseWriter) error {
vars := mux.Vars(c.R)
brdname := vars["brdname"]
Expand Down

0 comments on commit 8add7f3

Please sign in to comment.