sum7/warehost
sum7
/
warehost
Archived
1
0
Fork 0
This repository has been archived on 2020-09-27. You can view files and clone it, but cannot push or open issues or pull requests.
warehost/cmd/warehost-web/handler.go

100 lines
2.5 KiB
Go
Raw Normal View History

2016-08-27 02:18:41 +02:00
package main
import (
"fmt"
"net/http"
"os"
"text/template"
2016-11-21 21:14:01 +01:00
//"github.com/microcosm-cc/bluemonday"
2016-08-27 02:18:41 +02:00
"github.com/russross/blackfriday"
2016-10-11 20:16:24 +02:00
liblog "dev.sum7.eu/sum7/warehost/lib/log"
web "dev.sum7.eu/sum7/warehost/modul/web"
2016-08-27 02:18:41 +02:00
)
//ProxyHost if this server is behind a proxy
const ProxyHost = "X-Real-Host"
2016-10-11 20:16:24 +02:00
func getWebsite(host string) *web.Website {
website := &web.Website{}
2016-08-27 02:18:41 +02:00
dbconnection.Model(website).Joins("JOIN web_domain ON web_domain.website = web_website.id").Where("web_domain.name = ?", host).First(website)
return website
}
func handlerstatic(w http.ResponseWriter, r *http.Request) {
host := r.Header.Get(ProxyHost)
if len(host) <= 1 {
host = r.Host
}
website := getWebsite(host)
2016-09-01 22:19:39 +02:00
path := fmt.Sprintf("%s/%d/%s/%s", config.Webroot, website.ID, "static", r.URL.Path)
if f, err := os.Stat(path); err == nil && !f.IsDir() {
http.ServeFile(w, r, path)
return
}
http.NotFound(w, r)
}
func handlerfiles(w http.ResponseWriter, r *http.Request) {
host := r.Header.Get(ProxyHost)
if len(host) <= 1 {
host = r.Host
}
website := getWebsite(host)
2016-08-27 02:18:41 +02:00
path := fmt.Sprintf("%s/%d/%s/%s", config.Webroot, website.ID, "files", r.URL.Path)
if f, err := os.Stat(path); err == nil && !f.IsDir() {
http.ServeFile(w, r, path)
return
}
http.NotFound(w, r)
}
2016-09-01 22:19:39 +02:00
2016-08-27 02:18:41 +02:00
func handlerfunc(w http.ResponseWriter, r *http.Request) {
host := r.Header.Get(ProxyHost)
if len(host) <= 1 {
host = r.Host
}
url := r.URL.Path[1:]
2016-10-11 20:16:24 +02:00
logger := liblog.NewModulLog(host).GetLog(r, url)
2016-08-27 02:18:41 +02:00
website := getWebsite(host)
logger = logger.WithField("hID", website.ID)
2016-10-11 20:16:24 +02:00
var menus []*web.Menu
2016-09-27 10:27:21 +02:00
dbconnection.Where("website = ?", website.ID).Preload("Menu").Order("position").Find(&menus)
2016-10-11 20:16:24 +02:00
var menu *web.Menu
2016-08-27 02:18:41 +02:00
for _, item := range menus {
if item.Shorturl == "" && menu == nil {
menu = item
}
if item.Shorturl == url {
menu = item
}
}
2016-10-11 20:16:24 +02:00
menus = web.BuildMenuTree(menus)
page := &web.Page{WebsiteID: website.ID, MenuID: menu.ID}
2016-08-27 02:18:41 +02:00
dbconnection.Where(page).Find(page)
2016-11-21 13:57:15 +01:00
page.Menu = menu
2016-08-27 02:18:41 +02:00
unsafe := blackfriday.MarkdownCommon([]byte(page.Content))
2016-11-21 21:14:01 +01:00
//page.Content = string(bluemonday.UGCPolicy().SanitizeBytes(unsafe))
page.Content = string(unsafe)
2016-08-27 02:18:41 +02:00
i := TemplateInfo{
2016-09-12 20:34:09 +02:00
Website: website,
Host: host,
URL: url,
Page: page,
Menu: menus,
}
t, err := template.ParseGlob(fmt.Sprintf("%s/%d/%s/%s", config.Webroot, website.ID, "tmpl", "*.tmpl"))
logger.Info("done")
if err != nil {
w.Write([]byte(fmt.Sprintf("<h1>Error on rendering Template</h1>\n%s", err)))
} else {
t.Execute(w, i)
2016-08-27 02:18:41 +02:00
}
}