Major refactor
Moved all code into seperate endpoints folder Various bugfixes
This commit is contained in:
commit
a144c25ab8
10
Makefile
10
Makefile
|
@ -11,10 +11,13 @@ domain=test.monster
|
||||||
lua_files=$(shell find src/lua -type f)
|
lua_files=$(shell find src/lua -type f)
|
||||||
src_files=$(shell find src -type f) $(shell find conf -type f)
|
src_files=$(shell find src -type f) $(shell find conf -type f)
|
||||||
sql_files=$(shell find src/sql -type f)
|
sql_files=$(shell find src/sql -type f)
|
||||||
|
test_files=$(shell find spec -type f)
|
||||||
|
built_tests=$(test_files:%=$(chroot_dir)%)
|
||||||
built_files=$(lua_files:src/lua/%.lua=$(chroot_dir)%.lua)
|
built_files=$(lua_files:src/lua/%.lua=$(chroot_dir)%.lua)
|
||||||
page_files=$(shell find src/pages -type f)
|
page_files=$(shell find src/pages -type f)
|
||||||
built_pages=$(page_files:src/pages/%.etlua=$(chroot_dir)pages/%.etlua)
|
built_pages=$(page_files:src/pages/%.etlua=$(chroot_dir)pages/%.etlua)
|
||||||
built_sql=$(sql_files:src/sql/%.sql=$(chroot_dir)sql/%.sql)
|
built_sql=$(sql_files:src/sql/%.sql=$(chroot_dir)sql/%.sql)
|
||||||
|
built=$(built_files) $(built_sql) $(built_pages) $(built_tests)
|
||||||
|
|
||||||
all: $(chroot_dir) smr.so $(built_files) $(built_pages) $(built_sql)
|
all: $(chroot_dir) smr.so $(built_files) $(built_pages) $(built_sql)
|
||||||
echo $(built_files)
|
echo $(built_files)
|
||||||
|
@ -31,6 +34,7 @@ $(chroot_dir): apk-tools-static-$(version).apk
|
||||||
mkdir -p $(chroot_dir)/pages
|
mkdir -p $(chroot_dir)/pages
|
||||||
mkdir -p $(chroot_dir)/sql
|
mkdir -p $(chroot_dir)/sql
|
||||||
mkdir -p $(chroot_dir)/data
|
mkdir -p $(chroot_dir)/data
|
||||||
|
mkdir -p $(chroot_dir)/endpoints
|
||||||
#cd $(chroot_dir) && tar -xvzf ../apk-tools-static-*.apk
|
#cd $(chroot_dir) && tar -xvzf ../apk-tools-static-*.apk
|
||||||
#cd $(chroot_dir) && sudo ./sbin/apk.static -X $(mirror)latest-stable/main -U --allow-untrusted --root $(chroot_dir) --no-cache --initdb add alpine-base
|
#cd $(chroot_dir) && sudo ./sbin/apk.static -X $(mirror)latest-stable/main -U --allow-untrusted --root $(chroot_dir) --no-cache --initdb add alpine-base
|
||||||
#ln -s /dev/urandom $(chroot_dir)/dev/random #Prevent an attacker with access to the chroot from exhausting our entropy pool and causing a dos
|
#ln -s /dev/urandom $(chroot_dir)/dev/random #Prevent an attacker with access to the chroot from exhausting our entropy pool and causing a dos
|
||||||
|
@ -69,5 +73,11 @@ $(built_pages): $(chroot_dir)pages/%.etlua : src/pages/%.etlua
|
||||||
$(built_sql): $(chroot_dir)sql/%.sql : src/sql/%.sql
|
$(built_sql): $(chroot_dir)sql/%.sql : src/sql/%.sql
|
||||||
cp $^ $@
|
cp $^ $@
|
||||||
|
|
||||||
|
$(built_tests) : $(chroot_dir)% : %
|
||||||
|
cp $^ $@
|
||||||
|
|
||||||
smr.so : $(src_files)
|
smr.so : $(src_files)
|
||||||
kodev build
|
kodev build
|
||||||
|
|
||||||
|
test : $(built)
|
||||||
|
cd kore_chroot && busted
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
--[[
|
||||||
|
Test the home page
|
||||||
|
]]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
describe("smr",function()
|
||||||
|
describe("site home page",function()
|
||||||
|
it("detours configure",function()
|
||||||
|
local s = {}
|
||||||
|
local c = false
|
||||||
|
function configure(...)
|
||||||
|
local args = {...}
|
||||||
|
if args[1] == s then
|
||||||
|
c = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
local oldconfigure = configure
|
||||||
|
local index_get = require("index_get")
|
||||||
|
configure(s)
|
||||||
|
assert(c)
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
describe("author home page",function()
|
||||||
|
|
||||||
|
end)
|
||||||
|
end)
|
|
@ -0,0 +1,106 @@
|
||||||
|
|
||||||
|
local pages = {
|
||||||
|
index = {
|
||||||
|
route = "/",
|
||||||
|
name = "home",
|
||||||
|
methods = {
|
||||||
|
GET={}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
paste = {
|
||||||
|
route = "/_paste",
|
||||||
|
name = "post_story",
|
||||||
|
methods = {
|
||||||
|
GET={},
|
||||||
|
POST={}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
edit = {
|
||||||
|
route = "/_edit",
|
||||||
|
name = "edit",
|
||||||
|
methods = {
|
||||||
|
GET={},
|
||||||
|
POST={},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
--TODO:bio
|
||||||
|
login = {
|
||||||
|
route = "/_login",
|
||||||
|
name = "login",
|
||||||
|
methods = {
|
||||||
|
GET={},
|
||||||
|
POST={},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
claim = {
|
||||||
|
route = "/_claim",
|
||||||
|
name = "claim",
|
||||||
|
methods = {
|
||||||
|
GET = {},
|
||||||
|
POST = {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
download = {
|
||||||
|
route = "/_download",
|
||||||
|
name = "download",
|
||||||
|
methods = {
|
||||||
|
GET = {},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
preview = {
|
||||||
|
route = "/_preview",
|
||||||
|
name = "preview",
|
||||||
|
methods = {
|
||||||
|
POST = {},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
search = {
|
||||||
|
route = "/_search",
|
||||||
|
name = "search",
|
||||||
|
methods = {
|
||||||
|
GET = {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
local request_stub_m = {
|
||||||
|
}
|
||||||
|
function http_response(req,errcode,str)
|
||||||
|
s = true
|
||||||
|
end
|
||||||
|
function http_request_get_host(reqstub)
|
||||||
|
return "localhost:8888"
|
||||||
|
end
|
||||||
|
function http_request_populate_post(reqstub)
|
||||||
|
reqstub.post_populated = true
|
||||||
|
end
|
||||||
|
|
||||||
|
describe("smr",function()
|
||||||
|
for name, obj in pairs(pages) do
|
||||||
|
describe("endpoint " .. name,function()
|
||||||
|
for method,parameters in pairs(obj.methods) do
|
||||||
|
describe("method " .. method,function()
|
||||||
|
local fname = string.format("%s_%s",name,string.lower(method))
|
||||||
|
it("should be named appropriately",function()
|
||||||
|
local f = assert(io.open(fname .. ".lua","r"))
|
||||||
|
end)
|
||||||
|
it("should run without errors",function()
|
||||||
|
require(fname)
|
||||||
|
end)
|
||||||
|
it("should return a function",function()
|
||||||
|
local pagefunc = assert(require(fname))
|
||||||
|
assert(type(pagefunc) == "function")
|
||||||
|
end)
|
||||||
|
it("calls http_response()",function()
|
||||||
|
local pagefunc = require(fname)
|
||||||
|
local s = false
|
||||||
|
local reqstub = {}
|
||||||
|
pagefunc(reqstub)
|
||||||
|
end)
|
||||||
|
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end)
|
|
@ -0,0 +1,94 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
|
||||||
|
local ret = {}
|
||||||
|
|
||||||
|
local stmnt_cache, stmnt_insert_cache
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
local cache = util.sqlassert(sql.open_memory())
|
||||||
|
--A cache table to store rendered pages that do not need to be
|
||||||
|
--rerendered. In theory this could OOM the program eventually and start
|
||||||
|
--swapping to disk. TODO: fixme
|
||||||
|
assert(cache:exec([[
|
||||||
|
CREATE TABLE IF NOT EXISTS cache (
|
||||||
|
path TEXT PRIMARY KEY,
|
||||||
|
data BLOB,
|
||||||
|
updated INTEGER,
|
||||||
|
dirty INTEGER
|
||||||
|
);
|
||||||
|
]]))
|
||||||
|
stmnt_cache = assert(cache:prepare([[
|
||||||
|
SELECT data
|
||||||
|
FROM cache
|
||||||
|
WHERE
|
||||||
|
path = :path AND
|
||||||
|
((dirty = 0) OR (strftime('%s','now') - updated) < 20)
|
||||||
|
;
|
||||||
|
]]))
|
||||||
|
stmnt_insert_cache = assert(cache:prepare([[
|
||||||
|
INSERT OR REPLACE INTO cache (
|
||||||
|
path, data, updated, dirty
|
||||||
|
) VALUES (
|
||||||
|
:path, :data, strftime('%s','now'), 0
|
||||||
|
);
|
||||||
|
]]))
|
||||||
|
stmnt_dirty_cache = assert(cache:prepare([[
|
||||||
|
UPDATE OR IGNORE cache
|
||||||
|
SET dirty = 1
|
||||||
|
WHERE path = :path;
|
||||||
|
]]))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
--Render a page, with cacheing. If you need to dirty a cache, call dirty_cache()
|
||||||
|
function ret.render(pagename,callback)
|
||||||
|
print("Running render...")
|
||||||
|
stmnt_cache:bind_names{path=pagename}
|
||||||
|
local err = util.do_sql(stmnt_cache)
|
||||||
|
if err == sql.DONE then
|
||||||
|
stmnt_cache:reset()
|
||||||
|
--page is not cached
|
||||||
|
elseif err == sql.ROW then
|
||||||
|
print("Cache hit:" .. pagename)
|
||||||
|
data = stmnt_cache:get_values()
|
||||||
|
stmnt_cache:reset()
|
||||||
|
return data[1]
|
||||||
|
else --sql.ERROR or sql.MISUSE
|
||||||
|
error("Failed to check cache for page " .. pagename)
|
||||||
|
end
|
||||||
|
--We didn't have the paged cached, render it
|
||||||
|
print("Cache miss, running function")
|
||||||
|
local text = callback()
|
||||||
|
print("Saving data...")
|
||||||
|
--And save the data back into the cache
|
||||||
|
stmnt_insert_cache:bind_names{
|
||||||
|
path=pagename,
|
||||||
|
data=text,
|
||||||
|
}
|
||||||
|
err = util.do_sql(stmnt_insert_cache)
|
||||||
|
if err == sql.ERROR or err == sql.MISUSE then
|
||||||
|
error("Failed to update cache for page " .. pagename)
|
||||||
|
end
|
||||||
|
stmnt_insert_cache:reset()
|
||||||
|
print("returning text from cache.render:",text)
|
||||||
|
return text
|
||||||
|
end
|
||||||
|
|
||||||
|
function ret.dirty(url)
|
||||||
|
print("Dirtying cache:",url)
|
||||||
|
stmnt_dirty_cache:bind_names{
|
||||||
|
path = url
|
||||||
|
}
|
||||||
|
err = util.do_sql(stmnt_dirty_cache)
|
||||||
|
stmnt_dirty_cache:reset()
|
||||||
|
end
|
||||||
|
|
||||||
|
function ret.close()
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
return ret
|
|
@ -0,0 +1,5 @@
|
||||||
|
|
||||||
|
return {
|
||||||
|
domain = "test.monster:8888",
|
||||||
|
production = false,
|
||||||
|
}
|
|
@ -40,8 +40,10 @@ function configure(...)
|
||||||
--Store a cookie for logged in users. Logged in users can edit
|
--Store a cookie for logged in users. Logged in users can edit
|
||||||
--their own posts, and edit their biographies.
|
--their own posts, and edit their biographies.
|
||||||
assert(db.conn:exec(queries.create_table_session))
|
assert(db.conn:exec(queries.create_table_session))
|
||||||
|
|
||||||
return oldconfigure(...)
|
return oldconfigure(...)
|
||||||
end
|
end
|
||||||
|
configure()
|
||||||
|
|
||||||
function db.close()
|
function db.close()
|
||||||
db.conn:close()
|
db.conn:close()
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
local cache = require("cache")
|
||||||
|
local config = require("config")
|
||||||
|
local pages = require("pages")
|
||||||
|
|
||||||
|
|
||||||
|
local function claim_get(req)
|
||||||
|
--Get the page to claim a name
|
||||||
|
local cachestr = string.format("%s/_claim",config.domain)
|
||||||
|
local text = cache.render(cachestr,function()
|
||||||
|
print("cache miss, rendering claim page")
|
||||||
|
return pages.claim{err=""}
|
||||||
|
end)
|
||||||
|
http_response(req,200,text)
|
||||||
|
end
|
||||||
|
|
||||||
|
return claim_get
|
|
@ -0,0 +1,74 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local pages = require("pages")
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
local sessionlib = require("session")
|
||||||
|
local config = require("config")
|
||||||
|
|
||||||
|
local stmnt_author_create
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
|
||||||
|
stmnt_author_create = util.sqlassert(db.conn:prepare(queries.insert_author))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function claim_post(req)
|
||||||
|
--Actually claim a name
|
||||||
|
http_request_populate_post(req)
|
||||||
|
local name = assert(http_argument_get_string(req,"user"))
|
||||||
|
local text
|
||||||
|
--What in the world, Kore should be rejecting names that
|
||||||
|
--are not lower case & no symbols, but some still get through somehow.
|
||||||
|
if not name:match("^[a-z0-9]*$") then
|
||||||
|
print("Bad username:",name)
|
||||||
|
text = pages.claim{
|
||||||
|
err = "Usernames must match ^[a-z0-9]{1,30}$"
|
||||||
|
}
|
||||||
|
http_response(req,200,text)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
local rngf = assert(io.open("/dev/urandom","rb"))
|
||||||
|
local passlength = string.byte(rngf:read(1)) + 64
|
||||||
|
local salt = rngf:read(64)
|
||||||
|
local password = rngf:read(passlength)
|
||||||
|
rngf:close()
|
||||||
|
local hash = sha3(salt .. password)
|
||||||
|
stmnt_author_create:bind_names{
|
||||||
|
name = name,
|
||||||
|
}
|
||||||
|
stmnt_author_create:bind_blob(2,salt)
|
||||||
|
stmnt_author_create:bind_blob(3,hash)
|
||||||
|
local err = util.do_sql(stmnt_author_create)
|
||||||
|
if err == sql.DONE then
|
||||||
|
print("success")
|
||||||
|
--We sucessfully made the new author
|
||||||
|
local id = stmnt_author_create:last_insert_rowid()
|
||||||
|
stmnt_author_create:reset()
|
||||||
|
--Give them a file back
|
||||||
|
http_response_header(req,"Content-Type","application/octet-stream")
|
||||||
|
http_response_header(req,"Content-Disposition","attachment; filename=\"" .. name .. "." .. config.domain .. ".passfile\"")
|
||||||
|
local session = sessionlib.start(id)
|
||||||
|
text = password
|
||||||
|
print("session started, about to send password:",text)
|
||||||
|
http_response(req,200,text)
|
||||||
|
return
|
||||||
|
elseif err == sql.CONSTRAINT then
|
||||||
|
--If the creation failed, they probably just tried
|
||||||
|
--to use a name that was already taken
|
||||||
|
text = pages.claim {
|
||||||
|
err = "Failed to claim. That name may already be taken."
|
||||||
|
}
|
||||||
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||||
|
--This is bad though
|
||||||
|
text = pages.claim {
|
||||||
|
err = "Failed to claim"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
stmnt_author_create:reset()
|
||||||
|
http_response(req,200,text)
|
||||||
|
end
|
||||||
|
return claim_post
|
|
@ -0,0 +1,45 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
local zlib = require("zlib")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
local pages = require("pages")
|
||||||
|
|
||||||
|
local stmnt_download
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_download = assert(db.conn:prepare(queries.select_download))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function download_get(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
print("host:",host,"path:",path)
|
||||||
|
http_request_populate_qs(req)
|
||||||
|
local story = assert(http_argument_get_string(req,"story"))
|
||||||
|
local story_id = util.decode_id(story)
|
||||||
|
print("Downloading", story_id)
|
||||||
|
stmnt_download:bind_names{
|
||||||
|
postid = story_id
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_download)
|
||||||
|
if err == sql.DONE then
|
||||||
|
--No rows, story not found
|
||||||
|
http_responose(req,404,pages.nostory{path=story})
|
||||||
|
stmnt_download:reset()
|
||||||
|
return
|
||||||
|
end
|
||||||
|
assert(err == sql.ROW, "after doing download sql, result was not a row, was:" .. tostring(err))
|
||||||
|
local txt_compressed, title = unpack(stmnt_download:get_values())
|
||||||
|
local text = zlib.decompress(txt_compressed)
|
||||||
|
stmnt_download:reset()
|
||||||
|
http_response_header(req,"Content-Type","application/octet-stream")
|
||||||
|
local nicetitle = title:gsub("%W","_")
|
||||||
|
http_response_header(req,"Content-Disposition","attachment; filename=\"" .. nicetitle .. ".txt\"")
|
||||||
|
http_response(req,200,text)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
return download_get
|
|
@ -0,0 +1,68 @@
|
||||||
|
local zlib = require("zlib")
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
local pages = require("pages")
|
||||||
|
local tags = require("tags")
|
||||||
|
local session = require("session")
|
||||||
|
local config = require("config")
|
||||||
|
|
||||||
|
local stmnt_edit
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_edit = assert(db.conn:prepare(queries.select_edit))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function edit_get(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
local author, authorid = session.get(req)
|
||||||
|
|
||||||
|
http_request_populate_qs(req)
|
||||||
|
local story = assert(http_argument_get_string(req,"story"))
|
||||||
|
local story_id = util.decode_id(story)
|
||||||
|
local ret
|
||||||
|
|
||||||
|
print("we want to edit story:",story)
|
||||||
|
--Check that the logged in user is the owner of the story
|
||||||
|
--sql-side. If we're not the owner, we'll get 0 rows back.
|
||||||
|
stmnt_edit:bind_names{
|
||||||
|
postid = story_id,
|
||||||
|
authorid = authorid
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_edit)
|
||||||
|
if err == sql.DONE then
|
||||||
|
--No rows, we're probably not the owner (it might
|
||||||
|
--also be because there's no such story)
|
||||||
|
ret = pages.cantedit{
|
||||||
|
path = story,
|
||||||
|
}
|
||||||
|
stmnt_edit:reset()
|
||||||
|
http_response(req,200,ret)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
assert(err == sql.ROW)
|
||||||
|
local data = stmnt_edit:get_values()
|
||||||
|
local txt_compressed, markup, isanon, title = unpack(data)
|
||||||
|
local text = zlib.decompress(txt_compressed)
|
||||||
|
local tags = tags.get(story_id)
|
||||||
|
local tags_txt = table.concat(tags,";")
|
||||||
|
stmnt_edit:reset()
|
||||||
|
ret = pages.edit{
|
||||||
|
title = title,
|
||||||
|
text = text,
|
||||||
|
markup = markup,
|
||||||
|
user = author,
|
||||||
|
isanon = isanon == 1,
|
||||||
|
domain = config.domain,
|
||||||
|
story = story_id,
|
||||||
|
err = "",
|
||||||
|
tags = tags_txt
|
||||||
|
}
|
||||||
|
http_response(req,200,ret)
|
||||||
|
end
|
||||||
|
|
||||||
|
return edit_get
|
|
@ -0,0 +1,89 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
local zlib = require("zlib")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local pages = require("pages")
|
||||||
|
local parsers = require("parsers")
|
||||||
|
local util = require("util")
|
||||||
|
local tagslib = require("tags")
|
||||||
|
local cache = require("cache")
|
||||||
|
local config = require("config")
|
||||||
|
local session = require("session")
|
||||||
|
|
||||||
|
local stmnt_author_of, stmnt_update_raw, stmnt_update
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_author_of = assert(db.conn:prepare(queries.select_author_of_post))
|
||||||
|
stmnt_update_raw = assert(db.conn:prepare(queries.update_raw))
|
||||||
|
stmnt_update = assert(db.conn:prepare(queries.update_post))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function edit_post(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
local author, author_id = session.get(req)
|
||||||
|
|
||||||
|
http_request_populate_post(req)
|
||||||
|
local storyid = tonumber(assert(http_argument_get_string(req,"story")))
|
||||||
|
local title = assert(http_argument_get_string(req,"title"))
|
||||||
|
local text = assert(http_argument_get_string(req,"text"))
|
||||||
|
local pasteas = assert(http_argument_get_string(req,"pasteas"))
|
||||||
|
local markup = assert(http_argument_get_string(req,"markup"))
|
||||||
|
local tags_str = http_argument_get_string(req,"tags")
|
||||||
|
stmnt_author_of:bind_names{
|
||||||
|
id = storyid
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_author_of)
|
||||||
|
if err ~= sql.ROW then
|
||||||
|
stmnt_author_of:reset()
|
||||||
|
error("No author found for story:" .. storyid)
|
||||||
|
end
|
||||||
|
local data = stmnt_author_of:get_values()
|
||||||
|
stmnt_author_of:reset()
|
||||||
|
local realauthor = data[1]
|
||||||
|
assert(realauthor == author_id) --Make sure the author of the story is the currently logged in user
|
||||||
|
local parsed = parsers[markup](text)
|
||||||
|
local compr_raw = zlib.compress(text)
|
||||||
|
local compr = zlib.compress(parsed)
|
||||||
|
local tags = {}
|
||||||
|
if tags_str then
|
||||||
|
tags = util.parse_tags(tags_str)
|
||||||
|
end
|
||||||
|
assert(stmnt_update_raw:bind_blob(1,compr_raw) == sql.OK)
|
||||||
|
assert(stmnt_update_raw:bind(2,markup) == sql.OK)
|
||||||
|
assert(stmnt_update_raw:bind(3,storyid) == sql.OK)
|
||||||
|
assert(util.do_sql(stmnt_update_raw) == sql.DONE, "Failed to update raw")
|
||||||
|
stmnt_update_raw:reset()
|
||||||
|
assert(stmnt_update:bind(1,title) == sql.OK)
|
||||||
|
assert(stmnt_update:bind_blob(2,compr) == sql.OK)
|
||||||
|
assert(stmnt_update:bind(3,pasteas == "anonymous" and 1 or 0) == sql.OK)
|
||||||
|
assert(stmnt_update:bind(4,storyid) == sql.OK)
|
||||||
|
assert(util.do_sql(stmnt_update) == sql.DONE, "Failed to update text")
|
||||||
|
stmnt_update:reset()
|
||||||
|
tagslib.set(storyid,tags)
|
||||||
|
--[[
|
||||||
|
assert(stmnt_drop_tags:bind_names{postid = storyid} == sql.OK)
|
||||||
|
do_sql(stmnt_drop_tags)
|
||||||
|
stmnt_drop_tags:reset()
|
||||||
|
for _,tag in pairs(tags) do
|
||||||
|
print("Looking at tag",tag)
|
||||||
|
assert(stmnt_ins_tag:bind(1,storyid) == sql.OK)
|
||||||
|
assert(stmnt_ins_tag:bind(2,tag) == sql.OK)
|
||||||
|
err = do_sql(stmnt_ins_tag)
|
||||||
|
stmnt_ins_tag:reset()
|
||||||
|
end
|
||||||
|
]]
|
||||||
|
local id_enc = util.encode_id(storyid)
|
||||||
|
local loc = string.format("https://%s/%s",config.domain,id_enc)
|
||||||
|
cache.dirty(string.format("%s/%s",config.domain,id_enc)) -- This place to read this post
|
||||||
|
cache.dirty(string.format("%s",config.domain)) -- The site index (ex, if the author changed the paste from their's to "Anonymous", the cache should reflect that).
|
||||||
|
cache.dirty(string.format("%s.%s",author,config.domain)) -- The author's index, same reasoning as above.
|
||||||
|
http_response_header(req,"Location",loc)
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
return edit_post
|
|
@ -0,0 +1,115 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local cache = require("cache")
|
||||||
|
local queries = require("queries")
|
||||||
|
local db = require("db")
|
||||||
|
local util = require("util")
|
||||||
|
local config = require("config")
|
||||||
|
local pages = require("pages")
|
||||||
|
local libtags = require("tags")
|
||||||
|
|
||||||
|
local stmnt_index, stmnt_author, stmnt_author_bio
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_index = assert(db.conn:prepare(queries.select_site_index))
|
||||||
|
--TODO: actually let authors edit their bio
|
||||||
|
stmnt_author_bio = assert(db.conn:prepare([[
|
||||||
|
SELECT authors.biography FROM authors WHERE authors.name = :author;
|
||||||
|
]]))
|
||||||
|
stmnt_author = assert(db.conn:prepare(queries.select_author_index))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function get_site_home(req)
|
||||||
|
print("Cache miss, rendering index")
|
||||||
|
stmnt_index:bind_names{}
|
||||||
|
local err = util.do_sql(stmnt_index)
|
||||||
|
local latest = {}
|
||||||
|
--err may be sql.ROW or sql.DONE if we don't have any stories yet
|
||||||
|
while err == sql.ROW do
|
||||||
|
local data = stmnt_index:get_values()
|
||||||
|
local storytags = libtags.get(data[1])
|
||||||
|
table.insert(latest,{
|
||||||
|
url = util.encode_id(data[1]),
|
||||||
|
title = data[2],
|
||||||
|
isanon = data[3] == 1,
|
||||||
|
posted = os.date("%B %d %Y",tonumber(data[4])),
|
||||||
|
author = data[5],
|
||||||
|
tags = storytags,
|
||||||
|
})
|
||||||
|
err = stmnt_index:step()
|
||||||
|
end
|
||||||
|
stmnt_index:reset()
|
||||||
|
return pages.index{
|
||||||
|
domain = config.domain,
|
||||||
|
stories = latest
|
||||||
|
}
|
||||||
|
end
|
||||||
|
local function get_author_home(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local subdomain = host:match("([^\\.]+)")
|
||||||
|
stmnt_author_bio:bind_names{author=subdomain}
|
||||||
|
local err = util.do_sql(stmnt_author_bio)
|
||||||
|
if err == sql.DONE then
|
||||||
|
print("No such author")
|
||||||
|
stmnt_author_bio:reset()
|
||||||
|
return pages.noauthor{
|
||||||
|
author = subdomain
|
||||||
|
}
|
||||||
|
end
|
||||||
|
print("err:",err)
|
||||||
|
assert(err == sql.ROW,"failed to get author:" .. subdomain .. " error:" .. tostring(err))
|
||||||
|
local data = stmnt_author_bio:get_values()
|
||||||
|
local bio = data[1]
|
||||||
|
stmnt_author_bio:reset()
|
||||||
|
print("Getting author's stories")
|
||||||
|
stmnt_author:bind_names{author=subdomain}
|
||||||
|
err = util.do_sql(stmnt_author)
|
||||||
|
print("err:",err)
|
||||||
|
local stories = {}
|
||||||
|
while err == sql.ROW do
|
||||||
|
local data = stmnt_author:get_values()
|
||||||
|
local id, title, time = unpack(data)
|
||||||
|
local tags = libtags.get(id)
|
||||||
|
table.insert(stories,{
|
||||||
|
url = util.encode_id(id),
|
||||||
|
title = title,
|
||||||
|
posted = os.date("%B %d %Y",tonumber(time)),
|
||||||
|
tags = tags,
|
||||||
|
})
|
||||||
|
err = stmnt_author:step()
|
||||||
|
end
|
||||||
|
stmnt_author:reset()
|
||||||
|
return pages.author_index{
|
||||||
|
domain=config.domain,
|
||||||
|
author=subdomain,
|
||||||
|
stories=stories,
|
||||||
|
bio=bio
|
||||||
|
}
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
local function index_get(req)
|
||||||
|
local method = http_method_text(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
--Default home page
|
||||||
|
local subdomain = host:match("([^\\.]+)")
|
||||||
|
local text
|
||||||
|
if host == config.domain then
|
||||||
|
local cachepath = string.format("%s",config.domain)
|
||||||
|
text = cache.render(cachepath, function()
|
||||||
|
return get_site_home(req)
|
||||||
|
end)
|
||||||
|
else --author home page
|
||||||
|
local cachepath = string.format("%s.%s",subdomain,config.domain)
|
||||||
|
text = cache.render(cachepath, function()
|
||||||
|
return get_author_home(req)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
assert(text)
|
||||||
|
http_response(req,200,text)
|
||||||
|
end
|
||||||
|
|
||||||
|
return index_get
|
|
@ -0,0 +1,17 @@
|
||||||
|
local config = require("config")
|
||||||
|
local cache = require("cache")
|
||||||
|
local config = require("config")
|
||||||
|
local pages = require("pages")
|
||||||
|
|
||||||
|
|
||||||
|
local function login_get(req)
|
||||||
|
--Just give them the login page
|
||||||
|
local ret = cache.render(string.format("%s/_login",config.domain),function()
|
||||||
|
return pages.login{
|
||||||
|
err = "",
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
http_response(req,200,ret)
|
||||||
|
end
|
||||||
|
|
||||||
|
return login_get
|
|
@ -0,0 +1,61 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local util = require("util")
|
||||||
|
local session = require("session")
|
||||||
|
local config = require("config")
|
||||||
|
local pages = require("pages")
|
||||||
|
|
||||||
|
local stmnt_author_acct
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
--Get the data we need to check if someone can log in
|
||||||
|
stmnt_author_acct = assert(db.conn:prepare([[
|
||||||
|
SELECT id, salt, passhash FROM authors WHERE name = :name;
|
||||||
|
]]))
|
||||||
|
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function login_post(req)
|
||||||
|
--Try to log in
|
||||||
|
http_populate_multipart_form(req)
|
||||||
|
local name = assert(http_argument_get_string(req,"user"))
|
||||||
|
local pass = assert(http_file_get(req,"pass"))
|
||||||
|
stmnt_author_acct:bind_names{
|
||||||
|
name = name
|
||||||
|
}
|
||||||
|
local text
|
||||||
|
local err = util.do_sql(stmnt_author_acct)
|
||||||
|
if err == sql.ROW then
|
||||||
|
local id, salt, passhash = unpack(stmnt_author_acct:get_values())
|
||||||
|
stmnt_author_acct:reset()
|
||||||
|
local todigest = salt .. pass
|
||||||
|
local hash = sha3(todigest)
|
||||||
|
if hash == passhash then
|
||||||
|
local mysession = session.start(id)
|
||||||
|
http_response_cookie(req,"session",mysession,"/",0,0)
|
||||||
|
local loc = string.format("https://%s.%s",name,config.domain)
|
||||||
|
http_response_header(req,"Location",loc)
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
else
|
||||||
|
text = pages.login{
|
||||||
|
err = "Incorrect username or password"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
elseif err == sql.DONE then --Allows user enumeration, do we want this?
|
||||||
|
--Probably not a problem since all passwords are forced to be "good"
|
||||||
|
stmnt_author_acct:reset()
|
||||||
|
text = pages.login{
|
||||||
|
err = "Failed to find user:" .. name
|
||||||
|
}
|
||||||
|
else
|
||||||
|
stmnt_author_acct:reset()
|
||||||
|
error("Other sql error during login")
|
||||||
|
end
|
||||||
|
http_response(req,200,text)
|
||||||
|
end
|
||||||
|
|
||||||
|
return login_post
|
|
@ -0,0 +1,96 @@
|
||||||
|
local config = require("config")
|
||||||
|
local session = require("session")
|
||||||
|
local pages = require("pages")
|
||||||
|
local cache = require("cache")
|
||||||
|
|
||||||
|
local function paste_get(req)
|
||||||
|
--Get the paste page
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local text
|
||||||
|
local author,_ = session.get(req)
|
||||||
|
if host == config.domain and author then
|
||||||
|
http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,config.domain))
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
elseif host == config.domain and author == nil then
|
||||||
|
text = cache.render(string.format("%s/_paste",host),function()
|
||||||
|
print("Cache missing, rendering post page")
|
||||||
|
return pages.paste{
|
||||||
|
domain = config.domain,
|
||||||
|
err = "",
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
http_response(req,200,text)
|
||||||
|
elseif host ~= config.domain and author then
|
||||||
|
text = pages.author_paste{
|
||||||
|
domain = config.domain,
|
||||||
|
user = author,
|
||||||
|
err = "",
|
||||||
|
text="",
|
||||||
|
}
|
||||||
|
elseif host ~= config.domain and author == nil then
|
||||||
|
http_response_header(req,"Location",string.format("https://%s/_paste",config.domain))
|
||||||
|
http_response(req,303,"")
|
||||||
|
else
|
||||||
|
error(string.format(
|
||||||
|
"Unable to find a good case for paste:%s,%s,%s",
|
||||||
|
host,
|
||||||
|
config.domain,
|
||||||
|
author
|
||||||
|
))
|
||||||
|
end
|
||||||
|
assert(text)
|
||||||
|
http_response(req,200,text)
|
||||||
|
--[=[
|
||||||
|
if host == config.domain then
|
||||||
|
local author,_ = get_session(req)
|
||||||
|
if author then
|
||||||
|
http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
else
|
||||||
|
--For an anonymous user
|
||||||
|
ret = cache.render(string.format("%s/_paste",host),function()
|
||||||
|
print("Cache missing, rendering post page")
|
||||||
|
return pages.paste{
|
||||||
|
domain = domain,
|
||||||
|
err = "",
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
else
|
||||||
|
--Or for someone that's logged in
|
||||||
|
print("Looks like a logged in user wants to paste!")
|
||||||
|
local subdomain = host:match("([^%.]+)")
|
||||||
|
local author,_ = session.get(req)
|
||||||
|
print("subdomain:",subdomain,"author:",author)
|
||||||
|
--If they try to paste as an author, but are on the
|
||||||
|
--wrong subdomain, or or not logged in, redirect them
|
||||||
|
--to the right place. Their own subdomain for authors
|
||||||
|
--or the anonymous paste page for not logged in users.
|
||||||
|
if author == nil then
|
||||||
|
http_response_header(req,"Location","https://"..domain.."/_paste")
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
if author ~= subdomain then
|
||||||
|
http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
assert(author == subdomain,"someone wants to paste as someone else")
|
||||||
|
--We're where we want to be, serve up this users's
|
||||||
|
--paste page. No cache, because how often is a user
|
||||||
|
--going to paste?
|
||||||
|
ret = pages.author_paste{
|
||||||
|
domain = domain,
|
||||||
|
user = author,
|
||||||
|
text = "",
|
||||||
|
err = "",
|
||||||
|
}
|
||||||
|
end
|
||||||
|
]=]
|
||||||
|
end
|
||||||
|
|
||||||
|
return paste_get
|
|
@ -0,0 +1,194 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
local zlib = require("zlib")
|
||||||
|
|
||||||
|
local util = require("util")
|
||||||
|
local parsers = require("parsers")
|
||||||
|
local config = require("config")
|
||||||
|
local queries = require("queries")
|
||||||
|
local db = require("db")
|
||||||
|
local cache = require("cache")
|
||||||
|
local tags = require("tags")
|
||||||
|
local session = require("session")
|
||||||
|
|
||||||
|
local stmnt_raw,stmnt_paste
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_paste = assert(db.conn:prepare(queries.insert_post))
|
||||||
|
stmnt_raw = assert(db.conn:prepare(queries.insert_raw))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function anon_paste(req,ps)
|
||||||
|
--Public paste
|
||||||
|
--[[
|
||||||
|
This doesn't actually do much for IPv4 addresses,
|
||||||
|
since there are only 32 bits of address. Someone who
|
||||||
|
got a copy of the database could
|
||||||
|
just generate all 2^32 hashes and look up who posted
|
||||||
|
what. Use IPv6, Tor or I2P where possible. (but then I
|
||||||
|
guess it's harder to ban spammers... hmm..)
|
||||||
|
]]
|
||||||
|
--local ip = http_request_get_ip(req)
|
||||||
|
--local iphash = sha3(ip)
|
||||||
|
--Don't store this information for now, until I come up
|
||||||
|
--with a more elegent solution.
|
||||||
|
|
||||||
|
util.sqlbind(stmnt_paste,"bind_blob",1,ps.text)
|
||||||
|
--assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
||||||
|
util.sqlbind(stmnt_paste,"bind",2,ps.title)
|
||||||
|
--assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
||||||
|
util.sqlbind(stmnt_paste,"bind",3,-1)
|
||||||
|
--assert(stmnt_paste:bind(3,-1) == sql.OK)
|
||||||
|
util.sqlbind(stmnt_paste,"bind",4,true)
|
||||||
|
--assert(stmnt_paste:bind(4,true) == sql.OK)
|
||||||
|
util.sqlbind(stmnt_paste,"bind_blob",5,"")
|
||||||
|
--assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
||||||
|
err = util.do_sql(stmnt_paste)
|
||||||
|
stmnt_paste:reset()
|
||||||
|
if err == sql.DONE then
|
||||||
|
local rowid = stmnt_paste:last_insert_rowid()
|
||||||
|
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||||||
|
assert(stmnt_raw:bind_blob(2,ps.raw) == sql.OK)
|
||||||
|
assert(stmnt_raw:bind(3,ps.markup) == sql.OK)
|
||||||
|
err = util.do_sql(stmnt_raw)
|
||||||
|
stmnt_raw:reset()
|
||||||
|
if err ~= sql.DONE then
|
||||||
|
print("Failed to save raw text, but paste still went though")
|
||||||
|
end
|
||||||
|
tags.set(rowid,ps.tags)
|
||||||
|
--[[
|
||||||
|
for _,tag in pairs(ps.tags) do
|
||||||
|
print("tag 1:",stmnt_ins_tag:bind(1,rowid))
|
||||||
|
print("Looking at tag",tag)
|
||||||
|
print("tag 2:",stmnt_ins_tag:bind(2,tag))
|
||||||
|
err = util.do_sql(stmnt_ins_tag)
|
||||||
|
stmnt_ins_tag:reset()
|
||||||
|
end
|
||||||
|
]]
|
||||||
|
local url = util.encode_id(rowid)
|
||||||
|
local loc = string.format("https://%s/%s",config.domain,url)
|
||||||
|
http_response_header(req,"Location",loc)
|
||||||
|
http_response(req,303,"")
|
||||||
|
cache.dirty(string.format("%s/%s",config.domain,url))
|
||||||
|
cache.dirty(string.format("%s",config.domain))
|
||||||
|
return
|
||||||
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||||
|
ret = "Failed to paste: " .. tostring(err)
|
||||||
|
else
|
||||||
|
error("Error pasting:" .. tostring(err))
|
||||||
|
end
|
||||||
|
stmnt_paste:reset()
|
||||||
|
end
|
||||||
|
local function author_paste(req,ps)
|
||||||
|
--Author paste
|
||||||
|
local author, authorid = session.get(req)
|
||||||
|
if author == nil then
|
||||||
|
ret = pages.author_paste{
|
||||||
|
domain = domain,
|
||||||
|
author = subdomain,
|
||||||
|
err = "You are not logged in, you must be logged in to post as " .. subdomain .. ".",
|
||||||
|
text = text
|
||||||
|
}
|
||||||
|
end
|
||||||
|
local asanon = assert(http_argument_get_string(req,"pasteas"))
|
||||||
|
--No need to check if the author is posting to the
|
||||||
|
--"right" sudomain, just post it to the one they have
|
||||||
|
--the session key for.
|
||||||
|
assert(stmnt_paste:bind_blob(1,ps.text) == sql.OK)
|
||||||
|
assert(stmnt_paste:bind(2,ps.title) == sql.OK)
|
||||||
|
assert(stmnt_paste:bind(3,authorid) == sql.OK)
|
||||||
|
if asanon == "anonymous" then
|
||||||
|
assert(stmnt_paste:bind(4,true) == sql.OK)
|
||||||
|
else
|
||||||
|
assert(stmnt_paste:bind(4,false) == sql.OK)
|
||||||
|
end
|
||||||
|
assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
||||||
|
err = util.do_sql(stmnt_paste)
|
||||||
|
stmnt_paste:reset()
|
||||||
|
if err == sql.DONE then
|
||||||
|
local rowid = stmnt_paste:last_insert_rowid()
|
||||||
|
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||||||
|
assert(stmnt_raw:bind_blob(2,ps.raw) == sql.OK)
|
||||||
|
assert(stmnt_raw:bind(3,ps.markup) == sql.OK)
|
||||||
|
err = util.do_sql(stmnt_raw)
|
||||||
|
stmnt_raw:reset()
|
||||||
|
if err ~= sql.DONE then
|
||||||
|
print("Failed to save raw text, but paste still went through")
|
||||||
|
end
|
||||||
|
tags.set(rowid,ps.tags)
|
||||||
|
--[[
|
||||||
|
for _,tag in pairs(ps.tags) do
|
||||||
|
print("tag 1:",stmnt_ins_tag:bind(1,rowid))
|
||||||
|
print("Looking at tag",tag)
|
||||||
|
print("tag 2:",stmnt_ins_tag:bind(2,tag))
|
||||||
|
err = do_sql(stmnt_ins_tag)
|
||||||
|
stmnt_ins_tag:reset()
|
||||||
|
end
|
||||||
|
]]
|
||||||
|
local url = util.encode_id(rowid)
|
||||||
|
local loc
|
||||||
|
if asanon == "anonymous" then
|
||||||
|
loc = string.format("https://%s/%s",config.domain,url)
|
||||||
|
else
|
||||||
|
loc = string.format("https://%s.%s/%s",author,config.domain,url)
|
||||||
|
end
|
||||||
|
http_response_header(req,"Location",loc)
|
||||||
|
http_response(req,303,"")
|
||||||
|
cache.dirty(string.format("%s.%s",author,config.domain))
|
||||||
|
cache.dirty(string.format("%s/%s",config.domain,url))
|
||||||
|
cache.dirty(string.format("%s",config.domain))
|
||||||
|
return
|
||||||
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||||
|
ret = "Failed to paste: " .. tostring(err)
|
||||||
|
else
|
||||||
|
error("Error pasting:",err)
|
||||||
|
end
|
||||||
|
stmnt_paste:reset()
|
||||||
|
|
||||||
|
end
|
||||||
|
local function decodeentities(capture)
|
||||||
|
local n = tonumber(capture,16)
|
||||||
|
local c = string.char(n)
|
||||||
|
if escapes[c] then
|
||||||
|
return escapes[c]
|
||||||
|
else
|
||||||
|
return c
|
||||||
|
end
|
||||||
|
end
|
||||||
|
local function paste_post(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
|
||||||
|
local ps = {}
|
||||||
|
--We're creatinga new paste
|
||||||
|
http_request_populate_post(req)
|
||||||
|
local title = assert(http_argument_get_string(req,"title"))
|
||||||
|
local text = assert(http_argument_get_string(req,"text"))
|
||||||
|
ps.markup = assert(http_argument_get_string(req,"markup"))
|
||||||
|
local tag_str = http_argument_get_string(req,"tags")
|
||||||
|
ps.tags = {}
|
||||||
|
if tag_str then
|
||||||
|
ps.tags = util.parse_tags(tag_str)
|
||||||
|
end
|
||||||
|
local pasteas
|
||||||
|
ps.raw = zlib.compress(text)
|
||||||
|
text = string.gsub(text,"%%(%x%x)",decodeentities)
|
||||||
|
text = parsers[ps.markup](text)
|
||||||
|
assert(text,"Failed to parse text")
|
||||||
|
text = zlib.compress(text)
|
||||||
|
assert(text,"Failed to compress text")
|
||||||
|
ps.text = text
|
||||||
|
local esctitle = string.gsub(title,"%%(%x%x)",decodeentities)
|
||||||
|
--Always sanatize the title with the plain parser. no markup
|
||||||
|
--in the title.
|
||||||
|
ps.title = parsers.plain(title)
|
||||||
|
if host == config.domain then
|
||||||
|
anon_paste(req,ps)
|
||||||
|
else
|
||||||
|
author_paste(req,ps)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
--assert(ret)
|
||||||
|
--http_response(req,200,ret)
|
||||||
|
return paste_post
|
|
@ -0,0 +1,33 @@
|
||||||
|
local parsers = require("parsers")
|
||||||
|
local tags = require("tags")
|
||||||
|
local util = require("util")
|
||||||
|
local pages = require("pages")
|
||||||
|
local config = require("config")
|
||||||
|
|
||||||
|
local function preview_post(req)
|
||||||
|
print("We want to preview a paste!")
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
http_request_populate_post(req)
|
||||||
|
local title = assert(http_argument_get_string(req,"title"))
|
||||||
|
local text = assert(http_argument_get_string(req,"text"))
|
||||||
|
local markup = assert(http_argument_get_string(req,"markup"))
|
||||||
|
local tag_str = http_argument_get_string(req,"tags")
|
||||||
|
local tags = {}
|
||||||
|
if tag_str then
|
||||||
|
tags = util.parse_tags(tag_str)
|
||||||
|
end
|
||||||
|
print("title:",title,"text:",text,"markup:",markup)
|
||||||
|
local parsed = parsers[markup](text)
|
||||||
|
local ret = pages.read{
|
||||||
|
domain = config.domain,
|
||||||
|
title = title,
|
||||||
|
author = "preview",
|
||||||
|
idp = "preview",
|
||||||
|
text = parsed,
|
||||||
|
tags = tags,
|
||||||
|
}
|
||||||
|
http_response(req,200,ret)
|
||||||
|
end
|
||||||
|
|
||||||
|
return preview_post
|
|
@ -0,0 +1,173 @@
|
||||||
|
local sql = require("sqlite3")
|
||||||
|
|
||||||
|
local session = require("session")
|
||||||
|
local tags = require("tags")
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
local cache = require("cache")
|
||||||
|
local pages = require("pages")
|
||||||
|
local config = require("config")
|
||||||
|
|
||||||
|
local stmnt_read, stmnt_update_views, stmnt_comments
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_read = assert(db.conn:prepare(queries.select_post))
|
||||||
|
stmnt_update_views = assert(db.conn:prepare(queries.update_views))
|
||||||
|
stmnt_comments = assert(db.conn:prepare(queries.select_comments))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Increases a story's hit counter by 1
|
||||||
|
]]
|
||||||
|
local function add_view(storyid)
|
||||||
|
stmnt_update_views:bind_names{
|
||||||
|
id = storyid
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_update_views)
|
||||||
|
assert(err == sql.DONE, "Failed to update view counter:"..tostring(err))
|
||||||
|
stmnt_update_views:reset()
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Populates ps with story settings, returns true if story was found,
|
||||||
|
or nil if it wasn't
|
||||||
|
]]
|
||||||
|
local function populate_ps_story(req,ps)
|
||||||
|
--Make sure our story exists
|
||||||
|
stmnt_read:bind_names{
|
||||||
|
id = ps.storyid
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_read)
|
||||||
|
if err == sql.DONE then
|
||||||
|
--We got no story
|
||||||
|
stmnt_read:reset()
|
||||||
|
print("No story by this name",ps.storyid)
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
--If we've made it here, we have a story. Populate our settings
|
||||||
|
--with title, text, ect.
|
||||||
|
assert(err == sql.ROW)
|
||||||
|
local title, storytext, tauthor, isanon, authorname, views = unpack(
|
||||||
|
stmnt_read:get_values()
|
||||||
|
)
|
||||||
|
ps.title = title
|
||||||
|
ps.text = zlib.decompress(storytext)
|
||||||
|
ps.tauthor = tauthor
|
||||||
|
ps.isanon = isanon == 1
|
||||||
|
ps.author = authorname
|
||||||
|
ps.views = views
|
||||||
|
stmnt_read:reset()
|
||||||
|
--Tags
|
||||||
|
ps.tags = tags.get(ps.storyid)
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Get the comments for a story
|
||||||
|
]]
|
||||||
|
local function get_comments(req,ps)
|
||||||
|
stmnt_comments:bind_names{
|
||||||
|
id = ps.storyid
|
||||||
|
}
|
||||||
|
err = util.do_sql(stmnt_comments)
|
||||||
|
local comments = {}
|
||||||
|
while err ~= sql.DONE do
|
||||||
|
local com_author, com_isanon, com_text = unpack(stmnt_comments:get_values())
|
||||||
|
table.insert(comments,{
|
||||||
|
author = com_author,
|
||||||
|
isanon = com_isanon == 1, --int to boolean
|
||||||
|
text = com_text
|
||||||
|
})
|
||||||
|
err = stmnt_comments:step()
|
||||||
|
end
|
||||||
|
stmnt_comments:reset()
|
||||||
|
return comments
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
The author is viewing their own story, give them an edit button
|
||||||
|
]]
|
||||||
|
local function read_get_author(req,storyid,author,authorid,comments)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
An author is viewing a story, allow them to post comments as themselves
|
||||||
|
]]
|
||||||
|
local function read_get_loggedin(req,ps)
|
||||||
|
if ps.tauthor == ps.authorid then
|
||||||
|
--The story exists and we're logged in as the
|
||||||
|
--owner, display the edit button
|
||||||
|
return read_get_author(req,ps)
|
||||||
|
end
|
||||||
|
return pages.read(ps)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
local function read_get(req)
|
||||||
|
--Pages settings
|
||||||
|
local ps = {
|
||||||
|
domain = config.domain,
|
||||||
|
host = http_request_get_host(req),
|
||||||
|
path = http_request_get_path(req),
|
||||||
|
method = http_method_text(req),
|
||||||
|
}
|
||||||
|
print("reading", ps.path)
|
||||||
|
|
||||||
|
--Get our story id
|
||||||
|
assert(string.len(ps.path) > 0,"Tried to read 0-length story id")
|
||||||
|
ps.idp = string.sub(ps.path,2)--remove leading "/"
|
||||||
|
ps.storyid = util.decode_id(ps.idp)
|
||||||
|
add_view(ps.storyid)
|
||||||
|
|
||||||
|
--If we're logged in, set author and authorid
|
||||||
|
local author, authorid = session.get(req)
|
||||||
|
if author and authorid then
|
||||||
|
ps.loggedauthor = author
|
||||||
|
ps.iam = author
|
||||||
|
ps.loggedauthorid = authorid
|
||||||
|
end
|
||||||
|
|
||||||
|
--If we need to show comments
|
||||||
|
http_request_populate_qs(req)
|
||||||
|
ps.show_comments = http_argument_get_string(req,"comments")
|
||||||
|
if ps.show_comments then
|
||||||
|
ps.comments = get_comments(req,ps)
|
||||||
|
end
|
||||||
|
|
||||||
|
local text
|
||||||
|
--normal story display
|
||||||
|
if (not ps.loggedauthor) then
|
||||||
|
print("not author")
|
||||||
|
local cachestr = string.format("%s%s%s",
|
||||||
|
ps.host,
|
||||||
|
ps.path,
|
||||||
|
ps.show_comments and "?comments=1" or ""
|
||||||
|
)
|
||||||
|
text = cache.render(cachestr,function()
|
||||||
|
if not populate_ps_story(req,ps) then
|
||||||
|
return pages.nostory(ps)
|
||||||
|
end
|
||||||
|
local output = pages.read(ps)
|
||||||
|
assert(output,"failed to read page:" .. cachestr)
|
||||||
|
return output
|
||||||
|
end)
|
||||||
|
else --we are logged in, don't cache
|
||||||
|
print("is author")
|
||||||
|
if not populate_ps_story(req,ps) then
|
||||||
|
return pages.nostory(ps)
|
||||||
|
end
|
||||||
|
print("tauthor was", ps.tauthor, "while author was:",ps.author)
|
||||||
|
ps.owner = (ps.loggedauthorid == ps.tauthor)
|
||||||
|
text = pages.read(ps)
|
||||||
|
end
|
||||||
|
assert(text)
|
||||||
|
http_response(req,200,text)
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
return read_get
|
|
@ -0,0 +1,53 @@
|
||||||
|
local sql = require("sqlite3")
|
||||||
|
|
||||||
|
local cache = require("cache")
|
||||||
|
local session = require("session")
|
||||||
|
local util = require("util")
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local config = require("config")
|
||||||
|
|
||||||
|
local stmnt_comment_insert
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_comment_insert = assert(db.conn:prepare(queries.insert_comment))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function read_post(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
--We're posting a comment
|
||||||
|
http_request_populate_post(req)
|
||||||
|
http_populate_cookies(req)
|
||||||
|
local author, authorid = session.get(req)
|
||||||
|
local comment_text = assert(http_argument_get_string(req,"text"))
|
||||||
|
local pasteas = assert(http_argument_get_string(req,"postas"))
|
||||||
|
local idp = string.sub(path,2)--remove leading "/"
|
||||||
|
local id = util.decode_id(idp)
|
||||||
|
local isanon = 1
|
||||||
|
--Even if an author is logged in, they may post their comment anonymously
|
||||||
|
if author and pasteas ~= "Anonymous" then
|
||||||
|
isanon = 0
|
||||||
|
end
|
||||||
|
stmnt_comment_insert:bind_names{
|
||||||
|
postid=id,
|
||||||
|
authorid = author and authorid or -1,
|
||||||
|
isanon = isanon,
|
||||||
|
comment_text = comment_text,
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_comment_insert)
|
||||||
|
stmnt_comment_insert:reset()
|
||||||
|
if err ~= sql.DONE then
|
||||||
|
http_response(req,500,"Internal error, failed to post comment. Go back and try again.")
|
||||||
|
else
|
||||||
|
--When we post a comment, we need to dirty the cache for the "comments displayed" page.
|
||||||
|
cache.dirty(string.format("%s%s?comments=1",host,path))
|
||||||
|
local redir = string.format("https://%s%s?comments=1", config.domain, path)
|
||||||
|
http_response_header(req,"Location",redir)
|
||||||
|
http_response(req,303,"")
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
return read_post
|
|
@ -0,0 +1,59 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
local libtags = require("tags")
|
||||||
|
local pages = require("pages")
|
||||||
|
local config = require("config")
|
||||||
|
|
||||||
|
local stmnt_search
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
stmnt_search = assert(db.conn:prepare(queries.select_post_tags))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local function search_get(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
http_request_populate_qs(req)
|
||||||
|
local tag = http_argument_get_string(req,"tag")
|
||||||
|
if tag then
|
||||||
|
stmnt_search:bind_names{
|
||||||
|
tag = tag
|
||||||
|
}
|
||||||
|
local results = {}
|
||||||
|
local err
|
||||||
|
repeat
|
||||||
|
err = stmnt_search:step()
|
||||||
|
if err == sql.BUSY then
|
||||||
|
coroutine.yield()
|
||||||
|
elseif err == sql.ROW then
|
||||||
|
local id, title, anon, time, author = unpack(stmnt_search:get_values())
|
||||||
|
local idp = util.encode_id(id)
|
||||||
|
local tags = libtags.get(id)
|
||||||
|
table.insert(results,{
|
||||||
|
id = idp,
|
||||||
|
title = title,
|
||||||
|
anon = anon,
|
||||||
|
time = os.date("%B %d %Y",tonumber(time)),
|
||||||
|
author = author,
|
||||||
|
tags = tags
|
||||||
|
})
|
||||||
|
elseif err == sql.DONE then
|
||||||
|
stmnt_search:reset()
|
||||||
|
else
|
||||||
|
error("Failed to search, sql error:" .. tostring(err))
|
||||||
|
end
|
||||||
|
until err == sql.DONE
|
||||||
|
local ret = pages.search{
|
||||||
|
domain = config.domain,
|
||||||
|
results = results,
|
||||||
|
tag = tag,
|
||||||
|
}
|
||||||
|
http_response(req,200,ret)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return search_get
|
1183
src/lua/init.lua
1183
src/lua/init.lua
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,7 @@
|
||||||
|
|
||||||
|
local parser_names = {"plain","imageboard"}
|
||||||
|
local parsers = {}
|
||||||
|
for _,v in pairs(parser_names) do
|
||||||
|
parsers[v] = require("parser_" .. v)
|
||||||
|
end
|
||||||
|
return parsers
|
|
@ -0,0 +1,13 @@
|
||||||
|
|
||||||
|
local queries = {}
|
||||||
|
|
||||||
|
setmetatable(queries,{
|
||||||
|
__index = function(self,key)
|
||||||
|
local f = assert(io.open("sql/" .. key .. ".sql","r"))
|
||||||
|
local ret = f:read("*a")
|
||||||
|
f:close()
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
})
|
||||||
|
|
||||||
|
return queries
|
|
@ -0,0 +1,68 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local util = require("util")
|
||||||
|
local queries = require("queries")
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
local stmnt_get_session, stmnt_insert_session
|
||||||
|
function configure(...)
|
||||||
|
stmnt_get_session = assert(db.conn:prepare(queries.select_valid_sessions))
|
||||||
|
stmnt_insert_session = assert(db.conn:prepare(queries.insert_session))
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
local session = {}
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Retreive the name and authorid of the logged in person,
|
||||||
|
or nil+error message if not logged in
|
||||||
|
]]
|
||||||
|
function session.get(req)
|
||||||
|
http_populate_cookies(req)
|
||||||
|
local sessionid = http_request_cookie(req,"session")
|
||||||
|
if sessionid == nil then
|
||||||
|
return nil, "No session cookie passed by client"
|
||||||
|
end
|
||||||
|
stmnt_get_session:bind_names{
|
||||||
|
key = sessionid
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_get_session)
|
||||||
|
if err ~= sql.ROW then
|
||||||
|
return nil, "No such session by logged in users"
|
||||||
|
end
|
||||||
|
print("get session err:",err)
|
||||||
|
local data = stmnt_get_session:get_values()
|
||||||
|
stmnt_get_session:reset()
|
||||||
|
local author = data[1]
|
||||||
|
local authorid = data[2]
|
||||||
|
return author,authorid
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Start a session for someone who logged in
|
||||||
|
]]
|
||||||
|
function session.start(who)
|
||||||
|
local rngf = assert(io.open("/dev/urandom","rb"))
|
||||||
|
local session_t = {}
|
||||||
|
for i = 1,64 do
|
||||||
|
local r = string.byte(rngf:read(1))
|
||||||
|
local s = string.char((r % 26) + 65)
|
||||||
|
table.insert(session_t,s)
|
||||||
|
end
|
||||||
|
local session = table.concat(session_t)
|
||||||
|
rngf:close()
|
||||||
|
print("sessionid:",session)
|
||||||
|
print("authorid:",who)
|
||||||
|
stmnt_insert_session:bind_names{
|
||||||
|
sessionid = session,
|
||||||
|
authorid = who
|
||||||
|
}
|
||||||
|
local err = util.do_sql(stmnt_insert_session)
|
||||||
|
stmnt_insert_session:reset()
|
||||||
|
print("Err:",err)
|
||||||
|
assert(err == sql.DONE)
|
||||||
|
return session
|
||||||
|
end
|
||||||
|
|
||||||
|
return session
|
|
@ -0,0 +1,60 @@
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local db = require("db")
|
||||||
|
local queries = require("queries")
|
||||||
|
local util = require("util")
|
||||||
|
local tags = {}
|
||||||
|
|
||||||
|
local stmnt_get_tags, stmnt_ins_tag, stmnt_drop_tags
|
||||||
|
|
||||||
|
local oldconfigure = configure
|
||||||
|
function configure(...)
|
||||||
|
--Tags for a story
|
||||||
|
stmnt_ins_tag = assert(db.conn:prepare(queries.insert_tag))
|
||||||
|
stmnt_get_tags = assert(db.conn:prepare(queries.select_tags))
|
||||||
|
stmnt_drop_tags = assert(db.conn:prepare(queries.delete_tags))
|
||||||
|
|
||||||
|
return oldconfigure(...)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
function tags.get(id)
|
||||||
|
local ret = {}
|
||||||
|
stmnt_get_tags:bind_names{
|
||||||
|
id = id
|
||||||
|
}
|
||||||
|
local err
|
||||||
|
repeat
|
||||||
|
err = stmnt_get_tags:step()
|
||||||
|
if err == sql.BUSY then
|
||||||
|
coroutine.yield()
|
||||||
|
elseif err == sql.ROW then
|
||||||
|
table.insert(ret,stmnt_get_tags:get_value(0))
|
||||||
|
elseif err == sql.DONE then
|
||||||
|
stmnt_get_tags:reset()
|
||||||
|
return ret
|
||||||
|
else
|
||||||
|
error(string.format("Failed to get tags for story %d : %d", id, err))
|
||||||
|
end
|
||||||
|
until false
|
||||||
|
end
|
||||||
|
|
||||||
|
function tags.set(storyid,tags)
|
||||||
|
assert(stmnt_drop_tags:bind_names{postid = storyid} == sql.OK)
|
||||||
|
util.do_sql(stmnt_drop_tags)
|
||||||
|
stmnt_drop_tags:reset()
|
||||||
|
local err
|
||||||
|
for _,tag in pairs(tags) do
|
||||||
|
print("Looking at tag",tag)
|
||||||
|
assert(stmnt_ins_tag:bind(1,storyid) == sql.OK)
|
||||||
|
assert(stmnt_ins_tag:bind(2,tag) == sql.OK)
|
||||||
|
err = util.do_sql(stmnt_ins_tag)
|
||||||
|
stmnt_ins_tag:reset()
|
||||||
|
end
|
||||||
|
if err ~= sql.DONE then
|
||||||
|
print("Failed to save tags, but paste and raw still went through")
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
return tags
|
|
@ -0,0 +1,123 @@
|
||||||
|
|
||||||
|
local sql = require("lsqlite3")
|
||||||
|
|
||||||
|
local util = {}
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Runs an sql query and receives the 3 arguments back, prints a nice error
|
||||||
|
message on fail, and returns true on success.
|
||||||
|
]]
|
||||||
|
function util.sqlassert(...)
|
||||||
|
local r,errcode,err = ...
|
||||||
|
if not r then
|
||||||
|
error(string.format("%d: %s",errcode, err))
|
||||||
|
end
|
||||||
|
return r
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Continuously tries to perform an sql statement until it goes through
|
||||||
|
]]
|
||||||
|
function util.do_sql(stmnt)
|
||||||
|
if not stmnt then error("No statement",2) end
|
||||||
|
local err
|
||||||
|
local i = 0
|
||||||
|
repeat
|
||||||
|
err = stmnt:step()
|
||||||
|
print("After stepping, err is", err)
|
||||||
|
if err == sql.BUSY then
|
||||||
|
i = i + 1
|
||||||
|
coroutine.yield()
|
||||||
|
end
|
||||||
|
until(err ~= sql.BUSY or i > 10)
|
||||||
|
assert(i < 10, "Database busy")
|
||||||
|
return err
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Binds an argument to as statement with nice error reporting on failure
|
||||||
|
stmnt :: sql.stmnt - the prepared sql statemnet
|
||||||
|
call :: string - a string "bind" or "bind_blob"
|
||||||
|
position :: number - the argument position to bind to
|
||||||
|
data :: string - The data to bind
|
||||||
|
]]
|
||||||
|
function util.sqlbind(stmnt,call,position,data)
|
||||||
|
assert(call == "bind" or call == "bind_blob","Bad bind call, call was:" .. call)
|
||||||
|
local f = stmnt[call](stmnt,position,data)
|
||||||
|
if f ~= sql.OK then
|
||||||
|
error(string.format("Failed to %s at %d with %q: %s", call, position, data, db:errmsg()),2)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
|
||||||
|
--no underscore because we use that for our operative pages
|
||||||
|
local url_characters =
|
||||||
|
[[abcdefghijklmnopqrstuvwxyz]]..
|
||||||
|
[[ABCDEFGHIJKLMNOPQRSTUVWXYZ]]..
|
||||||
|
[[0123456789]]..
|
||||||
|
[[$-+!*'(),]]
|
||||||
|
local url_characters_rev = {}
|
||||||
|
for i = 1,string.len(url_characters) do
|
||||||
|
url_characters_rev[string.sub(url_characters,i,i)] = i
|
||||||
|
end
|
||||||
|
--[[
|
||||||
|
Encode a number to a shorter HTML-safe url path
|
||||||
|
]]
|
||||||
|
function util.encode_id(number)
|
||||||
|
local result = {}
|
||||||
|
local charlen = string.len(url_characters)
|
||||||
|
repeat
|
||||||
|
local pos = (number % charlen) + 1
|
||||||
|
number = math.floor(number / charlen)
|
||||||
|
table.insert(result,string.sub(url_characters,pos,pos))
|
||||||
|
until number == 0
|
||||||
|
return table.concat(result)
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Given a short HTML-safe url path, convert it to a storyid
|
||||||
|
]]
|
||||||
|
function util.decode_id(s)
|
||||||
|
local res, id = pcall(function()
|
||||||
|
local n = 0
|
||||||
|
local charlen = string.len(url_characters)
|
||||||
|
for i = 1,string.len(s) do
|
||||||
|
local char = string.sub(s,i,i)
|
||||||
|
local pos = url_characters_rev[char] - 1
|
||||||
|
n = n + (pos*math.pow(charlen,i-1))
|
||||||
|
end
|
||||||
|
return n
|
||||||
|
end)
|
||||||
|
if res then
|
||||||
|
return id
|
||||||
|
else
|
||||||
|
error("Failed to decode id:" .. s)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
--[[
|
||||||
|
Parses a semicolon seperated string into it's parts:
|
||||||
|
1. seperates by semicolon
|
||||||
|
2. trims whitespace
|
||||||
|
3. lowercases
|
||||||
|
4. capitalizes the first letter.
|
||||||
|
Returns an array of zero or more strings.
|
||||||
|
There is no blank tag, parsing "one;two;;three" will yield
|
||||||
|
{"one","two","three"}
|
||||||
|
]]
|
||||||
|
function util.parse_tags(str)
|
||||||
|
local tags = {}
|
||||||
|
for tag in string.gmatch(str,"([^;]+)") do
|
||||||
|
assert(tag, "Found a nil or false tag in:" .. str)
|
||||||
|
local tag_trimmed = string.match(tag,"%s*(.*)%s*")
|
||||||
|
local tag_lower = string.lower(tag_trimmed)
|
||||||
|
local tag_capitalized = string.gsub(tag_lower,"^.",string.upper)
|
||||||
|
assert(tag_capitalized, "After processing tag:" .. tag .. " it was falsey.")
|
||||||
|
if string.len(tag_capitalized) > 0 then
|
||||||
|
table.insert(tags, tag_capitalized)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return tags
|
||||||
|
end
|
||||||
|
|
||||||
|
return util
|
|
@ -1,51 +1,49 @@
|
||||||
<% assert(author,"No author specified") %>
|
<% assert(author,"No author specified") %> <% assert(bio,"No bio included") %> <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8">
|
||||||
<% assert(bio,"No bio included") %>
|
|
||||||
<!DOCTYPE html>
|
|
||||||
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<h1 class="title">
|
<main class="wrapper">
|
||||||
<a href="https://<%= author %>.<%= domain %>"><%= author %></a>.<a href="https://<%= domain %>"><%= domain %></a>
|
<h1 class="title">
|
||||||
</h1>
|
<a href="https://<%= author %>.<%= domain %>"><%= author %></a>.<a href="https://<%= domain %>"><%= domain %></a>
|
||||||
|
</h1>
|
||||||
<div class="content">
|
<div class="container">
|
||||||
<%= bio %>
|
<a href="/_paste" class="button">New paste</a>
|
||||||
</div>
|
</div>
|
||||||
<div class="content">
|
<div class="content">
|
||||||
<% if #stories == 0 then %>
|
<%= bio %>
|
||||||
This author has not made any pastes yet.
|
</div>
|
||||||
<% else %>
|
<div class="content">
|
||||||
<table>
|
<% if #stories == 0 then %>
|
||||||
<% for k,v in pairs(stories) do %>
|
This author has not made any pastes yet.
|
||||||
<tr><td>
|
<% else %>
|
||||||
<a href="<%= v.url %>">
|
<table>
|
||||||
<%- v.title %>
|
<% for k,v in pairs(stories) do %>
|
||||||
</a>
|
<tr><td>
|
||||||
</td><td>
|
<a href="<%= v.url %>">
|
||||||
By <a href="https://<%= author %>.<%= domain %>"><%= author %></a>
|
<%- v.title %>
|
||||||
</td><td>
|
</a>
|
||||||
<ul class="row tag-list">
|
</td><td>
|
||||||
<% for i = 1,math.min(#v.tags, 5) do %>
|
By <a href="https://<%= author %>.<%= domain %>"><%= author %></a>
|
||||||
<li><a class="tag button button-outline" href="https://<%= domain %>/_search?tag=<%= v.tags[i] %>"><%= v.tags[i] %></a></li>
|
</td><td>
|
||||||
<% end %>
|
<ul class="row tag-list">
|
||||||
</ul>
|
<% for i = 1,math.min(#v.tags, 5) do %>
|
||||||
</td><td>
|
<li><a class="tag button button-outline" href="https://<%= domain %>/_search?tag=<%= v.tags[i] %>"><%= v.tags[i] %></a></li>
|
||||||
<%= v.posted %>
|
<% end %>
|
||||||
</td></tr>
|
</ul>
|
||||||
|
</td><td>
|
||||||
|
<%= v.posted %>
|
||||||
|
</td></tr>
|
||||||
|
<% end %>
|
||||||
|
</table>
|
||||||
<% end %>
|
<% end %>
|
||||||
</table>
|
</div>
|
||||||
<% end %>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<footer class="footer">
|
<footer class="footer">
|
||||||
|
|
||||||
</footer>
|
</footer>
|
||||||
|
</main>
|
||||||
</body>
|
</body>
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,9 @@
|
||||||
<% for i = 1,math.min(#v.tags, 5) do %>
|
<% for i = 1,math.min(#v.tags, 5) do %>
|
||||||
<li><a class="tag button button-outline" href="https://<%= domain %>/_search?tag=<%= v.tags[i] %>"><%= v.tags[i] %></a></li>
|
<li><a class="tag button button-outline" href="https://<%= domain %>/_search?tag=<%= v.tags[i] %>"><%= v.tags[i] %></a></li>
|
||||||
<% end %>
|
<% end %>
|
||||||
|
<% if #v.tags > 5 then %>
|
||||||
|
<li>+<%= #v.tags - 5 %></li>
|
||||||
|
<% end %>
|
||||||
</ul>
|
</ul>
|
||||||
</td><td>
|
</td><td>
|
||||||
<%= v.posted %>
|
<%= v.posted %>
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
<%- title %>
|
<%- title %>
|
||||||
</h2>
|
</h2>
|
||||||
<h3>
|
<h3>
|
||||||
<% if isanon then -%>
|
<% if isanon or author == nil then -%>
|
||||||
By Anonymous
|
By Anonymous
|
||||||
<% else -%>
|
<% else -%>
|
||||||
By <a href="https://<%= author %>.<%= domain %>"><%= author %></a>
|
By <a href="https://<%= author %>.<%= domain %>"><%= author %></a>
|
||||||
|
@ -32,7 +32,7 @@
|
||||||
</article>
|
</article>
|
||||||
<hr/>
|
<hr/>
|
||||||
<p><%= views %> Hits</p>
|
<p><%= views %> Hits</p>
|
||||||
<ul class="row tag-list">
|
<ul class="tag-list">
|
||||||
<% for _,tag in pairs(tags) do -%>
|
<% for _,tag in pairs(tags) do -%>
|
||||||
<li><a class="tag button button-outline" href="https://<%= domain %>/_search?tag=<%= tag %>"><%= tag %></a></li>
|
<li><a class="tag button button-outline" href="https://<%= domain %>/_search?tag=<%= tag %>"><%= tag %></a></li>
|
||||||
<% end -%>
|
<% end -%>
|
||||||
|
|
41
src/smr.c
41
src/smr.c
|
@ -12,6 +12,7 @@
|
||||||
#include "libkore.h"
|
#include "libkore.h"
|
||||||
#include "libcrypto.h"
|
#include "libcrypto.h"
|
||||||
#include <dirent.h>
|
#include <dirent.h>
|
||||||
|
#include <kore/seccomp.h>
|
||||||
|
|
||||||
int home(struct http_request *);
|
int home(struct http_request *);
|
||||||
int post_story(struct http_request *);
|
int post_story(struct http_request *);
|
||||||
|
@ -38,15 +39,37 @@ lua_State *L;
|
||||||
static / _claim claim
|
static / _claim claim
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/*Allow seccomp things for luajit and sqlite*/
|
||||||
|
KORE_SECCOMP_FILTER("app",
|
||||||
|
KORE_SYSCALL_ALLOW(pread64),
|
||||||
|
KORE_SYSCALL_ALLOW(pwrite64),
|
||||||
|
KORE_SYSCALL_ALLOW(fdatasync),
|
||||||
|
KORE_SYSCALL_ALLOW(unlinkat),
|
||||||
|
KORE_SYSCALL_ALLOW(mremap),
|
||||||
|
KORE_SYSCALL_ALLOW(newfstatat)
|
||||||
|
);
|
||||||
|
|
||||||
int
|
int
|
||||||
errhandeler(lua_State *L){
|
errhandeler(lua_State *L){
|
||||||
printf("Error: %s\n",lua_tostring(L,1));
|
printf("Error: %s\n",lua_tostring(L,1));//"error"
|
||||||
lua_getglobal(L,"debug");
|
lua_getglobal(L,"debug");//"error",{debug}
|
||||||
lua_getglobal(L,"print");
|
lua_getglobal(L,"print");//"error",{debug},print()
|
||||||
lua_getfield(L,-2,"traceback");
|
lua_getfield(L,-2,"traceback");//"error",{debug},print(),traceback()
|
||||||
lua_call(L,0,1);
|
lua_call(L,0,1);//"error",{debug},print(),"traceback"
|
||||||
lua_call(L,1,0);
|
lua_call(L,1,0);//"error",{debug}
|
||||||
lua_pop(L,1);
|
printf("Called print()\n");
|
||||||
|
lua_getfield(L,-1,"traceback");//"error",{debug},traceback()
|
||||||
|
printf("got traceback\n");
|
||||||
|
lua_call(L,0,1);//"error",{debug},"traceback"
|
||||||
|
lua_pushstring(L,"\n");
|
||||||
|
printf("called traceback\n");
|
||||||
|
lua_pushvalue(L,-4);//"error",{debug},"traceback","error"
|
||||||
|
printf("pushed error\n");
|
||||||
|
lua_concat(L,3);//"error",{debug},"traceback .. error"
|
||||||
|
printf("concated\n");
|
||||||
|
int ref = luaL_ref(L,LUA_REGISTRYINDEX);//"error",{debug}
|
||||||
|
lua_pop(L,2);//
|
||||||
|
lua_rawgeti(L,LUA_REGISTRYINDEX,ref);//"traceback .. error"
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,8 +89,10 @@ do_lua(struct http_request *req, const char *name){
|
||||||
printf("About to pcall\n");
|
printf("About to pcall\n");
|
||||||
int err = lua_pcall(L,1,0,-3);
|
int err = lua_pcall(L,1,0,-3);
|
||||||
if(err != LUA_OK){
|
if(err != LUA_OK){
|
||||||
|
size_t retlen;
|
||||||
|
const char *ret = lua_tolstring(L,-1,&retlen);
|
||||||
printf("Failed to run %s: %s\n",name,lua_tostring(L,-1));
|
printf("Failed to run %s: %s\n",name,lua_tostring(L,-1));
|
||||||
http_response(req, 500, NULL, 0);
|
http_response(req, 500, ret, retlen);
|
||||||
lua_pop(L,lua_gettop(L));
|
lua_pop(L,lua_gettop(L));
|
||||||
return (KORE_RESULT_OK);
|
return (KORE_RESULT_OK);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,12 @@
|
||||||
|
/*
|
||||||
|
If/when an author delets their account, all posts
|
||||||
|
and comments by that author are also deleted (on
|
||||||
|
delete cascade) this is intentional. This also
|
||||||
|
means that all comments by other users on a post
|
||||||
|
an author makes will also be deleted.
|
||||||
|
|
||||||
|
Post text uses zlib compression
|
||||||
|
*/
|
||||||
CREATE TABLE IF NOT EXISTS posts (
|
CREATE TABLE IF NOT EXISTS posts (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
post_text BLOB,
|
post_text BLOB,
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
/*
|
||||||
|
Store the raw text so people can download it later, maybe
|
||||||
|
we can use it for "download as image" or "download as pdf"
|
||||||
|
in the future too. Stil stored zlib compressed
|
||||||
|
*/
|
||||||
CREATE TABLE IF NOT EXISTS raw_text (
|
CREATE TABLE IF NOT EXISTS raw_text (
|
||||||
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
|
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
|
||||||
post_text BLOB,
|
post_text BLOB,
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
/*
|
||||||
|
Store a cookie for logged in users. Logged in users can edit
|
||||||
|
their own posts.
|
||||||
|
*/
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS sessions (
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
key TEXT PRIMARY KEY,
|
key TEXT PRIMARY KEY,
|
||||||
author REFERENCES authors(id) ON DELETE CASCADE,
|
author REFERENCES authors(id) ON DELETE CASCADE,
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
/*
|
||||||
|
Create a fake "anonymous" user, so
|
||||||
|
that no one runs into touble being able to paste under this account.
|
||||||
|
*/
|
||||||
INSERT OR IGNORE INTO authors (
|
INSERT OR IGNORE INTO authors (
|
||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
/* Add a new comment to a story */
|
||||||
INSERT INTO comments(
|
INSERT INTO comments(
|
||||||
postid,
|
postid,
|
||||||
author,
|
author,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
/* Get the data we need to display a particular author's latest stories */
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
posts.id,
|
posts.id,
|
||||||
posts.post_title,
|
posts.post_title,
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
/*
|
||||||
|
Get the author of a story, used to check when editing that the
|
||||||
|
author really owns the story they're trying to edit
|
||||||
|
*/
|
||||||
SELECT
|
SELECT
|
||||||
authors.id,
|
authors.id,
|
||||||
authors.name
|
authors.name
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
/* Retreive comments on a story */
|
||||||
SELECT
|
SELECT
|
||||||
authors.name,
|
authors.name,
|
||||||
comments.isanon,
|
comments.isanon,
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
/*
|
||||||
|
Select the data we need to read a story (and maybe display an edit button)
|
||||||
|
*/
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
post_title,
|
post_title,
|
||||||
post_text,
|
post_text,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
/* Select the data we need to display the on the front page */
|
||||||
SELECT
|
SELECT
|
||||||
posts.id,
|
posts.id,
|
||||||
posts.post_title,
|
posts.post_title,
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
|
/* Update the view counter when someone reads a story */
|
||||||
UPDATE posts SET views = views + 1 WHERE id = :id;
|
UPDATE posts SET views = views + 1 WHERE id = :id;
|
||||||
|
|
Loading…
Reference in New Issue