diff --git a/Makefile b/Makefile
index c2496bb..668d388 100644
--- a/Makefile
+++ b/Makefile
@@ -11,10 +11,13 @@ domain=test.monster
lua_files=$(shell find src/lua -type f)
src_files=$(shell find src -type f) $(shell find conf -type f)
sql_files=$(shell find src/sql -type f)
+test_files=$(shell find spec -type f)
+built_tests=$(test_files:%=$(chroot_dir)%)
built_files=$(lua_files:src/lua/%.lua=$(chroot_dir)%.lua)
page_files=$(shell find src/pages -type f)
built_pages=$(page_files:src/pages/%.etlua=$(chroot_dir)pages/%.etlua)
built_sql=$(sql_files:src/sql/%.sql=$(chroot_dir)sql/%.sql)
+built=$(built_files) $(built_sql) $(built_pages) $(built_tests)
all: $(chroot_dir) smr.so $(built_files) $(built_pages) $(built_sql)
echo $(built_files)
@@ -31,6 +34,7 @@ $(chroot_dir): apk-tools-static-$(version).apk
mkdir -p $(chroot_dir)/pages
mkdir -p $(chroot_dir)/sql
mkdir -p $(chroot_dir)/data
+ mkdir -p $(chroot_dir)/endpoints
#cd $(chroot_dir) && tar -xvzf ../apk-tools-static-*.apk
#cd $(chroot_dir) && sudo ./sbin/apk.static -X $(mirror)latest-stable/main -U --allow-untrusted --root $(chroot_dir) --no-cache --initdb add alpine-base
#ln -s /dev/urandom $(chroot_dir)/dev/random #Prevent an attacker with access to the chroot from exhausting our entropy pool and causing a dos
@@ -69,5 +73,11 @@ $(built_pages): $(chroot_dir)pages/%.etlua : src/pages/%.etlua
$(built_sql): $(chroot_dir)sql/%.sql : src/sql/%.sql
cp $^ $@
+$(built_tests) : $(chroot_dir)% : %
+ cp $^ $@
+
smr.so : $(src_files)
kodev build
+
+test : $(built)
+ cd kore_chroot && busted
diff --git a/spec/home_spec.lua b/spec/home_spec.lua
new file mode 100644
index 0000000..01f120e
--- /dev/null
+++ b/spec/home_spec.lua
@@ -0,0 +1,28 @@
+--[[
+Test the home page
+]]
+
+
+
+
+describe("smr",function()
+ describe("site home page",function()
+ it("detours configure",function()
+ local s = {}
+ local c = false
+ function configure(...)
+ local args = {...}
+ if args[1] == s then
+ c = true
+ end
+ end
+ local oldconfigure = configure
+ local index_get = require("index_get")
+ configure(s)
+ assert(c)
+ end)
+ end)
+ describe("author home page",function()
+
+ end)
+end)
diff --git a/spec/pages_sanity_spec.lua b/spec/pages_sanity_spec.lua
new file mode 100644
index 0000000..35b38da
--- /dev/null
+++ b/spec/pages_sanity_spec.lua
@@ -0,0 +1,106 @@
+
+local pages = {
+ index = {
+ route = "/",
+ name = "home",
+ methods = {
+ GET={}
+ }
+ },
+ paste = {
+ route = "/_paste",
+ name = "post_story",
+ methods = {
+ GET={},
+ POST={}
+ }
+ },
+ edit = {
+ route = "/_edit",
+ name = "edit",
+ methods = {
+ GET={},
+ POST={},
+ }
+ },
+ --TODO:bio
+ login = {
+ route = "/_login",
+ name = "login",
+ methods = {
+ GET={},
+ POST={},
+ }
+ },
+ claim = {
+ route = "/_claim",
+ name = "claim",
+ methods = {
+ GET = {},
+ POST = {}
+ }
+ },
+ download = {
+ route = "/_download",
+ name = "download",
+ methods = {
+ GET = {},
+ }
+ },
+ preview = {
+ route = "/_preview",
+ name = "preview",
+ methods = {
+ POST = {},
+ }
+ },
+ search = {
+ route = "/_search",
+ name = "search",
+ methods = {
+ GET = {},
+ }
+ }
+
+}
+
+local request_stub_m = {
+}
+function http_response(req,errcode,str)
+ s = true
+end
+function http_request_get_host(reqstub)
+ return "localhost:8888"
+end
+function http_request_populate_post(reqstub)
+ reqstub.post_populated = true
+end
+
+describe("smr",function()
+ for name, obj in pairs(pages) do
+ describe("endpoint " .. name,function()
+ for method,parameters in pairs(obj.methods) do
+ describe("method " .. method,function()
+ local fname = string.format("%s_%s",name,string.lower(method))
+ it("should be named appropriately",function()
+ local f = assert(io.open(fname .. ".lua","r"))
+ end)
+ it("should run without errors",function()
+ require(fname)
+ end)
+ it("should return a function",function()
+ local pagefunc = assert(require(fname))
+ assert(type(pagefunc) == "function")
+ end)
+ it("calls http_response()",function()
+ local pagefunc = require(fname)
+ local s = false
+ local reqstub = {}
+ pagefunc(reqstub)
+ end)
+
+ end)
+ end
+ end)
+ end
+end)
diff --git a/src/lua/cache.lua b/src/lua/cache.lua
new file mode 100644
index 0000000..4880135
--- /dev/null
+++ b/src/lua/cache.lua
@@ -0,0 +1,94 @@
+local sql = require("lsqlite3")
+
+local queries = require("queries")
+local util = require("util")
+
+local ret = {}
+
+local stmnt_cache, stmnt_insert_cache
+
+local oldconfigure = configure
+function configure(...)
+ local cache = util.sqlassert(sql.open_memory())
+ --A cache table to store rendered pages that do not need to be
+ --rerendered. In theory this could OOM the program eventually and start
+ --swapping to disk. TODO: fixme
+ assert(cache:exec([[
+ CREATE TABLE IF NOT EXISTS cache (
+ path TEXT PRIMARY KEY,
+ data BLOB,
+ updated INTEGER,
+ dirty INTEGER
+ );
+ ]]))
+ stmnt_cache = assert(cache:prepare([[
+ SELECT data
+ FROM cache
+ WHERE
+ path = :path AND
+ ((dirty = 0) OR (strftime('%s','now') - updated) < 20)
+ ;
+ ]]))
+ stmnt_insert_cache = assert(cache:prepare([[
+ INSERT OR REPLACE INTO cache (
+ path, data, updated, dirty
+ ) VALUES (
+ :path, :data, strftime('%s','now'), 0
+ );
+ ]]))
+ stmnt_dirty_cache = assert(cache:prepare([[
+ UPDATE OR IGNORE cache
+ SET dirty = 1
+ WHERE path = :path;
+ ]]))
+ return oldconfigure(...)
+end
+
+--Render a page, with cacheing. If you need to dirty a cache, call dirty_cache()
+function ret.render(pagename,callback)
+ print("Running render...")
+ stmnt_cache:bind_names{path=pagename}
+ local err = util.do_sql(stmnt_cache)
+ if err == sql.DONE then
+ stmnt_cache:reset()
+ --page is not cached
+ elseif err == sql.ROW then
+ print("Cache hit:" .. pagename)
+ data = stmnt_cache:get_values()
+ stmnt_cache:reset()
+ return data[1]
+ else --sql.ERROR or sql.MISUSE
+ error("Failed to check cache for page " .. pagename)
+ end
+ --We didn't have the paged cached, render it
+ print("Cache miss, running function")
+ local text = callback()
+ print("Saving data...")
+ --And save the data back into the cache
+ stmnt_insert_cache:bind_names{
+ path=pagename,
+ data=text,
+ }
+ err = util.do_sql(stmnt_insert_cache)
+ if err == sql.ERROR or err == sql.MISUSE then
+ error("Failed to update cache for page " .. pagename)
+ end
+ stmnt_insert_cache:reset()
+ print("returning text from cache.render:",text)
+ return text
+end
+
+function ret.dirty(url)
+ print("Dirtying cache:",url)
+ stmnt_dirty_cache:bind_names{
+ path = url
+ }
+ err = util.do_sql(stmnt_dirty_cache)
+ stmnt_dirty_cache:reset()
+end
+
+function ret.close()
+
+end
+
+return ret
diff --git a/src/lua/config.lua b/src/lua/config.lua
new file mode 100644
index 0000000..804038f
--- /dev/null
+++ b/src/lua/config.lua
@@ -0,0 +1,5 @@
+
+return {
+ domain = "test.monster:8888",
+ production = false,
+}
diff --git a/src/lua/db.lua b/src/lua/db.lua
index 409dfcb..1655710 100644
--- a/src/lua/db.lua
+++ b/src/lua/db.lua
@@ -40,8 +40,10 @@ function configure(...)
--Store a cookie for logged in users. Logged in users can edit
--their own posts, and edit their biographies.
assert(db.conn:exec(queries.create_table_session))
+
return oldconfigure(...)
end
+configure()
function db.close()
db.conn:close()
diff --git a/src/lua/endpoints/claim_get.lua b/src/lua/endpoints/claim_get.lua
new file mode 100644
index 0000000..cd13079
--- /dev/null
+++ b/src/lua/endpoints/claim_get.lua
@@ -0,0 +1,16 @@
+local cache = require("cache")
+local config = require("config")
+local pages = require("pages")
+
+
+local function claim_get(req)
+ --Get the page to claim a name
+ local cachestr = string.format("%s/_claim",config.domain)
+ local text = cache.render(cachestr,function()
+ print("cache miss, rendering claim page")
+ return pages.claim{err=""}
+ end)
+ http_response(req,200,text)
+end
+
+return claim_get
diff --git a/src/lua/endpoints/claim_post.lua b/src/lua/endpoints/claim_post.lua
new file mode 100644
index 0000000..ae4d149
--- /dev/null
+++ b/src/lua/endpoints/claim_post.lua
@@ -0,0 +1,74 @@
+local sql = require("lsqlite3")
+
+local pages = require("pages")
+local db = require("db")
+local queries = require("queries")
+local util = require("util")
+local sessionlib = require("session")
+local config = require("config")
+
+local stmnt_author_create
+
+local oldconfigure = configure
+function configure(...)
+
+ stmnt_author_create = util.sqlassert(db.conn:prepare(queries.insert_author))
+ return oldconfigure(...)
+end
+
+local function claim_post(req)
+ --Actually claim a name
+ http_request_populate_post(req)
+ local name = assert(http_argument_get_string(req,"user"))
+ local text
+ --What in the world, Kore should be rejecting names that
+ --are not lower case & no symbols, but some still get through somehow.
+ if not name:match("^[a-z0-9]*$") then
+ print("Bad username:",name)
+ text = pages.claim{
+ err = "Usernames must match ^[a-z0-9]{1,30}$"
+ }
+ http_response(req,200,text)
+ return
+ end
+ local rngf = assert(io.open("/dev/urandom","rb"))
+ local passlength = string.byte(rngf:read(1)) + 64
+ local salt = rngf:read(64)
+ local password = rngf:read(passlength)
+ rngf:close()
+ local hash = sha3(salt .. password)
+ stmnt_author_create:bind_names{
+ name = name,
+ }
+ stmnt_author_create:bind_blob(2,salt)
+ stmnt_author_create:bind_blob(3,hash)
+ local err = util.do_sql(stmnt_author_create)
+ if err == sql.DONE then
+ print("success")
+ --We sucessfully made the new author
+ local id = stmnt_author_create:last_insert_rowid()
+ stmnt_author_create:reset()
+ --Give them a file back
+ http_response_header(req,"Content-Type","application/octet-stream")
+ http_response_header(req,"Content-Disposition","attachment; filename=\"" .. name .. "." .. config.domain .. ".passfile\"")
+ local session = sessionlib.start(id)
+ text = password
+ print("session started, about to send password:",text)
+ http_response(req,200,text)
+ return
+ elseif err == sql.CONSTRAINT then
+ --If the creation failed, they probably just tried
+ --to use a name that was already taken
+ text = pages.claim {
+ err = "Failed to claim. That name may already be taken."
+ }
+ elseif err == sql.ERROR or err == sql.MISUSE then
+ --This is bad though
+ text = pages.claim {
+ err = "Failed to claim"
+ }
+ end
+ stmnt_author_create:reset()
+ http_response(req,200,text)
+end
+return claim_post
diff --git a/src/lua/endpoints/download_get.lua b/src/lua/endpoints/download_get.lua
new file mode 100644
index 0000000..48f4593
--- /dev/null
+++ b/src/lua/endpoints/download_get.lua
@@ -0,0 +1,45 @@
+local sql = require("lsqlite3")
+local zlib = require("zlib")
+
+local db = require("db")
+local queries = require("queries")
+local util = require("util")
+local pages = require("pages")
+
+local stmnt_download
+local oldconfigure = configure
+function configure(...)
+ stmnt_download = assert(db.conn:prepare(queries.select_download))
+ return oldconfigure(...)
+end
+
+local function download_get(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ print("host:",host,"path:",path)
+ http_request_populate_qs(req)
+ local story = assert(http_argument_get_string(req,"story"))
+ local story_id = util.decode_id(story)
+ print("Downloading", story_id)
+ stmnt_download:bind_names{
+ postid = story_id
+ }
+ local err = util.do_sql(stmnt_download)
+ if err == sql.DONE then
+ --No rows, story not found
+ http_responose(req,404,pages.nostory{path=story})
+ stmnt_download:reset()
+ return
+ end
+ assert(err == sql.ROW, "after doing download sql, result was not a row, was:" .. tostring(err))
+ local txt_compressed, title = unpack(stmnt_download:get_values())
+ local text = zlib.decompress(txt_compressed)
+ stmnt_download:reset()
+ http_response_header(req,"Content-Type","application/octet-stream")
+ local nicetitle = title:gsub("%W","_")
+ http_response_header(req,"Content-Disposition","attachment; filename=\"" .. nicetitle .. ".txt\"")
+ http_response(req,200,text)
+
+end
+
+return download_get
diff --git a/src/lua/endpoints/edit_get.lua b/src/lua/endpoints/edit_get.lua
new file mode 100644
index 0000000..1881a58
--- /dev/null
+++ b/src/lua/endpoints/edit_get.lua
@@ -0,0 +1,68 @@
+local zlib = require("zlib")
+local sql = require("lsqlite3")
+
+local db = require("db")
+local queries = require("queries")
+local util = require("util")
+local pages = require("pages")
+local tags = require("tags")
+local session = require("session")
+local config = require("config")
+
+local stmnt_edit
+local oldconfigure = configure
+function configure(...)
+ stmnt_edit = assert(db.conn:prepare(queries.select_edit))
+ return oldconfigure(...)
+end
+
+local function edit_get(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ local author, authorid = session.get(req)
+
+ http_request_populate_qs(req)
+ local story = assert(http_argument_get_string(req,"story"))
+ local story_id = util.decode_id(story)
+ local ret
+
+ print("we want to edit story:",story)
+ --Check that the logged in user is the owner of the story
+ --sql-side. If we're not the owner, we'll get 0 rows back.
+ stmnt_edit:bind_names{
+ postid = story_id,
+ authorid = authorid
+ }
+ local err = util.do_sql(stmnt_edit)
+ if err == sql.DONE then
+ --No rows, we're probably not the owner (it might
+ --also be because there's no such story)
+ ret = pages.cantedit{
+ path = story,
+ }
+ stmnt_edit:reset()
+ http_response(req,200,ret)
+ return
+ end
+ assert(err == sql.ROW)
+ local data = stmnt_edit:get_values()
+ local txt_compressed, markup, isanon, title = unpack(data)
+ local text = zlib.decompress(txt_compressed)
+ local tags = tags.get(story_id)
+ local tags_txt = table.concat(tags,";")
+ stmnt_edit:reset()
+ ret = pages.edit{
+ title = title,
+ text = text,
+ markup = markup,
+ user = author,
+ isanon = isanon == 1,
+ domain = config.domain,
+ story = story_id,
+ err = "",
+ tags = tags_txt
+ }
+ http_response(req,200,ret)
+end
+
+return edit_get
diff --git a/src/lua/endpoints/edit_post.lua b/src/lua/endpoints/edit_post.lua
new file mode 100644
index 0000000..0e612d8
--- /dev/null
+++ b/src/lua/endpoints/edit_post.lua
@@ -0,0 +1,89 @@
+local sql = require("lsqlite3")
+local zlib = require("zlib")
+
+local db = require("db")
+local queries = require("queries")
+local pages = require("pages")
+local parsers = require("parsers")
+local util = require("util")
+local tagslib = require("tags")
+local cache = require("cache")
+local config = require("config")
+local session = require("session")
+
+local stmnt_author_of, stmnt_update_raw, stmnt_update
+
+local oldconfigure = configure
+function configure(...)
+ stmnt_author_of = assert(db.conn:prepare(queries.select_author_of_post))
+ stmnt_update_raw = assert(db.conn:prepare(queries.update_raw))
+ stmnt_update = assert(db.conn:prepare(queries.update_post))
+ return oldconfigure(...)
+end
+
+local function edit_post(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ local author, author_id = session.get(req)
+
+ http_request_populate_post(req)
+ local storyid = tonumber(assert(http_argument_get_string(req,"story")))
+ local title = assert(http_argument_get_string(req,"title"))
+ local text = assert(http_argument_get_string(req,"text"))
+ local pasteas = assert(http_argument_get_string(req,"pasteas"))
+ local markup = assert(http_argument_get_string(req,"markup"))
+ local tags_str = http_argument_get_string(req,"tags")
+ stmnt_author_of:bind_names{
+ id = storyid
+ }
+ local err = util.do_sql(stmnt_author_of)
+ if err ~= sql.ROW then
+ stmnt_author_of:reset()
+ error("No author found for story:" .. storyid)
+ end
+ local data = stmnt_author_of:get_values()
+ stmnt_author_of:reset()
+ local realauthor = data[1]
+ assert(realauthor == author_id) --Make sure the author of the story is the currently logged in user
+ local parsed = parsers[markup](text)
+ local compr_raw = zlib.compress(text)
+ local compr = zlib.compress(parsed)
+ local tags = {}
+ if tags_str then
+ tags = util.parse_tags(tags_str)
+ end
+ assert(stmnt_update_raw:bind_blob(1,compr_raw) == sql.OK)
+ assert(stmnt_update_raw:bind(2,markup) == sql.OK)
+ assert(stmnt_update_raw:bind(3,storyid) == sql.OK)
+ assert(util.do_sql(stmnt_update_raw) == sql.DONE, "Failed to update raw")
+ stmnt_update_raw:reset()
+ assert(stmnt_update:bind(1,title) == sql.OK)
+ assert(stmnt_update:bind_blob(2,compr) == sql.OK)
+ assert(stmnt_update:bind(3,pasteas == "anonymous" and 1 or 0) == sql.OK)
+ assert(stmnt_update:bind(4,storyid) == sql.OK)
+ assert(util.do_sql(stmnt_update) == sql.DONE, "Failed to update text")
+ stmnt_update:reset()
+ tagslib.set(storyid,tags)
+ --[[
+ assert(stmnt_drop_tags:bind_names{postid = storyid} == sql.OK)
+ do_sql(stmnt_drop_tags)
+ stmnt_drop_tags:reset()
+ for _,tag in pairs(tags) do
+ print("Looking at tag",tag)
+ assert(stmnt_ins_tag:bind(1,storyid) == sql.OK)
+ assert(stmnt_ins_tag:bind(2,tag) == sql.OK)
+ err = do_sql(stmnt_ins_tag)
+ stmnt_ins_tag:reset()
+ end
+ ]]
+ local id_enc = util.encode_id(storyid)
+ local loc = string.format("https://%s/%s",config.domain,id_enc)
+ cache.dirty(string.format("%s/%s",config.domain,id_enc)) -- This place to read this post
+ cache.dirty(string.format("%s",config.domain)) -- The site index (ex, if the author changed the paste from their's to "Anonymous", the cache should reflect that).
+ cache.dirty(string.format("%s.%s",author,config.domain)) -- The author's index, same reasoning as above.
+ http_response_header(req,"Location",loc)
+ http_response(req,303,"")
+ return
+end
+
+return edit_post
diff --git a/src/lua/endpoints/index_get.lua b/src/lua/endpoints/index_get.lua
new file mode 100644
index 0000000..104d09b
--- /dev/null
+++ b/src/lua/endpoints/index_get.lua
@@ -0,0 +1,115 @@
+local sql = require("lsqlite3")
+
+local cache = require("cache")
+local queries = require("queries")
+local db = require("db")
+local util = require("util")
+local config = require("config")
+local pages = require("pages")
+local libtags = require("tags")
+
+local stmnt_index, stmnt_author, stmnt_author_bio
+
+local oldconfigure = configure
+function configure(...)
+ stmnt_index = assert(db.conn:prepare(queries.select_site_index))
+ --TODO: actually let authors edit their bio
+ stmnt_author_bio = assert(db.conn:prepare([[
+ SELECT authors.biography FROM authors WHERE authors.name = :author;
+ ]]))
+ stmnt_author = assert(db.conn:prepare(queries.select_author_index))
+ return oldconfigure(...)
+end
+
+local function get_site_home(req)
+ print("Cache miss, rendering index")
+ stmnt_index:bind_names{}
+ local err = util.do_sql(stmnt_index)
+ local latest = {}
+ --err may be sql.ROW or sql.DONE if we don't have any stories yet
+ while err == sql.ROW do
+ local data = stmnt_index:get_values()
+ local storytags = libtags.get(data[1])
+ table.insert(latest,{
+ url = util.encode_id(data[1]),
+ title = data[2],
+ isanon = data[3] == 1,
+ posted = os.date("%B %d %Y",tonumber(data[4])),
+ author = data[5],
+ tags = storytags,
+ })
+ err = stmnt_index:step()
+ end
+ stmnt_index:reset()
+ return pages.index{
+ domain = config.domain,
+ stories = latest
+ }
+end
+local function get_author_home(req)
+ local host = http_request_get_host(req)
+ local subdomain = host:match("([^\\.]+)")
+ stmnt_author_bio:bind_names{author=subdomain}
+ local err = util.do_sql(stmnt_author_bio)
+ if err == sql.DONE then
+ print("No such author")
+ stmnt_author_bio:reset()
+ return pages.noauthor{
+ author = subdomain
+ }
+ end
+ print("err:",err)
+ assert(err == sql.ROW,"failed to get author:" .. subdomain .. " error:" .. tostring(err))
+ local data = stmnt_author_bio:get_values()
+ local bio = data[1]
+ stmnt_author_bio:reset()
+ print("Getting author's stories")
+ stmnt_author:bind_names{author=subdomain}
+ err = util.do_sql(stmnt_author)
+ print("err:",err)
+ local stories = {}
+ while err == sql.ROW do
+ local data = stmnt_author:get_values()
+ local id, title, time = unpack(data)
+ local tags = libtags.get(id)
+ table.insert(stories,{
+ url = util.encode_id(id),
+ title = title,
+ posted = os.date("%B %d %Y",tonumber(time)),
+ tags = tags,
+ })
+ err = stmnt_author:step()
+ end
+ stmnt_author:reset()
+ return pages.author_index{
+ domain=config.domain,
+ author=subdomain,
+ stories=stories,
+ bio=bio
+ }
+
+end
+
+local function index_get(req)
+ local method = http_method_text(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ --Default home page
+ local subdomain = host:match("([^\\.]+)")
+ local text
+ if host == config.domain then
+ local cachepath = string.format("%s",config.domain)
+ text = cache.render(cachepath, function()
+ return get_site_home(req)
+ end)
+ else --author home page
+ local cachepath = string.format("%s.%s",subdomain,config.domain)
+ text = cache.render(cachepath, function()
+ return get_author_home(req)
+ end)
+ end
+ assert(text)
+ http_response(req,200,text)
+end
+
+return index_get
diff --git a/src/lua/endpoints/login_get.lua b/src/lua/endpoints/login_get.lua
new file mode 100644
index 0000000..e9d279d
--- /dev/null
+++ b/src/lua/endpoints/login_get.lua
@@ -0,0 +1,17 @@
+local config = require("config")
+local cache = require("cache")
+local config = require("config")
+local pages = require("pages")
+
+
+local function login_get(req)
+ --Just give them the login page
+ local ret = cache.render(string.format("%s/_login",config.domain),function()
+ return pages.login{
+ err = "",
+ }
+ end)
+ http_response(req,200,ret)
+end
+
+return login_get
diff --git a/src/lua/endpoints/login_post.lua b/src/lua/endpoints/login_post.lua
new file mode 100644
index 0000000..46d7504
--- /dev/null
+++ b/src/lua/endpoints/login_post.lua
@@ -0,0 +1,61 @@
+local sql = require("lsqlite3")
+
+local db = require("db")
+local util = require("util")
+local session = require("session")
+local config = require("config")
+local pages = require("pages")
+
+local stmnt_author_acct
+
+local oldconfigure = configure
+function configure(...)
+ --Get the data we need to check if someone can log in
+ stmnt_author_acct = assert(db.conn:prepare([[
+ SELECT id, salt, passhash FROM authors WHERE name = :name;
+ ]]))
+
+ return oldconfigure(...)
+end
+
+local function login_post(req)
+ --Try to log in
+ http_populate_multipart_form(req)
+ local name = assert(http_argument_get_string(req,"user"))
+ local pass = assert(http_file_get(req,"pass"))
+ stmnt_author_acct:bind_names{
+ name = name
+ }
+ local text
+ local err = util.do_sql(stmnt_author_acct)
+ if err == sql.ROW then
+ local id, salt, passhash = unpack(stmnt_author_acct:get_values())
+ stmnt_author_acct:reset()
+ local todigest = salt .. pass
+ local hash = sha3(todigest)
+ if hash == passhash then
+ local mysession = session.start(id)
+ http_response_cookie(req,"session",mysession,"/",0,0)
+ local loc = string.format("https://%s.%s",name,config.domain)
+ http_response_header(req,"Location",loc)
+ http_response(req,303,"")
+ return
+ else
+ text = pages.login{
+ err = "Incorrect username or password"
+ }
+ end
+ elseif err == sql.DONE then --Allows user enumeration, do we want this?
+ --Probably not a problem since all passwords are forced to be "good"
+ stmnt_author_acct:reset()
+ text = pages.login{
+ err = "Failed to find user:" .. name
+ }
+ else
+ stmnt_author_acct:reset()
+ error("Other sql error during login")
+ end
+ http_response(req,200,text)
+end
+
+return login_post
diff --git a/src/lua/endpoints/paste_get.lua b/src/lua/endpoints/paste_get.lua
new file mode 100644
index 0000000..b371a0b
--- /dev/null
+++ b/src/lua/endpoints/paste_get.lua
@@ -0,0 +1,96 @@
+local config = require("config")
+local session = require("session")
+local pages = require("pages")
+local cache = require("cache")
+
+local function paste_get(req)
+ --Get the paste page
+ local host = http_request_get_host(req)
+ local text
+ local author,_ = session.get(req)
+ if host == config.domain and author then
+ http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,config.domain))
+ http_response(req,303,"")
+ return
+ elseif host == config.domain and author == nil then
+ text = cache.render(string.format("%s/_paste",host),function()
+ print("Cache missing, rendering post page")
+ return pages.paste{
+ domain = config.domain,
+ err = "",
+ }
+ end)
+ http_response(req,200,text)
+ elseif host ~= config.domain and author then
+ text = pages.author_paste{
+ domain = config.domain,
+ user = author,
+ err = "",
+ text="",
+ }
+ elseif host ~= config.domain and author == nil then
+ http_response_header(req,"Location",string.format("https://%s/_paste",config.domain))
+ http_response(req,303,"")
+ else
+ error(string.format(
+ "Unable to find a good case for paste:%s,%s,%s",
+ host,
+ config.domain,
+ author
+ ))
+ end
+ assert(text)
+ http_response(req,200,text)
+ --[=[
+ if host == config.domain then
+ local author,_ = get_session(req)
+ if author then
+ http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
+ http_response(req,303,"")
+ return
+ else
+ --For an anonymous user
+ ret = cache.render(string.format("%s/_paste",host),function()
+ print("Cache missing, rendering post page")
+ return pages.paste{
+ domain = domain,
+ err = "",
+ }
+ end)
+ end
+
+ else
+ --Or for someone that's logged in
+ print("Looks like a logged in user wants to paste!")
+ local subdomain = host:match("([^%.]+)")
+ local author,_ = session.get(req)
+ print("subdomain:",subdomain,"author:",author)
+ --If they try to paste as an author, but are on the
+ --wrong subdomain, or or not logged in, redirect them
+ --to the right place. Their own subdomain for authors
+ --or the anonymous paste page for not logged in users.
+ if author == nil then
+ http_response_header(req,"Location","https://"..domain.."/_paste")
+ http_response(req,303,"")
+ return
+ end
+ if author ~= subdomain then
+ http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
+ http_response(req,303,"")
+ return
+ end
+ assert(author == subdomain,"someone wants to paste as someone else")
+ --We're where we want to be, serve up this users's
+ --paste page. No cache, because how often is a user
+ --going to paste?
+ ret = pages.author_paste{
+ domain = domain,
+ user = author,
+ text = "",
+ err = "",
+ }
+ end
+ ]=]
+end
+
+return paste_get
diff --git a/src/lua/endpoints/paste_post.lua b/src/lua/endpoints/paste_post.lua
new file mode 100644
index 0000000..243cf6e
--- /dev/null
+++ b/src/lua/endpoints/paste_post.lua
@@ -0,0 +1,194 @@
+local sql = require("lsqlite3")
+local zlib = require("zlib")
+
+local util = require("util")
+local parsers = require("parsers")
+local config = require("config")
+local queries = require("queries")
+local db = require("db")
+local cache = require("cache")
+local tags = require("tags")
+local session = require("session")
+
+local stmnt_raw,stmnt_paste
+
+local oldconfigure = configure
+function configure(...)
+ stmnt_paste = assert(db.conn:prepare(queries.insert_post))
+ stmnt_raw = assert(db.conn:prepare(queries.insert_raw))
+ return oldconfigure(...)
+end
+
+local function anon_paste(req,ps)
+ --Public paste
+ --[[
+ This doesn't actually do much for IPv4 addresses,
+ since there are only 32 bits of address. Someone who
+ got a copy of the database could
+ just generate all 2^32 hashes and look up who posted
+ what. Use IPv6, Tor or I2P where possible. (but then I
+ guess it's harder to ban spammers... hmm..)
+ ]]
+ --local ip = http_request_get_ip(req)
+ --local iphash = sha3(ip)
+ --Don't store this information for now, until I come up
+ --with a more elegent solution.
+
+ util.sqlbind(stmnt_paste,"bind_blob",1,ps.text)
+ --assert(stmnt_paste:bind_blob(1,text) == sql.OK)
+ util.sqlbind(stmnt_paste,"bind",2,ps.title)
+ --assert(stmnt_paste:bind(2,esctitle) == sql.OK)
+ util.sqlbind(stmnt_paste,"bind",3,-1)
+ --assert(stmnt_paste:bind(3,-1) == sql.OK)
+ util.sqlbind(stmnt_paste,"bind",4,true)
+ --assert(stmnt_paste:bind(4,true) == sql.OK)
+ util.sqlbind(stmnt_paste,"bind_blob",5,"")
+ --assert(stmnt_paste:bind_blob(5,"") == sql.OK)
+ err = util.do_sql(stmnt_paste)
+ stmnt_paste:reset()
+ if err == sql.DONE then
+ local rowid = stmnt_paste:last_insert_rowid()
+ assert(stmnt_raw:bind(1,rowid) == sql.OK)
+ assert(stmnt_raw:bind_blob(2,ps.raw) == sql.OK)
+ assert(stmnt_raw:bind(3,ps.markup) == sql.OK)
+ err = util.do_sql(stmnt_raw)
+ stmnt_raw:reset()
+ if err ~= sql.DONE then
+ print("Failed to save raw text, but paste still went though")
+ end
+ tags.set(rowid,ps.tags)
+ --[[
+ for _,tag in pairs(ps.tags) do
+ print("tag 1:",stmnt_ins_tag:bind(1,rowid))
+ print("Looking at tag",tag)
+ print("tag 2:",stmnt_ins_tag:bind(2,tag))
+ err = util.do_sql(stmnt_ins_tag)
+ stmnt_ins_tag:reset()
+ end
+ ]]
+ local url = util.encode_id(rowid)
+ local loc = string.format("https://%s/%s",config.domain,url)
+ http_response_header(req,"Location",loc)
+ http_response(req,303,"")
+ cache.dirty(string.format("%s/%s",config.domain,url))
+ cache.dirty(string.format("%s",config.domain))
+ return
+ elseif err == sql.ERROR or err == sql.MISUSE then
+ ret = "Failed to paste: " .. tostring(err)
+ else
+ error("Error pasting:" .. tostring(err))
+ end
+ stmnt_paste:reset()
+end
+local function author_paste(req,ps)
+ --Author paste
+ local author, authorid = session.get(req)
+ if author == nil then
+ ret = pages.author_paste{
+ domain = domain,
+ author = subdomain,
+ err = "You are not logged in, you must be logged in to post as " .. subdomain .. ".",
+ text = text
+ }
+ end
+ local asanon = assert(http_argument_get_string(req,"pasteas"))
+ --No need to check if the author is posting to the
+ --"right" sudomain, just post it to the one they have
+ --the session key for.
+ assert(stmnt_paste:bind_blob(1,ps.text) == sql.OK)
+ assert(stmnt_paste:bind(2,ps.title) == sql.OK)
+ assert(stmnt_paste:bind(3,authorid) == sql.OK)
+ if asanon == "anonymous" then
+ assert(stmnt_paste:bind(4,true) == sql.OK)
+ else
+ assert(stmnt_paste:bind(4,false) == sql.OK)
+ end
+ assert(stmnt_paste:bind_blob(5,"") == sql.OK)
+ err = util.do_sql(stmnt_paste)
+ stmnt_paste:reset()
+ if err == sql.DONE then
+ local rowid = stmnt_paste:last_insert_rowid()
+ assert(stmnt_raw:bind(1,rowid) == sql.OK)
+ assert(stmnt_raw:bind_blob(2,ps.raw) == sql.OK)
+ assert(stmnt_raw:bind(3,ps.markup) == sql.OK)
+ err = util.do_sql(stmnt_raw)
+ stmnt_raw:reset()
+ if err ~= sql.DONE then
+ print("Failed to save raw text, but paste still went through")
+ end
+ tags.set(rowid,ps.tags)
+ --[[
+ for _,tag in pairs(ps.tags) do
+ print("tag 1:",stmnt_ins_tag:bind(1,rowid))
+ print("Looking at tag",tag)
+ print("tag 2:",stmnt_ins_tag:bind(2,tag))
+ err = do_sql(stmnt_ins_tag)
+ stmnt_ins_tag:reset()
+ end
+ ]]
+ local url = util.encode_id(rowid)
+ local loc
+ if asanon == "anonymous" then
+ loc = string.format("https://%s/%s",config.domain,url)
+ else
+ loc = string.format("https://%s.%s/%s",author,config.domain,url)
+ end
+ http_response_header(req,"Location",loc)
+ http_response(req,303,"")
+ cache.dirty(string.format("%s.%s",author,config.domain))
+ cache.dirty(string.format("%s/%s",config.domain,url))
+ cache.dirty(string.format("%s",config.domain))
+ return
+ elseif err == sql.ERROR or err == sql.MISUSE then
+ ret = "Failed to paste: " .. tostring(err)
+ else
+ error("Error pasting:",err)
+ end
+ stmnt_paste:reset()
+
+end
+local function decodeentities(capture)
+ local n = tonumber(capture,16)
+ local c = string.char(n)
+ if escapes[c] then
+ return escapes[c]
+ else
+ return c
+ end
+end
+local function paste_post(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+
+ local ps = {}
+ --We're creatinga new paste
+ http_request_populate_post(req)
+ local title = assert(http_argument_get_string(req,"title"))
+ local text = assert(http_argument_get_string(req,"text"))
+ ps.markup = assert(http_argument_get_string(req,"markup"))
+ local tag_str = http_argument_get_string(req,"tags")
+ ps.tags = {}
+ if tag_str then
+ ps.tags = util.parse_tags(tag_str)
+ end
+ local pasteas
+ ps.raw = zlib.compress(text)
+ text = string.gsub(text,"%%(%x%x)",decodeentities)
+ text = parsers[ps.markup](text)
+ assert(text,"Failed to parse text")
+ text = zlib.compress(text)
+ assert(text,"Failed to compress text")
+ ps.text = text
+ local esctitle = string.gsub(title,"%%(%x%x)",decodeentities)
+ --Always sanatize the title with the plain parser. no markup
+ --in the title.
+ ps.title = parsers.plain(title)
+ if host == config.domain then
+ anon_paste(req,ps)
+ else
+ author_paste(req,ps)
+ end
+end
+--assert(ret)
+--http_response(req,200,ret)
+return paste_post
diff --git a/src/lua/endpoints/preview_post.lua b/src/lua/endpoints/preview_post.lua
new file mode 100644
index 0000000..e3cb8da
--- /dev/null
+++ b/src/lua/endpoints/preview_post.lua
@@ -0,0 +1,33 @@
+local parsers = require("parsers")
+local tags = require("tags")
+local util = require("util")
+local pages = require("pages")
+local config = require("config")
+
+local function preview_post(req)
+ print("We want to preview a paste!")
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ http_request_populate_post(req)
+ local title = assert(http_argument_get_string(req,"title"))
+ local text = assert(http_argument_get_string(req,"text"))
+ local markup = assert(http_argument_get_string(req,"markup"))
+ local tag_str = http_argument_get_string(req,"tags")
+ local tags = {}
+ if tag_str then
+ tags = util.parse_tags(tag_str)
+ end
+ print("title:",title,"text:",text,"markup:",markup)
+ local parsed = parsers[markup](text)
+ local ret = pages.read{
+ domain = config.domain,
+ title = title,
+ author = "preview",
+ idp = "preview",
+ text = parsed,
+ tags = tags,
+ }
+ http_response(req,200,ret)
+end
+
+return preview_post
diff --git a/src/lua/endpoints/read_get.lua b/src/lua/endpoints/read_get.lua
new file mode 100644
index 0000000..1b5d4e7
--- /dev/null
+++ b/src/lua/endpoints/read_get.lua
@@ -0,0 +1,173 @@
+local sql = require("sqlite3")
+
+local session = require("session")
+local tags = require("tags")
+local db = require("db")
+local queries = require("queries")
+local util = require("util")
+local cache = require("cache")
+local pages = require("pages")
+local config = require("config")
+
+local stmnt_read, stmnt_update_views, stmnt_comments
+
+local oldconfigure = configure
+function configure(...)
+ stmnt_read = assert(db.conn:prepare(queries.select_post))
+ stmnt_update_views = assert(db.conn:prepare(queries.update_views))
+ stmnt_comments = assert(db.conn:prepare(queries.select_comments))
+ return oldconfigure(...)
+end
+
+
+--[[
+Increases a story's hit counter by 1
+]]
+local function add_view(storyid)
+ stmnt_update_views:bind_names{
+ id = storyid
+ }
+ local err = util.do_sql(stmnt_update_views)
+ assert(err == sql.DONE, "Failed to update view counter:"..tostring(err))
+ stmnt_update_views:reset()
+end
+
+--[[
+Populates ps with story settings, returns true if story was found,
+or nil if it wasn't
+]]
+local function populate_ps_story(req,ps)
+ --Make sure our story exists
+ stmnt_read:bind_names{
+ id = ps.storyid
+ }
+ local err = util.do_sql(stmnt_read)
+ if err == sql.DONE then
+ --We got no story
+ stmnt_read:reset()
+ print("No story by this name",ps.storyid)
+ return false
+ end
+ --If we've made it here, we have a story. Populate our settings
+ --with title, text, ect.
+ assert(err == sql.ROW)
+ local title, storytext, tauthor, isanon, authorname, views = unpack(
+ stmnt_read:get_values()
+ )
+ ps.title = title
+ ps.text = zlib.decompress(storytext)
+ ps.tauthor = tauthor
+ ps.isanon = isanon == 1
+ ps.author = authorname
+ ps.views = views
+ stmnt_read:reset()
+ --Tags
+ ps.tags = tags.get(ps.storyid)
+ return true
+end
+
+--[[
+Get the comments for a story
+]]
+local function get_comments(req,ps)
+ stmnt_comments:bind_names{
+ id = ps.storyid
+ }
+ err = util.do_sql(stmnt_comments)
+ local comments = {}
+ while err ~= sql.DONE do
+ local com_author, com_isanon, com_text = unpack(stmnt_comments:get_values())
+ table.insert(comments,{
+ author = com_author,
+ isanon = com_isanon == 1, --int to boolean
+ text = com_text
+ })
+ err = stmnt_comments:step()
+ end
+ stmnt_comments:reset()
+ return comments
+end
+
+--[[
+The author is viewing their own story, give them an edit button
+]]
+local function read_get_author(req,storyid,author,authorid,comments)
+
+end
+
+--[[
+An author is viewing a story, allow them to post comments as themselves
+]]
+local function read_get_loggedin(req,ps)
+ if ps.tauthor == ps.authorid then
+ --The story exists and we're logged in as the
+ --owner, display the edit button
+ return read_get_author(req,ps)
+ end
+ return pages.read(ps)
+end
+
+
+local function read_get(req)
+ --Pages settings
+ local ps = {
+ domain = config.domain,
+ host = http_request_get_host(req),
+ path = http_request_get_path(req),
+ method = http_method_text(req),
+ }
+ print("reading", ps.path)
+
+ --Get our story id
+ assert(string.len(ps.path) > 0,"Tried to read 0-length story id")
+ ps.idp = string.sub(ps.path,2)--remove leading "/"
+ ps.storyid = util.decode_id(ps.idp)
+ add_view(ps.storyid)
+
+ --If we're logged in, set author and authorid
+ local author, authorid = session.get(req)
+ if author and authorid then
+ ps.loggedauthor = author
+ ps.iam = author
+ ps.loggedauthorid = authorid
+ end
+
+ --If we need to show comments
+ http_request_populate_qs(req)
+ ps.show_comments = http_argument_get_string(req,"comments")
+ if ps.show_comments then
+ ps.comments = get_comments(req,ps)
+ end
+
+ local text
+ --normal story display
+ if (not ps.loggedauthor) then
+ print("not author")
+ local cachestr = string.format("%s%s%s",
+ ps.host,
+ ps.path,
+ ps.show_comments and "?comments=1" or ""
+ )
+ text = cache.render(cachestr,function()
+ if not populate_ps_story(req,ps) then
+ return pages.nostory(ps)
+ end
+ local output = pages.read(ps)
+ assert(output,"failed to read page:" .. cachestr)
+ return output
+ end)
+ else --we are logged in, don't cache
+ print("is author")
+ if not populate_ps_story(req,ps) then
+ return pages.nostory(ps)
+ end
+ print("tauthor was", ps.tauthor, "while author was:",ps.author)
+ ps.owner = (ps.loggedauthorid == ps.tauthor)
+ text = pages.read(ps)
+ end
+ assert(text)
+ http_response(req,200,text)
+ return
+end
+
+return read_get
diff --git a/src/lua/endpoints/read_post.lua b/src/lua/endpoints/read_post.lua
new file mode 100644
index 0000000..2fde5ef
--- /dev/null
+++ b/src/lua/endpoints/read_post.lua
@@ -0,0 +1,53 @@
+local sql = require("sqlite3")
+
+local cache = require("cache")
+local session = require("session")
+local util = require("util")
+local db = require("db")
+local queries = require("queries")
+local config = require("config")
+
+local stmnt_comment_insert
+
+local oldconfigure = configure
+function configure(...)
+ stmnt_comment_insert = assert(db.conn:prepare(queries.insert_comment))
+ return oldconfigure(...)
+end
+
+local function read_post(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ --We're posting a comment
+ http_request_populate_post(req)
+ http_populate_cookies(req)
+ local author, authorid = session.get(req)
+ local comment_text = assert(http_argument_get_string(req,"text"))
+ local pasteas = assert(http_argument_get_string(req,"postas"))
+ local idp = string.sub(path,2)--remove leading "/"
+ local id = util.decode_id(idp)
+ local isanon = 1
+ --Even if an author is logged in, they may post their comment anonymously
+ if author and pasteas ~= "Anonymous" then
+ isanon = 0
+ end
+ stmnt_comment_insert:bind_names{
+ postid=id,
+ authorid = author and authorid or -1,
+ isanon = isanon,
+ comment_text = comment_text,
+ }
+ local err = util.do_sql(stmnt_comment_insert)
+ stmnt_comment_insert:reset()
+ if err ~= sql.DONE then
+ http_response(req,500,"Internal error, failed to post comment. Go back and try again.")
+ else
+ --When we post a comment, we need to dirty the cache for the "comments displayed" page.
+ cache.dirty(string.format("%s%s?comments=1",host,path))
+ local redir = string.format("https://%s%s?comments=1", config.domain, path)
+ http_response_header(req,"Location",redir)
+ http_response(req,303,"")
+ end
+
+end
+return read_post
diff --git a/src/lua/endpoints/search_get.lua b/src/lua/endpoints/search_get.lua
new file mode 100644
index 0000000..4c340a3
--- /dev/null
+++ b/src/lua/endpoints/search_get.lua
@@ -0,0 +1,59 @@
+local sql = require("lsqlite3")
+
+local db = require("db")
+local queries = require("queries")
+local util = require("util")
+local libtags = require("tags")
+local pages = require("pages")
+local config = require("config")
+
+local stmnt_search
+local oldconfigure = configure
+function configure(...)
+ stmnt_search = assert(db.conn:prepare(queries.select_post_tags))
+ return oldconfigure(...)
+end
+
+local function search_get(req)
+ local host = http_request_get_host(req)
+ local path = http_request_get_path(req)
+ http_request_populate_qs(req)
+ local tag = http_argument_get_string(req,"tag")
+ if tag then
+ stmnt_search:bind_names{
+ tag = tag
+ }
+ local results = {}
+ local err
+ repeat
+ err = stmnt_search:step()
+ if err == sql.BUSY then
+ coroutine.yield()
+ elseif err == sql.ROW then
+ local id, title, anon, time, author = unpack(stmnt_search:get_values())
+ local idp = util.encode_id(id)
+ local tags = libtags.get(id)
+ table.insert(results,{
+ id = idp,
+ title = title,
+ anon = anon,
+ time = os.date("%B %d %Y",tonumber(time)),
+ author = author,
+ tags = tags
+ })
+ elseif err == sql.DONE then
+ stmnt_search:reset()
+ else
+ error("Failed to search, sql error:" .. tostring(err))
+ end
+ until err == sql.DONE
+ local ret = pages.search{
+ domain = config.domain,
+ results = results,
+ tag = tag,
+ }
+ http_response(req,200,ret)
+ end
+end
+
+return search_get
diff --git a/src/lua/init.lua b/src/lua/init.lua
index 2a971f7..22983dd 100644
--- a/src/lua/init.lua
+++ b/src/lua/init.lua
@@ -1,491 +1,59 @@
+
print("Really fast print from init.lua")
+--Luarocks libraries
local et = require("etlua")
local sql = require("lsqlite3")
local zlib = require("zlib")
-if PRODUCTION then
- local function print() end --squash prints
-end
-local parser_names = {"plain","imageboard"}
-local parsers = {}
-for _,v in pairs(parser_names) do
- parsers[v] = require("parser_" .. v)
-end
-local db,cache --databases
-local domain = "test.monster:8888" --The domain to write links as
-local pagenames = {
- "index",
- "author_index",
- "claim",
- "paste",
- "edit",
- "read",
- "nostory",
- "cantedit",
- "noauthor",
- "login",
- "author_paste",
- "author_edit",
- "search",
+--stubs for overloading
+function configure(...) end
+
+--smr code
+local cache = require("cache")
+local pages = require("pages")
+local util = require("util")
+local config = require("config")
+local db = require("db")
+
+--Pages
+local endpoint_names = {
+ read = {"get","post"},
+ preview = {"post"},
+ index = {"get"},
+ paste = {"get","post"},
+ download = {"get"},
+ login = {"get","post"},
+ edit = {"get","post"},
+ claim = {"get","post"},
+ search = {"get"},
}
-local pages = {}
-for k,v in pairs(pagenames) do
- print("Compiling page:",v)
- local f = assert(io.open("pages/" .. v .. ".etlua","r"))
- pages[v] = assert(et.compile(f:read("*a")))
- f:close()
-end
-
-local queries = {}
---These are all loaded during startup, won't affect ongoing performance.
-setmetatable(queries,{
- __index = function(self,key)
- local f = assert(io.open("sql/" .. key .. ".sql","r"))
- local ret = f:read("*a")
- f:close()
- return ret
- end
-})
-
----sql queries
-local stmnt_index, stmnt_author_index, stmnt_read, stmnt_paste, stmnt_raw
-local stmnt_update_views
-local stmnt_ins_tag, stmnt_drop_tags, stmnt_get_tags
-local stmnt_author_create, stmnt_author_acct, stmnt_author_bio
-local stmnt_cache, stmnt_insert_cache, stmnt_dirty_cache
-local stmnt_get_session, stmnt_insert_session
-local stmnt_edit, stmnt_update, stmnt_update_raw, stmnt_author_of
-local stmnt_comments, stmnt_comment_insert
-local stmnt_search
---see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
---no underscore because we use that for our operative pages
-local url_characters =
- [[abcdefghijklmnopqrstuvwxyz]]..
- [[ABCDEFGHIJKLMNOPQRSTUVWXYZ]]..
- [[0123456789]]..
- [[$-+!*'(),]]
-local url_characters_rev = {}
-for i = 1,string.len(url_characters) do
- url_characters_rev[string.sub(url_characters,i,i)] = i
-end
-local function decodeentities(capture)
- local n = tonumber(capture,16)
- local c = string.char(n)
- if escapes[c] then
- return escapes[c]
- else
- return c
+local endpoints = {}
+for name, methods in pairs(endpoint_names) do
+ for _,method in pairs(methods) do
+ local epn = string.format("%s_%s",name,method)
+ endpoints[epn] = require("endpoints." .. epn)
end
end
-local function sqlassert(...)
- local r,errcode,err = ...
- if not r then
- error(string.format("%d: %s",errcode, err))
- end
- return r
-end
-
-local function sqlbind(stmnt,call,position,data)
- assert(call == "bind" or call == "bind_blob","Bad bind call, call was:" .. call)
- local f = stmnt[call](stmnt,position,data)
- if f ~= sql.OK then
- error(string.format("Failed to %s at %d with %q: %s", call, position, data, db:errmsg()),2)
- end
-end
-
-
-
print("Hello from init.lua")
-function configure()
- db = sqlassert(sql.open("data/posts.db"))
- --db = sqlassert(sql.open_memory())
- cache = sqlassert(sql.open_memory())
- print("Compiled pages...")
- --Test that compression works
+local oldconfigure = configure
+function configure(...)
+ --Test that compression works. For some reason, the zlib library
+ --fails if this is done as a one-liner
local msg = "test message"
local one = zlib.compress(msg)
local two = zlib.decompress(one)
- --For some reason, the zlib library fails if this is done as a oneliner
assert(two == msg, "zlib not working as expected")
-
- --Create sql tables
- assert(db:exec(queries.create_table_authors))
- --Create a fake "anonymous" user, so we don't run into trouble
- --so that no one runs into touble being able to paste under this account.
- assert(db:exec(queries.insert_anon_author))
- --If/when an author delets their account, all posts
- --and comments by that author are also deleted (on
- --delete cascade) this is intentional. This also
- --means that all comments by other users on a post
- --an author makes will also be deleted.
- --
- --Post text uses zlib compression
- assert(db:exec(queries.create_table_posts))
- --Store the raw text so people can download it later, maybe
- --we can use it for "download as image" or "download as pdf"
- --in the future too. Stil stored zlib compressed
- assert(db:exec(queries.create_table_raw_text))
- assert(db:exec(queries.create_table_images))
- assert(db:exec(queries.create_table_comments))
- assert(db:exec(queries.create_table_tags))
- assert(db:exec(queries.create_index_tags))
- --Store a cookie for logged in users. Logged in users can edit
- --their own posts.
- assert(db:exec(queries.create_table_session))
- print("Created db tables")
-
- --A cache table to store rendered pages that do not need to be
- --rerendered. In theory this could OOM the program eventually and start
- --swapping to disk. TODO: fixme
- assert(cache:exec([[
- CREATE TABLE IF NOT EXISTS cache (
- path TEXT PRIMARY KEY,
- data BLOB,
- updated INTEGER,
- dirty INTEGER
- );
- ]]))
-
- --Select the data we need to display the on the front page
- stmnt_index = assert(db:prepare(queries.select_site_index))
- --Select the data we need to read a story (and maybe display an edit
- --button
- stmnt_read = assert(db:prepare(queries.select_post))
- --Update the view counter when someone reads a story
- stmnt_update_views = assert(db:prepare(queries.update_views))
- --Retreive comments on a story
- stmnt_comments = assert(db:prepare(queries.select_comments))
- --Add a new comment to a story
- stmnt_comment_insert = assert(db:prepare(queries.insert_comment))
- --TODO: actually let authors edit their bio
- stmnt_author_bio = assert(db:prepare([[
- SELECT authors.biography FROM authors WHERE authors.name = :author;
- ]]))
- --Get the author of a story, used to check when editing that the
- --author really owns the story they're trying to edit
- stmnt_author_of = assert(db:prepare(queries.select_author_of_post))
- --Get the data we need to display a particular author's latest
- --stories
- stmnt_author = assert(db:prepare(queries.select_author_index))
- --Get the data we need to check if someone can log in
- stmnt_author_acct = assert(db:prepare([[
- SELECT id, salt, passhash FROM authors WHERE name = :name;
- ]]))
- --Create a new author on the site
- stmnt_author_create = assert(db:prepare(queries.insert_author))
- stmnt_author_login = assert(db:prepare([[
- SELECT name, passhash FROM authors WHERE name = :name;
- ]]))
- --Create a new post
- stmnt_paste = assert(db:prepare(queries.insert_post))
- --Keep a copy of the plain text of a post so we can edit it later
- --It might also be useful for migrations, if that ever needs to happen
- stmnt_raw = assert(db:prepare(queries.insert_raw))
- --Tags for a story
- stmnt_ins_tag = assert(db:prepare(queries.insert_tag))
- stmnt_get_tags = assert(db:prepare(queries.select_tags))
- stmnt_drop_tags = assert(db:prepare(queries.delete_tags))
- --Get the data we need to display the edit screen
- stmnt_edit = assert(db:prepare(queries.select_edit))
- --Get the data we need when someone wants to download a paste
- stmnt_download = assert(db:prepare(queries.select_download))
- --When we update a post, store the plaintext again
- stmnt_update_raw = assert(db:prepare(queries.update_raw))
- --Should we really reset the update time every time someone makes a post?
- --Someone could keep their story on the front page by just editing it a lot.
- --If it gets abused I can disable it I guess.
- stmnt_update = assert(db:prepare(queries.update_post))
- --Check sessions for login support
- stmnt_insert_session = assert(db:prepare(queries.insert_session))
- stmnt_get_session = assert(db:prepare(queries.select_valid_sessions))
- --Search by tag name
- stmnt_search = assert(db:prepare(queries.select_post_tags))
- --only refresh pages at most once every 10 seconds
- stmnt_cache = cache:prepare([[
- SELECT data
- FROM cache
- WHERE
- path = :path AND
- ((dirty = 0) OR (strftime('%s','now') - updated) < 20)
- ;
- ]])
- stmnt_insert_cache = cache:prepare([[
- INSERT OR REPLACE INTO cache (
- path, data, updated, dirty
- ) VALUES (
- :path, :data, strftime('%s','now'), 0
- );
- ]])
- stmnt_dirty_cache = cache:prepare([[
- UPDATE OR IGNORE cache
- SET dirty = 1
- WHERE path = :path;
- ]])
- --[=[
- ]=]
- print("finished running configure()")
+ oldconfigure(...)
end
print("Created configure function")
---[[
-find a string url for a number
-]]
-
-local function encode_id(number)
- local result = {}
- local charlen = string.len(url_characters)
- repeat
- local pos = (number % charlen) + 1
- number = math.floor(number / charlen)
- table.insert(result,string.sub(url_characters,pos,pos))
- until number == 0
- return table.concat(result)
-end
-
-local function decode_id(s)
- local res, id = pcall(function()
- local n = 0
- local charlen = string.len(url_characters)
- for i = 1,string.len(s) do
- local char = string.sub(s,i,i)
- local pos = url_characters_rev[char] - 1
- n = n + (pos*math.pow(charlen,i-1))
- end
- return n
- end)
- if res then
- return id
- else
- error("Failed to decode id:" .. s)
- end
-end
-
-local function do_sql(stmnt)
- if not stmnt then error("No statement",2) end
- local err
- repeat
- err = stmnt:step()
- print("After stepping, err is", err)
- if err == sql.BUSY then
- coroutine.yield()
- end
- until(err ~= sql.BUSY)
- return err
-end
-
-local function get_tags(id)
- local ret = {}
- stmnt_get_tags:bind_names{
- id = id
- }
- local err
- repeat
- err = stmnt_get_tags:step()
- if err == sql.BUSY then
- coroutine.yield()
- elseif err == sql.ROW then
- table.insert(ret,stmnt_get_tags:get_value(0))
- elseif err == sql.DONE then
- stmnt_get_tags:reset()
- return ret
- else
- error(string.format("Failed to get tags for story %d : %d", id, err))
- end
- until false
-end
-
-local function dirty_cache(url)
- print("Dirtying cache:",url)
- stmnt_dirty_cache:bind_names{
- path = url
- }
- err = do_sql(stmnt_dirty_cache)
- stmnt_dirty_cache:reset()
-end
-
-
---[[
-Start a session for someone who logged in
-]]
-local function start_session(who)
- local rngf = assert(io.open("/dev/urandom","rb"))
- local session_t = {}
- for i = 1,64 do
- local r = string.byte(rngf:read(1))
- local s = string.char((r % 26) + 65)
- table.insert(session_t,s)
- end
- local session = table.concat(session_t)
- rngf:close()
- print("sessionid:",session)
- print("authorid:",who)
- stmnt_insert_session:bind_names{
- sessionid = session,
- authorid = who
- }
- local err = do_sql(stmnt_insert_session)
- stmnt_insert_session:reset()
- print("Err:",err)
- assert(err == sql.DONE)
- return session
-end
---[[
-Retreive the name and authorid of the logged in person,
-or nil+error message if not logged in
-]]
-local function get_session(req)
- http_populate_cookies(req)
- local sessionid = http_request_cookie(req,"session")
- if sessionid == nil then
- return nil, "No session cookie passed by client"
- end
- stmnt_get_session:bind_names{
- key = sessionid
- }
- local err = do_sql(stmnt_get_session)
- if err ~= sql.ROW then
- return nil, "No such session by logged in users"
- end
- print("get session err:",err)
- local data = stmnt_get_session:get_values()
- stmnt_get_session:reset()
- local author = data[1]
- local authorid = data[2]
- return author,authorid
-end
-
---Render a page, with cacheing. If you need to dirty a cache, call dirty_cache()
-local function render(pagename,callback)
- print("Running render...")
- stmnt_cache:bind_names{path=pagename}
- local err = do_sql(stmnt_cache)
- if err == sql.DONE then
- stmnt_cache:reset()
- --page is not cached
- elseif err == sql.ROW then
- print("Cache hit:" .. pagename)
- data = stmnt_cache:get_values()
- stmnt_cache:reset()
- return data[1]
- else --sql.ERROR or sql.MISUSE
- error("Failed to check cache for page " .. pagename)
- end
- --We didn't have the paged cached, render it
- print("Cache miss, running function")
- local text = callback()
- print("Saving data...")
- --And save the data back into the cache
- stmnt_insert_cache:bind_names{
- path=pagename,
- data=text,
- }
- err = do_sql(stmnt_insert_cache)
- if err == sql.ERROR or err == sql.MISUSE then
- error("Failed to update cache for page " .. pagename)
- end
- stmnt_insert_cache:reset()
- return text
-end
-
---[[Parses a semicolon seperated string into it's parts, trims whitespace, lowercases, and capitalizes the first letter. Tags will not be empty. Returns an array of tags]]
-local function parse_tags(str)
- local tags = {}
- for tag in string.gmatch(str,"([^;]+)") do
- assert(tag, "Found a nil or false tag in:" .. str)
- local tag_trimmed = string.match(tag,"%s*(.*)%s*")
- local tag_lower = string.lower(tag_trimmed)
- local tag_capitalized = string.gsub(tag_lower,"^%w",string.upper)
- assert(tag_capitalized, "After processing tag:" .. tag .. " it was falsey.")
- if string.len(tag_capitalized) > 0 then
- table.insert(tags, tag_capitalized)
- end
- end
- return tags
-end
-
function home(req)
- print("Hello from lua!")
- print("Method:", http_method_text(req))
local method = http_method_text(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
- local text
- if host == domain then
- --Default home page
- text = render(string.format("%s",domain),function()
- print("Cache miss, rendering index")
- stmnt_index:bind_names{}
- local err = do_sql(stmnt_index)
- local latest = {}
- --err may be sql.ROW or sql.DONE if we don't have any stories yet
- while err == sql.ROW do
- local data = stmnt_index:get_values()
- local tags = get_tags(data[1])
- table.insert(latest,{
- url = encode_id(data[1]),
- title = data[2],
- isanon = data[3] == 1,
- posted = os.date("%B %d %Y",tonumber(data[4])),
- author = data[5],
- tags = tags,
- })
- err = stmnt_index:step()
- end
- stmnt_index:reset()
- return pages.index{
- domain = domain,
- stories = latest
- }
- end)
- else
- --Home page for an author
- local subdomain = host:match("([^\\.]+)")
- text = render(string.format("%s.%s",subdomain,domain),function()
- print("Cache miss, rendering author:" .. subdomain)
- stmnt_author_bio:bind_names{author=subdomain}
- local err = do_sql(stmnt_author_bio)
- if err == sql.DONE then
- print("No such author")
- stmnt_author_bio:reset()
- return pages.noauthor{
- author = subdomain
- }
- end
- print("err:",err)
- assert(err == sql.ROW,"failed to get author:" .. subdomain .. " error:" .. tostring(err))
- local data = stmnt_author_bio:get_values()
- local bio = data[1]
- stmnt_author_bio:reset()
- print("Getting author's stories")
- stmnt_author:bind_names{author=subdomain}
- err = do_sql(stmnt_author)
- print("err:",err)
- local stories = {}
- while err == sql.ROW do
- local data = stmnt_author:get_values()
- local id, title, time = unpack(data)
- local tags = get_tags(id)
- table.insert(stories,{
- url = encode_id(id),
- title = title,
- posted = os.date("%B %d %Y",tonumber(time)),
- tags = tags,
- })
- err = stmnt_author:step()
- end
- stmnt_author:reset()
- return pages.author_index{
- domain=domain,
- author=subdomain,
- stories=stories,
- bio=bio
- }
- end)
+ if method == "GET" then
+ endpoints.index_get(req)
end
- assert(text)
- http_response(req,200,text)
end
--We prevent people from changing their password file, this way we don't really
@@ -494,724 +62,77 @@ end
--a while, but whatever.
function claim(req)
local method = http_method_text(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
- if host ~= domain then
- http_response_header(req,"Location",string.format("https://%s/_claim",domain))
- http_response(req,303,"")
- return
- end
- assert(host == domain)
- local text
if method == "GET" then
- --Get the page to claim a name
- text = render(string.format("%s/_claim",domain),function()
- print("cache miss, rendering claim page")
- return pages.claim{err=""}
- end)
+ endpoints.claim_get(req)
elseif method == "POST" then
- --Actually claim a name
- http_request_populate_post(req)
- local name = assert(http_argument_get_string(req,"user"))
- --What in the world, Kore should be rejecting names that
- --are not lower case & no symbols, but some still get through somehow.
- if not name:match("^[a-z0-9]*$") then
- print("Bad username:",name)
- text = pages.claim{
- err = "Usernames must match ^[a-z0-9]{1,30}$"
- }
- http_response(req,200,text)
- return
- end
- local rngf = assert(io.open("/dev/urandom","rb"))
- local passlength = string.byte(rngf:read(1)) + 64
- local salt = rngf:read(64)
- local password = rngf:read(passlength)
- rngf:close()
- local hash = sha3(salt .. password)
- stmnt_author_create:bind_names{
- name = name,
- }
- stmnt_author_create:bind_blob(2,salt)
- stmnt_author_create:bind_blob(3,hash)
- local err = do_sql(stmnt_author_create)
- if err == sql.DONE then
- --We sucessfully made athe new author
- local id = stmnt_author_create:last_insert_rowid()
- stmnt_author_create:reset()
- --Give them a file back
- http_response_header(req,"Content-Type","application/octet-stream")
- http_response_header(req,"Content-Disposition","attachment; filename=\"" .. name .. "." .. domain .. ".passfile\"")
- local session = start_session(id)
- text = password
- elseif err == sql.CONSTRAINT then
- --If the creation failed, they probably just tried
- --to use a name that was already taken
- text = pages.claim {
- err = "Failed to claim. That name may already be taken."
- }
- elseif err == sql.ERROR or err == sql.MISUSE then
- --This is bad though
- text = pages.claim {
- err = "Failed to claim"
- }
- end
- stmnt_author_create:reset()
+ endpoints.claim_post(req)
end
- assert(text)
- http_response(req,200,text)
end
+--Create a new paste on the site
function paste(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
local method = http_method_text(req)
- local err
- local ret
if method == "GET" then
- --Get the paste page
- if host == domain then
- local author,_ = get_session(req)
- if author then
- http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
- http_response(req,303,"")
- return
- else
- --For an anonymous user
- ret = render(string.format("%s/_paste",host),function()
- print("Cache missing, rendering post page")
- return pages.paste{
- domain = domain,
- err = "",
- }
- end)
- end
-
- else
- --Or for someone that's logged in
- print("Looks like a logged in user wants to paste!")
- local subdomain = host:match("([^%.]+)")
- local author,_ = get_session(req)
- print("subdomain:",subdomain,"author:",author)
- --If they try to paste as an author, but are on the
- --wrong subdomain, or or not logged in, redirect them
- --to the right place. Their own subdomain for authors
- --or the anonymous paste page for not logged in users.
- if author == nil then
- http_response_header(req,"Location","https://"..domain.."/_paste")
- http_response(req,303,"")
- return
- end
- if author ~= subdomain then
- http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
- http_response(req,303,"")
- return
- end
- assert(author == subdomain,"someone wants to paste as someone else")
- --We're where we want to be, serve up this users's
- --paste page. No cache, because how often is a user
- --going to paste?
- ret = pages.author_paste{
- domain = domain,
- user = author,
- text = "",
- err = "",
- }
- end
+ endpoints.paste_get(req)
elseif method == "POST" then
- --We're creatinga new paste
- http_request_populate_post(req)
- local title = assert(http_argument_get_string(req,"title"))
- local text = assert(http_argument_get_string(req,"text"))
- local markup = assert(http_argument_get_string(req,"markup"))
- local tag_str = http_argument_get_string(req,"tags")
- local tags = {}
- if tag_str then
- tags = parse_tags(tag_str)
- end
- local pasteas
- local raw = zlib.compress(text)
- text = string.gsub(text,"%%(%x%x)",decodeentities)
- text = parsers[markup](text)
- text = zlib.compress(text)
- local esctitle = string.gsub(title,"%%(%x%x)",decodeentities)
- --Always sanatize the title with the plain parser. no markup
- --in the title.
- esctitle = parsers.plain(title)
- if host == domain then
- --Public paste
- --[[
- This doesn't actually do much for IPv4 addresses,
- since there are only 32 bits of address. Someone who
- got a copy of the database could
- just generate all 2^32 hashes and look up who posted
- what. Use IPv6, Tor or I2P where possible. (but then I
- guess it's harder to ban spammers... hmm..)
- ]]
- --local ip = http_request_get_ip(req)
- --local iphash = sha3(ip)
- --Don't store this information for now, until I come up
- --with a more elegent solution.
-
- sqlbind(stmnt_paste,"bind_blob",1,text)
- --assert(stmnt_paste:bind_blob(1,text) == sql.OK)
- sqlbind(stmnt_paste,"bind",2,esctitle)
- --assert(stmnt_paste:bind(2,esctitle) == sql.OK)
- sqlbind(stmnt_paste,"bind",3,-1)
- --assert(stmnt_paste:bind(3,-1) == sql.OK)
- sqlbind(stmnt_paste,"bind",4,true)
- --assert(stmnt_paste:bind(4,true) == sql.OK)
- sqlbind(stmnt_paste,"bind_blob",5,"")
- --assert(stmnt_paste:bind_blob(5,"") == sql.OK)
- err = do_sql(stmnt_paste)
- stmnt_paste:reset()
- if err == sql.DONE then
- local rowid = stmnt_paste:last_insert_rowid()
- assert(stmnt_raw:bind(1,rowid) == sql.OK)
- assert(stmnt_raw:bind_blob(2,raw) == sql.OK)
- assert(stmnt_raw:bind(3,markup) == sql.OK)
- err = do_sql(stmnt_raw)
- stmnt_raw:reset()
- if err ~= sql.DONE then
- print("Failed to save raw text, but paste still went though")
- end
- for _,tag in pairs(tags) do
- print("tag 1:",stmnt_ins_tag:bind(1,rowid))
- print("Looking at tag",tag)
- print("tag 2:",stmnt_ins_tag:bind(2,tag))
- err = do_sql(stmnt_ins_tag)
- stmnt_ins_tag:reset()
- end
- local url = encode_id(rowid)
- local loc = string.format("https://%s/%s",domain,url)
- http_response_header(req,"Location",loc)
- http_response(req,303,"")
- dirty_cache(string.format("%s/%s",domain,url))
- dirty_cache(string.format("%s",domain))
- return
- elseif err == sql.ERROR or err == sql.MISUSE then
- ret = "Failed to paste: " .. tostring(err)
- else
- error("Error pasting:" .. tostring(err))
- end
- stmnt_paste:reset()
-
- else
- --Author paste
- local author, authorid = get_session(req)
- if author == nil then
- ret = pages.author_paste{
- domain = domain,
- author = subdomain,
- err = "You are not logged in, you must be logged in to post as " .. subdomain .. ".",
- text = text
- }
- end
- local asanon = assert(http_argument_get_string(req,"pasteas"))
- --No need to check if the author is posting to the
- --"right" sudomain, just post it to the one they have
- --the session key for.
- assert(stmnt_paste:bind_blob(1,text) == sql.OK)
- assert(stmnt_paste:bind(2,esctitle) == sql.OK)
- assert(stmnt_paste:bind(3,authorid) == sql.OK)
- if asanon == "anonymous" then
- assert(stmnt_paste:bind(4,true) == sql.OK)
- else
- assert(stmnt_paste:bind(4,false) == sql.OK)
- end
- assert(stmnt_paste:bind_blob(5,"") == sql.OK)
- err = do_sql(stmnt_paste)
- stmnt_paste:reset()
- if err == sql.DONE then
- local rowid = stmnt_paste:last_insert_rowid()
- assert(stmnt_raw:bind(1,rowid) == sql.OK)
- assert(stmnt_raw:bind_blob(2,raw) == sql.OK)
- assert(stmnt_raw:bind(3,markup) == sql.OK)
- err = do_sql(stmnt_raw)
- stmnt_raw:reset()
- for _,tag in pairs(tags) do
- print("tag 1:",stmnt_ins_tag:bind(1,rowid))
- print("Looking at tag",tag)
- print("tag 2:",stmnt_ins_tag:bind(2,tag))
- err = do_sql(stmnt_ins_tag)
- stmnt_ins_tag:reset()
- end
- if err ~= sql.DONE then
- print("Failed to save raw text, but paste still went through")
- end
- local url = encode_id(rowid)
- local loc
- if asanon == "anonymous" then
- loc = string.format("https://%s/%s",domain,url)
- else
- loc = string.format("https://%s.%s/%s",author,domain,url)
- end
- http_response_header(req,"Location",loc)
- http_response(req,303,"")
- dirty_cache(string.format("%s.%s",author,domain))
- dirty_cache(string.format("%s/%s",domain,url))
- dirty_cache(string.format("%s",domain))
- return
- elseif err == sql.ERROR or err == sql.MISUSE then
- ret = "Failed to paste: " .. tostring(err)
- else
- error("Error pasting:",err)
- end
- stmnt_paste:reset()
- end
- end
- assert(ret)
- http_response(req,200,ret)
-end
-
---A helper function for below
-local function read_story(host,path,idp,show_comments,iam)
- local cachestr
- if show_comments then
- cachestr = string.format("%s%s?comments=1",host,path)
- else
- cachestr = string.format("%s%s",host,path)
- end
- local id = decode_id(idp)
- stmnt_update_views:bind_names{
- id = id
- }
- print("update:",do_sql(stmnt_update_views))
- stmnt_update_views:reset()
- dirty_cache(cachestr)
- print("cachestr was:",cachestr)
- local readstoryf = function()
- stmnt_read:bind_names{
- id = id
- }
- local err = do_sql(stmnt_read)
- if err == sql.DONE then
- stmnt_read:reset()
- return pages.nostory{
- path = path
- }
- end
- local tags = get_tags(id)
- assert(err == sql.ROW,"Could not get row:" .. tostring(id) .. " Error:" .. tostring(err))
- local title, text, authorid, isanon, authorname, views = unpack(stmnt_read:get_values())
- stmnt_comments:bind_names{
- id = id
- }
- err = do_sql(stmnt_comments)
- local comments = {}
- while err ~= sql.DONE do
- local com_author, com_isanon, com_text = unpack(stmnt_comments:get_values())
- table.insert(comments,{
- author = com_author,
- isanon = com_isanon == 1, --int to boolean
- text = com_text
- })
- err = stmnt_comments:step()
- end
- stmnt_comments:reset()
- text = zlib.decompress(text)
- stmnt_read:reset()
- return pages.read{
- domain = domain,
- title = title,
- text = text,
- idp = idp,
- isanon = isanon == 1,
- author = authorname,
- comments = comments,
- show_comments = show_comments,
- iam = iam,
- tags = tags,
- views = views,
- }
- end
- --Don't cache if we're logged in, someone might see dirty cache information on the page.
- --(I.e. When the user has loaded comments, the form to past a comment may contain a username,
- --which is not the user's, from whoever loaded the cache last) to fix this bug, don't cache
- --pages when the user is logged in. All non-logged-in users can see the same page no problem.
- if not iam then
- return render(cachestr,readstoryf)
- else
- return readstoryf()
+ endpoints.paste_post(req)
end
end
function read(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
local method = http_method_text(req)
if method == "GET" then
- local idp = string.sub(path,2)--remove leading "/"
- assert(string.len(path) > 0,"Tried to read 0-length story id")
- local author, authorid = get_session(req)
- http_request_populate_qs(req)
- local show_comments = http_argument_get_string(req,"comments")
- --parameters needed for the read page
- local text
- if author then
- --We're logged in as someone
- local id = decode_id(idp)
- stmnt_read:bind_names{
- id = id
- }
- local err = do_sql(stmnt_read)
- local tags = get_tags(id)
- if err == sql.DONE then
- --We got no story
- stmnt_read:reset()
- text = pages.nostory{
- path = path
- }
- else
- --If we can edit this story, we don't want to cache
- --the page, since it'll have an edit button on it.
- assert(err == sql.ROW)
- local title, storytext, tauthor, isanon, authorname, views = unpack(stmnt_read:get_values())
- stmnt_update_views:bind_names{
- id = id
- }
- print("update:",do_sql(stmnt_update_views))
- stmnt_update_views:reset()
- storytext = zlib.decompress(storytext)
- stmnt_read:reset()
- if tauthor == authorid then
- --The story exists and we're logged in as the
- --owner, display the edit button
- text = pages.read{
- domain = domain,
- title = title,
- text = storytext,
- idp = idp,
- isanon = isanon == 1,
- author = authorname,
- iam = authorname,
- owner = true,
- tags = tags,
- views = views,
- }
-
- else
- text = read_story(host,path,idp,show_comments,author)
- end
- end
- else
- --We're not logged in as anyone
- http_request_populate_qs(req)
- text = read_story(host,path,idp,show_comments,author)
- end
- assert(text)
- http_response(req,200,text)
- return
+ endpoints.read_get(req)
elseif method == "POST" then
- --We're posting a comment
- http_request_populate_post(req)
- http_populate_cookies(req)
- local author, authorid = get_session(req)
- local comment_text = assert(http_argument_get_string(req,"text"))
- local pasteas = assert(http_argument_get_string(req,"postas"))
- local idp = string.sub(path,2)--remove leading "/"
- local id = decode_id(idp)
- local isanon = 1
- --Even if an author is logged in, they may post their comment anonymously
- if author and pasteas ~= "Anonymous" then
- isanon = 0
- end
- stmnt_comment_insert:bind_names{
- postid=id,
- authorid = author and authorid or -1,
- isanon = isanon,
- comment_text = comment_text,
- }
- local err = do_sql(stmnt_comment_insert)
- stmnt_comment_insert:reset()
- if err ~= sql.DONE then
- http_response(req,500,"Internal error, failed to post comment. Go back and try again.")
- else
- --When we post a comment, we need to dirty the cache for the "comments displayed" page.
- dirty_cache(string.format("%s%s?comments=1",host,path))
- local redir = string.format("https://%s%s?comments=1", domain, path)
- http_response_header(req,"Location",redir)
- http_response(req,303,"")
- end
+ endpoints.read_post(req)
end
end
function login(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
local method = http_method_text(req)
- if host ~= domain then
- --Don't allow logging into subdomains, I guess
- http_response_header(req,"Location",string.format("https://%s/_login",domain))
- http_response(req,303,"")
- return
- end
- local text
if method == "GET" then
- --Just give them the login page
- text = render(string.format("%s/_login",domain),function()
- return pages.login{
- err = "",
- }
- end)
+ endpoints.login_get(req)
elseif method == "POST" then
- --Try to log in
- http_populate_multipart_form(req)
- local name = assert(http_argument_get_string(req,"user"))
- local pass = assert(http_file_get(req,"pass"))
- stmnt_author_acct:bind_names{
- name = name
- }
- local err = do_sql(stmnt_author_acct)
- if err == sql.ROW then
- local id, salt, passhash = unpack(stmnt_author_acct:get_values())
- stmnt_author_acct:reset()
- local todigest = salt .. pass
- local hash = sha3(todigest)
- if hash == passhash then
- local session = start_session(id)
- http_response_cookie(req,"session",session,"/",0,0)
- local loc = string.format("https://%s.%s",name,domain)
- http_response_header(req,"Location",loc)
- http_response(req,303,"")
- return
- else
- text = pages.login{
- err = "Incorrect username or password"
- }
- end
- elseif err == sql.DONE then --Allows user enumeration, do we want this?
- --Probably not a problem since all passwords are forced to be "good"
- stmnt_author_acct:reset()
- text = pages.login{
- err = "Failed to find user:" .. name
- }
- else
- stmnt_author_acct:reset()
- error("Other sql error during login")
- end
+ endpoints.login_post(req)
end
- assert(text)
- http_response(req,200,text)
end
--Edit a story
function edit(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
local method = http_method_text(req)
- local author, author_id = get_session(req)
- local ret
if method == "GET" then
- http_request_populate_qs(req)
- local story = assert(http_argument_get_string(req,"story"))
- local story_id = decode_id(story)
- print("we want to edit story:",story)
- --Check that the logged in user is the owner of the story
- --sql-side. If we're not the owner, we'll get 0 rows back.
- stmnt_edit:bind_names{
- postid = story_id,
- authorid = author_id
- }
- local err = do_sql(stmnt_edit)
- if err == sql.DONE then
- --No rows, we're probably not the owner (it might
- --also be because there's no such story)
- ret = pages.cantedit{
- path = story,
- }
- stmnt_edit:reset()
- http_response(req,200,ret)
- return
- end
- assert(err == sql.ROW)
- local data = stmnt_edit:get_values()
- local txt_compressed, markup, isanon, title = unpack(data)
- local text = zlib.decompress(txt_compressed)
- local tags = get_tags(story_id)
- local tags_txt = table.concat(tags,";")
- stmnt_edit:reset()
- ret = pages.edit{
- title = title,
- text = text,
- markup = markup,
- user = author,
- isanon = isanon == 1,
- domain = domain,
- story = story_id,
- err = "",
- tags = tags_txt
- }
+ endpoints.edit_get(req)
elseif method == "POST" then
- http_request_populate_post(req)
- local storyid = tonumber(assert(http_argument_get_string(req,"story")))
- local title = assert(http_argument_get_string(req,"title"))
- local text = assert(http_argument_get_string(req,"text"))
- local pasteas = assert(http_argument_get_string(req,"pasteas"))
- local markup = assert(http_argument_get_string(req,"markup"))
- local tags_str = http_argument_get_string(req,"tags")
- stmnt_author_of:bind_names{
- id = storyid
- }
- local err = do_sql(stmnt_author_of)
- if err ~= sql.ROW then
- stmnt_author_of:reset()
- error("No author found for story:" .. storyid)
- end
- local data = stmnt_author_of:get_values()
- stmnt_author_of:reset()
- local realauthor = data[1]
- assert(realauthor == author_id) --Make sure the author of the story is the currently logged in user
- local parsed = parsers[markup](text)
- local compr_raw = zlib.compress(text)
- local compr = zlib.compress(parsed)
- local tags = {}
- if tags_str then
- tags = parse_tags(tags_str)
- end
- assert(stmnt_update_raw:bind_blob(1,compr_raw) == sql.OK)
- assert(stmnt_update_raw:bind(2,markup) == sql.OK)
- assert(stmnt_update_raw:bind(3,storyid) == sql.OK)
- assert(do_sql(stmnt_update_raw) == sql.DONE, "Failed to update raw")
- stmnt_update_raw:reset()
- assert(stmnt_update:bind(1,title) == sql.OK)
- assert(stmnt_update:bind_blob(2,compr) == sql.OK)
- assert(stmnt_update:bind(3,pasteas == "anonymous" and 1 or 0) == sql.OK)
- assert(stmnt_update:bind(4,storyid) == sql.OK)
- assert(do_sql(stmnt_update) == sql.DONE, "Failed to update text")
- stmnt_update:reset()
- assert(stmnt_drop_tags:bind_names{postid = storyid} == sql.OK)
- do_sql(stmnt_drop_tags)
- stmnt_drop_tags:reset()
-
- for _,tag in pairs(tags) do
- print("Looking at tag",tag)
- assert(stmnt_ins_tag:bind(1,storyid) == sql.OK)
- assert(stmnt_ins_tag:bind(2,tag) == sql.OK)
- err = do_sql(stmnt_ins_tag)
- stmnt_ins_tag:reset()
- end
- local id_enc = encode_id(storyid)
- local loc = string.format("https://%s/%s",domain,id_enc)
- dirty_cache(string.format("%s/%s",domain,id_enc)) -- This place to read this post
- dirty_cache(string.format("%s",domain)) -- The site index (ex, if the author changed the paste from their's to "Anonymous", the cache should reflect that).
- dirty_cache(string.format("%s.%s",author,domain)) -- The author's index, same reasoning as above.
- http_response_header(req,"Location",loc)
- http_response(req,303,"")
- return
+ endpoints.edit_post(req)
end
- assert(ret)
- http_response(req,200,ret)
end
--TODO
function edit_bio()
- print("we want to edit bio")
+ error("Not yet implemented")
end
function teardown()
print("Exiting...")
if db then
- db:close()
+ db.close()
end
if cache then
- cache:close()
+ cache.close()
end
print("Finished lua teardown")
end
function download(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
- print("host:",host,"path:",path)
- http_request_populate_qs(req)
- local story = assert(http_argument_get_string(req,"story"))
- local story_id = decode_id(story)
- stmnt_download:bind_names{
- postid = story_id
- }
- local err = do_sql(stmnt_download)
- if err == sql.DONE then
- --No rows, story not found
- http_responose(req,404,pages.nostory{path=story})
- stmnt_download:reset()
- return
- end
- local txt_compressed, title = unpack(stmnt_download:get_values())
- local text = zlib.decompress(txt_compressed)
- stmnt_download:reset()
- http_response_header(req,"Content-Type","application/octet-stream")
- local nicetitle = title:gsub("%W","_")
- http_response_header(req,"Content-Disposition","attachment; filename=\"" .. nicetitle .. ".txt\"")
- http_response(req,200,text)
+ endpoints.download_get(req)
end
function preview(req)
- print("We want to preview a paste!")
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
- http_request_populate_post(req)
- local title = assert(http_argument_get_string(req,"title"))
- local text = assert(http_argument_get_string(req,"text"))
- local markup = assert(http_argument_get_string(req,"markup"))
- local tag_str = http_argument_get_string(req,"tags")
- local tags = {}
- if tag_str then
- tags = parse_tags(tag_str)
- end
- print("title:",title,"text:",text,"markup:",markup)
- local parsed = parsers[markup](text)
- local ret = pages.read{
- domain = domain,
- title = title,
- author = "preview",
- idp = "preview",
- text = parsed,
- tags = tags,
- }
- http_response(req,200,ret)
+ endpoints.preview_post(req)
end
function search(req)
- local host = http_request_get_host(req)
- local path = http_request_get_path(req)
- http_request_populate_qs(req)
- local tag = http_argument_get_string(req,"tag")
- if tag then
- stmnt_search:bind_names{
- tag = tag
- }
- local results = {}
- local err
- repeat
- err = stmnt_search:step()
- if err == sql.BUSY then
- coroutine.yield()
- elseif err == sql.ROW then
- local id, title, anon, time, author = unpack(stmnt_search:get_values())
- local idp = encode_id(id)
- local tags = get_tags(id)
- table.insert(results,{
- id = idp,
- title = title,
- anon = anon,
- time = os.date("%B %d %Y",tonumber(time)),
- author = author,
- tags = tags
- })
- elseif err == sql.DONE then
- stmnt_search:reset()
- else
- error("Failed to search, sql error:" .. tostring(err))
- end
- until err == sql.DONE
- local ret = pages.search{
- domain = domain,
- results = results,
- tag = tag,
- }
- http_response(req,200,ret)
- end
+ endpoints.search_get(req)
end
print("Done with init.lua")
diff --git a/src/lua/parsers.lua b/src/lua/parsers.lua
new file mode 100644
index 0000000..1f679c9
--- /dev/null
+++ b/src/lua/parsers.lua
@@ -0,0 +1,7 @@
+
+local parser_names = {"plain","imageboard"}
+local parsers = {}
+for _,v in pairs(parser_names) do
+ parsers[v] = require("parser_" .. v)
+end
+return parsers
diff --git a/src/lua/queries.lua b/src/lua/queries.lua
new file mode 100644
index 0000000..3d4e500
--- /dev/null
+++ b/src/lua/queries.lua
@@ -0,0 +1,13 @@
+
+local queries = {}
+
+setmetatable(queries,{
+ __index = function(self,key)
+ local f = assert(io.open("sql/" .. key .. ".sql","r"))
+ local ret = f:read("*a")
+ f:close()
+ return ret
+ end
+})
+
+return queries
diff --git a/src/lua/session.lua b/src/lua/session.lua
new file mode 100644
index 0000000..b6a5c4a
--- /dev/null
+++ b/src/lua/session.lua
@@ -0,0 +1,68 @@
+local sql = require("lsqlite3")
+
+local db = require("db")
+local util = require("util")
+local queries = require("queries")
+
+local oldconfigure = configure
+local stmnt_get_session, stmnt_insert_session
+function configure(...)
+ stmnt_get_session = assert(db.conn:prepare(queries.select_valid_sessions))
+ stmnt_insert_session = assert(db.conn:prepare(queries.insert_session))
+ return oldconfigure(...)
+end
+
+local session = {}
+
+--[[
+Retreive the name and authorid of the logged in person,
+or nil+error message if not logged in
+]]
+function session.get(req)
+ http_populate_cookies(req)
+ local sessionid = http_request_cookie(req,"session")
+ if sessionid == nil then
+ return nil, "No session cookie passed by client"
+ end
+ stmnt_get_session:bind_names{
+ key = sessionid
+ }
+ local err = util.do_sql(stmnt_get_session)
+ if err ~= sql.ROW then
+ return nil, "No such session by logged in users"
+ end
+ print("get session err:",err)
+ local data = stmnt_get_session:get_values()
+ stmnt_get_session:reset()
+ local author = data[1]
+ local authorid = data[2]
+ return author,authorid
+end
+
+--[[
+Start a session for someone who logged in
+]]
+function session.start(who)
+ local rngf = assert(io.open("/dev/urandom","rb"))
+ local session_t = {}
+ for i = 1,64 do
+ local r = string.byte(rngf:read(1))
+ local s = string.char((r % 26) + 65)
+ table.insert(session_t,s)
+ end
+ local session = table.concat(session_t)
+ rngf:close()
+ print("sessionid:",session)
+ print("authorid:",who)
+ stmnt_insert_session:bind_names{
+ sessionid = session,
+ authorid = who
+ }
+ local err = util.do_sql(stmnt_insert_session)
+ stmnt_insert_session:reset()
+ print("Err:",err)
+ assert(err == sql.DONE)
+ return session
+end
+
+return session
diff --git a/src/lua/tags.lua b/src/lua/tags.lua
new file mode 100644
index 0000000..9ee026b
--- /dev/null
+++ b/src/lua/tags.lua
@@ -0,0 +1,60 @@
+local sql = require("lsqlite3")
+
+local db = require("db")
+local queries = require("queries")
+local util = require("util")
+local tags = {}
+
+local stmnt_get_tags, stmnt_ins_tag, stmnt_drop_tags
+
+local oldconfigure = configure
+function configure(...)
+ --Tags for a story
+ stmnt_ins_tag = assert(db.conn:prepare(queries.insert_tag))
+ stmnt_get_tags = assert(db.conn:prepare(queries.select_tags))
+ stmnt_drop_tags = assert(db.conn:prepare(queries.delete_tags))
+
+ return oldconfigure(...)
+end
+
+
+function tags.get(id)
+ local ret = {}
+ stmnt_get_tags:bind_names{
+ id = id
+ }
+ local err
+ repeat
+ err = stmnt_get_tags:step()
+ if err == sql.BUSY then
+ coroutine.yield()
+ elseif err == sql.ROW then
+ table.insert(ret,stmnt_get_tags:get_value(0))
+ elseif err == sql.DONE then
+ stmnt_get_tags:reset()
+ return ret
+ else
+ error(string.format("Failed to get tags for story %d : %d", id, err))
+ end
+ until false
+end
+
+function tags.set(storyid,tags)
+ assert(stmnt_drop_tags:bind_names{postid = storyid} == sql.OK)
+ util.do_sql(stmnt_drop_tags)
+ stmnt_drop_tags:reset()
+ local err
+ for _,tag in pairs(tags) do
+ print("Looking at tag",tag)
+ assert(stmnt_ins_tag:bind(1,storyid) == sql.OK)
+ assert(stmnt_ins_tag:bind(2,tag) == sql.OK)
+ err = util.do_sql(stmnt_ins_tag)
+ stmnt_ins_tag:reset()
+ end
+ if err ~= sql.DONE then
+ print("Failed to save tags, but paste and raw still went through")
+ end
+
+end
+
+return tags
diff --git a/src/lua/util.lua b/src/lua/util.lua
new file mode 100644
index 0000000..32da51f
--- /dev/null
+++ b/src/lua/util.lua
@@ -0,0 +1,123 @@
+
+local sql = require("lsqlite3")
+
+local util = {}
+
+--[[
+Runs an sql query and receives the 3 arguments back, prints a nice error
+message on fail, and returns true on success.
+]]
+function util.sqlassert(...)
+ local r,errcode,err = ...
+ if not r then
+ error(string.format("%d: %s",errcode, err))
+ end
+ return r
+end
+
+--[[
+Continuously tries to perform an sql statement until it goes through
+]]
+function util.do_sql(stmnt)
+ if not stmnt then error("No statement",2) end
+ local err
+ local i = 0
+ repeat
+ err = stmnt:step()
+ print("After stepping, err is", err)
+ if err == sql.BUSY then
+ i = i + 1
+ coroutine.yield()
+ end
+ until(err ~= sql.BUSY or i > 10)
+ assert(i < 10, "Database busy")
+ return err
+end
+
+--[[
+Binds an argument to as statement with nice error reporting on failure
+stmnt :: sql.stmnt - the prepared sql statemnet
+call :: string - a string "bind" or "bind_blob"
+position :: number - the argument position to bind to
+data :: string - The data to bind
+]]
+function util.sqlbind(stmnt,call,position,data)
+ assert(call == "bind" or call == "bind_blob","Bad bind call, call was:" .. call)
+ local f = stmnt[call](stmnt,position,data)
+ if f ~= sql.OK then
+ error(string.format("Failed to %s at %d with %q: %s", call, position, data, db:errmsg()),2)
+ end
+end
+
+--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
+--no underscore because we use that for our operative pages
+local url_characters =
+ [[abcdefghijklmnopqrstuvwxyz]]..
+ [[ABCDEFGHIJKLMNOPQRSTUVWXYZ]]..
+ [[0123456789]]..
+ [[$-+!*'(),]]
+local url_characters_rev = {}
+for i = 1,string.len(url_characters) do
+ url_characters_rev[string.sub(url_characters,i,i)] = i
+end
+--[[
+Encode a number to a shorter HTML-safe url path
+]]
+function util.encode_id(number)
+ local result = {}
+ local charlen = string.len(url_characters)
+ repeat
+ local pos = (number % charlen) + 1
+ number = math.floor(number / charlen)
+ table.insert(result,string.sub(url_characters,pos,pos))
+ until number == 0
+ return table.concat(result)
+end
+
+--[[
+Given a short HTML-safe url path, convert it to a storyid
+]]
+function util.decode_id(s)
+ local res, id = pcall(function()
+ local n = 0
+ local charlen = string.len(url_characters)
+ for i = 1,string.len(s) do
+ local char = string.sub(s,i,i)
+ local pos = url_characters_rev[char] - 1
+ n = n + (pos*math.pow(charlen,i-1))
+ end
+ return n
+ end)
+ if res then
+ return id
+ else
+ error("Failed to decode id:" .. s)
+ end
+end
+
+--[[
+Parses a semicolon seperated string into it's parts:
+1. seperates by semicolon
+2. trims whitespace
+3. lowercases
+4. capitalizes the first letter.
+Returns an array of zero or more strings.
+There is no blank tag, parsing "one;two;;three" will yield
+{"one","two","three"}
+]]
+function util.parse_tags(str)
+ local tags = {}
+ for tag in string.gmatch(str,"([^;]+)") do
+ assert(tag, "Found a nil or false tag in:" .. str)
+ local tag_trimmed = string.match(tag,"%s*(.*)%s*")
+ local tag_lower = string.lower(tag_trimmed)
+ local tag_capitalized = string.gsub(tag_lower,"^.",string.upper)
+ assert(tag_capitalized, "After processing tag:" .. tag .. " it was falsey.")
+ if string.len(tag_capitalized) > 0 then
+ table.insert(tags, tag_capitalized)
+ end
+ end
+ return tags
+end
+
+return util
diff --git a/src/pages/author_index.etlua b/src/pages/author_index.etlua
index 8654483..98b8f76 100644
--- a/src/pages/author_index.etlua
+++ b/src/pages/author_index.etlua
@@ -1,51 +1,49 @@
-<% assert(author,"No author specified") %>
-<% assert(bio,"No bio included") %>
-
-
-
-
<% end -%>
diff --git a/src/smr.c b/src/smr.c
index 6030602..c1486e9 100644
--- a/src/smr.c
+++ b/src/smr.c
@@ -12,6 +12,7 @@
#include "libkore.h"
#include "libcrypto.h"
#include
+#include
int home(struct http_request *);
int post_story(struct http_request *);
@@ -38,15 +39,37 @@ lua_State *L;
static / _claim claim
*/
+/*Allow seccomp things for luajit and sqlite*/
+KORE_SECCOMP_FILTER("app",
+ KORE_SYSCALL_ALLOW(pread64),
+ KORE_SYSCALL_ALLOW(pwrite64),
+ KORE_SYSCALL_ALLOW(fdatasync),
+ KORE_SYSCALL_ALLOW(unlinkat),
+ KORE_SYSCALL_ALLOW(mremap),
+ KORE_SYSCALL_ALLOW(newfstatat)
+);
+
int
errhandeler(lua_State *L){
- printf("Error: %s\n",lua_tostring(L,1));
- lua_getglobal(L,"debug");
- lua_getglobal(L,"print");
- lua_getfield(L,-2,"traceback");
- lua_call(L,0,1);
- lua_call(L,1,0);
- lua_pop(L,1);
+ printf("Error: %s\n",lua_tostring(L,1));//"error"
+ lua_getglobal(L,"debug");//"error",{debug}
+ lua_getglobal(L,"print");//"error",{debug},print()
+ lua_getfield(L,-2,"traceback");//"error",{debug},print(),traceback()
+ lua_call(L,0,1);//"error",{debug},print(),"traceback"
+ lua_call(L,1,0);//"error",{debug}
+ printf("Called print()\n");
+ lua_getfield(L,-1,"traceback");//"error",{debug},traceback()
+ printf("got traceback\n");
+ lua_call(L,0,1);//"error",{debug},"traceback"
+ lua_pushstring(L,"\n");
+ printf("called traceback\n");
+ lua_pushvalue(L,-4);//"error",{debug},"traceback","error"
+ printf("pushed error\n");
+ lua_concat(L,3);//"error",{debug},"traceback .. error"
+ printf("concated\n");
+ int ref = luaL_ref(L,LUA_REGISTRYINDEX);//"error",{debug}
+ lua_pop(L,2);//
+ lua_rawgeti(L,LUA_REGISTRYINDEX,ref);//"traceback .. error"
return 1;
}
@@ -66,8 +89,10 @@ do_lua(struct http_request *req, const char *name){
printf("About to pcall\n");
int err = lua_pcall(L,1,0,-3);
if(err != LUA_OK){
+ size_t retlen;
+ const char *ret = lua_tolstring(L,-1,&retlen);
printf("Failed to run %s: %s\n",name,lua_tostring(L,-1));
- http_response(req, 500, NULL, 0);
+ http_response(req, 500, ret, retlen);
lua_pop(L,lua_gettop(L));
return (KORE_RESULT_OK);
}
diff --git a/src/sql/create_table_posts.sql b/src/sql/create_table_posts.sql
index 76cc858..1f6f5dc 100644
--- a/src/sql/create_table_posts.sql
+++ b/src/sql/create_table_posts.sql
@@ -1,3 +1,12 @@
+/*
+If/when an author delets their account, all posts
+and comments by that author are also deleted (on
+delete cascade) this is intentional. This also
+means that all comments by other users on a post
+an author makes will also be deleted.
+
+Post text uses zlib compression
+*/
CREATE TABLE IF NOT EXISTS posts (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
post_text BLOB,
diff --git a/src/sql/create_table_raw_text.sql b/src/sql/create_table_raw_text.sql
index a424f9a..5ca8c51 100644
--- a/src/sql/create_table_raw_text.sql
+++ b/src/sql/create_table_raw_text.sql
@@ -1,3 +1,8 @@
+/*
+Store the raw text so people can download it later, maybe
+we can use it for "download as image" or "download as pdf"
+in the future too. Stil stored zlib compressed
+*/
CREATE TABLE IF NOT EXISTS raw_text (
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
post_text BLOB,
diff --git a/src/sql/create_table_session.sql b/src/sql/create_table_session.sql
index 28e5f11..e4f4c8c 100644
--- a/src/sql/create_table_session.sql
+++ b/src/sql/create_table_session.sql
@@ -1,3 +1,8 @@
+/*
+Store a cookie for logged in users. Logged in users can edit
+their own posts.
+*/
+
CREATE TABLE IF NOT EXISTS sessions (
key TEXT PRIMARY KEY,
author REFERENCES authors(id) ON DELETE CASCADE,
diff --git a/src/sql/insert_anon_author.sql b/src/sql/insert_anon_author.sql
index b2ff0e6..0036f83 100644
--- a/src/sql/insert_anon_author.sql
+++ b/src/sql/insert_anon_author.sql
@@ -1,3 +1,7 @@
+/*
+Create a fake "anonymous" user, so
+that no one runs into touble being able to paste under this account.
+*/
INSERT OR IGNORE INTO authors (
id,
name,
diff --git a/src/sql/insert_comment.sql b/src/sql/insert_comment.sql
index 0adf012..3986b31 100644
--- a/src/sql/insert_comment.sql
+++ b/src/sql/insert_comment.sql
@@ -1,3 +1,4 @@
+/* Add a new comment to a story */
INSERT INTO comments(
postid,
author,
diff --git a/src/sql/select_author_index.sql b/src/sql/select_author_index.sql
index ac536f8..319da48 100644
--- a/src/sql/select_author_index.sql
+++ b/src/sql/select_author_index.sql
@@ -1,3 +1,5 @@
+/* Get the data we need to display a particular author's latest stories */
+
SELECT
posts.id,
posts.post_title,
diff --git a/src/sql/select_author_of_post.sql b/src/sql/select_author_of_post.sql
index 8527b90..e875211 100644
--- a/src/sql/select_author_of_post.sql
+++ b/src/sql/select_author_of_post.sql
@@ -1,3 +1,7 @@
+/*
+Get the author of a story, used to check when editing that the
+author really owns the story they're trying to edit
+*/
SELECT
authors.id,
authors.name
diff --git a/src/sql/select_comments.sql b/src/sql/select_comments.sql
index 05b5419..d147f79 100644
--- a/src/sql/select_comments.sql
+++ b/src/sql/select_comments.sql
@@ -1,3 +1,4 @@
+/* Retreive comments on a story */
SELECT
authors.name,
comments.isanon,
diff --git a/src/sql/select_post.sql b/src/sql/select_post.sql
index c614596..9e36fdc 100644
--- a/src/sql/select_post.sql
+++ b/src/sql/select_post.sql
@@ -1,3 +1,7 @@
+/*
+Select the data we need to read a story (and maybe display an edit button)
+*/
+
SELECT
post_title,
post_text,
diff --git a/src/sql/select_site_index.sql b/src/sql/select_site_index.sql
index 024b692..99d8abc 100644
--- a/src/sql/select_site_index.sql
+++ b/src/sql/select_site_index.sql
@@ -1,3 +1,4 @@
+/* Select the data we need to display the on the front page */
SELECT
posts.id,
posts.post_title,
diff --git a/src/sql/update_views.sql b/src/sql/update_views.sql
index 3af22f7..685f6fe 100644
--- a/src/sql/update_views.sql
+++ b/src/sql/update_views.sql
@@ -1 +1,2 @@
+/* Update the view counter when someone reads a story */
UPDATE posts SET views = views + 1 WHERE id = :id;