1028 lines
26 KiB
Lua
1028 lines
26 KiB
Lua
![]() |
print("very quick hello from init.lua")
|
||
|
local et = require("etlua")
|
||
|
local sql = require("lsqlite3")
|
||
|
local zlib = require("zlib")
|
||
|
--local function print() end --squash prints
|
||
|
print("Hello from init.lua")
|
||
|
local parser_names = {"plain","imageboard"}
|
||
|
local parsers = {}
|
||
|
for _,v in pairs(parser_names) do
|
||
|
parsers[v] = require("parser_" .. v)
|
||
|
end
|
||
|
local db,cache
|
||
|
local domain = "test.monster:8888"
|
||
|
local pagenames = {
|
||
|
"index",
|
||
|
"author_index",
|
||
|
"claim",
|
||
|
"paste",
|
||
|
"read",
|
||
|
"nostory",
|
||
|
"noauthor",
|
||
|
"login",
|
||
|
"author_paste",
|
||
|
"author_edit",
|
||
|
}
|
||
|
local pages = {}
|
||
|
local stmnt_index, stmnt_author_index, stmnt_read, stmnt_paste, stmnt_raw
|
||
|
local stmnt_author_create, stmnt_author_acct, stmnt_author_bio
|
||
|
local stmnt_cache, stmnt_insert_cache, stmnt_dirty_cache
|
||
|
local stmnt_get_session, stmnt_insert_session
|
||
|
local stmnt_edit
|
||
|
--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
|
||
|
--no underscore because we use that for our operative pages
|
||
|
local url_characters =
|
||
|
[[abcdefghijklmnopqrstuvwxyz]]..
|
||
|
[[ABCDEFGHIJKLMNOPQRSTUVWXYZ]]..
|
||
|
[[0123456789]]..
|
||
|
[[$-+!*'(),]]
|
||
|
local url_characters_rev = {}
|
||
|
for i = 1,string.len(url_characters) do
|
||
|
url_characters_rev[string.sub(url_characters,i,i)] = i
|
||
|
end
|
||
|
local function decodeentities(capture)
|
||
|
local n = tonumber(capture,16)
|
||
|
local c = string.char(n)
|
||
|
if escapes[c] then
|
||
|
return escapes[c]
|
||
|
else
|
||
|
return c
|
||
|
end
|
||
|
end
|
||
|
|
||
|
local function sqlassert(...)
|
||
|
local r,errcode,err = ...
|
||
|
if not r then
|
||
|
error(string.format("%d: %s",errcode, err))
|
||
|
end
|
||
|
return r
|
||
|
end
|
||
|
|
||
|
print("Hello from init.lua")
|
||
|
function configure()
|
||
|
db = sqlassert(sql.open("data/posts.db"))
|
||
|
cache = sqlassert(sql.open_memory())
|
||
|
for k,v in pairs(pagenames) do
|
||
|
print("Compiling page:",v)
|
||
|
local f = assert(io.open("pages/" .. v .. ".etlua","r"))
|
||
|
pages[v] = assert(et.compile(f:read("*a")))
|
||
|
f:close()
|
||
|
end
|
||
|
print("Compiled pages...")
|
||
|
local msg = "test message"
|
||
|
local one = zlib.compress(msg)
|
||
|
local two = zlib.decompress(one)
|
||
|
--For some reason, the zlib library fails if this is done as a oneliner
|
||
|
assert(two == msg, "zlib not working as expected")
|
||
|
print("zlib seems to work...")
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS authors (
|
||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||
|
name TEXT UNIQUE ON CONFLICT FAIL,
|
||
|
salt BLOB,
|
||
|
passhash BLOB,
|
||
|
joindate INTEGER,
|
||
|
biography TEXT
|
||
|
);
|
||
|
]]))
|
||
|
--Create a fake "anonymous" user, so we don't run into trouble
|
||
|
--so that no one runs into touble being able to paste under this account.
|
||
|
assert(db:exec([[
|
||
|
INSERT OR IGNORE INTO authors (
|
||
|
name,
|
||
|
salt,
|
||
|
passhash,
|
||
|
joindate,
|
||
|
biography
|
||
|
) VALUES (
|
||
|
'anonymous',
|
||
|
'',
|
||
|
'',
|
||
|
strftime('%s','1970-01-01 00:00:00'),
|
||
|
''
|
||
|
);
|
||
|
]]))
|
||
|
assert(db:exec([[
|
||
|
REPLACE INTO authors (name,salt,passhash,joindate,biography) VALUES (
|
||
|
'anonymous',
|
||
|
'',
|
||
|
'',
|
||
|
strftime('%s','1970-01-01 00:00:00'),
|
||
|
'',
|
||
|
);
|
||
|
]]))
|
||
|
--If/when an author delets their account, all posts
|
||
|
--and comments by that author are also deleted (on
|
||
|
--delete cascade) this is intentional. This also
|
||
|
--means that all comments by other users on a post
|
||
|
--an author makes will also be deleted.
|
||
|
--
|
||
|
--Post text uses zlib compression
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS posts (
|
||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||
|
post_text BLOB,
|
||
|
post_title TEXT,
|
||
|
authorid REFERENCES authors(id) ON DELETE CASCADE,
|
||
|
isanon INTEGER,
|
||
|
hashedip BLOB,
|
||
|
post_time INTEGER
|
||
|
);
|
||
|
]]))
|
||
|
--Store the raw text so people can download it later, maybe
|
||
|
--we can use it for "download as image" or "download as pdf"
|
||
|
--in the future too. Stil stored zlib compressed
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS raw_text (
|
||
|
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
|
||
|
post_text BLOB,
|
||
|
markup TEXT
|
||
|
);]]))
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS images (
|
||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||
|
name TEXT,
|
||
|
image BLOB,
|
||
|
authorid REFERENCES authors(id) ON DELETE CASCADE,
|
||
|
upload_time INTEGER,
|
||
|
hashedip BLOB
|
||
|
);
|
||
|
]]))
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS comments (
|
||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||
|
postid REFERENCES posts(id) ON DELETE CASCADE,
|
||
|
author REFERENCES authors(id) ON DELETE CASCADE,
|
||
|
isanon INTEGER,
|
||
|
comment_text TEXT,
|
||
|
hashedip BLOB,
|
||
|
post_time INTEGER
|
||
|
);
|
||
|
]]))
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS tags (
|
||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||
|
postid REFERENCES posts(id) ON DELETE CASCADE,
|
||
|
tag TEXT
|
||
|
);
|
||
|
]]))
|
||
|
assert(db:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS sessions (
|
||
|
key TEXT PRIMARY KEY,
|
||
|
author REFERENCES authors(id) ON DELETE CASCADE,
|
||
|
start INTEGER
|
||
|
);
|
||
|
]]))
|
||
|
print("Created db tables")
|
||
|
assert(cache:exec([[
|
||
|
CREATE TABLE IF NOT EXISTS cache (
|
||
|
path TEXT PRIMARY KEY,
|
||
|
data BLOB,
|
||
|
updated INTEGER,
|
||
|
dirty INTEGER
|
||
|
);
|
||
|
]]))
|
||
|
stmnt_index = assert(db:prepare([[
|
||
|
SELECT
|
||
|
posts.id,
|
||
|
posts.post_title,
|
||
|
posts.isanon,
|
||
|
posts.post_time,
|
||
|
authors.name
|
||
|
FROM
|
||
|
posts,
|
||
|
authors
|
||
|
WHERE
|
||
|
posts.authorid = authors.id
|
||
|
UNION
|
||
|
SELECT
|
||
|
posts.id,
|
||
|
posts.post_title,
|
||
|
posts.isanon,
|
||
|
posts.post_time,
|
||
|
'Anonymous'
|
||
|
FROM
|
||
|
posts
|
||
|
WHERE
|
||
|
posts.authorid = -1
|
||
|
ORDER BY
|
||
|
posts.post_time DESC
|
||
|
LIMIT 10;
|
||
|
]]))
|
||
|
stmnt_read = assert(db:prepare([[
|
||
|
SELECT
|
||
|
post_title,
|
||
|
post_text,
|
||
|
posts.authorid,
|
||
|
posts.isanon,
|
||
|
authors.name
|
||
|
FROM
|
||
|
posts,authors
|
||
|
WHERE
|
||
|
posts.authorid = authors.id AND
|
||
|
posts.id = :id;
|
||
|
]]))
|
||
|
stmnt_author_bio = assert(db:prepare([[
|
||
|
SELECT authors.biography FROM authors WHERE authors.name = :author;
|
||
|
]]))
|
||
|
stmnt_author = assert(db:prepare([[
|
||
|
SELECT
|
||
|
posts.id,
|
||
|
posts.post_title,
|
||
|
posts.post_time
|
||
|
FROM
|
||
|
posts,
|
||
|
authors
|
||
|
WHERE
|
||
|
posts.isanon = 0 AND
|
||
|
posts.authorid = authors.id AND
|
||
|
authors.name = :author
|
||
|
ORDER BY
|
||
|
posts.post_time DESC
|
||
|
LIMIT 10;
|
||
|
]]))
|
||
|
stmnt_author_acct = assert(db:prepare([[
|
||
|
SELECT id, salt, passhash FROM authors WHERE name = :name;
|
||
|
]]))
|
||
|
stmnt_author_create = assert(db:prepare([[
|
||
|
INSERT OR FAIL INTO authors (
|
||
|
name,
|
||
|
salt,
|
||
|
passhash,
|
||
|
joindate,
|
||
|
biography
|
||
|
) VALUES (
|
||
|
:name,
|
||
|
:salt,
|
||
|
:hash,
|
||
|
strftime('%s','now'),
|
||
|
''
|
||
|
);
|
||
|
]]))
|
||
|
stmnt_author_login = assert(db:prepare([[
|
||
|
SELECT name, passhash FROM authors WHERE name = :name;
|
||
|
]]))
|
||
|
stmnt_paste = assert(db:prepare([[
|
||
|
INSERT INTO posts (
|
||
|
post_text,
|
||
|
post_title,
|
||
|
authorid,
|
||
|
isanon,
|
||
|
hashedip,
|
||
|
post_time
|
||
|
) VALUES (
|
||
|
?,
|
||
|
?,
|
||
|
?,
|
||
|
?,
|
||
|
?,
|
||
|
strftime('%s','now')
|
||
|
);
|
||
|
]]))
|
||
|
stmnt_raw = assert(db:prepare([[
|
||
|
INSERT INTO raw_text (
|
||
|
id, post_text, markup
|
||
|
) VALUES (
|
||
|
?, ?, ?
|
||
|
);
|
||
|
]]))
|
||
|
stmnt_edit = assert(db:prepare([[
|
||
|
SELECT
|
||
|
raw_text.post_text, raw_text.markup, posts.isanon
|
||
|
FROM
|
||
|
raw_text, posts
|
||
|
WHERE
|
||
|
raw_text.id = posts.id AND
|
||
|
raw_text.id = :postid;
|
||
|
]]))
|
||
|
stmnt_insert_session = assert(db:prepare([[
|
||
|
INSERT INTO sessions (
|
||
|
key,
|
||
|
author,
|
||
|
start
|
||
|
) VALUES (
|
||
|
:sessionid,
|
||
|
:authorid,
|
||
|
strftime('%s','now')
|
||
|
);
|
||
|
]]))
|
||
|
stmnt_get_session = assert(db:prepare([[
|
||
|
SELECT authors.name, authors.id
|
||
|
FROM authors, sessions
|
||
|
WHERE
|
||
|
sessions.key = :key AND
|
||
|
sessions.author = authors.id AND
|
||
|
sessions.start - strftime('%s','now') < 60*60*24;
|
||
|
]]))
|
||
|
--only refresh pages at most once every 10 seconds
|
||
|
stmnt_cache = cache:prepare([[
|
||
|
SELECT data
|
||
|
FROM cache
|
||
|
WHERE
|
||
|
path = :path AND
|
||
|
((dirty = 0) OR (strftime('%s','now') - updated) < 10)
|
||
|
;
|
||
|
]])
|
||
|
stmnt_insert_cache = cache:prepare([[
|
||
|
INSERT OR REPLACE INTO cache (
|
||
|
path, data, updated, dirty
|
||
|
) VALUES (
|
||
|
:path, :data, strftime('%s','now'), 0
|
||
|
);
|
||
|
]])
|
||
|
stmnt_dirty_cache = cache:prepare([[
|
||
|
UPDATE OR IGNORE cache
|
||
|
SET dirty = 1
|
||
|
WHERE path = :path;
|
||
|
]])
|
||
|
--[=[
|
||
|
]=]
|
||
|
end
|
||
|
print("Created configure function")
|
||
|
|
||
|
--[[
|
||
|
find a string url for a number
|
||
|
]]
|
||
|
|
||
|
local function encode_id(number)
|
||
|
local result = {}
|
||
|
local charlen = string.len(url_characters)
|
||
|
repeat
|
||
|
local pos = (number % charlen) + 1
|
||
|
number = math.floor(number / charlen)
|
||
|
table.insert(result,string.sub(url_characters,pos,pos))
|
||
|
until number == 0
|
||
|
return table.concat(result)
|
||
|
end
|
||
|
|
||
|
local function decode_id(s)
|
||
|
local n = 0
|
||
|
local charlen = string.len(url_characters)
|
||
|
for i = 1,string.len(s) do
|
||
|
local char = string.sub(s,i,i)
|
||
|
local pos = url_characters_rev[char] - 1
|
||
|
n = n + (pos*math.pow(charlen,i-1))
|
||
|
end
|
||
|
return n
|
||
|
end
|
||
|
|
||
|
local function do_sql(stmnt)
|
||
|
if not stmnt then error("No statement",2) end
|
||
|
local err
|
||
|
repeat
|
||
|
err = stmnt:step()
|
||
|
print("After stepping, err is", err)
|
||
|
if err == sql.BUSY then
|
||
|
coroutine.yield()
|
||
|
end
|
||
|
until(err ~= sql.BUSY)
|
||
|
return err
|
||
|
end
|
||
|
|
||
|
local function dirty_cache(url)
|
||
|
stmnt_dirty_cache:bind_names{
|
||
|
path = string.format("%s/%s",domain,url)
|
||
|
}
|
||
|
err = do_sql(stmnt_dirty_cache)
|
||
|
stmnt_dirty_cache:reset()
|
||
|
end
|
||
|
|
||
|
|
||
|
--[[
|
||
|
Start a session for someone who logged in
|
||
|
]]
|
||
|
local function start_session(who)
|
||
|
local rngf = assert(io.open("/dev/urandom","rb"))
|
||
|
local session_t = {}
|
||
|
for i = 1,64 do
|
||
|
local r = string.byte(rngf:read(1))
|
||
|
local s = string.char((r % 26) + 65)
|
||
|
table.insert(session_t,s)
|
||
|
end
|
||
|
local session = table.concat(session_t)
|
||
|
rngf:close()
|
||
|
print("sessionid:",session)
|
||
|
print("authorid:",who)
|
||
|
stmnt_insert_session:bind_names{
|
||
|
sessionid = session,
|
||
|
authorid = who
|
||
|
}
|
||
|
local err = do_sql(stmnt_insert_session)
|
||
|
print("Err:",err)
|
||
|
assert(err == sql.DONE)
|
||
|
return session
|
||
|
end
|
||
|
--[[
|
||
|
Retreive the name and authorid of the logged in person,
|
||
|
or nil+error message if not logged in
|
||
|
]]
|
||
|
local function get_session(req)
|
||
|
http_populate_cookies(req)
|
||
|
local sessionid = http_request_cookie(req,"session")
|
||
|
if sessionid == nil then
|
||
|
return nil, "No session cookie passed by client"
|
||
|
end
|
||
|
stmnt_get_session:bind_names{
|
||
|
key = sessionid
|
||
|
}
|
||
|
local err = do_sql(stmnt_get_session)
|
||
|
if err ~= sql.ROW then
|
||
|
return nil, "No such session by logged in users"
|
||
|
end
|
||
|
print("get session err:",err)
|
||
|
local data = stmnt_get_session:get_values()
|
||
|
stmnt_get_session:reset()
|
||
|
local author = data[1]
|
||
|
local authorid = data[2]
|
||
|
return author,authorid
|
||
|
end
|
||
|
|
||
|
--Render a page, with cacheing. If you need to dirty a cache, call dirty_cache()
|
||
|
local function render(pagename,callback)
|
||
|
print("Running render...")
|
||
|
stmnt_cache:bind_names{path=pagename}
|
||
|
local err = do_sql(stmnt_cache)
|
||
|
if err == sql.DONE then
|
||
|
stmnt_cache:reset()
|
||
|
--page is not cached
|
||
|
elseif err == sql.ROW then
|
||
|
print("Cache hit!")
|
||
|
data = stmnt_cache:get_values()
|
||
|
stmnt_cache:reset()
|
||
|
return data[1]
|
||
|
else --sql.ERROR or sql.MISUSE
|
||
|
error("Failed to check cache for page " .. pagename)
|
||
|
end
|
||
|
--We didn't have the paged cached, render it
|
||
|
print("Cache miss, running function")
|
||
|
local text = callback()
|
||
|
print("Saving data...")
|
||
|
--And save the data back into the cache
|
||
|
stmnt_insert_cache:bind_names{
|
||
|
path=pagename,
|
||
|
data=text,
|
||
|
}
|
||
|
err = do_sql(stmnt_insert_cache)
|
||
|
if err == sql.ERROR or err == sql.MISUSE then
|
||
|
error("Failed to update cache for page " .. pagename)
|
||
|
end
|
||
|
stmnt_insert_cache:reset()
|
||
|
return text
|
||
|
end
|
||
|
|
||
|
local function author_page(name)
|
||
|
|
||
|
end
|
||
|
|
||
|
function home(req)
|
||
|
print("Hello from lua!")
|
||
|
print("Method:", http_method_text(req))
|
||
|
local method = http_method_text(req)
|
||
|
local host = http_request_get_host(req)
|
||
|
local path = http_request_get_path(req)
|
||
|
local text
|
||
|
if host == domain then
|
||
|
text = render(host..path,function()
|
||
|
print("Cache miss, rendering index")
|
||
|
stmnt_index:bind_names{}
|
||
|
local err = do_sql(stmnt_index)
|
||
|
print("err:",err)
|
||
|
local latest = {}
|
||
|
while err == sql.ROW do
|
||
|
local data = stmnt_index:get_values()
|
||
|
for k,v in pairs(data) do
|
||
|
print(k,":",v)
|
||
|
end
|
||
|
table.insert(latest,{
|
||
|
url = encode_id(data[1]),
|
||
|
title = data[2],
|
||
|
isanon = data[3] == 1,
|
||
|
posted = os.date("%B %d %Y",tonumber(data[4])),
|
||
|
author = data[5],
|
||
|
})
|
||
|
err = stmnt_index:step()
|
||
|
end
|
||
|
stmnt_index:reset()
|
||
|
--[[
|
||
|
local latest = stmnt_index:get_values()
|
||
|
print("latest:",latest)
|
||
|
for k,v in pairs(latest) do
|
||
|
print(k,":",v)
|
||
|
end
|
||
|
]]
|
||
|
print("returning...\n")
|
||
|
return pages.index{
|
||
|
domain = domain,
|
||
|
stories = latest
|
||
|
}
|
||
|
end)
|
||
|
else
|
||
|
print("Author login")
|
||
|
local subdomain = host:match("([^\\.]+)")
|
||
|
text = render(host..path,function()
|
||
|
print("Cache miss, rendering author:" .. subdomain)
|
||
|
stmnt_author_bio:bind_names{author=subdomain}
|
||
|
local err = do_sql(stmnt_author_bio)
|
||
|
if err == sql.DONE then
|
||
|
print("No such author")
|
||
|
stmnt_author_bio:reset()
|
||
|
return pages.noauthor{
|
||
|
author = subdomain
|
||
|
}
|
||
|
end
|
||
|
print("err:",err)
|
||
|
assert(err == sql.ROW,"failed to get author:" .. subdomain .. " error:" .. tostring(err))
|
||
|
local data = stmnt_author_bio:get_values()
|
||
|
local bio = data[1]
|
||
|
stmnt_author_bio:reset()
|
||
|
print("Getting author's stories")
|
||
|
stmnt_author:bind_names{author=subdomain}
|
||
|
err = do_sql(stmnt_author)
|
||
|
print("err:",err)
|
||
|
local stories = {}
|
||
|
while err ~= sql.DONE do
|
||
|
local data = stmnt_author:get_values()
|
||
|
print("Added story:",data)
|
||
|
for k,v in pairs(data) do
|
||
|
print(k,":",v)
|
||
|
end
|
||
|
table.insert(stories,{
|
||
|
url = encode_id(data[1]),
|
||
|
title = data[2],
|
||
|
posted = os.date("%B %d %Y",tonumber(data[3]))
|
||
|
})
|
||
|
err = stmnt_author:step()
|
||
|
end
|
||
|
stmnt_author:reset()
|
||
|
return pages.author_index{
|
||
|
domain=domain,
|
||
|
author=subdomain,
|
||
|
stories=stories,
|
||
|
bio=bio
|
||
|
}
|
||
|
end)
|
||
|
end
|
||
|
print("Host:",http_request_get_host(req))
|
||
|
print("Path:",http_request_get_path(req))
|
||
|
print("subdomain:",subdomain)
|
||
|
print("index:",pages.index)
|
||
|
--local text = pages.index({domain = domain})
|
||
|
print("returning:",text)
|
||
|
assert(text)
|
||
|
http_response(req,200,text)
|
||
|
end
|
||
|
|
||
|
--We prevent people from changing their password file, this way we don't really
|
||
|
--need to worry about logged in accounts being hijacked if someone gets at the
|
||
|
--database. The attacker can still paste from the logged in account for a while,
|
||
|
--but whatever.
|
||
|
function claim(req)
|
||
|
local method = http_method_text(req)
|
||
|
local host = http_request_get_host(req)
|
||
|
local path = http_request_get_path(req)
|
||
|
if host ~= domain then
|
||
|
http_response_header(req,"Location",string.format("https://%s/_claim",domain))
|
||
|
http_response(req,303,"")
|
||
|
return
|
||
|
end
|
||
|
assert(host == domain)
|
||
|
local text
|
||
|
print("method:",method)
|
||
|
if method == "GET" then
|
||
|
print("render is:",render)
|
||
|
text = render(host..path,function()
|
||
|
print("cache miss, rendering claim page")
|
||
|
return pages.claim{}
|
||
|
end)
|
||
|
elseif method == "POST" then
|
||
|
http_request_populate_post(req)
|
||
|
local name = assert(http_argument_get_string(req,"user"))
|
||
|
local rngf = assert(io.open("/dev/urandom","rb"))
|
||
|
local passlength = string.byte(rngf:read(1)) + 64
|
||
|
local salt = rngf:read(64)
|
||
|
local password = rngf:read(passlength)
|
||
|
rngf:close()
|
||
|
print("Starting session:",session)
|
||
|
print("About to xor")
|
||
|
print("About to hash")
|
||
|
local hash = sha3(salt .. password)
|
||
|
print("done hashing")
|
||
|
stmnt_author_create:bind_names{
|
||
|
name = name,
|
||
|
}
|
||
|
stmnt_author_create:bind_blob(2,salt)
|
||
|
stmnt_author_create:bind_blob(3,hash)
|
||
|
print("Everything bound, ready to go")
|
||
|
local err = do_sql(stmnt_author_create)
|
||
|
print("Error:",err)
|
||
|
print("DONE",sql.DONE)
|
||
|
print("ERROR",sql.ERROR)
|
||
|
print("MISUSE",sql.MISUSE)
|
||
|
print("ROW",sql.ROW)
|
||
|
if err == sql.DONE then
|
||
|
print("New author:",name)
|
||
|
local id = stmnt_author_create:last_insert_rowid()
|
||
|
print("ID:",id)
|
||
|
stmnt_author_create:reset()
|
||
|
http_response_header(req,"Content-Type","application/octet-stream")
|
||
|
http_response_header(req,"Content-Disposition","attachment; filename=\"" .. domain .. "." .. name .. ".passfile\"")
|
||
|
local session = start_session(id)
|
||
|
|
||
|
http_response_cookie(req,"session",session,"/",0,0)
|
||
|
text = password
|
||
|
elseif err == sql.CONSTRAINT then
|
||
|
text = pages.claim {
|
||
|
err = "Failed to claim. That name may already be taken."
|
||
|
}
|
||
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||
|
text = pages.claim {
|
||
|
err = "Failed to claim"
|
||
|
}
|
||
|
end
|
||
|
stmnt_author_create:reset()
|
||
|
end
|
||
|
assert(text)
|
||
|
http_response(req,200,text)
|
||
|
end
|
||
|
|
||
|
function paste(req)
|
||
|
local host = http_request_get_host(req)
|
||
|
local path = http_request_get_path(req)
|
||
|
local method = http_method_text(req)
|
||
|
local err
|
||
|
local ret
|
||
|
if method == "GET" then
|
||
|
if host == domain then
|
||
|
ret = render(host..path,function()
|
||
|
print("Cache missing, rendering post page")
|
||
|
return pages.paste{
|
||
|
domain = domain,
|
||
|
}
|
||
|
end)
|
||
|
else
|
||
|
print("Looks like a logged in user wants to paste!")
|
||
|
local subdomain = host:match("([^\\.]+)")
|
||
|
local author,_ = get_session(req)
|
||
|
if author == nil then
|
||
|
print("sessionid was nil")
|
||
|
http_response_header(req,"Location","https://"..domain.."/_paste")
|
||
|
http_response(req,303,"")
|
||
|
return
|
||
|
end
|
||
|
if author ~= subdomain then
|
||
|
http_response_header(req,"Location",string.format("https://%s.%s/_paste",author,domain))
|
||
|
http_response(req,303,"")
|
||
|
return
|
||
|
end
|
||
|
assert(author == subdomain,"someone wants to paste as someone else")
|
||
|
ret = pages.author_paste{
|
||
|
domain = domain,
|
||
|
user = author,
|
||
|
text = "",
|
||
|
}
|
||
|
end
|
||
|
elseif method == "POST" then
|
||
|
http_request_populate_post(req)
|
||
|
local title = assert(http_argument_get_string(req,"title"))
|
||
|
local text = assert(http_argument_get_string(req,"text"))
|
||
|
local markup = assert(http_argument_get_string(req,"markup"))
|
||
|
local pasteas
|
||
|
local raw = zlib.compress(text)
|
||
|
print("text1",text)
|
||
|
--text = string.gsub(text,"(%+)"," ")
|
||
|
print("text2",text)
|
||
|
text = string.gsub(text,"%%(%x%x)",decodeentities)
|
||
|
print("After decode:\n",text)
|
||
|
text = parsers[markup](text)
|
||
|
print("After markup:",text)
|
||
|
--text = string.gsub(text,escapematch,sanitize)
|
||
|
text = zlib.compress(text)
|
||
|
print("After deflate:",text)
|
||
|
print("inflating this data, we would get", zlib.decompress(text))
|
||
|
local esctitle = string.gsub(title,"%%(%x%x)",decodeentities)
|
||
|
esctitle = parsers.plain(title)
|
||
|
print("title:",esctitle)
|
||
|
--TODO:paste to author page
|
||
|
if host == domain then
|
||
|
print("got text:",text)
|
||
|
--[[
|
||
|
This doesn't actually do much for IPv4 addresses,
|
||
|
since there are only 32 bits of address, someone could
|
||
|
just generate all 2^32 hashes and look up who posted
|
||
|
what. Use IPv6, Tor or I2P where possible. (but then I
|
||
|
guess it's harder to ban spammers... hmm..)
|
||
|
]]
|
||
|
--local ip = http_request_get_ip(req)
|
||
|
--local iphash = sha3(ip)
|
||
|
assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
||
|
assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
||
|
assert(stmnt_paste:bind(3,-1) == sql.OK)
|
||
|
assert(stmnt_paste:bind(4,true) == sql.OK)
|
||
|
assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
||
|
err = do_sql(stmnt_paste)
|
||
|
print("err:",err)
|
||
|
if err == sql.DONE then
|
||
|
local rowid = stmnt_paste:last_insert_rowid()
|
||
|
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||
|
assert(stmnt_raw:bind_blob(2,raw) == sql.OK)
|
||
|
err = do_sql(stmnt_raw)
|
||
|
if err ~= sql.DONE then
|
||
|
print("Failed to save raw text, but paste still went though")
|
||
|
end
|
||
|
print("Successful paste, rowid:", rowid)
|
||
|
local url = encode_id(rowid)
|
||
|
local loc = string.format("https://%s/%s",domain,url)
|
||
|
http_response_header(req,"Location",loc)
|
||
|
http_response(req,303,"")
|
||
|
stmnt_paste:reset()
|
||
|
stmnt_raw:reset()
|
||
|
dirty_cache(string.format("%s/%s",domain,url))
|
||
|
dirty_cache(string.format("%s",domain))
|
||
|
return
|
||
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||
|
ret = "Failed to paste: " .. tostring(err)
|
||
|
else
|
||
|
error("Error pasting:",err)
|
||
|
end
|
||
|
stmnt_paste:reset()
|
||
|
|
||
|
else
|
||
|
--local subdomain = host:match("([^\\.]+)")
|
||
|
--http_populate_cookies(req)
|
||
|
--local sessionid = http_request_cookie(req,"session")
|
||
|
--if sessionid == nil then --If someone not logged in tries to paste as someone else, send give them an error
|
||
|
--ret = pages.author_paste{
|
||
|
--domain = domain,
|
||
|
--author = subdomain,
|
||
|
--err = "You are not logged in, you must be logged in to post as " .. subdomain .. ".",
|
||
|
--text = text
|
||
|
--}
|
||
|
--end
|
||
|
--print("Got cookie:",sessionid)
|
||
|
--stmnt_get_session:bind_names{
|
||
|
--key = sessionid
|
||
|
--}
|
||
|
--err = do_sql(stmnt_get_session)
|
||
|
--print("err:",err)
|
||
|
--local data = stmnt_get_session:get_values()
|
||
|
--stmnt_get_session:reset()
|
||
|
--print("got data:",data)
|
||
|
--for k,v in pairs(data) do
|
||
|
--print(k,":",v)
|
||
|
--end
|
||
|
local author, authorid = get_session(req)
|
||
|
if author == nil then
|
||
|
ret = pages.author_paste{
|
||
|
domain = domain,
|
||
|
author = subdomain,
|
||
|
err = "You are not logged in, you must be logged in to post as " .. subdomain .. ".",
|
||
|
text = text
|
||
|
}
|
||
|
end
|
||
|
|
||
|
--local author = data[1]
|
||
|
--local authorid = data[2]
|
||
|
local asanon = assert(http_argument_get_string(req,"pasteas"))
|
||
|
--No need to check if the author is posting to the
|
||
|
--right sudomain, just post it to the one they have
|
||
|
--the key for.
|
||
|
assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
||
|
assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
||
|
assert(stmnt_paste:bind(3,authorid) == sql.OK)
|
||
|
if asanon == "anonymous" then
|
||
|
assert(stmnt_paste:bind(4,true) == sql.OK)
|
||
|
else
|
||
|
assert(stmnt_paste:bind(4,false) == sql.OK)
|
||
|
end
|
||
|
assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
||
|
err = do_sql(stmnt_paste)
|
||
|
if err == sql.DONE then
|
||
|
local rowid = stmnt_paste:last_insert_rowid()
|
||
|
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||
|
assert(stmnt_raw:bind_blob(2,raw) == sql.OK)
|
||
|
assert(stmnt_raw:bind(3,markup) == sql.OK)
|
||
|
err = do_sql(stmnt_raw)
|
||
|
if err ~= sql.DONE then
|
||
|
print("Failed to save raw text, but paste still went through")
|
||
|
end
|
||
|
local url = encode_id(rowid)
|
||
|
local loc
|
||
|
if asanon == "anonymous" then
|
||
|
loc = string.format("https://%s/%s",domain,url)
|
||
|
else
|
||
|
loc = string.format("https://%s.%s/%s",author,domain,url)
|
||
|
end
|
||
|
http_response_header(req,"Location",loc)
|
||
|
http_response(req,303,"")
|
||
|
stmnt_paste:reset()
|
||
|
stmnt_raw:reset()
|
||
|
dirty_cache(string.format("%s/%s",domain,url))
|
||
|
dirty_cache(string.format("%s",domain))
|
||
|
return
|
||
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||
|
ret = "Failed to paste: " .. tostring(err)
|
||
|
else
|
||
|
error("Error pasting:",err)
|
||
|
end
|
||
|
stmnt_paste:reset()
|
||
|
end
|
||
|
end
|
||
|
assert(ret)
|
||
|
http_response(req,200,ret)
|
||
|
end
|
||
|
|
||
|
function read(req)
|
||
|
local host = http_request_get_host(req)
|
||
|
local path = http_request_get_path(req)
|
||
|
print("host:",host)
|
||
|
print("path:",path)
|
||
|
local idp = string.sub(path,2)--remove leading "/"
|
||
|
assert(string.len(path) > 0,"Tried to read 0-length story id")
|
||
|
local author, authorid = get_session(req)
|
||
|
print("author is:",author)
|
||
|
local text
|
||
|
if author then
|
||
|
local id = decode_id(idp)
|
||
|
stmnt_read:bind_names{
|
||
|
id = id
|
||
|
}
|
||
|
local err = do_sql(stmnt_read)
|
||
|
print("err:",err)
|
||
|
if err == sql.DONE then
|
||
|
stmnt_read:reset()
|
||
|
return pages.nostory{
|
||
|
path = path
|
||
|
}
|
||
|
end
|
||
|
assert(err == sql.ROW)
|
||
|
local title, storytext, tauthor, isanon, authorname = unpack(stmnt_read:get_values())
|
||
|
stmnt_read:reset()
|
||
|
if tauthor == authorid then
|
||
|
print("We're the owner of this story!")
|
||
|
local uncompressed = zlib.decompress(storytext)
|
||
|
text = pages.read{
|
||
|
domain = domain,
|
||
|
title = title,
|
||
|
text = text,
|
||
|
idp = idp,
|
||
|
isanon = isanon == 1,
|
||
|
author = authorname,
|
||
|
owner = true
|
||
|
}
|
||
|
|
||
|
else
|
||
|
print("We're logged in, but not the owner of this story!")
|
||
|
end
|
||
|
else
|
||
|
text = render(host..path,function()
|
||
|
print("Trying to read, id is",idp,":",decode_id(idp))
|
||
|
local id = decode_id(idp)
|
||
|
print("id:",id,type(id))
|
||
|
stmnt_read:bind_names{
|
||
|
id = id
|
||
|
}
|
||
|
local err = do_sql(stmnt_read)
|
||
|
print("err:",err)
|
||
|
if err == sql.ROW then
|
||
|
|
||
|
elseif err == sql.DONE then
|
||
|
stmnt_read:reset()
|
||
|
return pages.nostory{
|
||
|
path = path
|
||
|
}
|
||
|
end
|
||
|
assert(err == sql.ROW,"Could not get row:" .. tostring(id) .. " Error:" .. tostring(err))
|
||
|
print("get_values:")
|
||
|
local title, text, authorid, isanon, authorname = unpack(stmnt_read:get_values())
|
||
|
print("Got text from unpack:",text)
|
||
|
text = zlib.decompress(text)
|
||
|
print("inflated text:",text)
|
||
|
print("title:",title)
|
||
|
print("text:",text)
|
||
|
print("idp:",idp)
|
||
|
stmnt_read:reset()
|
||
|
return pages.read{
|
||
|
domain = domain,
|
||
|
title = title,
|
||
|
text = text,
|
||
|
idp = idp,
|
||
|
isanon = isanon == 1,
|
||
|
author = authorname
|
||
|
}
|
||
|
end)
|
||
|
end
|
||
|
assert(text)
|
||
|
http_response(req,200,text)
|
||
|
end
|
||
|
|
||
|
function login(req)
|
||
|
print("Logging in")
|
||
|
local host = http_request_get_host(req)
|
||
|
local path = http_request_get_path(req)
|
||
|
local method = http_method_text(req)
|
||
|
if host ~= domain then
|
||
|
http_response_header(req,"Location",string.format("https://%s/_login",domain))
|
||
|
http_response(req,303,"")
|
||
|
return
|
||
|
end
|
||
|
local text
|
||
|
if method == "GET" then
|
||
|
text = render(host..path,function()
|
||
|
return pages.login{}
|
||
|
end)
|
||
|
elseif method == "POST" then
|
||
|
--http_request_populate_post(req)
|
||
|
http_populate_multipart_form(req)
|
||
|
local name = assert(http_argument_get_string(req,"user"))
|
||
|
local pass = assert(http_file_get(req,"pass"))
|
||
|
print("name:",name)
|
||
|
print("pass:",pass)
|
||
|
stmnt_author_acct:bind_names{
|
||
|
name = name
|
||
|
}
|
||
|
local err = do_sql(stmnt_author_acct)
|
||
|
print("err:",err)
|
||
|
if err == sql.ROW then
|
||
|
local id, salt, passhash = unpack(stmnt_author_acct:get_values())
|
||
|
stmnt_author_acct:reset()
|
||
|
print("salt:",salt)
|
||
|
print("passhash:",passhash)
|
||
|
local todigest = salt .. pass
|
||
|
local hash = sha3(todigest)
|
||
|
print("hash:",hash)
|
||
|
print("passhash:",passhash)
|
||
|
if hash == passhash then
|
||
|
print("Passfile accepted")
|
||
|
local session = start_session(id)
|
||
|
http_response_cookie(req,"session",session,"/",0,0)
|
||
|
local loc = string.format("https://%s.%s",name,domain)
|
||
|
http_response_header(req,"Location",loc)
|
||
|
http_response(req,303,"")
|
||
|
return
|
||
|
else
|
||
|
text = pages.login{
|
||
|
err = "Incorrect username or password"
|
||
|
}
|
||
|
end
|
||
|
elseif err == sql.DONE then --Allows user enumeration, do we want this?
|
||
|
--Probably not a problem since all passwords are forced to be "good"
|
||
|
text = pages.login{
|
||
|
err = "Failed to find user:" .. name
|
||
|
}
|
||
|
else
|
||
|
error("Other sql error during login")
|
||
|
end
|
||
|
end
|
||
|
assert(text)
|
||
|
http_response(req,200,text)
|
||
|
end
|
||
|
|
||
|
function edit(req)
|
||
|
local host = http_request_get_host(req)
|
||
|
local path = http_request_get_path(req)
|
||
|
local method = http_method_text(req)
|
||
|
if method == "GET" then
|
||
|
http_request_populate_qs(req)
|
||
|
local story = assert(http_argument_get_string(req,"story"))
|
||
|
local story_id = decode_id(story)
|
||
|
print("we want to edit story:",story)
|
||
|
stmnt_edit:bind_names{
|
||
|
postid = story_id
|
||
|
}
|
||
|
local err = do_sql(stmnt_edit)
|
||
|
print("err:",err)
|
||
|
if err == sql.DONE then
|
||
|
print("No such story to edit:",story_id)
|
||
|
end
|
||
|
assert(err == sql.ROW)
|
||
|
local data = stmnt_edit:get_values()
|
||
|
local txt_compressed, markup, isanon = unpack(data)
|
||
|
for k,v in pairs(data) do
|
||
|
print(k,":",v)
|
||
|
end
|
||
|
elseif method == "POST" then
|
||
|
--TODO: same as paste?
|
||
|
--nope, need to replace the story instead of inserting a new one.
|
||
|
http_request_populate_post(req)
|
||
|
end
|
||
|
end
|
||
|
|
||
|
function edit_bio()
|
||
|
print("we want to edit bio")
|
||
|
end
|
||
|
|
||
|
function teardown()
|
||
|
print("Exiting...")
|
||
|
if db then
|
||
|
db:close()
|
||
|
end
|
||
|
if cache then
|
||
|
cache:close()
|
||
|
end
|
||
|
print("Finished lua teardown")
|
||
|
end
|
||
|
|
||
|
print("Done with init.lua")
|
||
|
--[==[
|
||
|
]==]
|