Move sql files into their own directory
Moved all the of the sql out of the init file and into it's own directory.
This commit is contained in:
parent
48b4173a7a
commit
6c18829de6
69
Makefile
69
Makefile
|
@ -1 +1,68 @@
|
||||||
The slash.monster server code
|
# Config
|
||||||
|
chroot_dir=kore_chroot/
|
||||||
|
mirror=http://dl-cdn.alpinelinux.org/alpine/
|
||||||
|
arch=aarch64
|
||||||
|
version=2.10.5-r0
|
||||||
|
#certbot_email=--register-unsafely-without-email
|
||||||
|
certbot_email=-m you@cock.li
|
||||||
|
domain=test.monster
|
||||||
|
|
||||||
|
#Probably don't change stuff past here
|
||||||
|
lua_files=$(shell find src/lua -type f)
|
||||||
|
src_files=$(shell find src -type f) $(shell find conf -type f)
|
||||||
|
sql_files=$(shell find src/sql -type f)
|
||||||
|
built_files=$(lua_files:src/lua/%.lua=$(chroot_dir)%.lua)
|
||||||
|
page_files=$(shell find src/pages -type f)
|
||||||
|
built_pages=$(page_files:src/pages/%.etlua=$(chroot_dir)pages/%.etlua)
|
||||||
|
built_sql=$(sql_files:src/sql/%.sql=$(chroot_dir)sql/%.sql)
|
||||||
|
|
||||||
|
all: $(chroot_dir) smr.so $(built_files) $(built_pages) $(built_sql)
|
||||||
|
echo $(built_files)
|
||||||
|
kodev run
|
||||||
|
|
||||||
|
apk-tools-static-$(version).apk: Makefile
|
||||||
|
wget -q $(mirror)latest-stable/main/$(arch)/apk-tools-static-$(version).apk
|
||||||
|
|
||||||
|
clean:
|
||||||
|
kodev clean
|
||||||
|
|
||||||
|
$(chroot_dir): #apk-tools-static-$(version).apk
|
||||||
|
mkdir -p $(chroot_dir)
|
||||||
|
mkdir -p $(chroot_dir)/pages
|
||||||
|
mkdir -p $(chroot_dir)/sql
|
||||||
|
#cd $(chroot_dir) && tar -xvzf ../../apk-tools-static-*.apk
|
||||||
|
#cd $(chroot_dir) && sudo ./sbin/apk.static -X $(mirror)latest-stable/main -U --allow-untrusted --root $(chroot_dir) --no-cache --initdb add alpine-base
|
||||||
|
#ln -s /dev/urandom $(chroot_dir)/dev/random #Prevent an attacker with access to the chroot from exhausting our entropy pool and causing a dos
|
||||||
|
#ln -s /dev/urandom $(chroot_dir)/dev/urandom
|
||||||
|
#mount /dev/ $(chroot_dir)/dev --bind
|
||||||
|
#mount -o remount,ro,bind $(chroot_dir)/dev
|
||||||
|
#mount -t proc none $(chroot_dir)/proc
|
||||||
|
#mount -o bind /sys $(chroot_dir)/sys
|
||||||
|
#cp /etc/resolv.conf $(chroot_dir)/etc/resolv.conf
|
||||||
|
#cp /etc/apk/repositories $(chroot_dir)/etc/apk/repositories
|
||||||
|
#mkdir $(chroot_dir)/var/sm
|
||||||
|
## Things to build lua libraries
|
||||||
|
#chroot $(chroot_dir) apk add luarocks5.1 sqlite sqlite-dev lua5.1-dev build-base zlib zlib-dev
|
||||||
|
#chroot $(chroot_dir) luarocks-5.1 install etlua
|
||||||
|
#chroot $(chroot_dir) luarocks-5.1 install lsqlite3
|
||||||
|
#chroot $(chroot_dir) luarocks-5.1 install lzlib ZLIB_LIBDIR=/lib #for some reason lzlib looks in /usr/lib for libz, when it needs to look at /lib
|
||||||
|
## Once we've built + installed everything, delete extra stuff from the chroot
|
||||||
|
#chroot $(chroot_dir) apk del sqlite-dev lua5.1-dev build-base zlib-dev
|
||||||
|
## SSL certificates, if you don't trust EFF (they have an antifa black block member as their favicon at time of writing) you may want to replace this.
|
||||||
|
#chroot $(chroot_dir) apk add certbot
|
||||||
|
## After chroot, apk add luarocks5.1 sqlite sqlite-dev lua5.1-dev build-base
|
||||||
|
## After chroot, luarocks install etlua; luarocks install lsqlite3
|
||||||
|
|
||||||
|
code : $(built_files)
|
||||||
|
|
||||||
|
$(built_files): $(chroot_dir)%.lua : src/lua/%.lua
|
||||||
|
cp $^ $@
|
||||||
|
|
||||||
|
$(built_pages): $(chroot_dir)pages/%.etlua : src/pages/%.etlua
|
||||||
|
cp $^ $@
|
||||||
|
|
||||||
|
$(built_sql): $(chroot_dir)sql/%.sql : src/sql/%.sql
|
||||||
|
cp $^ $@
|
||||||
|
|
||||||
|
smr.so : $(src_files)
|
||||||
|
kodev build
|
||||||
|
|
|
@ -26,8 +26,8 @@ dev {
|
||||||
# These flags are added to the shared ones when
|
# These flags are added to the shared ones when
|
||||||
# you build the "dev" flavor.
|
# you build the "dev" flavor.
|
||||||
ldflags=-llua
|
ldflags=-llua
|
||||||
cflags=-g
|
cflags=-g -Wextra
|
||||||
cxxflags=-g
|
cxxflags=-g -Wextra
|
||||||
}
|
}
|
||||||
|
|
||||||
prod {
|
prod {
|
||||||
|
|
|
@ -28,8 +28,7 @@ domain * {
|
||||||
certkey key.pem
|
certkey key.pem
|
||||||
|
|
||||||
#I run kore behind a lighttpd reverse proxy, so this is a bit useless to me
|
#I run kore behind a lighttpd reverse proxy, so this is a bit useless to me
|
||||||
#accesslog /dev/null
|
accesslog /dev/null
|
||||||
accesslog kore_access.log
|
|
||||||
|
|
||||||
route / home
|
route / home
|
||||||
route /_css/style.css asset_serve_style_css
|
route /_css/style.css asset_serve_style_css
|
||||||
|
|
|
@ -1,4 +1,11 @@
|
||||||
typedef unsigned char u8;
|
typedef unsigned char u8;
|
||||||
typedef unsigned long long int u64;
|
typedef unsigned long long int u64;
|
||||||
typedef unsigned int ui;
|
typedef unsigned int ui;
|
||||||
|
void FIPS202_SHAKE128(const u8 *in, u64 inLen, u8 *out, u64 outLen);
|
||||||
|
void FIPS202_SHAKE256(const u8 *in, u64 inLen, u8 *out, u64 outLen);
|
||||||
|
void FIPS202_SHA3_224(const u8 *in, u64 inLen, u8 *out);
|
||||||
|
void FIPS202_SHA3_256(const u8 *in, u64 inLen, u8 *out);
|
||||||
|
void FIPS202_SHA3_384(const u8 *in, u64 inLen, u8 *out);
|
||||||
void FIPS202_SHA3_512(const u8 *in, u64 inLen, u8 *out);
|
void FIPS202_SHA3_512(const u8 *in, u64 inLen, u8 *out);
|
||||||
|
int LFSR86540(u8 *R);
|
||||||
|
void KeccakF1600(void *s);
|
||||||
|
|
|
@ -16,13 +16,13 @@ sha3(data::string)::string
|
||||||
int
|
int
|
||||||
lsha3(lua_State *L){
|
lsha3(lua_State *L){
|
||||||
size_t len;
|
size_t len;
|
||||||
char out[64];
|
unsigned char out[64];
|
||||||
const char *data = luaL_checklstring(L,-1,&len);
|
const unsigned char *data = (const unsigned char*)luaL_checklstring(L,-1,&len);
|
||||||
lua_pop(L,1);
|
lua_pop(L,1);
|
||||||
printf("All data gotten, about to hash\n");
|
printf("All data gotten, about to hash\n");
|
||||||
FIPS202_SHA3_512(data, len, out);
|
FIPS202_SHA3_512(data, len, out);
|
||||||
printf("Finished hashing\n");
|
printf("Finished hashing\n");
|
||||||
lua_pushlstring(L,out,64);
|
lua_pushlstring(L,(char*)out,64);
|
||||||
printf("Finished pushing string to lua\n");
|
printf("Finished pushing string to lua\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ lsxor(lua_State *L){
|
||||||
const char *shorter = la > lb ? b : a;
|
const char *shorter = la > lb ? b : a;
|
||||||
const char *longer = la > lb ? a : b;
|
const char *longer = la > lb ? a : b;
|
||||||
char out[outsize];
|
char out[outsize];
|
||||||
int i;
|
size_t i;
|
||||||
for(i = 0; i < loopsize; i++)
|
for(i = 0; i < loopsize; i++)
|
||||||
out[i] = shorter[i] ^ longer[i];
|
out[i] = shorter[i] ^ longer[i];
|
||||||
for(;i < outsize; i++)
|
for(;i < outsize; i++)
|
||||||
|
|
|
@ -170,9 +170,6 @@ lhttp_request_get_ip(lua_State *L){
|
||||||
struct http_request *req = luaL_checkrequest(L,-1);
|
struct http_request *req = luaL_checkrequest(L,-1);
|
||||||
lua_pop(L,1);
|
lua_pop(L,1);
|
||||||
char addr[INET6_ADDRSTRLEN];
|
char addr[INET6_ADDRSTRLEN];
|
||||||
printf("AF_INET:%d\n",AF_INET);
|
|
||||||
printf("AF_INET6:%d\n",AF_INET6);
|
|
||||||
printf("AF_UNIX:%d\n",AF_UNIX);
|
|
||||||
switch(req->owner->family){
|
switch(req->owner->family){
|
||||||
case AF_INET:
|
case AF_INET:
|
||||||
inet_ntop(
|
inet_ntop(
|
||||||
|
@ -242,7 +239,6 @@ lhttp_file_get(lua_State *L){
|
||||||
lua_concat(L,2);
|
lua_concat(L,2);
|
||||||
lua_error(L);
|
lua_error(L);
|
||||||
}
|
}
|
||||||
printf("file length: %d\n", f->length);
|
|
||||||
char s[f->length + 1];
|
char s[f->length + 1];
|
||||||
size_t read = http_file_read(f,s,f->length);
|
size_t read = http_file_read(f,s,f->length);
|
||||||
if(read < f->length){
|
if(read < f->length){
|
||||||
|
|
|
@ -5,6 +5,7 @@ int lhttp_method_text(lua_State *L);
|
||||||
int lhttp_request_get_path(lua_State *L);
|
int lhttp_request_get_path(lua_State *L);
|
||||||
int lhttp_request_get_host(lua_State *L);
|
int lhttp_request_get_host(lua_State *L);
|
||||||
int lhttp_request_populate_post(lua_State *L);
|
int lhttp_request_populate_post(lua_State *L);
|
||||||
|
int lhttp_request_populate_qs(lua_State *L);
|
||||||
int lhttp_response_cookie(lua_State *L);
|
int lhttp_response_cookie(lua_State *L);
|
||||||
int lhttp_request_cookie(lua_State *L);
|
int lhttp_request_cookie(lua_State *L);
|
||||||
int lhttp_argument_get_string(lua_State *L);
|
int lhttp_argument_get_string(lua_State *L);
|
||||||
|
|
627
src/lua/init.lua
627
src/lua/init.lua
|
@ -1,34 +1,53 @@
|
||||||
print("very quick hello from init.lua")
|
|
||||||
local et = require("etlua")
|
local et = require("etlua")
|
||||||
local sql = require("lsqlite3")
|
local sql = require("lsqlite3")
|
||||||
local zlib = require("zlib")
|
local zlib = require("zlib")
|
||||||
--local function print() end --squash prints
|
local function print() end --squash prints
|
||||||
print("Hello from init.lua")
|
|
||||||
local parser_names = {"plain","imageboard"}
|
local parser_names = {"plain","imageboard"}
|
||||||
local parsers = {}
|
local parsers = {}
|
||||||
for _,v in pairs(parser_names) do
|
for _,v in pairs(parser_names) do
|
||||||
parsers[v] = require("parser_" .. v)
|
parsers[v] = require("parser_" .. v)
|
||||||
end
|
end
|
||||||
local db,cache
|
local db,cache --databases
|
||||||
local domain = "test.monster:8888"
|
local domain = "test.monster:8888" --The domain to write links as
|
||||||
local pagenames = {
|
local pagenames = {
|
||||||
"index",
|
"index",
|
||||||
"author_index",
|
"author_index",
|
||||||
"claim",
|
"claim",
|
||||||
"paste",
|
"paste",
|
||||||
|
"edit",
|
||||||
"read",
|
"read",
|
||||||
"nostory",
|
"nostory",
|
||||||
|
"cantedit",
|
||||||
"noauthor",
|
"noauthor",
|
||||||
"login",
|
"login",
|
||||||
"author_paste",
|
"author_paste",
|
||||||
"author_edit",
|
"author_edit",
|
||||||
}
|
}
|
||||||
local pages = {}
|
local pages = {}
|
||||||
|
for k,v in pairs(pagenames) do
|
||||||
|
print("Compiling page:",v)
|
||||||
|
local f = assert(io.open("pages/" .. v .. ".etlua","r"))
|
||||||
|
pages[v] = assert(et.compile(f:read("*a")))
|
||||||
|
f:close()
|
||||||
|
end
|
||||||
|
|
||||||
|
local queries = {}
|
||||||
|
--These are all loaded during startup, won't affect ongoing performance.
|
||||||
|
setmetatable(queries,{
|
||||||
|
__index = function(self,key)
|
||||||
|
local f = assert(io.open("sql/" .. key .. ".sql","r"))
|
||||||
|
local ret = f:read("*a")
|
||||||
|
f:close()
|
||||||
|
return ret
|
||||||
|
end
|
||||||
|
})
|
||||||
|
|
||||||
|
---sql queries
|
||||||
local stmnt_index, stmnt_author_index, stmnt_read, stmnt_paste, stmnt_raw
|
local stmnt_index, stmnt_author_index, stmnt_read, stmnt_paste, stmnt_raw
|
||||||
local stmnt_author_create, stmnt_author_acct, stmnt_author_bio
|
local stmnt_author_create, stmnt_author_acct, stmnt_author_bio
|
||||||
local stmnt_cache, stmnt_insert_cache, stmnt_dirty_cache
|
local stmnt_cache, stmnt_insert_cache, stmnt_dirty_cache
|
||||||
local stmnt_get_session, stmnt_insert_session
|
local stmnt_get_session, stmnt_insert_session
|
||||||
local stmnt_edit
|
local stmnt_edit, stmnt_update, stmnt_update_raw, stmnt_author_of
|
||||||
--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
|
--see https://perishablepress.com/stop-using-unsafe-characters-in-urls/
|
||||||
--no underscore because we use that for our operative pages
|
--no underscore because we use that for our operative pages
|
||||||
local url_characters =
|
local url_characters =
|
||||||
|
@ -62,55 +81,18 @@ print("Hello from init.lua")
|
||||||
function configure()
|
function configure()
|
||||||
db = sqlassert(sql.open("data/posts.db"))
|
db = sqlassert(sql.open("data/posts.db"))
|
||||||
cache = sqlassert(sql.open_memory())
|
cache = sqlassert(sql.open_memory())
|
||||||
for k,v in pairs(pagenames) do
|
|
||||||
print("Compiling page:",v)
|
|
||||||
local f = assert(io.open("pages/" .. v .. ".etlua","r"))
|
|
||||||
pages[v] = assert(et.compile(f:read("*a")))
|
|
||||||
f:close()
|
|
||||||
end
|
|
||||||
print("Compiled pages...")
|
print("Compiled pages...")
|
||||||
local msg = "test message"
|
local msg = "test message"
|
||||||
local one = zlib.compress(msg)
|
local one = zlib.compress(msg)
|
||||||
local two = zlib.decompress(one)
|
local two = zlib.decompress(one)
|
||||||
--For some reason, the zlib library fails if this is done as a oneliner
|
--For some reason, the zlib library fails if this is done as a oneliner
|
||||||
assert(two == msg, "zlib not working as expected")
|
assert(two == msg, "zlib not working as expected")
|
||||||
print("zlib seems to work...")
|
|
||||||
assert(db:exec([[
|
--Create sql tables
|
||||||
CREATE TABLE IF NOT EXISTS authors (
|
assert(db:exec(queries.create_table_authors))
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
name TEXT UNIQUE ON CONFLICT FAIL,
|
|
||||||
salt BLOB,
|
|
||||||
passhash BLOB,
|
|
||||||
joindate INTEGER,
|
|
||||||
biography TEXT
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
--Create a fake "anonymous" user, so we don't run into trouble
|
--Create a fake "anonymous" user, so we don't run into trouble
|
||||||
--so that no one runs into touble being able to paste under this account.
|
--so that no one runs into touble being able to paste under this account.
|
||||||
assert(db:exec([[
|
assert(db:exec(queries.insert_anon_author))
|
||||||
INSERT OR IGNORE INTO authors (
|
|
||||||
name,
|
|
||||||
salt,
|
|
||||||
passhash,
|
|
||||||
joindate,
|
|
||||||
biography
|
|
||||||
) VALUES (
|
|
||||||
'anonymous',
|
|
||||||
'',
|
|
||||||
'',
|
|
||||||
strftime('%s','1970-01-01 00:00:00'),
|
|
||||||
''
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
assert(db:exec([[
|
|
||||||
REPLACE INTO authors (name,salt,passhash,joindate,biography) VALUES (
|
|
||||||
'anonymous',
|
|
||||||
'',
|
|
||||||
'',
|
|
||||||
strftime('%s','1970-01-01 00:00:00'),
|
|
||||||
'',
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
--If/when an author delets their account, all posts
|
--If/when an author delets their account, all posts
|
||||||
--and comments by that author are also deleted (on
|
--and comments by that author are also deleted (on
|
||||||
--delete cascade) this is intentional. This also
|
--delete cascade) this is intentional. This also
|
||||||
|
@ -118,62 +100,22 @@ function configure()
|
||||||
--an author makes will also be deleted.
|
--an author makes will also be deleted.
|
||||||
--
|
--
|
||||||
--Post text uses zlib compression
|
--Post text uses zlib compression
|
||||||
assert(db:exec([[
|
assert(db:exec(queries.create_table_posts))
|
||||||
CREATE TABLE IF NOT EXISTS posts (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
post_text BLOB,
|
|
||||||
post_title TEXT,
|
|
||||||
authorid REFERENCES authors(id) ON DELETE CASCADE,
|
|
||||||
isanon INTEGER,
|
|
||||||
hashedip BLOB,
|
|
||||||
post_time INTEGER
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
--Store the raw text so people can download it later, maybe
|
--Store the raw text so people can download it later, maybe
|
||||||
--we can use it for "download as image" or "download as pdf"
|
--we can use it for "download as image" or "download as pdf"
|
||||||
--in the future too. Stil stored zlib compressed
|
--in the future too. Stil stored zlib compressed
|
||||||
assert(db:exec([[
|
assert(db:exec(queries.create_table_raw_text))
|
||||||
CREATE TABLE IF NOT EXISTS raw_text (
|
assert(db:exec(queries.create_table_images)) --TODO
|
||||||
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
|
assert(db:exec(queries.create_table_comments)) --TODO
|
||||||
post_text BLOB,
|
assert(db:exec(queries.create_table_tags)) --TODO
|
||||||
markup TEXT
|
--Store a cookie for logged in users. Logged in users can edit
|
||||||
);]]))
|
--their own posts.
|
||||||
assert(db:exec([[
|
assert(db:exec(queries.create_table_session))
|
||||||
CREATE TABLE IF NOT EXISTS images (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
name TEXT,
|
|
||||||
image BLOB,
|
|
||||||
authorid REFERENCES authors(id) ON DELETE CASCADE,
|
|
||||||
upload_time INTEGER,
|
|
||||||
hashedip BLOB
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
assert(db:exec([[
|
|
||||||
CREATE TABLE IF NOT EXISTS comments (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
postid REFERENCES posts(id) ON DELETE CASCADE,
|
|
||||||
author REFERENCES authors(id) ON DELETE CASCADE,
|
|
||||||
isanon INTEGER,
|
|
||||||
comment_text TEXT,
|
|
||||||
hashedip BLOB,
|
|
||||||
post_time INTEGER
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
assert(db:exec([[
|
|
||||||
CREATE TABLE IF NOT EXISTS tags (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
|
||||||
postid REFERENCES posts(id) ON DELETE CASCADE,
|
|
||||||
tag TEXT
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
assert(db:exec([[
|
|
||||||
CREATE TABLE IF NOT EXISTS sessions (
|
|
||||||
key TEXT PRIMARY KEY,
|
|
||||||
author REFERENCES authors(id) ON DELETE CASCADE,
|
|
||||||
start INTEGER
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
print("Created db tables")
|
print("Created db tables")
|
||||||
|
|
||||||
|
--A cache table to store rendered pages that do not need to be
|
||||||
|
--rerendered. In theory this could OOM the program eventually and start
|
||||||
|
--swapping to disk. TODO: fixme
|
||||||
assert(cache:exec([[
|
assert(cache:exec([[
|
||||||
CREATE TABLE IF NOT EXISTS cache (
|
CREATE TABLE IF NOT EXISTS cache (
|
||||||
path TEXT PRIMARY KEY,
|
path TEXT PRIMARY KEY,
|
||||||
|
@ -182,138 +124,46 @@ function configure()
|
||||||
dirty INTEGER
|
dirty INTEGER
|
||||||
);
|
);
|
||||||
]]))
|
]]))
|
||||||
stmnt_index = assert(db:prepare([[
|
|
||||||
SELECT
|
--Select the data we need to display the on the front page
|
||||||
posts.id,
|
stmnt_index = assert(db:prepare(queries.select_site_index))
|
||||||
posts.post_title,
|
--Select the data we need to read a story (and maybe display an edit
|
||||||
posts.isanon,
|
--button
|
||||||
posts.post_time,
|
stmnt_read = assert(db:prepare(queries.select_post))
|
||||||
authors.name
|
--TODO: actually let authors edit their bio
|
||||||
FROM
|
|
||||||
posts,
|
|
||||||
authors
|
|
||||||
WHERE
|
|
||||||
posts.authorid = authors.id
|
|
||||||
UNION
|
|
||||||
SELECT
|
|
||||||
posts.id,
|
|
||||||
posts.post_title,
|
|
||||||
posts.isanon,
|
|
||||||
posts.post_time,
|
|
||||||
'Anonymous'
|
|
||||||
FROM
|
|
||||||
posts
|
|
||||||
WHERE
|
|
||||||
posts.authorid = -1
|
|
||||||
ORDER BY
|
|
||||||
posts.post_time DESC
|
|
||||||
LIMIT 10;
|
|
||||||
]]))
|
|
||||||
stmnt_read = assert(db:prepare([[
|
|
||||||
SELECT
|
|
||||||
post_title,
|
|
||||||
post_text,
|
|
||||||
posts.authorid,
|
|
||||||
posts.isanon,
|
|
||||||
authors.name
|
|
||||||
FROM
|
|
||||||
posts,authors
|
|
||||||
WHERE
|
|
||||||
posts.authorid = authors.id AND
|
|
||||||
posts.id = :id;
|
|
||||||
]]))
|
|
||||||
stmnt_author_bio = assert(db:prepare([[
|
stmnt_author_bio = assert(db:prepare([[
|
||||||
SELECT authors.biography FROM authors WHERE authors.name = :author;
|
SELECT authors.biography FROM authors WHERE authors.name = :author;
|
||||||
]]))
|
]]))
|
||||||
stmnt_author = assert(db:prepare([[
|
--Get the author of a story, used to check when editing that the
|
||||||
SELECT
|
--author really owns the story they're trying to edit
|
||||||
posts.id,
|
stmnt_author_of = assert(db:prepare(queries.select_author_of_post))
|
||||||
posts.post_title,
|
--Get the data we need to display a particular author's latest
|
||||||
posts.post_time
|
--stories
|
||||||
FROM
|
stmnt_author = assert(db:prepare(queries.select_author_index))
|
||||||
posts,
|
--Get the data we need to check if someone can log in
|
||||||
authors
|
|
||||||
WHERE
|
|
||||||
posts.isanon = 0 AND
|
|
||||||
posts.authorid = authors.id AND
|
|
||||||
authors.name = :author
|
|
||||||
ORDER BY
|
|
||||||
posts.post_time DESC
|
|
||||||
LIMIT 10;
|
|
||||||
]]))
|
|
||||||
stmnt_author_acct = assert(db:prepare([[
|
stmnt_author_acct = assert(db:prepare([[
|
||||||
SELECT id, salt, passhash FROM authors WHERE name = :name;
|
SELECT id, salt, passhash FROM authors WHERE name = :name;
|
||||||
]]))
|
]]))
|
||||||
stmnt_author_create = assert(db:prepare([[
|
--Create a new author on the site
|
||||||
INSERT OR FAIL INTO authors (
|
stmnt_author_create = assert(db:prepare(queries.insert_author))
|
||||||
name,
|
|
||||||
salt,
|
|
||||||
passhash,
|
|
||||||
joindate,
|
|
||||||
biography
|
|
||||||
) VALUES (
|
|
||||||
:name,
|
|
||||||
:salt,
|
|
||||||
:hash,
|
|
||||||
strftime('%s','now'),
|
|
||||||
''
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
stmnt_author_login = assert(db:prepare([[
|
stmnt_author_login = assert(db:prepare([[
|
||||||
SELECT name, passhash FROM authors WHERE name = :name;
|
SELECT name, passhash FROM authors WHERE name = :name;
|
||||||
]]))
|
]]))
|
||||||
stmnt_paste = assert(db:prepare([[
|
--Create a new post
|
||||||
INSERT INTO posts (
|
stmnt_paste = assert(db:prepare(queries.insert_post))
|
||||||
post_text,
|
--Keep a copy of the plain text of a post so we can edit it later
|
||||||
post_title,
|
--It might also be useful for migrations, if that ever needs to happen
|
||||||
authorid,
|
stmnt_raw = assert(db:prepare(queries.insert_raw))
|
||||||
isanon,
|
--Get the data we need to display the edit screen
|
||||||
hashedip,
|
stmnt_edit = assert(db:prepare(queries.select_edit))
|
||||||
post_time
|
--When we update a post, store the plaintext again
|
||||||
) VALUES (
|
stmnt_update_raw = assert(db:prepare(queries.update_raw))
|
||||||
?,
|
--Should we really reset the update time every time someone makes a post?
|
||||||
?,
|
--Someone could keep their story on the front page by just editing it a lot.
|
||||||
?,
|
--If it gets abused I can disable it I guess.
|
||||||
?,
|
stmnt_update = assert(db:prepare(queries.update_post))
|
||||||
?,
|
stmnt_insert_session = assert(db:prepare(queries.insert_session))
|
||||||
strftime('%s','now')
|
stmnt_get_session = assert(db:prepare(queries.select_valid_sessions))
|
||||||
);
|
|
||||||
]]))
|
|
||||||
stmnt_raw = assert(db:prepare([[
|
|
||||||
INSERT INTO raw_text (
|
|
||||||
id, post_text, markup
|
|
||||||
) VALUES (
|
|
||||||
?, ?, ?
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
stmnt_edit = assert(db:prepare([[
|
|
||||||
SELECT
|
|
||||||
raw_text.post_text, raw_text.markup, posts.isanon
|
|
||||||
FROM
|
|
||||||
raw_text, posts
|
|
||||||
WHERE
|
|
||||||
raw_text.id = posts.id AND
|
|
||||||
raw_text.id = :postid;
|
|
||||||
]]))
|
|
||||||
stmnt_insert_session = assert(db:prepare([[
|
|
||||||
INSERT INTO sessions (
|
|
||||||
key,
|
|
||||||
author,
|
|
||||||
start
|
|
||||||
) VALUES (
|
|
||||||
:sessionid,
|
|
||||||
:authorid,
|
|
||||||
strftime('%s','now')
|
|
||||||
);
|
|
||||||
]]))
|
|
||||||
stmnt_get_session = assert(db:prepare([[
|
|
||||||
SELECT authors.name, authors.id
|
|
||||||
FROM authors, sessions
|
|
||||||
WHERE
|
|
||||||
sessions.key = :key AND
|
|
||||||
sessions.author = authors.id AND
|
|
||||||
sessions.start - strftime('%s','now') < 60*60*24;
|
|
||||||
]]))
|
|
||||||
--only refresh pages at most once every 10 seconds
|
--only refresh pages at most once every 10 seconds
|
||||||
stmnt_cache = cache:prepare([[
|
stmnt_cache = cache:prepare([[
|
||||||
SELECT data
|
SELECT data
|
||||||
|
@ -408,6 +258,7 @@ local function start_session(who)
|
||||||
authorid = who
|
authorid = who
|
||||||
}
|
}
|
||||||
local err = do_sql(stmnt_insert_session)
|
local err = do_sql(stmnt_insert_session)
|
||||||
|
stmnt_insert_session:reset()
|
||||||
print("Err:",err)
|
print("Err:",err)
|
||||||
assert(err == sql.DONE)
|
assert(err == sql.DONE)
|
||||||
return session
|
return session
|
||||||
|
@ -470,10 +321,6 @@ local function render(pagename,callback)
|
||||||
return text
|
return text
|
||||||
end
|
end
|
||||||
|
|
||||||
local function author_page(name)
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
function home(req)
|
function home(req)
|
||||||
print("Hello from lua!")
|
print("Hello from lua!")
|
||||||
print("Method:", http_method_text(req))
|
print("Method:", http_method_text(req))
|
||||||
|
@ -482,17 +329,15 @@ function home(req)
|
||||||
local path = http_request_get_path(req)
|
local path = http_request_get_path(req)
|
||||||
local text
|
local text
|
||||||
if host == domain then
|
if host == domain then
|
||||||
|
--Default home page
|
||||||
text = render(host..path,function()
|
text = render(host..path,function()
|
||||||
print("Cache miss, rendering index")
|
print("Cache miss, rendering index")
|
||||||
stmnt_index:bind_names{}
|
stmnt_index:bind_names{}
|
||||||
local err = do_sql(stmnt_index)
|
local err = do_sql(stmnt_index)
|
||||||
print("err:",err)
|
|
||||||
local latest = {}
|
local latest = {}
|
||||||
|
--err may be sql.ROW or sql.DONE if we don't have any stories yet
|
||||||
while err == sql.ROW do
|
while err == sql.ROW do
|
||||||
local data = stmnt_index:get_values()
|
local data = stmnt_index:get_values()
|
||||||
for k,v in pairs(data) do
|
|
||||||
print(k,":",v)
|
|
||||||
end
|
|
||||||
table.insert(latest,{
|
table.insert(latest,{
|
||||||
url = encode_id(data[1]),
|
url = encode_id(data[1]),
|
||||||
title = data[2],
|
title = data[2],
|
||||||
|
@ -503,23 +348,15 @@ function home(req)
|
||||||
err = stmnt_index:step()
|
err = stmnt_index:step()
|
||||||
end
|
end
|
||||||
stmnt_index:reset()
|
stmnt_index:reset()
|
||||||
--[[
|
|
||||||
local latest = stmnt_index:get_values()
|
|
||||||
print("latest:",latest)
|
|
||||||
for k,v in pairs(latest) do
|
|
||||||
print(k,":",v)
|
|
||||||
end
|
|
||||||
]]
|
|
||||||
print("returning...\n")
|
|
||||||
return pages.index{
|
return pages.index{
|
||||||
domain = domain,
|
domain = domain,
|
||||||
stories = latest
|
stories = latest
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
else
|
else
|
||||||
print("Author login")
|
--Home page for an author
|
||||||
local subdomain = host:match("([^\\.]+)")
|
local subdomain = host:match("([^\\.]+)")
|
||||||
text = render(host..path,function()
|
text = render(string.format("%s.%s",subdomain,domain),function()
|
||||||
print("Cache miss, rendering author:" .. subdomain)
|
print("Cache miss, rendering author:" .. subdomain)
|
||||||
stmnt_author_bio:bind_names{author=subdomain}
|
stmnt_author_bio:bind_names{author=subdomain}
|
||||||
local err = do_sql(stmnt_author_bio)
|
local err = do_sql(stmnt_author_bio)
|
||||||
|
@ -540,16 +377,13 @@ function home(req)
|
||||||
err = do_sql(stmnt_author)
|
err = do_sql(stmnt_author)
|
||||||
print("err:",err)
|
print("err:",err)
|
||||||
local stories = {}
|
local stories = {}
|
||||||
while err ~= sql.DONE do
|
while err == sql.ROW do
|
||||||
local data = stmnt_author:get_values()
|
local data = stmnt_author:get_values()
|
||||||
print("Added story:",data)
|
local id, title, time = unpack(data)
|
||||||
for k,v in pairs(data) do
|
|
||||||
print(k,":",v)
|
|
||||||
end
|
|
||||||
table.insert(stories,{
|
table.insert(stories,{
|
||||||
url = encode_id(data[1]),
|
url = encode_id(id),
|
||||||
title = data[2],
|
title = title,
|
||||||
posted = os.date("%B %d %Y",tonumber(data[3]))
|
posted = os.date("%B %d %Y",tonumber(time))
|
||||||
})
|
})
|
||||||
err = stmnt_author:step()
|
err = stmnt_author:step()
|
||||||
end
|
end
|
||||||
|
@ -562,20 +396,14 @@ function home(req)
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
print("Host:",http_request_get_host(req))
|
|
||||||
print("Path:",http_request_get_path(req))
|
|
||||||
print("subdomain:",subdomain)
|
|
||||||
print("index:",pages.index)
|
|
||||||
--local text = pages.index({domain = domain})
|
|
||||||
print("returning:",text)
|
|
||||||
assert(text)
|
assert(text)
|
||||||
http_response(req,200,text)
|
http_response(req,200,text)
|
||||||
end
|
end
|
||||||
|
|
||||||
--We prevent people from changing their password file, this way we don't really
|
--We prevent people from changing their password file, this way we don't really
|
||||||
--need to worry about logged in accounts being hijacked if someone gets at the
|
--need to worry about logged in accounts being hijacked if someone gets at the
|
||||||
--database. The attacker can still paste from the logged in account for a while,
|
--database. The attacker can still paste & edit from the logged in account for
|
||||||
--but whatever.
|
--a while, but whatever.
|
||||||
function claim(req)
|
function claim(req)
|
||||||
local method = http_method_text(req)
|
local method = http_method_text(req)
|
||||||
local host = http_request_get_host(req)
|
local host = http_request_get_host(req)
|
||||||
|
@ -587,14 +415,14 @@ function claim(req)
|
||||||
end
|
end
|
||||||
assert(host == domain)
|
assert(host == domain)
|
||||||
local text
|
local text
|
||||||
print("method:",method)
|
|
||||||
if method == "GET" then
|
if method == "GET" then
|
||||||
print("render is:",render)
|
--Get the page to claim a name
|
||||||
text = render(host..path,function()
|
text = render(host..path,function()
|
||||||
print("cache miss, rendering claim page")
|
print("cache miss, rendering claim page")
|
||||||
return pages.claim{}
|
return pages.claim{}
|
||||||
end)
|
end)
|
||||||
elseif method == "POST" then
|
elseif method == "POST" then
|
||||||
|
--Actually claim a name
|
||||||
http_request_populate_post(req)
|
http_request_populate_post(req)
|
||||||
local name = assert(http_argument_get_string(req,"user"))
|
local name = assert(http_argument_get_string(req,"user"))
|
||||||
local rngf = assert(io.open("/dev/urandom","rb"))
|
local rngf = assert(io.open("/dev/urandom","rb"))
|
||||||
|
@ -602,39 +430,30 @@ function claim(req)
|
||||||
local salt = rngf:read(64)
|
local salt = rngf:read(64)
|
||||||
local password = rngf:read(passlength)
|
local password = rngf:read(passlength)
|
||||||
rngf:close()
|
rngf:close()
|
||||||
print("Starting session:",session)
|
|
||||||
print("About to xor")
|
|
||||||
print("About to hash")
|
|
||||||
local hash = sha3(salt .. password)
|
local hash = sha3(salt .. password)
|
||||||
print("done hashing")
|
|
||||||
stmnt_author_create:bind_names{
|
stmnt_author_create:bind_names{
|
||||||
name = name,
|
name = name,
|
||||||
}
|
}
|
||||||
stmnt_author_create:bind_blob(2,salt)
|
stmnt_author_create:bind_blob(2,salt)
|
||||||
stmnt_author_create:bind_blob(3,hash)
|
stmnt_author_create:bind_blob(3,hash)
|
||||||
print("Everything bound, ready to go")
|
|
||||||
local err = do_sql(stmnt_author_create)
|
local err = do_sql(stmnt_author_create)
|
||||||
print("Error:",err)
|
|
||||||
print("DONE",sql.DONE)
|
|
||||||
print("ERROR",sql.ERROR)
|
|
||||||
print("MISUSE",sql.MISUSE)
|
|
||||||
print("ROW",sql.ROW)
|
|
||||||
if err == sql.DONE then
|
if err == sql.DONE then
|
||||||
print("New author:",name)
|
--We sucessfully made athe new author
|
||||||
local id = stmnt_author_create:last_insert_rowid()
|
local id = stmnt_author_create:last_insert_rowid()
|
||||||
print("ID:",id)
|
|
||||||
stmnt_author_create:reset()
|
stmnt_author_create:reset()
|
||||||
|
--Give them a file back
|
||||||
http_response_header(req,"Content-Type","application/octet-stream")
|
http_response_header(req,"Content-Type","application/octet-stream")
|
||||||
http_response_header(req,"Content-Disposition","attachment; filename=\"" .. domain .. "." .. name .. ".passfile\"")
|
http_response_header(req,"Content-Disposition","attachment; filename=\"" .. name .. "." .. domain .. ".passfile\"")
|
||||||
local session = start_session(id)
|
local session = start_session(id)
|
||||||
|
|
||||||
http_response_cookie(req,"session",session,"/",0,0)
|
|
||||||
text = password
|
text = password
|
||||||
elseif err == sql.CONSTRAINT then
|
elseif err == sql.CONSTRAINT then
|
||||||
|
--If the creation failed, they probably just tried
|
||||||
|
--to use a name that was already taken
|
||||||
text = pages.claim {
|
text = pages.claim {
|
||||||
err = "Failed to claim. That name may already be taken."
|
err = "Failed to claim. That name may already be taken."
|
||||||
}
|
}
|
||||||
elseif err == sql.ERROR or err == sql.MISUSE then
|
elseif err == sql.ERROR or err == sql.MISUSE then
|
||||||
|
--This is bad though
|
||||||
text = pages.claim {
|
text = pages.claim {
|
||||||
err = "Failed to claim"
|
err = "Failed to claim"
|
||||||
}
|
}
|
||||||
|
@ -652,7 +471,9 @@ function paste(req)
|
||||||
local err
|
local err
|
||||||
local ret
|
local ret
|
||||||
if method == "GET" then
|
if method == "GET" then
|
||||||
|
--Get the paste page
|
||||||
if host == domain then
|
if host == domain then
|
||||||
|
--For an anonymous user
|
||||||
ret = render(host..path,function()
|
ret = render(host..path,function()
|
||||||
print("Cache missing, rendering post page")
|
print("Cache missing, rendering post page")
|
||||||
return pages.paste{
|
return pages.paste{
|
||||||
|
@ -660,9 +481,14 @@ function paste(req)
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
else
|
else
|
||||||
|
--Or for someone that's logged in
|
||||||
print("Looks like a logged in user wants to paste!")
|
print("Looks like a logged in user wants to paste!")
|
||||||
local subdomain = host:match("([^\\.]+)")
|
local subdomain = host:match("([^\\.]+)")
|
||||||
local author,_ = get_session(req)
|
local author,_ = get_session(req)
|
||||||
|
--If they try to paste as an author, but are on the
|
||||||
|
--wrong subdomain, or or not logged in, redirect them
|
||||||
|
--to the right place. Their own subdomain for authors
|
||||||
|
--or the anonymous paste page for not logged in users.
|
||||||
if author == nil then
|
if author == nil then
|
||||||
print("sessionid was nil")
|
print("sessionid was nil")
|
||||||
http_response_header(req,"Location","https://"..domain.."/_paste")
|
http_response_header(req,"Location","https://"..domain.."/_paste")
|
||||||
|
@ -675,6 +501,9 @@ function paste(req)
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
assert(author == subdomain,"someone wants to paste as someone else")
|
assert(author == subdomain,"someone wants to paste as someone else")
|
||||||
|
--We're where we want to be, serve up this users's
|
||||||
|
--paste page. No cache, because how often is a user
|
||||||
|
--going to paste?
|
||||||
ret = pages.author_paste{
|
ret = pages.author_paste{
|
||||||
domain = domain,
|
domain = domain,
|
||||||
user = author,
|
user = author,
|
||||||
|
@ -682,45 +511,40 @@ function paste(req)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
elseif method == "POST" then
|
elseif method == "POST" then
|
||||||
|
--We're creatinga new paste
|
||||||
http_request_populate_post(req)
|
http_request_populate_post(req)
|
||||||
local title = assert(http_argument_get_string(req,"title"))
|
local title = assert(http_argument_get_string(req,"title"))
|
||||||
local text = assert(http_argument_get_string(req,"text"))
|
local text = assert(http_argument_get_string(req,"text"))
|
||||||
local markup = assert(http_argument_get_string(req,"markup"))
|
local markup = assert(http_argument_get_string(req,"markup"))
|
||||||
local pasteas
|
local pasteas
|
||||||
local raw = zlib.compress(text)
|
local raw = zlib.compress(text)
|
||||||
print("text1",text)
|
|
||||||
--text = string.gsub(text,"(%+)"," ")
|
|
||||||
print("text2",text)
|
|
||||||
text = string.gsub(text,"%%(%x%x)",decodeentities)
|
text = string.gsub(text,"%%(%x%x)",decodeentities)
|
||||||
print("After decode:\n",text)
|
|
||||||
text = parsers[markup](text)
|
text = parsers[markup](text)
|
||||||
print("After markup:",text)
|
|
||||||
--text = string.gsub(text,escapematch,sanitize)
|
|
||||||
text = zlib.compress(text)
|
text = zlib.compress(text)
|
||||||
print("After deflate:",text)
|
|
||||||
print("inflating this data, we would get", zlib.decompress(text))
|
|
||||||
local esctitle = string.gsub(title,"%%(%x%x)",decodeentities)
|
local esctitle = string.gsub(title,"%%(%x%x)",decodeentities)
|
||||||
|
--Always sanatize the title with the plain parser. no markup
|
||||||
|
--in the title.
|
||||||
esctitle = parsers.plain(title)
|
esctitle = parsers.plain(title)
|
||||||
print("title:",esctitle)
|
|
||||||
--TODO:paste to author page
|
|
||||||
if host == domain then
|
if host == domain then
|
||||||
print("got text:",text)
|
--Public paste
|
||||||
--[[
|
--[[
|
||||||
This doesn't actually do much for IPv4 addresses,
|
This doesn't actually do much for IPv4 addresses,
|
||||||
since there are only 32 bits of address, someone could
|
since there are only 32 bits of address. Someone who
|
||||||
|
got a copy of the database could
|
||||||
just generate all 2^32 hashes and look up who posted
|
just generate all 2^32 hashes and look up who posted
|
||||||
what. Use IPv6, Tor or I2P where possible. (but then I
|
what. Use IPv6, Tor or I2P where possible. (but then I
|
||||||
guess it's harder to ban spammers... hmm..)
|
guess it's harder to ban spammers... hmm..)
|
||||||
]]
|
]]
|
||||||
--local ip = http_request_get_ip(req)
|
--local ip = http_request_get_ip(req)
|
||||||
--local iphash = sha3(ip)
|
--local iphash = sha3(ip)
|
||||||
|
--Don't store this information for now, until I come up
|
||||||
|
--with a more elegent solution.
|
||||||
assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
||||||
assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
||||||
assert(stmnt_paste:bind(3,-1) == sql.OK)
|
assert(stmnt_paste:bind(3,-1) == sql.OK)
|
||||||
assert(stmnt_paste:bind(4,true) == sql.OK)
|
assert(stmnt_paste:bind(4,true) == sql.OK)
|
||||||
assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
assert(stmnt_paste:bind_blob(5,"") == sql.OK)
|
||||||
err = do_sql(stmnt_paste)
|
err = do_sql(stmnt_paste)
|
||||||
print("err:",err)
|
|
||||||
if err == sql.DONE then
|
if err == sql.DONE then
|
||||||
local rowid = stmnt_paste:last_insert_rowid()
|
local rowid = stmnt_paste:last_insert_rowid()
|
||||||
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
assert(stmnt_raw:bind(1,rowid) == sql.OK)
|
||||||
|
@ -747,29 +571,7 @@ function paste(req)
|
||||||
stmnt_paste:reset()
|
stmnt_paste:reset()
|
||||||
|
|
||||||
else
|
else
|
||||||
--local subdomain = host:match("([^\\.]+)")
|
--Author paste
|
||||||
--http_populate_cookies(req)
|
|
||||||
--local sessionid = http_request_cookie(req,"session")
|
|
||||||
--if sessionid == nil then --If someone not logged in tries to paste as someone else, send give them an error
|
|
||||||
--ret = pages.author_paste{
|
|
||||||
--domain = domain,
|
|
||||||
--author = subdomain,
|
|
||||||
--err = "You are not logged in, you must be logged in to post as " .. subdomain .. ".",
|
|
||||||
--text = text
|
|
||||||
--}
|
|
||||||
--end
|
|
||||||
--print("Got cookie:",sessionid)
|
|
||||||
--stmnt_get_session:bind_names{
|
|
||||||
--key = sessionid
|
|
||||||
--}
|
|
||||||
--err = do_sql(stmnt_get_session)
|
|
||||||
--print("err:",err)
|
|
||||||
--local data = stmnt_get_session:get_values()
|
|
||||||
--stmnt_get_session:reset()
|
|
||||||
--print("got data:",data)
|
|
||||||
--for k,v in pairs(data) do
|
|
||||||
--print(k,":",v)
|
|
||||||
--end
|
|
||||||
local author, authorid = get_session(req)
|
local author, authorid = get_session(req)
|
||||||
if author == nil then
|
if author == nil then
|
||||||
ret = pages.author_paste{
|
ret = pages.author_paste{
|
||||||
|
@ -779,13 +581,10 @@ function paste(req)
|
||||||
text = text
|
text = text
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
--local author = data[1]
|
|
||||||
--local authorid = data[2]
|
|
||||||
local asanon = assert(http_argument_get_string(req,"pasteas"))
|
local asanon = assert(http_argument_get_string(req,"pasteas"))
|
||||||
--No need to check if the author is posting to the
|
--No need to check if the author is posting to the
|
||||||
--right sudomain, just post it to the one they have
|
--"right" sudomain, just post it to the one they have
|
||||||
--the key for.
|
--the session key for.
|
||||||
assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
assert(stmnt_paste:bind_blob(1,text) == sql.OK)
|
||||||
assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
assert(stmnt_paste:bind(2,esctitle) == sql.OK)
|
||||||
assert(stmnt_paste:bind(3,authorid) == sql.OK)
|
assert(stmnt_paste:bind(3,authorid) == sql.OK)
|
||||||
|
@ -831,50 +630,9 @@ function paste(req)
|
||||||
http_response(req,200,ret)
|
http_response(req,200,ret)
|
||||||
end
|
end
|
||||||
|
|
||||||
function read(req)
|
--A helper function for below
|
||||||
local host = http_request_get_host(req)
|
local function read_story(host,path,idp)
|
||||||
local path = http_request_get_path(req)
|
return render(host..path,function()
|
||||||
print("host:",host)
|
|
||||||
print("path:",path)
|
|
||||||
local idp = string.sub(path,2)--remove leading "/"
|
|
||||||
assert(string.len(path) > 0,"Tried to read 0-length story id")
|
|
||||||
local author, authorid = get_session(req)
|
|
||||||
print("author is:",author)
|
|
||||||
local text
|
|
||||||
if author then
|
|
||||||
local id = decode_id(idp)
|
|
||||||
stmnt_read:bind_names{
|
|
||||||
id = id
|
|
||||||
}
|
|
||||||
local err = do_sql(stmnt_read)
|
|
||||||
print("err:",err)
|
|
||||||
if err == sql.DONE then
|
|
||||||
stmnt_read:reset()
|
|
||||||
return pages.nostory{
|
|
||||||
path = path
|
|
||||||
}
|
|
||||||
end
|
|
||||||
assert(err == sql.ROW)
|
|
||||||
local title, storytext, tauthor, isanon, authorname = unpack(stmnt_read:get_values())
|
|
||||||
stmnt_read:reset()
|
|
||||||
if tauthor == authorid then
|
|
||||||
print("We're the owner of this story!")
|
|
||||||
local uncompressed = zlib.decompress(storytext)
|
|
||||||
text = pages.read{
|
|
||||||
domain = domain,
|
|
||||||
title = title,
|
|
||||||
text = text,
|
|
||||||
idp = idp,
|
|
||||||
isanon = isanon == 1,
|
|
||||||
author = authorname,
|
|
||||||
owner = true
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
|
||||||
print("We're logged in, but not the owner of this story!")
|
|
||||||
end
|
|
||||||
else
|
|
||||||
text = render(host..path,function()
|
|
||||||
print("Trying to read, id is",idp,":",decode_id(idp))
|
print("Trying to read, id is",idp,":",decode_id(idp))
|
||||||
local id = decode_id(idp)
|
local id = decode_id(idp)
|
||||||
print("id:",id,type(id))
|
print("id:",id,type(id))
|
||||||
|
@ -882,24 +640,15 @@ function read(req)
|
||||||
id = id
|
id = id
|
||||||
}
|
}
|
||||||
local err = do_sql(stmnt_read)
|
local err = do_sql(stmnt_read)
|
||||||
print("err:",err)
|
if err == sql.DONE then
|
||||||
if err == sql.ROW then
|
|
||||||
|
|
||||||
elseif err == sql.DONE then
|
|
||||||
stmnt_read:reset()
|
stmnt_read:reset()
|
||||||
return pages.nostory{
|
return pages.nostory{
|
||||||
path = path
|
path = path
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
assert(err == sql.ROW,"Could not get row:" .. tostring(id) .. " Error:" .. tostring(err))
|
assert(err == sql.ROW,"Could not get row:" .. tostring(id) .. " Error:" .. tostring(err))
|
||||||
print("get_values:")
|
|
||||||
local title, text, authorid, isanon, authorname = unpack(stmnt_read:get_values())
|
local title, text, authorid, isanon, authorname = unpack(stmnt_read:get_values())
|
||||||
print("Got text from unpack:",text)
|
|
||||||
text = zlib.decompress(text)
|
text = zlib.decompress(text)
|
||||||
print("inflated text:",text)
|
|
||||||
print("title:",title)
|
|
||||||
print("text:",text)
|
|
||||||
print("idp:",idp)
|
|
||||||
stmnt_read:reset()
|
stmnt_read:reset()
|
||||||
return pages.read{
|
return pages.read{
|
||||||
domain = domain,
|
domain = domain,
|
||||||
|
@ -910,49 +659,88 @@ function read(req)
|
||||||
author = authorname
|
author = authorname
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
function read(req)
|
||||||
|
local host = http_request_get_host(req)
|
||||||
|
local path = http_request_get_path(req)
|
||||||
|
local idp = string.sub(path,2)--remove leading "/"
|
||||||
|
assert(string.len(path) > 0,"Tried to read 0-length story id")
|
||||||
|
local author, authorid = get_session(req)
|
||||||
|
local text
|
||||||
|
if author then
|
||||||
|
--We're logged in as someone
|
||||||
|
local id = decode_id(idp)
|
||||||
|
stmnt_read:bind_names{
|
||||||
|
id = id
|
||||||
|
}
|
||||||
|
local err = do_sql(stmnt_read)
|
||||||
|
if err == sql.DONE then
|
||||||
|
--We got no story
|
||||||
|
stmnt_read:reset()
|
||||||
|
return pages.nostory{
|
||||||
|
path = path
|
||||||
|
}
|
||||||
|
end
|
||||||
|
assert(err == sql.ROW)
|
||||||
|
local title, storytext, tauthor, isanon, authorname = unpack(stmnt_read:get_values())
|
||||||
|
storytext = zlib.decompress(storytext)
|
||||||
|
stmnt_read:reset()
|
||||||
|
if tauthor == authorid then
|
||||||
|
--The story exists and we're logged in as the
|
||||||
|
--owner, display the edit button
|
||||||
|
text = pages.read{
|
||||||
|
domain = domain,
|
||||||
|
title = title,
|
||||||
|
text = storytext,
|
||||||
|
idp = idp,
|
||||||
|
isanon = isanon == 1,
|
||||||
|
author = authorname,
|
||||||
|
owner = true
|
||||||
|
}
|
||||||
|
|
||||||
|
else
|
||||||
|
text = read_story(host,path,idp)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
text = read_story(host,path,idp)
|
||||||
end
|
end
|
||||||
assert(text)
|
assert(text)
|
||||||
http_response(req,200,text)
|
http_response(req,200,text)
|
||||||
end
|
end
|
||||||
|
|
||||||
function login(req)
|
function login(req)
|
||||||
print("Logging in")
|
|
||||||
local host = http_request_get_host(req)
|
local host = http_request_get_host(req)
|
||||||
local path = http_request_get_path(req)
|
local path = http_request_get_path(req)
|
||||||
local method = http_method_text(req)
|
local method = http_method_text(req)
|
||||||
if host ~= domain then
|
if host ~= domain then
|
||||||
|
--Don't allow logging into subdomains, I guess
|
||||||
http_response_header(req,"Location",string.format("https://%s/_login",domain))
|
http_response_header(req,"Location",string.format("https://%s/_login",domain))
|
||||||
http_response(req,303,"")
|
http_response(req,303,"")
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
local text
|
local text
|
||||||
if method == "GET" then
|
if method == "GET" then
|
||||||
|
--Just give them the login page
|
||||||
text = render(host..path,function()
|
text = render(host..path,function()
|
||||||
return pages.login{}
|
return pages.login{}
|
||||||
end)
|
end)
|
||||||
elseif method == "POST" then
|
elseif method == "POST" then
|
||||||
--http_request_populate_post(req)
|
--Try to log in
|
||||||
http_populate_multipart_form(req)
|
http_populate_multipart_form(req)
|
||||||
local name = assert(http_argument_get_string(req,"user"))
|
local name = assert(http_argument_get_string(req,"user"))
|
||||||
local pass = assert(http_file_get(req,"pass"))
|
local pass = assert(http_file_get(req,"pass"))
|
||||||
print("name:",name)
|
|
||||||
print("pass:",pass)
|
|
||||||
stmnt_author_acct:bind_names{
|
stmnt_author_acct:bind_names{
|
||||||
name = name
|
name = name
|
||||||
}
|
}
|
||||||
local err = do_sql(stmnt_author_acct)
|
local err = do_sql(stmnt_author_acct)
|
||||||
print("err:",err)
|
|
||||||
if err == sql.ROW then
|
if err == sql.ROW then
|
||||||
local id, salt, passhash = unpack(stmnt_author_acct:get_values())
|
local id, salt, passhash = unpack(stmnt_author_acct:get_values())
|
||||||
stmnt_author_acct:reset()
|
stmnt_author_acct:reset()
|
||||||
print("salt:",salt)
|
|
||||||
print("passhash:",passhash)
|
|
||||||
local todigest = salt .. pass
|
local todigest = salt .. pass
|
||||||
local hash = sha3(todigest)
|
local hash = sha3(todigest)
|
||||||
print("hash:",hash)
|
|
||||||
print("passhash:",passhash)
|
|
||||||
if hash == passhash then
|
if hash == passhash then
|
||||||
print("Passfile accepted")
|
|
||||||
local session = start_session(id)
|
local session = start_session(id)
|
||||||
http_response_cookie(req,"session",session,"/",0,0)
|
http_response_cookie(req,"session",session,"/",0,0)
|
||||||
local loc = string.format("https://%s.%s",name,domain)
|
local loc = string.format("https://%s.%s",name,domain)
|
||||||
|
@ -966,10 +754,12 @@ function login(req)
|
||||||
end
|
end
|
||||||
elseif err == sql.DONE then --Allows user enumeration, do we want this?
|
elseif err == sql.DONE then --Allows user enumeration, do we want this?
|
||||||
--Probably not a problem since all passwords are forced to be "good"
|
--Probably not a problem since all passwords are forced to be "good"
|
||||||
|
stmnt_author_acct:reset()
|
||||||
text = pages.login{
|
text = pages.login{
|
||||||
err = "Failed to find user:" .. name
|
err = "Failed to find user:" .. name
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
stmnt_author_acct:reset()
|
||||||
error("Other sql error during login")
|
error("Other sql error during login")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -977,36 +767,97 @@ function login(req)
|
||||||
http_response(req,200,text)
|
http_response(req,200,text)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
--Edit a story
|
||||||
function edit(req)
|
function edit(req)
|
||||||
local host = http_request_get_host(req)
|
local host = http_request_get_host(req)
|
||||||
local path = http_request_get_path(req)
|
local path = http_request_get_path(req)
|
||||||
local method = http_method_text(req)
|
local method = http_method_text(req)
|
||||||
|
local author, author_id = get_session(req)
|
||||||
|
local ret
|
||||||
if method == "GET" then
|
if method == "GET" then
|
||||||
http_request_populate_qs(req)
|
http_request_populate_qs(req)
|
||||||
local story = assert(http_argument_get_string(req,"story"))
|
local story = assert(http_argument_get_string(req,"story"))
|
||||||
local story_id = decode_id(story)
|
local story_id = decode_id(story)
|
||||||
print("we want to edit story:",story)
|
print("we want to edit story:",story)
|
||||||
|
--Check that the logged in user is the owner of the story
|
||||||
|
--sql-side. If we're not the owner, we'll get 0 rows back.
|
||||||
stmnt_edit:bind_names{
|
stmnt_edit:bind_names{
|
||||||
postid = story_id
|
postid = story_id,
|
||||||
|
authorid = author_id
|
||||||
}
|
}
|
||||||
local err = do_sql(stmnt_edit)
|
local err = do_sql(stmnt_edit)
|
||||||
print("err:",err)
|
|
||||||
if err == sql.DONE then
|
if err == sql.DONE then
|
||||||
print("No such story to edit:",story_id)
|
--No rows, we're probably not the owner (it might
|
||||||
|
--also be because there's no such story)
|
||||||
|
ret = pages.cantedit{
|
||||||
|
path = story,
|
||||||
|
}
|
||||||
|
stmnt_edit:reset()
|
||||||
|
http_response(req,200,ret)
|
||||||
|
return
|
||||||
end
|
end
|
||||||
assert(err == sql.ROW)
|
assert(err == sql.ROW)
|
||||||
local data = stmnt_edit:get_values()
|
local data = stmnt_edit:get_values()
|
||||||
local txt_compressed, markup, isanon = unpack(data)
|
local txt_compressed, markup, isanon, title = unpack(data)
|
||||||
for k,v in pairs(data) do
|
local text = zlib.decompress(txt_compressed)
|
||||||
print(k,":",v)
|
stmnt_edit:reset()
|
||||||
end
|
ret = pages.edit{
|
||||||
|
title = title,
|
||||||
|
text = text,
|
||||||
|
markup = markup,
|
||||||
|
user = author,
|
||||||
|
isanon = isanon == 1,
|
||||||
|
domain = domain,
|
||||||
|
story = story_id,
|
||||||
|
err = "",
|
||||||
|
}
|
||||||
elseif method == "POST" then
|
elseif method == "POST" then
|
||||||
--TODO: same as paste?
|
|
||||||
--nope, need to replace the story instead of inserting a new one.
|
|
||||||
http_request_populate_post(req)
|
http_request_populate_post(req)
|
||||||
|
local storyid = tonumber(assert(http_argument_get_string(req,"story")))
|
||||||
|
local title = assert(http_argument_get_string(req,"title"))
|
||||||
|
local text = assert(http_argument_get_string(req,"text"))
|
||||||
|
local pasteas = assert(http_argument_get_string(req,"pasteas"))
|
||||||
|
local markup = assert(http_argument_get_string(req,"markup"))
|
||||||
|
stmnt_author_of:bind_names{
|
||||||
|
id = storyid
|
||||||
|
}
|
||||||
|
local err = do_sql(stmnt_author_of)
|
||||||
|
if err ~= sql.ROW then
|
||||||
|
stmnt_author_of:reset()
|
||||||
|
error("No author found for story:" .. storyid)
|
||||||
end
|
end
|
||||||
|
local data = stmnt_author_of:get_values()
|
||||||
|
stmnt_author_of:reset()
|
||||||
|
local realauthor = data[1]
|
||||||
|
assert(realauthor == author_id) --Make sure the author of the story is the currently logged in user
|
||||||
|
local parsed = parsers[markup](text)
|
||||||
|
local compr_raw = zlib.compress(text)
|
||||||
|
local compr = zlib.compress(parsed)
|
||||||
|
assert(stmnt_update_raw:bind_blob(1,compr_raw) == sql.OK)
|
||||||
|
assert(stmnt_update_raw:bind(2,markup) == sql.OK)
|
||||||
|
assert(stmnt_update_raw:bind(3,storyid) == sql.OK)
|
||||||
|
assert(do_sql(stmnt_update_raw) == sql.DONE, "Failed to update raw")
|
||||||
|
stmnt_update_raw:reset()
|
||||||
|
assert(stmnt_update:bind(1,title) == sql.OK)
|
||||||
|
assert(stmnt_update:bind_blob(2,compr) == sql.OK)
|
||||||
|
assert(stmnt_update:bind(3,pasteas == "anonymous" and 1 or 0) == sql.OK)
|
||||||
|
assert(stmnt_update:bind(4,storyid) == sql.OK)
|
||||||
|
assert(do_sql(stmnt_update) == sql.DONE, "Failed to update text")
|
||||||
|
stmnt_update:reset()
|
||||||
|
local id_enc = encode_id(storyid)
|
||||||
|
local loc = string.format("https://%s/%s",domain,id_enc)
|
||||||
|
dirty_cache(string.format("%s/%s",domain,id_enc)) -- This place to read this post
|
||||||
|
dirty_cache(string.format("%s",domain)) -- The site index (ex, if the author changed the paste from their's to "Anonymous", the cache should reflect that).
|
||||||
|
dirty_cache(string.format("%s.%s",author,domain)) -- The author's index, same reasoning as above.
|
||||||
|
http_response_header(req,"Location",loc)
|
||||||
|
http_response(req,303,"")
|
||||||
|
return
|
||||||
|
end
|
||||||
|
assert(ret)
|
||||||
|
http_response(req,200,ret)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
--TODO
|
||||||
function edit_bio()
|
function edit_bio()
|
||||||
print("we want to edit bio")
|
print("we want to edit bio")
|
||||||
end
|
end
|
||||||
|
@ -1023,5 +874,3 @@ function teardown()
|
||||||
end
|
end
|
||||||
|
|
||||||
print("Done with init.lua")
|
print("Done with init.lua")
|
||||||
--[==[
|
|
||||||
]==]
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ local function sanitize(text)
|
||||||
return ret
|
return ret
|
||||||
end
|
end
|
||||||
|
|
||||||
--Grammer
|
--Grammar
|
||||||
local space = S" \t\r"^0
|
local space = S" \t\r"^0
|
||||||
local special = P{
|
local special = P{
|
||||||
P"**" + P"''" + P"'''" +
|
P"**" + P"''" + P"'''" +
|
||||||
|
@ -33,14 +33,14 @@ local word = Cs((1 - special)^1) * space / sanitize
|
||||||
|
|
||||||
--Generates a pattern that formats text inside matching 'seq' tags with format
|
--Generates a pattern that formats text inside matching 'seq' tags with format
|
||||||
--ex wrap("^^",[[<sup>%s</sup>]])
|
--ex wrap("^^",[[<sup>%s</sup>]])
|
||||||
--will wrapp text "5^^3^^" as "5<sup>3</sup>"
|
--will wrap text "5^^3^^" as "5<sup>3</sup>"
|
||||||
local function wrap(seq,format)
|
local function wrap(seq,format)
|
||||||
return P(seq) * Cs(((1 - P(seq)) * space)^1) * P(seq) * space / function(a)
|
return P(seq) * Cs(((1 - P(seq)) * space)^1) * P(seq) * space / function(a)
|
||||||
return string.format(format,sanitize(a))
|
return string.format(format,sanitize(a))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
--Generates a pattern that formats text inside openinig and closing "name" tags
|
--Generates a pattern that formats text inside opening and closing "name" tags
|
||||||
--with a format, BB forum style
|
--with a format, BB forum style
|
||||||
local function tag(name,format)
|
local function tag(name,format)
|
||||||
local start_tag = P(string.format("[%s]",name))
|
local start_tag = P(string.format("[%s]",name))
|
||||||
|
@ -50,7 +50,7 @@ local function tag(name,format)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
local grammer = P{
|
local grammar = P{
|
||||||
"chunk";
|
"chunk";
|
||||||
--regular
|
--regular
|
||||||
spoiler = wrap("**",[[<span class="spoiler">%s</span>]]),
|
spoiler = wrap("**",[[<span class="spoiler">%s</span>]]),
|
||||||
|
@ -76,36 +76,36 @@ local grammer = P{
|
||||||
chunk = V"line"^0 * V"plainline" * V"ending"
|
chunk = V"line"^0 * V"plainline" * V"ending"
|
||||||
}
|
}
|
||||||
|
|
||||||
--local text = [[
|
local text = [[
|
||||||
--this is **a big** test with ''italics''!
|
this is **a big** test with ''italics''!
|
||||||
--we need to > sanitize < things that could be tags
|
we need to > sanitize < things that could be tags
|
||||||
--like really <b> badly </b>
|
like really <b> badly </b>
|
||||||
--words can include any'single item without=penalty
|
words can include any'single item without=penalty
|
||||||
--Can you use '''one tag ==within== another tag'''?
|
Can you use '''one tag ==within== another tag'''?
|
||||||
--let's see if [spoiler]spoiler tags work[/spoiler]
|
let's see if [spoiler]spoiler tags work[/spoiler]
|
||||||
--things might even __go over
|
things might even __go over
|
||||||
--multiple lines__ blah
|
multiple lines__ blah
|
||||||
--Let's test out those [code]
|
Let's test out those [code]
|
||||||
--code tag,s and see how well
|
code tag,s and see how well
|
||||||
--they work
|
they work
|
||||||
--here's ome
|
here's ome
|
||||||
--preformated <with injection>
|
preformated <with injection>
|
||||||
--text
|
text
|
||||||
--[/code]
|
[/code]
|
||||||
--> Or have blank lines
|
> Or have blank lines
|
||||||
|
|
||||||
--one important thing is that greentext > should not start in the middle of a line
|
one important thing is that greentext > should not start in the middle of a line
|
||||||
--> this next line is a green text, what if I include **markup** inside it?
|
> this next line is a green text, what if I include **markup** inside it?
|
||||||
--< and after '''it is''' a pinktext
|
< and after '''it is''' a pinktext
|
||||||
--> because of some of these restrictions **bold text
|
> because of some of these restrictions **bold text
|
||||||
--cannot go over multiple lines** in a green text
|
cannot go over multiple lines** in a green text
|
||||||
--__and finally__ there might be some text with '''
|
__and finally__ there might be some text with '''
|
||||||
--incomplete syntax <b> with injection</b> !!!!
|
incomplete syntax <b> with injection</b> !!!!
|
||||||
--]]
|
]]
|
||||||
|
|
||||||
return function(text)
|
--return function(text)
|
||||||
return table.concat({grammer:match(text .. "\n")}," ")
|
--return table.concat({grammar:match(text .. "\n")}," ")
|
||||||
end
|
|
||||||
--for k,v in pairs({grammer:match(text)}) do
|
|
||||||
--print(k,":",v)
|
|
||||||
--end
|
--end
|
||||||
|
for k,v in pairs({grammar:match(text)}) do
|
||||||
|
print(k,":",v)
|
||||||
|
end
|
||||||
|
|
|
@ -6,8 +6,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<h1 class="title">
|
<h1 class="title">
|
||||||
|
|
|
@ -6,8 +6,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<h1 class="title">
|
<h1 class="title">
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>🙁</title>
|
||||||
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
|
</head>
|
||||||
|
<body class="container">
|
||||||
|
<main class="wrapper">
|
||||||
|
<h1 class="title">
|
||||||
|
🙁
|
||||||
|
</h1>
|
||||||
|
<div class="container">
|
||||||
|
<p>
|
||||||
|
You don't have permission to edit: <%= path %>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
<body>
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
@ -13,16 +13,23 @@
|
||||||
Paste
|
Paste
|
||||||
</h1>
|
</h1>
|
||||||
<% if err then %><em class="error"><%= err %></em><% end %>
|
<% if err then %><em class="error"><%= err %></em><% end %>
|
||||||
<form action="https://<%= user %>.<%= domain %>/_paste" method="post" class="container">
|
<form action="https://<%= user %>.<%= domain %>/_edit" method="post" class="container">
|
||||||
<fieldset>
|
<fieldset>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<input type="text" name="title" placeholder="Title" class="column column-80"></input>
|
<input type="text" name="title" placeholder="Title" class="column column-80" value="<%= title %>"></input>
|
||||||
<select id="pasteas" class="column column-10">
|
<input type="hidden" name="story" value="<%= story %>">
|
||||||
|
<select id="pasteas" name="pasteas" class="column column-10">
|
||||||
|
<% if isanon then %>
|
||||||
<option value="<%= user %>"><%= user %></option>
|
<option value="<%= user %>"><%= user %></option>
|
||||||
<option value="anonymous">anonymous</option>
|
<option value="anonymous" selected>Anonymous</option>
|
||||||
|
<% else %>
|
||||||
|
<option value="<%= user %>" selected><%= user %></option>
|
||||||
|
<option value="anonymous">Anonymous</option>
|
||||||
|
<% end %>
|
||||||
</select>
|
</select>
|
||||||
<select id="markup" class="column column-10">
|
<select id="markup" name="markup" class="column column-10">
|
||||||
<option value="plain">Plain</option>
|
<option value="plain">Plain</option>
|
||||||
|
<option value="imageboard">Imageboard</option>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
<div class="row">
|
<div class="row">
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
@ -19,7 +19,7 @@
|
||||||
<input type="text" name="user" id="user" placeholder="name">
|
<input type="text" name="user" id="user" placeholder="name">
|
||||||
<label for="pass">Passfile:</label>
|
<label for="pass">Passfile:</label>
|
||||||
<input type="file" name="pass" id="pass">
|
<input type="file" name="pass" id="pass">
|
||||||
<input type="submit">
|
<input type="submit" value="Log In"/>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
</form>
|
</form>
|
||||||
<footer class="footer">
|
<footer class="footer">
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🙁</title>
|
<title>🙁</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🙁</title>
|
<title>🙁</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>🍑</title>
|
<title>🍑</title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title><%= title %></title>
|
<title><%= title %></title>
|
||||||
<link href="/_css/style.css" rel="stylesheet">
|
|
||||||
<link href="/_css/milligram.css" rel="stylesheet">
|
<link href="/_css/milligram.css" rel="stylesheet">
|
||||||
|
<link href="/_css/style.css" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body class="container">
|
<body class="container">
|
||||||
<main class="wrapper">
|
<main class="wrapper">
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS authors (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
name TEXT UNIQUE ON CONFLICT FAIL,
|
||||||
|
salt BLOB,
|
||||||
|
passhash BLOB,
|
||||||
|
joindate INTEGER,
|
||||||
|
biography TEXT
|
||||||
|
);
|
|
@ -0,0 +1,9 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS comments (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
postid REFERENCES posts(id) ON DELETE CASCADE,
|
||||||
|
author REFERENCES authors(id) ON DELETE CASCADE,
|
||||||
|
isanon INTEGER,
|
||||||
|
comment_text TEXT,
|
||||||
|
hashedip BLOB,
|
||||||
|
post_time INTEGER
|
||||||
|
);
|
|
@ -0,0 +1,8 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS images (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
name TEXT,
|
||||||
|
image BLOB,
|
||||||
|
authorid REFERENCES authors(id) ON DELETE CASCADE,
|
||||||
|
upload_time INTEGER,
|
||||||
|
hashedip BLOB
|
||||||
|
);
|
|
@ -0,0 +1,9 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS posts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
post_text BLOB,
|
||||||
|
post_title TEXT,
|
||||||
|
authorid REFERENCES authors(id) ON DELETE CASCADE,
|
||||||
|
isanon INTEGER,
|
||||||
|
hashedip BLOB,
|
||||||
|
post_time INTEGER
|
||||||
|
);
|
|
@ -0,0 +1,4 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS raw_text (
|
||||||
|
id INTEGER PRIMARY KEY REFERENCES posts(id) ON DELETE CASCADE,
|
||||||
|
post_text BLOB,
|
||||||
|
markup TEXT
|
|
@ -0,0 +1,5 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
author REFERENCES authors(id) ON DELETE CASCADE,
|
||||||
|
start INTEGER
|
||||||
|
);
|
|
@ -0,0 +1,5 @@
|
||||||
|
CREATE TABLE IF NOT EXISTS tags (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
postid REFERENCES posts(id) ON DELETE CASCADE,
|
||||||
|
tag TEXT
|
||||||
|
);
|
|
@ -0,0 +1,13 @@
|
||||||
|
INSERT OR IGNORE INTO authors (
|
||||||
|
name,
|
||||||
|
salt,
|
||||||
|
passhash,
|
||||||
|
joindate,
|
||||||
|
biography
|
||||||
|
) VALUES (
|
||||||
|
'anonymous',
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
strftime('%s','1970-01-01 00:00:00'),
|
||||||
|
''
|
||||||
|
);
|
|
@ -0,0 +1,13 @@
|
||||||
|
INSERT OR FAIL INTO authors (
|
||||||
|
name,
|
||||||
|
salt,
|
||||||
|
passhash,
|
||||||
|
joindate,
|
||||||
|
biography
|
||||||
|
) VALUES (
|
||||||
|
:name,
|
||||||
|
:salt,
|
||||||
|
:hash,
|
||||||
|
strftime('%s','now'),
|
||||||
|
''
|
||||||
|
);
|
|
@ -0,0 +1,15 @@
|
||||||
|
INSERT INTO posts (
|
||||||
|
post_text,
|
||||||
|
post_title,
|
||||||
|
authorid,
|
||||||
|
isanon,
|
||||||
|
hashedip,
|
||||||
|
post_time
|
||||||
|
) VALUES (
|
||||||
|
?,
|
||||||
|
?,
|
||||||
|
?,
|
||||||
|
?,
|
||||||
|
?,
|
||||||
|
strftime('%s','now')
|
||||||
|
);
|
|
@ -0,0 +1,5 @@
|
||||||
|
INSERT INTO raw_text (
|
||||||
|
id, post_text, markup
|
||||||
|
) VALUES (
|
||||||
|
?, ?, ?
|
||||||
|
);
|
|
@ -0,0 +1,9 @@
|
||||||
|
INSERT INTO sessions (
|
||||||
|
key,
|
||||||
|
author,
|
||||||
|
start
|
||||||
|
) VALUES (
|
||||||
|
:sessionid,
|
||||||
|
:authorid,
|
||||||
|
strftime('%s','now')
|
||||||
|
);
|
|
@ -0,0 +1,14 @@
|
||||||
|
SELECT
|
||||||
|
posts.id,
|
||||||
|
posts.post_title,
|
||||||
|
posts.post_time
|
||||||
|
FROM
|
||||||
|
posts,
|
||||||
|
authors
|
||||||
|
WHERE
|
||||||
|
posts.isanon = 0 AND
|
||||||
|
posts.authorid = authors.id AND
|
||||||
|
authors.name = :author
|
||||||
|
ORDER BY
|
||||||
|
posts.post_time DESC
|
||||||
|
LIMIT 10;
|
|
@ -0,0 +1,9 @@
|
||||||
|
SELECT
|
||||||
|
authors.id,
|
||||||
|
authors.name
|
||||||
|
FROM
|
||||||
|
authors,
|
||||||
|
posts
|
||||||
|
WHERE
|
||||||
|
posts.authorid = authors.id AND
|
||||||
|
posts.id = :id;
|
|
@ -0,0 +1,8 @@
|
||||||
|
SELECT
|
||||||
|
raw_text.post_text, raw_text.markup, posts.isanon, posts.post_title
|
||||||
|
FROM
|
||||||
|
raw_text, posts
|
||||||
|
WHERE
|
||||||
|
raw_text.id = posts.id AND
|
||||||
|
raw_text.id = :postid AND
|
||||||
|
posts.authorid = :authorid;
|
|
@ -0,0 +1,11 @@
|
||||||
|
SELECT
|
||||||
|
post_title,
|
||||||
|
post_text,
|
||||||
|
posts.authorid,
|
||||||
|
posts.isanon,
|
||||||
|
authors.name
|
||||||
|
FROM
|
||||||
|
posts,authors
|
||||||
|
WHERE
|
||||||
|
posts.authorid = authors.id AND
|
||||||
|
posts.id = :id;
|
|
@ -0,0 +1,25 @@
|
||||||
|
SELECT
|
||||||
|
posts.id,
|
||||||
|
posts.post_title,
|
||||||
|
posts.isanon,
|
||||||
|
posts.post_time,
|
||||||
|
authors.name
|
||||||
|
FROM
|
||||||
|
posts,
|
||||||
|
authors
|
||||||
|
WHERE
|
||||||
|
posts.authorid = authors.id
|
||||||
|
UNION
|
||||||
|
SELECT
|
||||||
|
posts.id,
|
||||||
|
posts.post_title,
|
||||||
|
posts.isanon,
|
||||||
|
posts.post_time,
|
||||||
|
'Anonymous'
|
||||||
|
FROM
|
||||||
|
posts
|
||||||
|
WHERE
|
||||||
|
posts.authorid = -1
|
||||||
|
ORDER BY
|
||||||
|
posts.post_time DESC
|
||||||
|
LIMIT 10;
|
|
@ -0,0 +1,6 @@
|
||||||
|
SELECT authors.name, authors.id
|
||||||
|
FROM authors, sessions
|
||||||
|
WHERE
|
||||||
|
sessions.key = :key AND
|
||||||
|
sessions.author = authors.id AND
|
||||||
|
sessions.start - strftime('%s','now') < 60*60*24;
|
|
@ -0,0 +1,9 @@
|
||||||
|
UPDATE
|
||||||
|
posts
|
||||||
|
SET
|
||||||
|
post_title = ?,
|
||||||
|
post_text = ?,
|
||||||
|
isanon = ?,
|
||||||
|
post_time = strftime('%s','now')
|
||||||
|
WHERE
|
||||||
|
posts.id = ?;
|
|
@ -0,0 +1,7 @@
|
||||||
|
UPDATE
|
||||||
|
raw_text
|
||||||
|
SET
|
||||||
|
post_text = ?,
|
||||||
|
markup = ?
|
||||||
|
WHERE
|
||||||
|
id = ?;
|
Loading…
Reference in New Issue