Overview
| Comment: | move from webp to svg except where necessary |
|---|---|
| Downloads: | Tarball | ZIP archive | SQL archive |
| Timelines: | family | ancestors | descendants | both | trunk |
| Files: | files | file ages | folders |
| SHA3-256: |
aa17a033218589d685cb7e420c45a4b5 |
| User & Date: | lexi on 2021-01-01 16:24:44 |
| Other Links: | manifest | tags |
Context
|
2021-01-01
| ||
| 16:42 | handle (some) deletions in live.js check-in: 53ef86f7ff user: lexi tags: trunk | |
| 16:24 | move from webp to svg except where necessary check-in: aa17a03321 user: lexi tags: trunk | |
| 04:33 | add live updates, system to only update when necessary almost works check-in: 24ec409083 user: lexi tags: trunk | |
Changes
Modified backend/pgsql.t from [3e99c3d4ab] to [30375d8380].
758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 |
subject = { `subj, `sblen };
acl = {`r:string(row,4), `r:len(row,4)+1};
body = {`r:string(row,5), `r:len(row,5)+1};
convoheaduri = { `cvhu, `cvhlen }; --FIXME
}) ]
p.ptr.id = r:int(uint64,row,1)
p.ptr.author = r:int(uint64,row,2)
p.ptr.posted = r:int(uint64,row,6)
p.ptr.discovered = r:int(uint64,row,7)
p.ptr.edited = r:int(uint64,row,8)
if r:null(row,9)
then p.ptr.parent = 0
else p.ptr.parent = r:int(uint64,row,9)
end
if r:null(row,11)
then p.ptr.chgcount = 0
else p.ptr.chgcount = r:int(uint32,row,11)
end
p.ptr.accent = r:int(int16,row,12)
p.ptr.localpost = r:bool(row,0)
|
> > | > > | | > | | | | > |
758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 |
subject = { `subj, `sblen };
acl = {`r:string(row,4), `r:len(row,4)+1};
body = {`r:string(row,5), `r:len(row,5)+1};
convoheaduri = { `cvhu, `cvhlen }; --FIXME
}) ]
p.ptr.id = r:int(uint64,row,1)
p.ptr.author = r:int(uint64,row,2)
if r:null(row,6)
then p.ptr.posted = 0
else p.ptr.posted = r:int(uint64,row,6)
end
if r:null(row,7)
then p.ptr.discovered = 0
else p.ptr.discovered = r:int(uint64,row,7)
end
if r:null(row,8)
then p.ptr.edited = 0
else p.ptr.edited = r:int(uint64,row,8)
end
p.ptr.parent = r:int(uint64,row,9)
if r:null(row,11)
then p.ptr.chgcount = 0
else p.ptr.chgcount = r:int(uint32,row,11)
end
p.ptr.accent = r:int(int16,row,12)
p.ptr.localpost = r:bool(row,0)
|
Modified config.lua from [cd48dd2db6] to [6a4b9180fe].
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
};
feat = {};
debug = u.tobool(default('parsav_enable_debug',true));
backends = defaultlist('parsav_backends', 'pgsql');
braingeniousmode = false;
embeds = {
-- TODO with gzip compression, svg is dramatically superior to webp
-- we should have a build-time option to serve svg so instances
-- proxied behind nginx can serve svgz, or possibly just straight-up
-- add support for content-encoding headers and pre-compress the
-- damn things before compiling
{'style.css', 'text/css'};
{'live.js', 'text/javascript'}; -- rrrrrrrr
{'default-avatar.webp', 'image/webp'};
{'padlock.webp', 'image/webp'};
{'warn.webp', 'image/webp'};
{'query.webp', 'image/webp'};
};
default_ui_accent = tonumber(default('parsav_ui_default_accent',323));
}
if os.getenv('parsav_let_me_be_an_idiot') == "i know what i'm doing" then
conf.braingeniousmode = true -- SOUND GENERAL QUARTERS
end
if u.ping '.fslckout' or u.ping '_FOSSIL_' then
|
< < | | | > > | < < |
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
};
feat = {};
debug = u.tobool(default('parsav_enable_debug',true));
backends = defaultlist('parsav_backends', 'pgsql');
braingeniousmode = false;
embeds = {
-- TODO with gzip compression, svg is dramatically superior to webp
-- we should add support for content-encoding headers and pre-compress
-- the damn things before compiling (also making the binary smaller)
{'style.css', 'text/css'};
{'live.js', 'text/javascript'}; -- rrrrrrrr
{'default-avatar.webp', 'image/webp'}; -- needs inkscape-exclusive svg features
{'padlock.svg', 'image/svg+xml'};
{'warn.svg', 'image/svg+xml'};
{'query.svg', 'image/svg+xml'};
};
default_ui_accent = tonumber(default('parsav_ui_default_accent',323));
}
if os.getenv('parsav_let_me_be_an_idiot') == "i know what i'm doing" then
conf.braingeniousmode = true -- SOUND GENERAL QUARTERS
end
if u.ping '.fslckout' or u.ping '_FOSSIL_' then
|
Modified makefile from [8946539e56] to [e6a5371547].
1
2
3
4
5
6
7
8
9
10
11
..
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
|
dl = git dbg-flags = $(if $(dbg),-g) images = $(addsuffix .webp, $(basename $(wildcard static/*.svg))) styles = $(addsuffix .css, $(basename $(wildcard static/*.scss))) parsav parsavd: parsav.t config.lua pkgdata.lua $(images) $(styles) terra $(dbg-flags) $< parsav.o parsavd.o: parsav.t config.lua pkgdata.lua $(images) $(styles) env parsav_link=no terra $(dbg-flags) $< parsav.ll parsavd.ll: parsav.t config.lua pkgdata.lua $(images) $(styles) ................................................................................ cwebp -q 90 $< -o $@ static/%.png: static/%.svg inkscape -f $< -C -d 180 -e $@ static/%.css: static/%.scss sassc -t compressed $< $@ clean: rm parsav parsav.o install: parsav mkdir $(prefix)/bin cp $< $(prefix)/bin/ dep: dep.mbedtls dep.mongoose dep.json-c dep.mbedtls: lib/mbedtls/library/libmbedtls.a \ |
>
|
|
|
1
2
3
4
5
6
7
8
9
10
11
12
..
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
|
dl = git dbg-flags = $(if $(dbg),-g) images = static/default-avatar.webp #$(addsuffix .webp, $(basename $(wildcard static/*.svg))) styles = $(addsuffix .css, $(basename $(wildcard static/*.scss))) parsav parsavd: parsav.t config.lua pkgdata.lua $(images) $(styles) terra $(dbg-flags) $< parsav.o parsavd.o: parsav.t config.lua pkgdata.lua $(images) $(styles) env parsav_link=no terra $(dbg-flags) $< parsav.ll parsavd.ll: parsav.t config.lua pkgdata.lua $(images) $(styles) ................................................................................ cwebp -q 90 $< -o $@ static/%.png: static/%.svg inkscape -f $< -C -d 180 -e $@ static/%.css: static/%.scss sassc -t compressed $< $@ clean: rm parsav parsav.o $(images) $(styles) install: parsav mkdir $(prefix)/bin cp $< $(prefix)/bin/ dep: dep.mbedtls dep.mongoose dep.json-c dep.mbedtls: lib/mbedtls/library/libmbedtls.a \ |
Modified parsav.md from [bfa0a26bd5] to [bd4297f1d2].
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
* postgresql-libs
* compile-time
* cmark (commonmark implementation), for transformation of the help files, whose source is in commonmark. online documentation transforms these into html and embeds them in the binary; cmark is also used to to produce the troff source which is used to build the offline documentation. disable with `parsav_online_documentation=no parsav_offline_documentation=no`
* troff implementation (tested with groff but as far as i know we don't need any groff-specific extensions) to produce PDFs and manpages from the cmark-generated intermediate forms. disable with `parsav_offline_documentation=no`
additional preconfigure dependencies are necessary if you are building directly from trunk, rather than from a release tarball that includes certain build artifacts which need to be embedded in the binary:
* inkscape, for rendering out UI graphics
* cwebp (libwebp package), for transforming inkscape PNGs to webp
* sassc, for compiling the SCSS stylesheet into its final CSS
all builds require terra, which, unfortunately, requires installing an older version of llvm, v9 at the latest (which i develop parsav under). with any luck, your distro will be clever enough to package terra and its dependencies properly (it's trivial on nix, tho you'll need to tweak the terra expression to select a more recent llvm package); Arch Linux is one of those distros which is not so clever, and whose (AUR) terra package is totally broken. due to these unfortunate circumstances, terra is distributed not just in source form, but also in the the form of LLVM IR. distributions will also be made in the form of tarballed object code and assembly listings for various common platforms, currently including x86-32/64, arm7hf, aarch64, riscv, mips32/64, and ppc64/64le.
i've noticed that terra (at least with llvm9) seems to get a bit cantankerous and trigger llvm to fail with bizarre errors when you try to cross-compile parsav from x86-64 to any other platform, even x86-32. i don't know if this problem exists on other architectures or in what form, but as a workaround, the current cross-compile process consists of generating LLVM IR (ostensibly for x86-64, though this is in reality an architecture-independent language), and then compiling that down to an object file with llc. this is an enormous hassle; hopefully the terra (or llvm?) people will fix this eventually.
|
| |
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
* postgresql-libs
* compile-time
* cmark (commonmark implementation), for transformation of the help files, whose source is in commonmark. online documentation transforms these into html and embeds them in the binary; cmark is also used to to produce the troff source which is used to build the offline documentation. disable with `parsav_online_documentation=no parsav_offline_documentation=no`
* troff implementation (tested with groff but as far as i know we don't need any groff-specific extensions) to produce PDFs and manpages from the cmark-generated intermediate forms. disable with `parsav_offline_documentation=no`
additional preconfigure dependencies are necessary if you are building directly from trunk, rather than from a release tarball that includes certain build artifacts which need to be embedded in the binary:
* inkscape, for rendering out some of the UI graphics that can't be represented with standard svg
* cwebp (libwebp package), for transforming inkscape PNGs to webp
* sassc, for compiling the SCSS stylesheet into its final CSS
all builds require terra, which, unfortunately, requires installing an older version of llvm, v9 at the latest (which i develop parsav under). with any luck, your distro will be clever enough to package terra and its dependencies properly (it's trivial on nix, tho you'll need to tweak the terra expression to select a more recent llvm package); Arch Linux is one of those distros which is not so clever, and whose (AUR) terra package is totally broken. due to these unfortunate circumstances, terra is distributed not just in source form, but also in the the form of LLVM IR. distributions will also be made in the form of tarballed object code and assembly listings for various common platforms, currently including x86-32/64, arm7hf, aarch64, riscv, mips32/64, and ppc64/64le.
i've noticed that terra (at least with llvm9) seems to get a bit cantankerous and trigger llvm to fail with bizarre errors when you try to cross-compile parsav from x86-64 to any other platform, even x86-32. i don't know if this problem exists on other architectures or in what form, but as a workaround, the current cross-compile process consists of generating LLVM IR (ostensibly for x86-64, though this is in reality an architecture-independent language), and then compiling that down to an object file with llc. this is an enormous hassle; hopefully the terra (or llvm?) people will fix this eventually.
|
Modified render/profile.t from [ae13f6f2b7] to [5ac1497f7a].
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
end
var fullname = lib.render.nym(actor,0) defer fullname:free()
var profile = data.view.profile {
nym = fullname;
bio = bio;
xid = cs(actor.xid);
avatar = lib.trn(actor.origin == 0, pstr{ptr=avistr.buf,ct=avistr.sz},
cs(lib.coalesce(actor.avatar, '/s/default-avatar.webp')));
nposts = sn_posts, nfollows = sn_follows;
nfollowers = sn_followers, nmutuals = sn_mutuals;
tweetday = cs(timestr);
timephrase = lib.trn(actor.origin == 0, lib.str.plit'joined', lib.str.plit'known since');
auxbtn = auxp;
|
| |
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
end
var fullname = lib.render.nym(actor,0) defer fullname:free()
var profile = data.view.profile {
nym = fullname;
bio = bio;
xid = cs(actor.xid);
avatar = lib.trn(actor.origin == 0, pstr{ptr=avistr.buf,ct=avistr.sz},
cs(lib.coalesce(actor.avatar, '/s/default-avatar.svg')));
nposts = sn_posts, nfollows = sn_follows;
nfollowers = sn_followers, nmutuals = sn_mutuals;
tweetday = cs(timestr);
timephrase = lib.trn(actor.origin == 0, lib.str.plit'joined', lib.str.plit'known since');
auxbtn = auxp;
|
Modified render/tweet.t from [77ab77b2db] to [2b64155fcc].
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
var fullname = lib.render.nym(author,0) defer fullname:free()
var tpl = data.view.tweet {
text = bhtml;
subject = cs(lib.coalesce(p.subject,''));
nym = fullname;
when = cs(×tr[0]);
avatar = cs(lib.trn(author.origin == 0, avistr.buf,
lib.coalesce(author.avatar, '/s/default-avatar.webp')));
acctlink = cs(author.xid);
permalink = permalink:finalize();
attr = ''
}
var attrbuf: int8[32]
if p.accent ~= -1 and p.accent ~= co.ui_hue then
|
| |
29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
var fullname = lib.render.nym(author,0) defer fullname:free()
var tpl = data.view.tweet {
text = bhtml;
subject = cs(lib.coalesce(p.subject,''));
nym = fullname;
when = cs(×tr[0]);
avatar = cs(lib.trn(author.origin == 0, avistr.buf,
lib.coalesce(author.avatar, '/s/default-avatar.svg')));
acctlink = cs(author.xid);
permalink = permalink:finalize();
attr = ''
}
var attrbuf: int8[32]
if p.accent ~= -1 and p.accent ~= co.ui_hue then
|
Modified srv.t from [675eda18a7] to [b74b4804fc].
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
...
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
|
lib.http.header { key = 'Cache-Control', value = 'no-store' },
lib.http.header {
key = 'X-Live-Newest-Artifact';
value = lib.math.decstr(lastup, &nbuf[20]);
},
lib.http.header { key = 'Content-Length', value = '0' }
)
if self.live_last ~= 0 and self.live_last <= lastup then
lib.net.mg_printf(self.con, 'HTTP/1.1 %s', lib.http.codestr(200))
for i = 0, [hdrs.type.N] do
lib.net.mg_printf(self.con, '%s: %s\r\n', hdrs[i].key, hdrs[i].value)
end
lib.net.mg_printf(self.con, '\r\n')
else
self:rawpage(200, pg, [lib.mem.ptr(lib.http.header)] {
................................................................................
self:reroute_cookie(dest, &sesskey[0])
end
terra convo:complain(code: uint16, title: rawstring, msg: rawstring)
if msg == nil then msg = "i'm sorry, dave. i can't let you do that" end
var ti: lib.str.acc ti:compose('error :: ', title)
var bo: lib.str.acc bo:compose('<div class="message"><img class="icon" src="/s/warn.webp"><h1>',title,'</h1><p>',msg,'</p></div>')
var body = [convo.page] {
title = ti:finalize();
body = bo:finalize();
class = lib.str.plit 'error';
cache = false;
}
|
|
|
|
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
...
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
|
lib.http.header { key = 'Cache-Control', value = 'no-store' },
lib.http.header {
key = 'X-Live-Newest-Artifact';
value = lib.math.decstr(lastup, &nbuf[20]);
},
lib.http.header { key = 'Content-Length', value = '0' }
)
if self.live_last ~= 0 and self.live_last >= lastup then
lib.net.mg_printf(self.con, 'HTTP/1.1 %s', lib.http.codestr(200))
for i = 0, [hdrs.type.N] do
lib.net.mg_printf(self.con, '%s: %s\r\n', hdrs[i].key, hdrs[i].value)
end
lib.net.mg_printf(self.con, '\r\n')
else
self:rawpage(200, pg, [lib.mem.ptr(lib.http.header)] {
................................................................................
self:reroute_cookie(dest, &sesskey[0])
end
terra convo:complain(code: uint16, title: rawstring, msg: rawstring)
if msg == nil then msg = "i'm sorry, dave. i can't let you do that" end
var ti: lib.str.acc ti:compose('error :: ', title)
var bo: lib.str.acc bo:compose('<div class="message"><img class="icon" src="/s/warn.svg"><h1>',title,'</h1><p>',msg,'</p></div>')
var body = [convo.page] {
title = ti:finalize();
body = bo:finalize();
class = lib.str.plit 'error';
cache = false;
}
|
Modified static/live.js from [6fb4c9ec70] to [15d09c6d9b].
16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
* data-live property, an html element registers itself for live
* updates from the server. this is pretty straightforward: we
* retrieve this url from the server as a get request, create a
* tree from its html, find the element in question, ferret out
* any deltas, and apply them. */
document.querySelectorAll('*[data-live]').forEach(function(container) {
let interv = parseFloat(container.attributes.getNamedItem('data-live').nodeValue) * 1000;
container._liveLastArrival = '0'; /* TODO include header for this */
window.setInterval(function() {
var req = new Request(window.location, {
method: 'GET',
headers: {
'X-Live-Last-Arrival': container._liveLastArrival
}
})
fetch(req).then(function(resp) {
if (!resp.ok) return;
let newest = resp.headers.get('X-Live-Newest-Artifact');
if (newest <= container._liveLastArrival) {
resp.body.cancel();
return;
}
container._liveLastArrival = newest
resp.text().then(function(htmlbody) {
var parser = new DOMParser();
var newdoc = parser.parseFromString(htmlbody,'text/html')
// console.log(newdoc.getElementById(container.id).innerHTML)
container.innerHTML = newdoc.getElementById(container.id).innerHTML
})
})
}, interv)
});
});
|
| | < |
16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
* data-live property, an html element registers itself for live
* updates from the server. this is pretty straightforward: we
* retrieve this url from the server as a get request, create a
* tree from its html, find the element in question, ferret out
* any deltas, and apply them. */
document.querySelectorAll('*[data-live]').forEach(function(container) {
let interv = parseFloat(container.attributes.getNamedItem('data-live').nodeValue) * 1000;
container._liveLastArrival = 0; /* TODO include initial value in document */
window.setInterval(function() {
var req = new Request(window.location, {
method: 'GET',
headers: {
'X-Live-Last-Arrival': container._liveLastArrival
}
})
fetch(req).then(function(resp) {
if (!resp.ok) return;
let newest = parseInt(resp.headers.get('X-Live-Newest-Artifact'));
if (newest <= container._liveLastArrival) {
resp.body.cancel();
return;
}
container._liveLastArrival = newest
resp.text().then(function(htmlbody) {
var parser = new DOMParser();
var newdoc = parser.parseFromString(htmlbody,'text/html')
container.innerHTML = newdoc.getElementById(container.id).innerHTML
})
})
}, interv)
});
});
|
Modified static/style.scss from [0e6b10a9e2] to [a256539ae3].
414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 |
font-weight: bold;
text-decoration: none;
cursor: help;
}
input.acl {
@extend %teletype;
background: url(/s/padlock.webp) no-repeat;
background-size: 20pt;
background-position: 0.05in 50%;
&:focus {
background: url(/s/padlock.webp) no-repeat, $grad-ui-focus;
background-size: 20pt;
background-position: 0.05in 50%;
};
padding-left: 0.40in;
}
div.modal {
|
| | |
414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 |
font-weight: bold;
text-decoration: none;
cursor: help;
}
input.acl {
@extend %teletype;
background: url(/s/padlock.svg) no-repeat;
background-size: 20pt;
background-position: 0.05in 50%;
&:focus {
background: url(/s/padlock.svg) no-repeat, $grad-ui-focus;
background-size: 20pt;
background-position: 0.05in 50%;
};
padding-left: 0.40in;
}
div.modal {
|
Modified view/confirm.tpl from [0d2952df9c] to [3b921f59eb].
1 2 3 4 5 6 7 8 9 |
<form class="message" method="post">
<img class="icon" src="/s/query.webp">
<h1>@title</h1>
<p>@query</p>
<menu class="horizontal choice">
<a class="button" href="@:cancel">cancel</a>
<button name="act" value="confirm">confirm</button>
</menu>
</form>
|
| |
1 2 3 4 5 6 7 8 9 |
<form class="message" method="post">
<img class="icon" src="/s/query.svg">
<h1>@title</h1>
<p>@query</p>
<menu class="horizontal choice">
<a class="button" href="@:cancel">cancel</a>
<button name="act" value="confirm">confirm</button>
</menu>
</form>
|