17
18
19
20
21
22
23
24
25
26
27
28
29
30
...
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
....
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
....
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
|
end
return pkg
end
ss = namespace 'sirsem'
ss.namespace = namespace
end
function ss.map(fn, lst)
local new = {}
for k,v in pairs(lst) do
table.insert(new, fn(v,k))
end
return new
end
................................................................................
-- tbl (lightweight alternative to shallow copies)
tpl = tpl or {}
return setmetatable({}, {__index=tbl})
end
ss.str = {}
function ss.str.begins(str, pfx)
-- appallingly, this is actually ~2/5ths faster than either
-- of the below. i hate scripting languages so much
return string.find(str, pfx, 1, true) == 1
-- to my shock, disgust, and horror, even writing my own
-- string scanning library for lua IN C only sped this up by
-- a tiny fraction. i am just speechless.
-- return string.sub(str, 1, #pfx) == pfx
-- local pl = string.len(pfx)
-- local sl = string.len(str)
-- if sl < pl then return false end
-- for i=1,pl do
-- if string.byte(str,i) ~= string.byte(pfx,i) then
-- return false
-- end
-- end
-- return true
end
function ss.enum(syms)
local e = {}
for i,v in pairs(syms) do
e[v] = i
e[i] = v
................................................................................
local fetchableProtocols = {
http = {
proto = {
{'http'};
{'https'};
{'http', 'tls'};
};
fetch = function(uri)
fetchexn('cortav must be compiled with the C shim and libcurl support to use http fetch'):throw()
end;
};
file = {
proto = {
{'file'};
{'file', 'txt'};
................................................................................
class = nil;
namespace = nil;
path = nil;
query = nil;
frag = nil;
auth = nil;
} end;
construct = function(me, str)
local enc = ss.str.enc.utf8
-- URIs must be either ASCII or utf8, so we read and
-- store as UTF8. to use a URI in another encoding, it
-- must be manually converted to and fro using the
-- appropriate functions, such as encodeUCS
if not str then return end
|
>
>
>
|
>
>
>
>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
|
>
>
>
|
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
...
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
....
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
....
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
|
end
return pkg
end
ss = namespace 'sirsem'
ss.namespace = namespace
end
local native = _G.native
function ss.map(fn, lst)
local new = {}
for k,v in pairs(lst) do
table.insert(new, fn(v,k))
end
return new
end
................................................................................
-- tbl (lightweight alternative to shallow copies)
tpl = tpl or {}
return setmetatable({}, {__index=tbl})
end
ss.str = {}
if native then
function ss.str.begins(str, pfx)
return native.strutils.rangematch(str,1,pfx)
end
else
function ss.str.begins(str, pfx)
-- appallingly, this is actually ~2/5ths faster than either
-- of the below. i hate scripting languages so much
return string.find(str, pfx, 1, true) == 1
-- to my shock, disgust, and horror, even writing my own
-- string scanning library for lua IN C only sped this up by
-- a tiny fraction. i am just speechless.
-- return string.sub(str, 1, #pfx) == pfx
-- local pl = string.len(pfx)
-- local sl = string.len(str)
-- if sl < pl then return false end
-- for i=1,pl do
-- if string.byte(str,i) ~= string.byte(pfx,i) then
-- return false
-- end
-- end
-- return true
end
end
function ss.enum(syms)
local e = {}
for i,v in pairs(syms) do
e[v] = i
e[i] = v
................................................................................
local fetchableProtocols = {
http = {
proto = {
{'http'};
{'https'};
{'http', 'tls'};
};
fetch = native and native.net and function(uri)
-- translate to a curl-compatible URI
if uri.path and uri.path ~= '' and uri.path:sub(1,1) ~= '/' then
fetchexn('relative HTTP URIs like ā%sā are not fetchable', uri):throw()
end
uri = uri:clone()
if uri.class[2] == 'tls' then
uri.class = {'https'}
end
if not uri.namespace then
uri.namespace = 'localhost'
end
local body, e = native.net.fetchURI(tostring(uri))
if e then
fetchexn('could not fetch URI ā%sā: %s',uri,e):throw()
end
return body
end or function(uri)
fetchexn('cortav must be compiled with the C shim and libcurl support to use http fetch'):throw()
end;
};
file = {
proto = {
{'file'};
{'file', 'txt'};
................................................................................
class = nil;
namespace = nil;
path = nil;
query = nil;
frag = nil;
auth = nil;
} end;
clonesetup = function(me)
me.class = ss.clone(me.class)
end;
construct = function(me, str)
local enc = ss.str.enc.utf8
-- URIs must be either ASCII or utf8, so we read and
-- store as UTF8. to use a URI in another encoding, it
-- must be manually converted to and fro using the
-- appropriate functions, such as encodeUCS
if not str then return end
|