157 lines
3 KiB
Hare
157 lines
3 KiB
Hare
use errors;
|
|
use bufio;
|
|
use io;
|
|
use os;
|
|
use strconv;
|
|
use strings;
|
|
use types;
|
|
|
|
export fn new_reader(
|
|
conn: io::handle,
|
|
resp: *response,
|
|
scan: *bufio::scanner,
|
|
) (*io::stream | errors::unsupported | protoerr) = {
|
|
// TODO: Content-Encoding support
|
|
const cl = header_get(&resp.header, "Content-Length");
|
|
const te = header_get(&resp.header, "Transfer-Encoding");
|
|
|
|
if (cl != "" || te == "") {
|
|
let length = types::SIZE_MAX;
|
|
if (cl != "") {
|
|
length = match (strconv::stoz(cl)) {
|
|
case let z: size =>
|
|
yield z;
|
|
case =>
|
|
return protoerr;
|
|
};
|
|
};
|
|
const remain = bufio::scan_buffer(scan);
|
|
return new_identity_reader(conn, remain, length);
|
|
};
|
|
|
|
let stream: io::handle = conn;
|
|
let buffer: []u8 = bufio::scan_buffer(scan);
|
|
const iter = strings::tokenize(te, ",");
|
|
for (true) {
|
|
const te = match (strings::next_token(&iter)) {
|
|
case let tok: str =>
|
|
yield strings::trim(tok);
|
|
case void =>
|
|
break;
|
|
};
|
|
|
|
// XXX: We could add lzw support if someone added it to
|
|
// hare-compress
|
|
switch (te) {
|
|
case "chunked" =>
|
|
stream = new_chunked_reader(stream, buffer);
|
|
buffer = [];
|
|
case "deflate" =>
|
|
abort(); // TODO
|
|
case "gzip" =>
|
|
abort(); // TODO
|
|
case =>
|
|
return errors::unsupported;
|
|
};
|
|
};
|
|
|
|
if (!(stream is *io::stream)) {
|
|
// Empty Transfer-Encoding header
|
|
return protoerr;
|
|
};
|
|
return stream as *io::stream;
|
|
};
|
|
|
|
export type identity_reader = struct {
|
|
vtable: io::stream,
|
|
conn: io::handle,
|
|
buffer: [os::BUFSIZ]u8,
|
|
pending: size,
|
|
length: size,
|
|
};
|
|
|
|
const identity_reader_vtable = io::vtable {
|
|
reader = &identity_read,
|
|
...
|
|
};
|
|
|
|
// Creates a new reader that reads data until the response's Content-Length is
|
|
// reached; i.e. the null Transport-Encoding.
|
|
fn new_identity_reader(
|
|
conn: io::handle,
|
|
buffer: []u8,
|
|
content_length: size,
|
|
) *io::stream = {
|
|
let rd = alloc(identity_reader {
|
|
vtable = &identity_reader_vtable,
|
|
conn = conn,
|
|
length = content_length,
|
|
...
|
|
});
|
|
rd.buffer[..len(buffer)] = buffer[..];
|
|
rd.pending = len(buffer);
|
|
return rd;
|
|
};
|
|
|
|
fn identity_read(
|
|
s: *io::stream,
|
|
buf: []u8,
|
|
) (size | io::EOF | io::error) = {
|
|
let rd = s: *identity_reader;
|
|
assert(rd.vtable == &identity_reader_vtable);
|
|
|
|
if (rd.length <= 0) {
|
|
return io::EOF;
|
|
};
|
|
|
|
if (rd.pending == 0) {
|
|
let nread = rd.length;
|
|
if (nread > len(rd.buffer)) {
|
|
nread = len(rd.buffer);
|
|
};
|
|
|
|
match (io::read(rd.conn, rd.buffer[..nread])?) {
|
|
case let n: size =>
|
|
rd.pending = n;
|
|
case io::EOF =>
|
|
return io::EOF;
|
|
};
|
|
};
|
|
|
|
let n = len(buf);
|
|
if (n > rd.pending) {
|
|
n = rd.pending;
|
|
};
|
|
buf[..n] = rd.buffer[..n];
|
|
rd.buffer[..len(rd.buffer) - n] = rd.buffer[n..];
|
|
rd.pending -= n;
|
|
rd.length -= n;
|
|
return n;
|
|
};
|
|
|
|
export type chunked_reader = struct {
|
|
vtable: io::stream,
|
|
conn: io::handle,
|
|
buffer: [os::BUFSIZ]u8,
|
|
pending: size,
|
|
};
|
|
|
|
fn new_chunked_reader(
|
|
conn: io::handle,
|
|
buffer: []u8,
|
|
) *io::stream = {
|
|
abort(); // TODO
|
|
};
|
|
|
|
const chunked_reader_vtable = io::vtable {
|
|
reader = &chunked_read,
|
|
...
|
|
};
|
|
|
|
fn chunked_read(
|
|
s: *io::stream,
|
|
buf: []u8,
|
|
) (size | io::EOF | io::error) = {
|
|
abort(); // TODO
|
|
};
|