v/vlib/net/http/http.v

427 lines
9.7 KiB
V
Raw Normal View History

2020-01-23 21:04:46 +01:00
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
2019-06-23 04:21:30 +02:00
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
2019-06-22 20:20:28 +02:00
module http
2019-08-06 14:43:09 +02:00
import net.urllib
2019-12-30 05:42:23 +01:00
import net.http.chunked
2020-05-20 19:21:57 +02:00
import strings
import net
2019-08-06 14:43:09 +02:00
const (
2020-05-16 16:12:23 +02:00
max_redirects = 4
2020-01-16 18:16:11 +01:00
content_type_default = 'text/plain'
2020-05-20 19:23:51 +02:00
bufsize = 1536
)
2019-10-24 18:44:49 +02:00
pub struct Request {
2020-04-28 13:57:48 +02:00
pub mut:
method Method
2020-01-16 18:16:11 +01:00
headers map[string]string
cookies map[string]string
2019-12-21 23:41:42 +01:00
data string
url string
2020-05-20 08:58:57 +02:00
user_agent string = 'v.http'
2020-01-16 18:16:11 +01:00
verbose bool
2019-12-21 23:41:42 +01:00
user_ptr voidptr
ws_func voidptr
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
pub struct FetchConfig {
pub mut:
method Method
data string
params map[string]string
headers map[string]string
cookies map[string]string
2020-05-20 08:58:57 +02:00
user_agent string = 'v.http'
2020-05-16 16:12:23 +02:00
verbose bool = false
2020-01-16 18:16:11 +01:00
}
2019-10-24 18:44:49 +02:00
pub struct Response {
2019-06-22 20:20:28 +02:00
pub:
2019-07-31 22:10:28 +02:00
text string
headers map[string]string // original response headers, 'Set-Cookie' or 'set-Cookie', etc.
lheaders map[string]string // same as headers, but with normalized lowercased keys, like 'set-cookie'
2020-01-16 18:16:11 +01:00
cookies map[string]string
2019-06-22 20:20:28 +02:00
status_code int
}
pub fn new_request(method Method, url_, data string) ?Request {
url := if method == .get { url_ + '?' + data } else { url_ }
2020-07-26 22:01:22 +02:00
//println('new req() method=$method url="$url" dta="$data"')
return Request{
method: method
url: url
data: data
/*
headers: {
'Accept-Encoding': 'compress'
}
*/
}
}
fn (methods []Method) contains(m Method) bool {
for method in methods {
if method == m {
return true
}
}
return false
}
2020-01-16 18:16:11 +01:00
pub fn get(url string) ?Response {
return fetch_with_method(.get, url, FetchConfig{})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn post(url, data string) ?Response {
return fetch_with_method(.post, url, {
2020-01-16 18:16:11 +01:00
data: data
headers: {
'Content-Type': content_type_default
}
})
2020-01-14 23:19:50 +01:00
}
pub fn post_json(url, data string) ?Response {
return fetch_with_method(.post, url, {
data: data
headers: {
'Content-Type': 'application/json'
}
})
}
2020-01-16 18:16:11 +01:00
pub fn post_form(url string, data map[string]string) ?Response {
return fetch_with_method(.post, url, {
2020-01-16 18:16:11 +01:00
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
data: url_encode_form_data(data)
})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn put(url, data string) ?Response {
return fetch_with_method(.put, url, {
2020-01-16 18:16:11 +01:00
data: data
headers: {
'Content-Type': content_type_default
}
})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn patch(url, data string) ?Response {
return fetch_with_method(.patch, url, {
2020-01-16 18:16:11 +01:00
data: data
headers: {
'Content-Type': content_type_default
}
})
2019-07-29 19:18:26 +02:00
}
2020-01-16 18:16:11 +01:00
pub fn head(url string) ?Response {
return fetch_with_method(.head, url, FetchConfig{})
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
pub fn delete(url string) ?Response {
return fetch_with_method(.delete, url, FetchConfig{})
}
2020-01-16 18:16:11 +01:00
pub fn fetch(_url string, config FetchConfig) ?Response {
2019-07-29 19:18:26 +02:00
if _url == '' {
2020-01-16 18:16:11 +01:00
return error('http.fetch: empty url')
2019-07-29 19:18:26 +02:00
}
2020-01-16 18:16:11 +01:00
url := build_url_from_fetch(_url, config) or {
return error('http.fetch: invalid url ${_url}')
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
data := config.data
req := Request{
method: config.method
url: url
data: data
2020-01-16 18:16:11 +01:00
headers: config.headers
cookies: config.cookies
user_agent: config.user_agent
2019-06-22 20:20:28 +02:00
ws_func: 0
user_ptr: 0
2020-01-16 18:16:11 +01:00
verbose: config.verbose
2019-06-22 20:20:28 +02:00
}
res := req.do()?
2020-01-16 18:16:11 +01:00
return res
2019-06-22 20:20:28 +02:00
}
2019-07-31 22:10:28 +02:00
pub fn get_text(url string) string {
2020-01-16 18:16:11 +01:00
resp := fetch(url, {
method: .get
2020-01-16 18:16:11 +01:00
}) or {
2019-12-21 23:41:42 +01:00
return ''
}
2019-10-24 18:44:49 +02:00
return resp.text
}
2019-07-31 22:10:28 +02:00
2020-01-16 18:16:11 +01:00
pub fn url_encode_form_data(data map[string]string) string {
2020-04-26 13:49:31 +02:00
mut pieces := []string{}
for key_, value_ in data {
key := urllib.query_escape(key_)
value := urllib.query_escape(value_)
2020-01-16 18:16:11 +01:00
pieces << '$key=$value'
}
return pieces.join('&')
}
fn fetch_with_method(method Method, url string, _config FetchConfig) ?Response {
2020-01-16 18:16:11 +01:00
mut config := _config
config.method = method
return fetch(url, config)
}
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
mut url := urllib.parse(_url)?
2020-01-16 18:16:11 +01:00
params := config.params
if params.keys().len == 0 {
return url.str()
}
2020-04-26 13:49:31 +02:00
mut pieces := []string{}
2020-01-16 18:16:11 +01:00
for key in params.keys() {
pieces << '${key}=${params[key]}'
}
mut query := pieces.join('&')
if url.raw_query.len > 1 {
query = url.raw_query + '&' + query
}
url.raw_query = query
return url.str()
}
2020-05-16 16:12:23 +02:00
fn (mut req Request) free() {
unsafe {
req.headers.free()
}
2019-06-22 20:20:28 +02:00
}
2020-05-16 16:12:23 +02:00
fn (mut resp Response) free() {
unsafe {
resp.headers.free()
}
2019-06-22 20:20:28 +02:00
}
2019-11-17 22:42:25 +01:00
// add_header adds the key and value of an HTTP request header
2020-05-16 16:12:23 +02:00
pub fn (mut req Request) add_header(key, val string) {
2019-06-22 20:20:28 +02:00
req.headers[key] = val
}
2019-09-05 14:46:24 +02:00
pub fn parse_headers(lines []string) map[string]string {
2020-05-16 16:12:23 +02:00
mut headers := map[string]string{}
2019-09-05 14:46:24 +02:00
for i, line in lines {
if i == 0 {
continue
}
words := line.split(': ')
if words.len != 2 {
continue
}
headers[words[0]] = words[1]
}
return headers
}
2019-11-17 22:42:25 +01:00
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
pub fn (req &Request) do() ?Response {
mut url := urllib.parse(req.url) or {
2020-01-16 18:16:11 +01:00
return error('http.Request.do: invalid url ${req.url}')
2019-12-21 23:41:42 +01:00
}
2019-08-21 19:04:06 +02:00
mut rurl := url
2019-12-23 11:37:52 +01:00
mut resp := Response{}
mut no_redirects := 0
2019-08-21 19:04:06 +02:00
for {
2019-12-21 23:41:42 +01:00
if no_redirects == max_redirects {
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
}
qresp := req.method_and_url_to_response(req.method, rurl)?
2019-08-21 19:04:06 +02:00
resp = qresp
if resp.status_code !in [301, 302, 303, 307, 308] {
2019-12-21 23:41:42 +01:00
break
}
2019-08-21 19:04:06 +02:00
// follow any redirects
mut redirect_url := resp.lheaders['location']
if redirect_url.len > 0 && redirect_url[0] == `/` {
url.set_path(redirect_url) or {
return error('http.request.do: invalid path in redirect: "$redirect_url"')
}
redirect_url = url.str()
}
2019-12-21 23:41:42 +01:00
qrurl := urllib.parse(redirect_url) or {
return error('http.request.do: invalid URL in redirect "$redirect_url"')
}
2019-08-21 19:04:06 +02:00
rurl = qrurl
no_redirects++
}
return resp
}
fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) ?Response {
2019-08-25 00:48:06 +02:00
host_name := url.hostname()
scheme := url.scheme
p := url.path.trim_left('/')
2020-06-21 16:51:02 +02:00
path := if url.query().len > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
2019-08-21 19:04:06 +02:00
mut nport := url.port().int()
if nport == 0 {
2019-12-21 23:41:42 +01:00
if scheme == 'http' {
nport = 80
}
if scheme == 'https' {
nport = 443
}
2019-08-21 19:04:06 +02:00
}
2019-12-21 23:41:42 +01:00
// println('fetch $method, $scheme, $host_name, $nport, $path ')
2019-08-21 19:04:06 +02:00
if scheme == 'https' {
2019-12-21 23:41:42 +01:00
// println('ssl_do( $nport, $method, $host_name, $path )')
res := req.ssl_do(nport, method, host_name, path)?
2019-10-10 19:24:36 +02:00
return res
2020-05-16 16:12:23 +02:00
} else if scheme == 'http' {
2019-12-21 23:41:42 +01:00
// println('http_do( $nport, $method, $host_name, $path )')
res := req.http_do(nport, method, host_name, path)?
2019-10-10 19:24:36 +02:00
return res
2019-08-21 19:04:06 +02:00
}
return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
2019-08-21 19:04:06 +02:00
}
fn parse_response(resp string) Response {
2020-01-16 18:16:11 +01:00
// TODO: Header data type
2020-05-16 16:12:23 +02:00
mut headers := map[string]string{}
mut lheaders := map[string]string{}
2020-01-16 18:16:11 +01:00
// TODO: Cookie data type
2020-05-16 16:12:23 +02:00
mut cookies := map[string]string{}
2019-10-24 18:44:49 +02:00
first_header := resp.all_before('\n')
mut status_code := 0
if first_header.contains('HTTP/') {
val := first_header.find_between(' ', ' ')
status_code = val.int()
}
2019-10-24 18:44:49 +02:00
mut text := ''
// Build resp headers map and separate the body
mut nl_pos := 3
mut i := 1
for {
old_pos := nl_pos
2019-12-21 23:41:42 +01:00
nl_pos = resp.index_after('\n', nl_pos + 1)
if nl_pos == -1 {
2019-10-24 18:44:49 +02:00
break
}
h := resp[old_pos + 1..nl_pos]
2019-10-24 18:44:49 +02:00
// End of headers
if h.len <= 1 {
text = resp[nl_pos + 1..]
2019-10-24 18:44:49 +02:00
break
}
i++
2019-11-30 13:27:16 +01:00
pos := h.index(':') or {
continue
}
2019-12-21 23:41:42 +01:00
// if h.contains('Content-Type') {
// continue
// }
mut key := h[..pos]
lkey := key.to_lower()
2019-12-21 23:41:42 +01:00
val := h[pos + 2..]
if lkey == 'set-cookie' {
2020-01-16 18:16:11 +01:00
parts := val.trim_space().split('=')
cookies[parts[0]] = parts[1]
}
tval := val.trim_space()
headers[key] = tval
lheaders[lkey] = tval
2019-08-07 03:57:47 +02:00
}
if lheaders['transfer-encoding'] == 'chunked' || lheaders['content-length'] == '' {
2019-12-21 23:41:42 +01:00
text = chunked.decode(text)
2019-08-07 03:57:47 +02:00
}
2019-12-21 23:41:42 +01:00
return Response{
2019-10-24 18:44:49 +02:00
status_code: status_code
headers: headers
lheaders: lheaders
2020-01-16 18:16:11 +01:00
cookies: cookies
2019-10-24 18:44:49 +02:00
text: text
}
}
fn (req &Request) build_request_headers(method Method, host_name, path string) string {
2019-08-25 00:48:06 +02:00
ua := req.user_agent
2020-04-26 13:49:31 +02:00
mut uheaders := []string{}
if 'Host' !in req.headers {
2020-01-16 18:16:11 +01:00
uheaders << 'Host: $host_name\r\n'
}
if 'User-Agent' !in req.headers {
2020-01-16 18:16:11 +01:00
uheaders << 'User-Agent: $ua\r\n'
}
if req.data.len > 0 && 'Content-Length' !in req.headers {
2020-01-16 18:16:11 +01:00
uheaders << 'Content-Length: ${req.data.len}\r\n'
}
2019-11-30 13:27:16 +01:00
for key, val in req.headers {
2020-01-16 18:16:11 +01:00
if key == 'Cookie' {
continue
}
2020-07-26 22:01:22 +02:00
uheaders << '${key}: ${val}\r\n'
2019-08-25 00:48:06 +02:00
}
2020-01-16 18:16:11 +01:00
uheaders << req.build_request_cookies_header()
2020-05-16 16:12:23 +02:00
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' +
req.data
2020-01-16 18:16:11 +01:00
}
fn (req &Request) build_request_cookies_header() string {
if req.cookies.keys().len < 1 {
return ''
}
2020-04-26 13:49:31 +02:00
mut cookie := []string{}
2020-01-16 18:16:11 +01:00
for key, val in req.cookies {
2020-07-26 15:54:18 +02:00
cookie << '$key=$val'
2020-01-16 18:16:11 +01:00
}
if 'Cookie' in req.headers && req.headers['Cookie'] != '' {
cookie << req.headers['Cookie']
2019-08-25 00:48:06 +02:00
}
2020-01-16 18:16:11 +01:00
return 'Cookie: ' + cookie.join('; ') + '\r\n'
}
pub fn unescape_url(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.unescape_url() was replaced with urllib.query_unescape()')
}
pub fn escape_url(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.escape_url() was replaced with urllib.query_escape()')
}
pub fn unescape(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.unescape() was replaced with http.unescape_url()')
}
pub fn escape(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.escape() was replaced with http.escape_url()')
}
2020-05-20 19:21:57 +02:00
fn (req &Request) http_do(port int, method Method, host_name, path string) ?Response {
2020-08-17 20:17:17 +02:00
rbuffer := [bufsize]byte{}
2020-05-20 19:21:57 +02:00
mut sb := strings.new_builder(100)
s := req.build_request_headers(method, host_name, path)
client := net.dial(host_name, port)?
2020-05-20 19:21:57 +02:00
client.send(s.str, s.len) or {
}
for {
readbytes := client.crecv(rbuffer, bufsize)
if readbytes < 0 {
return error('http.request.http_do: error reading response. readbytes=$readbytes')
}
if readbytes == 0 {
break
}
sb.write(tos(rbuffer, readbytes))
}
client.close() or {
}
return parse_response(sb.str())
}
2020-07-07 14:37:43 +02:00
pub fn (req &Request) referer() string {
return req.headers['Referer']
}