v/vlib/net/http/http.v

380 lines
8.5 KiB
V
Raw Normal View History

2020-01-23 21:04:46 +01:00
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
2019-06-23 04:21:30 +02:00
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
2019-06-22 20:20:28 +02:00
module http
2019-08-06 14:43:09 +02:00
import net.urllib
2019-12-30 05:42:23 +01:00
import net.http.chunked
2019-08-06 14:43:09 +02:00
const (
2020-05-16 16:12:23 +02:00
max_redirects = 4
2020-01-16 18:16:11 +01:00
content_type_default = 'text/plain'
)
2019-10-24 18:44:49 +02:00
pub struct Request {
2020-04-28 13:57:48 +02:00
pub mut:
2019-12-21 23:41:42 +01:00
method string
2020-01-16 18:16:11 +01:00
headers map[string]string
cookies map[string]string
2019-12-21 23:41:42 +01:00
data string
url string
2020-05-20 08:58:57 +02:00
user_agent string = 'v.http'
2020-01-16 18:16:11 +01:00
verbose bool
2019-12-21 23:41:42 +01:00
user_ptr voidptr
ws_func voidptr
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
pub struct FetchConfig {
pub mut:
method string
data string
params map[string]string
headers map[string]string
cookies map[string]string
2020-05-20 08:58:57 +02:00
user_agent string = 'v.http'
2020-05-16 16:12:23 +02:00
verbose bool = false
2020-01-16 18:16:11 +01:00
}
2019-10-24 18:44:49 +02:00
pub struct Response {
2019-06-22 20:20:28 +02:00
pub:
2019-07-31 22:10:28 +02:00
text string
2019-10-24 18:44:49 +02:00
headers map[string]string
2020-01-16 18:16:11 +01:00
cookies map[string]string
2019-06-22 20:20:28 +02:00
status_code int
}
2020-05-16 16:12:23 +02:00
pub fn new_request(method, url_, data string) ?Request {
2020-05-14 18:14:35 +02:00
url := if method == 'GET' { url_ + '?' + data } else { url_ }
//println('new req() method=$method url="$url" dta="$data"')
return Request{
2020-05-14 18:14:35 +02:00
method: method.to_upper()
url: url
data: data
/*
headers: {
'Accept-Encoding': 'compress'
}
*/
}
}
2020-01-16 18:16:11 +01:00
pub fn get(url string) ?Response {
return fetch_with_method('GET', url, FetchConfig{})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn post(url, data string) ?Response {
return fetch_with_method('POST', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn post_form(url string, data map[string]string) ?Response {
return fetch_with_method('POST', url, {
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
data: url_encode_form_data(data)
})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn put(url, data string) ?Response {
return fetch_with_method('PUT', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
2020-01-14 23:19:50 +01:00
}
2020-01-16 18:16:11 +01:00
pub fn patch(url, data string) ?Response {
return fetch_with_method('PATCH', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
2019-07-29 19:18:26 +02:00
}
2020-01-16 18:16:11 +01:00
pub fn head(url string) ?Response {
return fetch_with_method('HEAD', url, FetchConfig{})
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
pub fn delete(url string) ?Response {
return fetch_with_method('DELETE', url, FetchConfig{})
}
2020-01-16 18:16:11 +01:00
pub fn fetch(_url string, config FetchConfig) ?Response {
2019-07-29 19:18:26 +02:00
if _url == '' {
2020-01-16 18:16:11 +01:00
return error('http.fetch: empty url')
2019-07-29 19:18:26 +02:00
}
2020-01-16 18:16:11 +01:00
url := build_url_from_fetch(_url, config) or {
return error('http.fetch: invalid url ${_url}')
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
data := config.data
method := config.method.to_upper()
req := Request{
method: method
url: url
data: data
2020-01-16 18:16:11 +01:00
headers: config.headers
cookies: config.cookies
user_agent: config.user_agent
2019-06-22 20:20:28 +02:00
ws_func: 0
user_ptr: 0
2020-01-16 18:16:11 +01:00
verbose: config.verbose
2019-06-22 20:20:28 +02:00
}
2020-01-16 18:16:11 +01:00
res := req.do() or {
return error(err)
}
return res
2019-06-22 20:20:28 +02:00
}
2019-07-31 22:10:28 +02:00
pub fn get_text(url string) string {
2020-01-16 18:16:11 +01:00
resp := fetch(url, {
method: 'GET'
}) or {
2019-12-21 23:41:42 +01:00
return ''
}
2019-10-24 18:44:49 +02:00
return resp.text
}
2019-07-31 22:10:28 +02:00
2020-01-16 18:16:11 +01:00
pub fn url_encode_form_data(data map[string]string) string {
2020-04-26 13:49:31 +02:00
mut pieces := []string{}
2020-01-16 18:16:11 +01:00
for _key, _value in data {
key := urllib.query_escape(_key)
value := urllib.query_escape(_value)
pieces << '$key=$value'
}
return pieces.join('&')
}
2020-05-16 16:12:23 +02:00
fn fetch_with_method(method, url string, _config FetchConfig) ?Response {
2020-01-16 18:16:11 +01:00
mut config := _config
config.method = method
return fetch(url, config)
}
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
mut url := urllib.parse(_url) or {
return error(err)
}
params := config.params
if params.keys().len == 0 {
return url.str()
}
2020-04-26 13:49:31 +02:00
mut pieces := []string{}
2020-01-16 18:16:11 +01:00
for key in params.keys() {
pieces << '${key}=${params[key]}'
}
mut query := pieces.join('&')
if url.raw_query.len > 1 {
query = url.raw_query + '&' + query
}
url.raw_query = query
return url.str()
}
2020-05-16 16:12:23 +02:00
fn (mut req Request) free() {
2019-06-22 20:20:28 +02:00
req.headers.free()
}
2020-05-16 16:12:23 +02:00
fn (mut resp Response) free() {
2019-06-22 20:20:28 +02:00
resp.headers.free()
}
2019-11-17 22:42:25 +01:00
// add_header adds the key and value of an HTTP request header
2020-05-16 16:12:23 +02:00
pub fn (mut req Request) add_header(key, val string) {
2019-06-22 20:20:28 +02:00
req.headers[key] = val
}
2019-09-05 14:46:24 +02:00
pub fn parse_headers(lines []string) map[string]string {
2020-05-16 16:12:23 +02:00
mut headers := map[string]string{}
2019-09-05 14:46:24 +02:00
for i, line in lines {
if i == 0 {
continue
}
words := line.split(': ')
if words.len != 2 {
continue
}
headers[words[0]] = words[1]
}
return headers
}
2019-11-17 22:42:25 +01:00
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
pub fn (req &Request) do() ?Response {
mut url := urllib.parse(req.url) or {
2020-01-16 18:16:11 +01:00
return error('http.Request.do: invalid url ${req.url}')
2019-12-21 23:41:42 +01:00
}
2019-08-21 19:04:06 +02:00
mut rurl := url
2019-12-23 11:37:52 +01:00
mut resp := Response{}
mut no_redirects := 0
2019-08-21 19:04:06 +02:00
for {
2019-12-21 23:41:42 +01:00
if no_redirects == max_redirects {
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
}
2020-01-16 18:16:11 +01:00
qresp := req.method_and_url_to_response(req.method, rurl) or {
2019-12-21 23:41:42 +01:00
return error(err)
}
2019-08-21 19:04:06 +02:00
resp = qresp
if resp.status_code !in [301, 302, 303, 307, 308] {
2019-12-21 23:41:42 +01:00
break
}
2019-08-21 19:04:06 +02:00
// follow any redirects
mut redirect_url := resp.headers['Location']
if redirect_url.len > 0 && redirect_url[0] == `/` {
url.set_path(redirect_url) or {
return error('http.request.do: invalid path in redirect: "$redirect_url"')
}
redirect_url = url.str()
}
2019-12-21 23:41:42 +01:00
qrurl := urllib.parse(redirect_url) or {
return error('http.request.do: invalid URL in redirect "$redirect_url"')
}
2019-08-21 19:04:06 +02:00
rurl = qrurl
no_redirects++
}
return resp
}
fn (req &Request) method_and_url_to_response(method string, url urllib.URL) ?Response {
2019-08-25 00:48:06 +02:00
host_name := url.hostname()
scheme := url.scheme
p := url.path.trim_left('/')
path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
2019-08-21 19:04:06 +02:00
mut nport := url.port().int()
if nport == 0 {
2019-12-21 23:41:42 +01:00
if scheme == 'http' {
nport = 80
}
if scheme == 'https' {
nport = 443
}
2019-08-21 19:04:06 +02:00
}
2019-12-21 23:41:42 +01:00
// println('fetch $method, $scheme, $host_name, $nport, $path ')
2019-08-21 19:04:06 +02:00
if scheme == 'https' {
2019-12-21 23:41:42 +01:00
// println('ssl_do( $nport, $method, $host_name, $path )')
res := req.ssl_do(nport, method, host_name, path) or {
2019-10-10 19:24:36 +02:00
return error(err)
}
return res
2020-05-16 16:12:23 +02:00
} else if scheme == 'http' {
2019-12-21 23:41:42 +01:00
// println('http_do( $nport, $method, $host_name, $path )')
res := req.http_do(nport, method, host_name, path) or {
2019-10-10 19:24:36 +02:00
return error(err)
}
return res
2019-08-21 19:04:06 +02:00
}
return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
2019-08-21 19:04:06 +02:00
}
fn parse_response(resp string) Response {
2020-01-16 18:16:11 +01:00
// TODO: Header data type
2020-05-16 16:12:23 +02:00
mut headers := map[string]string{}
2020-01-16 18:16:11 +01:00
// TODO: Cookie data type
2020-05-16 16:12:23 +02:00
mut cookies := map[string]string{}
2019-10-24 18:44:49 +02:00
first_header := resp.all_before('\n')
mut status_code := 0
if first_header.contains('HTTP/') {
val := first_header.find_between(' ', ' ')
status_code = val.int()
}
2019-10-24 18:44:49 +02:00
mut text := ''
// Build resp headers map and separate the body
mut nl_pos := 3
mut i := 1
for {
old_pos := nl_pos
2019-12-21 23:41:42 +01:00
nl_pos = resp.index_after('\n', nl_pos + 1)
if nl_pos == -1 {
2019-10-24 18:44:49 +02:00
break
}
h := resp[old_pos + 1..nl_pos]
2019-10-24 18:44:49 +02:00
// End of headers
if h.len <= 1 {
text = resp[nl_pos + 1..]
2019-10-24 18:44:49 +02:00
break
}
i++
2019-11-30 13:27:16 +01:00
pos := h.index(':') or {
continue
}
2019-12-21 23:41:42 +01:00
// if h.contains('Content-Type') {
// continue
// }
key := h[..pos]
2019-12-21 23:41:42 +01:00
val := h[pos + 2..]
2020-01-16 18:16:11 +01:00
if key == 'Set-Cookie' {
parts := val.trim_space().split('=')
cookies[parts[0]] = parts[1]
}
headers[key] = val.trim_space()
2019-08-07 03:57:47 +02:00
}
if headers['Transfer-Encoding'] == 'chunked' || headers['Content-Length'] == '' {
2019-12-21 23:41:42 +01:00
text = chunked.decode(text)
2019-08-07 03:57:47 +02:00
}
2019-12-21 23:41:42 +01:00
return Response{
2019-10-24 18:44:49 +02:00
status_code: status_code
headers: headers
2020-01-16 18:16:11 +01:00
cookies: cookies
2019-10-24 18:44:49 +02:00
text: text
}
}
2019-08-25 00:48:06 +02:00
fn (req &Request) build_request_headers(method, host_name, path string) string {
ua := req.user_agent
2020-04-26 13:49:31 +02:00
mut uheaders := []string{}
if 'Host' !in req.headers {
2020-01-16 18:16:11 +01:00
uheaders << 'Host: $host_name\r\n'
}
if 'User-Agent' !in req.headers {
2020-01-16 18:16:11 +01:00
uheaders << 'User-Agent: $ua\r\n'
}
if req.data.len > 0 && 'Content-Length' !in req.headers {
2020-01-16 18:16:11 +01:00
uheaders << 'Content-Length: ${req.data.len}\r\n'
}
2019-11-30 13:27:16 +01:00
for key, val in req.headers {
2020-01-16 18:16:11 +01:00
if key == 'Cookie' {
continue
}
2019-10-24 18:44:49 +02:00
uheaders << '${key}: ${val}\r\n'
2019-08-25 00:48:06 +02:00
}
2020-01-16 18:16:11 +01:00
uheaders << req.build_request_cookies_header()
2020-05-16 16:12:23 +02:00
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' +
req.data
2020-01-16 18:16:11 +01:00
}
fn (req &Request) build_request_cookies_header() string {
if req.cookies.keys().len < 1 {
return ''
}
2020-04-26 13:49:31 +02:00
mut cookie := []string{}
2020-01-16 18:16:11 +01:00
for key, val in req.cookies {
cookie << '$key: $val'
}
if 'Cookie' in req.headers && req.headers['Cookie'] != '' {
cookie << req.headers['Cookie']
2019-08-25 00:48:06 +02:00
}
2020-01-16 18:16:11 +01:00
return 'Cookie: ' + cookie.join('; ') + '\r\n'
}
pub fn unescape_url(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.unescape_url() was replaced with urllib.query_unescape()')
}
pub fn escape_url(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.escape_url() was replaced with urllib.query_escape()')
}
pub fn unescape(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.unescape() was replaced with http.unescape_url()')
}
pub fn escape(s string) string {
2019-10-24 18:44:49 +02:00
panic('http.escape() was replaced with http.escape_url()')
}