http: refactor and reduce duplication

pull/3484/head
Chris Watson 2020-01-16 10:16:11 -07:00 committed by Alexander Medvednikov
parent 756c6d2858
commit 84a6c019e8
8 changed files with 274 additions and 88 deletions

View File

@ -57,7 +57,7 @@ jobs:
run: ./v -d network test vlib/
windows-msvc:
network-windows-msvc:
runs-on: windows-2019
env:
VFLAGS: -cc msvc

View File

@ -1,7 +1,7 @@
import net.http
fn main() {
html := http.get_text('https://news.ycombinator.com')
html := http.get_text('https://news.ycombinator.com')
mut pos := 0
for {
pos = html.index_after('https://', pos + 1)
@ -12,4 +12,3 @@ fn main() {
println(html[pos..end])
}
}

View File

@ -2,16 +2,11 @@ module main
import net.ftp
fn test_ftp_client() {
make_client_connection()
assert true
}
// NB: this function makes network calls to external servers,
// that is why it is not a very good idea to run it in CI.
// If you want to run it manually, use `v -d network vlib/net/ftp/ftp_test.v`
[if network]
fn make_client_connection() {
fn test_ftp_client() {
// $if !network ? { return }
mut ftp := ftp.new()
defer {
ftp.close()

View File

@ -13,4 +13,3 @@ pub fn download_file(url, out string) bool {
return true
// download_file_with_progress(url, out, empty, empty)
}

View File

@ -8,55 +8,109 @@ import net.http.chunked
const (
max_redirects = 4
content_type_default = 'text/plain'
)
pub struct Request {
pub:
headers map[string]string
method string
// cookies map[string]string
h string
cmd string
typ string // GET POST
headers map[string]string
cookies map[string]string
data string
url string
verbose bool
user_agent string
verbose bool
mut:
user_ptr voidptr
ws_func voidptr
}
pub struct FetchConfig {
pub mut:
method string
data string=''
params map[string]string=map[string]string
headers map[string]string=map[string]string
cookies map[string]string=map[string]string
user_agent string='v'
verbose bool=false
}
pub struct Response {
pub:
text string
headers map[string]string
cookies map[string]string
status_code int
}
pub fn get(url string) ?Response {
return method('GET', url, '')
}
pub fn head(url string) ?Response {
return method('HEAD', url, '')
}
pub fn delete(url string) ?Response {
return method('DELETE', url, '')
}
pub fn patch(url string) ?Response {
return method('PATCH', url, '')
}
pub fn put(url string) ?Response {
return method('PUT', url, '')
pub fn get(url string) ?Response {
return fetch_with_method('GET', url, FetchConfig{})
}
pub fn post(url, data string) ?Response {
req := new_request('POST', url, data) or {
return error(err)
return fetch_with_method('POST', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
}
pub fn post_form(url string, data map[string]string) ?Response {
return fetch_with_method('POST', url, {
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
data: url_encode_form_data(data)
})
}
pub fn put(url, data string) ?Response {
return fetch_with_method('PUT', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
}
pub fn patch(url, data string) ?Response {
return fetch_with_method('PATCH', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
}
pub fn head(url string) ?Response {
return fetch_with_method('HEAD', url, FetchConfig{})
}
pub fn delete(url string) ?Response {
return fetch_with_method('DELETE', url, FetchConfig{})
}
pub fn fetch(_url string, config FetchConfig) ?Response {
if _url == '' {
return error('http.fetch: empty url')
}
url := build_url_from_fetch(_url, config) or {
return error('http.fetch: invalid url ${_url}')
}
data := config.data
method := config.method.to_upper()
req := Request{
method: method
url: url
data: data
headers: config.headers
cookies: config.cookies
user_agent: 'v'
ws_func: 0
user_ptr: 0
verbose: config.verbose
}
res := req.do() or {
return error(err)
@ -64,42 +118,51 @@ pub fn post(url, data string) ?Response {
return res
}
pub fn method(mname string, url string, data string) ?Response {
req := new_request(mname, url, data) or { return error(err) }
res := req.do() or { return error(err)}
return res
}
// new_request creates a new HTTP request
pub fn new_request(typ, _url, _data string) ?Request {
if _url == '' {
return error('http.new_request: empty url')
}
mut url := _url
mut data := _data
// req.headers['User-Agent'] = 'V $VERSION'
if typ == 'GET' && !url.contains('?') && data != '' {
url = '$url?$data'
data = ''
}
return Request{
typ: typ
url: url
data: data
ws_func: 0
user_ptr: 0
headers: map[string]string
user_agent: 'v'
}
}
pub fn get_text(url string) string {
resp := get(url) or {
resp := fetch(url, {
method: 'GET'
}) or {
return ''
}
return resp.text
}
pub fn url_encode_form_data(data map[string]string) string {
mut pieces := []string
for _key, _value in data {
key := urllib.query_escape(_key)
value := urllib.query_escape(_value)
pieces << '$key=$value'
}
return pieces.join('&')
}
fn fetch_with_method(method string, url string, _config FetchConfig) ?Response {
mut config := _config
config.method = method
return fetch(url, config)
}
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
mut url := urllib.parse(_url) or {
return error(err)
}
params := config.params
if params.keys().len == 0 {
return url.str()
}
mut pieces := []string
for key in params.keys() {
pieces << '${key}=${params[key]}'
}
mut query := pieces.join('&')
if url.raw_query.len > 1 {
query = url.raw_query + '&' + query
}
url.raw_query = query
return url.str()
}
fn (req mut Request) free() {
req.headers.free()
}
@ -130,11 +193,8 @@ pub fn parse_headers(lines []string) map[string]string {
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
pub fn (req &Request) do() ?Response {
if req.typ == 'POST' {
// req.headers << 'Content-Type: application/x-www-form-urlencoded'
}
url := urllib.parse(req.url) or {
return error('http.request.do: invalid URL "$req.url"')
return error('http.Request.do: invalid url ${req.url}')
}
mut rurl := url
mut resp := Response{}
@ -143,7 +203,7 @@ pub fn (req &Request) do() ?Response {
if no_redirects == max_redirects {
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
}
qresp := req.method_and_url_to_response(req.typ, rurl) or {
qresp := req.method_and_url_to_response(req.method, rurl) or {
return error(err)
}
resp = qresp
@ -194,7 +254,10 @@ fn (req &Request) method_and_url_to_response(method string, url net_dot_urllib.U
}
fn parse_response(resp string) Response {
// TODO: Header data type
mut headers := map[string]string
// TODO: Cookie data type
mut cookies := map[string]string
first_header := resp.all_before('\n')
mut status_code := 0
if first_header.contains('HTTP/') {
@ -226,6 +289,10 @@ fn parse_response(resp string) Response {
// }
key := h[..pos]
val := h[pos + 2..]
if key == 'Set-Cookie' {
parts := val.trim_space().split('=')
cookies[parts[0]] = parts[1]
}
headers[key] = val.trim_space()
}
if headers['Transfer-Encoding'] == 'chunked' {
@ -234,6 +301,7 @@ fn parse_response(resp string) Response {
return Response{
status_code: status_code
headers: headers
cookies: cookies
text: text
}
}
@ -241,13 +309,37 @@ fn parse_response(resp string) Response {
fn (req &Request) build_request_headers(method, host_name, path string) string {
ua := req.user_agent
mut uheaders := []string
for key, val in req.headers {
uheaders << '${key}: ${val}\r\n'
if !('Host' in req.headers) {
uheaders << 'Host: $host_name\r\n'
}
if req.data.len > 0 {
if !('User-Agent' in req.headers) {
uheaders << 'User-Agent: $ua\r\n'
}
if req.data.len > 0 && !('Content-Length' in req.headers) {
uheaders << 'Content-Length: ${req.data.len}\r\n'
}
return '$method $path HTTP/1.1\r\n' + 'Host: $host_name\r\n' + 'User-Agent: $ua\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
for key, val in req.headers {
if key == 'Cookie' {
continue
}
uheaders << '${key}: ${val}\r\n'
}
uheaders << req.build_request_cookies_header()
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
}
fn (req &Request) build_request_cookies_header() string {
if req.cookies.keys().len < 1 {
return ''
}
mut cookie := []string
for key, val in req.cookies {
cookie << '$key: $val'
}
if 'Cookie' in req.headers && req.headers['Cookie'] != '' {
cookie << req.headers['Cookie']
}
return 'Cookie: ' + cookie.join('; ') + '\r\n'
}
pub fn unescape_url(s string) string {

View File

@ -0,0 +1,101 @@
module http //internal tests have access to *everything in the module*
import json
struct HttpbinResponseBody {
args map[string]string
data string
files map[string]string
form map[string]string
headers map[string]string
json ?map[string]string
origin string
url string
}
fn http_fetch_mock(_methods []string, _config FetchConfig) ?[]Response {
url := 'https://httpbin.org/'
methods := if _methods.len == 0 { ['GET', 'POST', 'PATCH', 'PUT', 'DELETE'] } else { _methods }
mut config := _config
mut result := []Response
// Note: httpbin doesn't support head
for method in methods {
lmethod := method.to_lower()
config.method = method
res := fetch(url + lmethod, config) or {
return error(err)
}
// TODO
// body := json.decode(HttpbinResponseBody,res.text) or {
// return error(err)
// }
result << res
}
return result
}
fn test_http_fetch_bare() {
// $if !network ? { return }
responses := http_fetch_mock([], FetchConfig{}) or {
panic(err)
}
for response in responses {
assert response.status_code == 200
}
}
fn test_http_fetch_with_data() {
// $if !network ? { return }
responses := http_fetch_mock(['POST', 'PUT', 'PATCH', 'DELETE'], {
data: 'hello world'
}) or {
panic(err)
}
for response in responses {
payload := json.decode(HttpbinResponseBody,response.text) or {
panic(err)
}
assert payload.data == 'hello world'
}
}
fn test_http_fetch_with_params() {
// $if !network ? { return }
responses := http_fetch_mock([], {
params: {
'a': 'b',
'c': 'd'
}
}) or {
panic(err)
}
for response in responses {
// payload := json.decode(HttpbinResponseBody,response.text) or {
// panic(err)
// }
assert response.status_code == 200
// TODO
// assert payload.args['a'] == 'b'
// assert payload.args['c'] == 'd'
}
}
fn test_http_fetch_with_headers() {
// $if !network ? { return }
responses := http_fetch_mock([], {
headers: {
'Test-Header': 'hello world'
}
}) or {
panic(err)
}
for response in responses {
// payload := json.decode(HttpbinResponseBody,response.text) or {
// panic(err)
// }
assert response.status_code == 200
// TODO
// assert payload.headers['Test-Header'] == 'hello world'
}
}

View File

@ -1,22 +1,13 @@
// import net.urllib
import net.http
fn test_escape_unescape() {
/*
original := 'те ст: т\\%'
escaped := urllib.query_escape(original) or { assert false return}
assert escaped == '%D1%82%D0%B5%20%D1%81%D1%82%3A%20%D1%82%5C%25'
unescaped := urllib.query_unescape(escaped) or { assert false return }
assert unescaped == original
*/
}
fn test_http_get() {
// $if !network ? { return }
assert http.get_text('https://vlang.io/version') == '0.1.5'
println('http ok')
}
fn test_http_get_from_vlang_utc_now() {
fn test_http_get_from_vlang_utc_now() {
// $if !network ? { return }
urls := ['http://vlang.io/utc_now', 'https://vlang.io/utc_now']
for url in urls {
println('Test getting current time from $url by http.get')
@ -29,6 +20,7 @@ fn test_http_get_from_vlang_utc_now() {
}
fn test_public_servers() {
// $if !network ? { return }
urls := [
'http://github.com/robots.txt',
'http://google.com/robots.txt',

View File

@ -28,3 +28,11 @@ fn test_str() {
}
assert url.str() == 'https://en.wikipedia.org/wiki/Brazil_(1985_film)'
}
fn test_escape_unescape() {
original := 'те ст: т\\%'
escaped := urllib.query_escape(original)
assert escaped == '%D1%82%D0%B5+%D1%81%D1%82%3A+%D1%82%5C%25'
unescaped := urllib.query_unescape(escaped) or { assert false return }
assert unescaped == original
}