enable `$if !network {` checks in net tests

pull/3471/head
Delyan Angelov 2020-01-16 20:44:16 +02:00 committed by Alexander Medvednikov
parent e1132156f5
commit cc606623bb
3 changed files with 8 additions and 8 deletions

View File

@ -6,7 +6,7 @@ import net.ftp
// that is why it is not a very good idea to run it in CI.
// If you want to run it manually, use `v -d network vlib/net/ftp/ftp_test.v`
fn test_ftp_client() {
// $if !network ? { return }
$if !network ? { return }
mut ftp := ftp.new()
defer {
ftp.close()

View File

@ -36,7 +36,7 @@ fn http_fetch_mock(_methods []string, _config FetchConfig) ?[]Response {
}
fn test_http_fetch_bare() {
// $if !network ? { return }
$if !network ? { return }
responses := http_fetch_mock([], FetchConfig{}) or {
panic(err)
}
@ -46,7 +46,7 @@ fn test_http_fetch_bare() {
}
fn test_http_fetch_with_data() {
// $if !network ? { return }
$if !network ? { return }
responses := http_fetch_mock(['POST', 'PUT', 'PATCH', 'DELETE'], {
data: 'hello world'
}) or {
@ -61,7 +61,7 @@ fn test_http_fetch_with_data() {
}
fn test_http_fetch_with_params() {
// $if !network ? { return }
$if !network ? { return }
responses := http_fetch_mock([], {
params: {
'a': 'b',
@ -82,7 +82,7 @@ fn test_http_fetch_with_params() {
}
fn test_http_fetch_with_headers() {
// $if !network ? { return }
$if !network ? { return }
responses := http_fetch_mock([], {
headers: {
'Test-Header': 'hello world'

View File

@ -1,13 +1,13 @@
import net.http
fn test_http_get() {
// $if !network ? { return }
$if !network ? { return }
assert http.get_text('https://vlang.io/version') == '0.1.5'
println('http ok')
}
fn test_http_get_from_vlang_utc_now() {
// $if !network ? { return }
$if !network ? { return }
urls := ['http://vlang.io/utc_now', 'https://vlang.io/utc_now']
for url in urls {
println('Test getting current time from $url by http.get')
@ -20,7 +20,7 @@ fn test_http_get_from_vlang_utc_now() {
}
fn test_public_servers() {
// $if !network ? { return }
$if !network ? { return }
urls := [
'http://github.com/robots.txt',
'http://google.com/robots.txt',