Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Non persistent connections #296

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions pyresttest/resttest.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ class TestConfig:
interactive = False
verbose = False
ssl_insecure = False
non_persistent = False # Disables keep-alive pycurl option
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would be nice to specify testing -vs- benchmarking here. Not sure how hard that would be though.

skip_term_colors = False # Turn off output term colors

# Binding and creation of generators
Expand Down Expand Up @@ -331,6 +332,8 @@ def run_test(mytest, test_config=TestConfig(), context=None, curl_handle=None, *
if test_config.ssl_insecure:
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(pycurl.SSL_VERIFYHOST, 0)
if test_config.non_persistent:
curl.setopt(pycurl.TCP_KEEPALIVE, 0)

result.passed = None

Expand Down Expand Up @@ -629,7 +632,6 @@ def run_testsets(testsets):
group_failure_counts = dict()
total_failures = 0
myinteractive = False
curl_handle = pycurl.Curl()

for testset in testsets:
mytests = testset.tests
Expand Down Expand Up @@ -659,7 +661,7 @@ def run_testsets(testsets):
group_results[test.group] = list()
group_failure_counts[test.group] = 0

result = run_test(test, test_config=myconfig, context=context, curl_handle=curl_handle)
result = run_test(test, test_config=myconfig, context=context)
result.body = None # Remove the body, save some memory!

if not result.passed: # Print failure, increase failure counts for that test group
Expand Down Expand Up @@ -853,6 +855,9 @@ def main(args):

if 'ssl_insecure' in args and args['ssl_insecure'] is not None:
t.config.ssl_insecure = safe_to_bool(args['ssl_insecure'])

if 'non_persistent' in args and args['non_persistent'] is not None:
t.config.non_persistent = safe_to_bool(args['non_persistent'])

if 'skip_term_colors' in args and args['skip_term_colors'] is not None:
t.config.skip_term_colors = safe_to_bool(args['skip_term_colors'])
Expand Down Expand Up @@ -887,6 +892,8 @@ def parse_command_line_args(args_in):
action='store_true', default=False, dest="verbose")
parser.add_option(u'--ssl-insecure', help='Disable cURL host and peer cert verification',
action='store_true', default=False, dest="ssl_insecure")
parser.add_option(u'--non-persistent', help='Disables persistent connections',
action='store_true', default=False, dest="non_persistent")
parser.add_option(u'--absolute-urls', help='Enable absolute URLs in tests instead of relative paths',
action="store_true", dest="absolute_urls")
parser.add_option(u'--skip_term_colors', help='Turn off the output term colors',
Expand Down