diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 816c502313..908772b8b3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -45,6 +45,7 @@ jobs: verify_docker_build: name: Always - Docker verify, push to tag 'master' if on master runs-on: ubuntu-latest + if: github.event_name != 'pull_request' # PR build doesnt get proper version, so dont try to build it steps: - uses: actions/checkout@v2 with: diff --git a/docs/increase-performance.rst b/docs/increase-performance.rst index f8ee3ecabc..b201e41ae7 100644 --- a/docs/increase-performance.rst +++ b/docs/increase-performance.rst @@ -1,48 +1,38 @@ .. _increase-performance: ============================================================== -Increase Locust's performance with a faster HTTP client +Increase performance with a faster HTTP client ============================================================== Locust's default HTTP client uses `python-requests `_. -The reason for this is that requests is a very well-maintained python package, that -provides a really nice API, that many python developers are familiar with. Therefore, -in many cases, we recommend that you use the default :py:class:`HttpUser ` -which uses requests. However, if you're planning to run really large scale tests, -Locust comes with an alternative HTTP client, -:py:class:`FastHttpUser ` which -uses `geventhttpclient `_ instead of requests. -This client is significantly faster, and we've seen 5x-6x performance increases for making -HTTP-requests. This does not necessarily mean that the number of users one can simulate -per CPU core will automatically increase 5x-6x, since it also depends on what else -the load testing script does. However, if your locust scripts are spending most of their -CPU time in making HTTP-requests, you are likely to see significant performance gains. - -It is impossible to say what your particular hardware can handle, but in a *best case* scenario -you should be able to do close to 5000 requests per second per core, instead of around 850 for -the normal HttpUser (tested on a 2018 MacBook Pro i7 2.6GHz) +It provides a nice API that many python developers are familiar with, and is very well-maintained. But if you're planning to run tests with very high throughput and have limited hardware for running Locust, it is sometimes not efficient enough. + +Because of this, Locust also comes with :py:class:`FastHttpUser ` which +uses `geventhttpclient `_ instead. +It provides a very similar API and uses significantly less CPU time, sometimes increasing the maximum number of requests per second on a given hardware by as much as 5x-6x. + +It is impossible to say what your particular hardware can handle, but in a best case scenario +a test using FastHttpUsers will be able to do close to 5000 requests per second per core, instead of around 850 for HttpUser (tested on a 2018 MacBook Pro i7 2.6GHz). In reality your results may vary, and you'll see smaller gains if your load tests also do other CPU-intensive things. + +.. note:: + + As long as your load generator CPU is not overloaded, FastHttpUser's response times should be almost identical to those of HttpUser. It is not "faster" in that sense. And of course, it cannot speed up the system you are testing. How to use FastHttpUser =========================== -Subclass FastHttpUser instead of HttpUser:: +Just subclass FastHttpUser instead of HttpUser:: - from locust import task, between - from locust.contrib.fasthttp import FastHttpUser + from locust import task, FastHttpUser class MyUser(FastHttpUser): - wait_time = between(2, 5) - @task def index(self): response = self.client.get("/") - .. note:: - Because FastHttpUser uses a different client implementation with a slightly different API, - it may not always work as a drop-in replacement for HttpUser. - + FastHttpUser/geventhttpclient is very similar to for HttpUser/python-requests, but sometimes there are subtle differences. This is particularly true if you work with the client library's internals, e.g. when manually managing cookies. API === diff --git a/examples/fast_http_locust.py b/examples/fast_http_locust.py index 3df86fec14..b2148caf40 100644 --- a/examples/fast_http_locust.py +++ b/examples/fast_http_locust.py @@ -1,5 +1,4 @@ -from locust import HttpUser, TaskSet, task, between -from locust.contrib.fasthttp import FastHttpUser +from locust import FastHttpUser, task class WebsiteUser(FastHttpUser): @@ -9,7 +8,6 @@ class WebsiteUser(FastHttpUser): """ host = "http://127.0.0.1:8089" - wait_time = between(2, 5) # some things you can configure on FastHttpUser # connection_timeout = 60.0 # insecure = True diff --git a/locust/__init__.py b/locust/__init__.py index a44d178f12..98ba11ea33 100644 --- a/locust/__init__.py +++ b/locust/__init__.py @@ -7,6 +7,7 @@ from .user import wait_time from .user.task import task, tag, TaskSet from .user.users import HttpUser, User +from .contrib.fasthttp import FastHttpUser from .user.wait_time import between, constant, constant_pacing from .shape import LoadTestShape @@ -21,6 +22,7 @@ "tag", "TaskSet", "HttpUser", + "FastHttpUser", "User", "between", "constant", diff --git a/locust/test/test_fasthttp.py b/locust/test/test_fasthttp.py index b32dce8ce2..21dcb9349a 100644 --- a/locust/test/test_fasthttp.py +++ b/locust/test/test_fasthttp.py @@ -3,7 +3,8 @@ from tempfile import NamedTemporaryFile from locust.user import task, TaskSet -from locust.contrib.fasthttp import FastHttpSession, FastHttpUser +from locust.contrib.fasthttp import FastHttpSession +from locust import FastHttpUser from locust.exception import CatchResponseError, InterruptTaskSet, ResponseError from locust.main import is_user_class from .testcases import WebserverTestCase, LocustTestCase