patx/mrhttp-asgi
Starting dynamic caching
Commit 3f98f85 · Mark Reed · 2024-03-22T12:05:26-07:00
Comments
No comments yet.
Diff
diff --git a/dotests.py b/dotests.py
index e6bb305..17486e8 100644
--- a/dotests.py
+++ b/dotests.py
@@ -114,6 +114,7 @@ async def run_benchmarks():
print(" Pipelined")
print (" Hello ", await run_wrk(loop, 'http://localhost:8080/',lua='tests/lua/pipeline.lua'), "Requests/second" )
+ print (" Hello ", await run_wrk(loop, 'http://localhost:8080/',lua='tests/lua/pipeline.lua'), "Requests/second" )
print (" More hdrs ", await run_wrk(loop, 'http://localhost:8080/',options=more_headers,lua='tests/lua/pipeline.lua'), "Requests/second" )
print (" Sessions ", await run_wrk(loop, 'http://localhost:8080/s',lua='tests/lua/q-session.lua'), "Requests/second" )
print (" File Upload ", await run_wrk(loop, 'http://localhost:8080/upload',lua='tests/lua/q-upload.lua'), "Requests/second" )
diff --git a/src/mrhttp/internals/protocol.c b/src/mrhttp/internals/protocol.c
index 379e432..682bf0b 100644
--- a/src/mrhttp/internals/protocol.c
+++ b/src/mrhttp/internals/protocol.c
@@ -541,7 +541,10 @@ Protocol* Protocol_handle_request(Protocol* self, Request* request, Route* r) {
// If we have cached bytes
if ( r->cached ) {
- if(!protocol_write_response(self, request, r->cached)) goto error;
+ if ( PyBytes_Check( r->cached ) ) {
+ if(!protocol_write_response(self, request, r->cached)) goto error;
+ }
+ // Else dynamic cache
}
if(!(result = protocol_callPageHandler(self, r->func, request)) ) {
diff --git a/tests/s_bench.py b/tests/s_bench.py
index 63d1fbf..0a6b6c5 100755
--- a/tests/s_bench.py
+++ b/tests/s_bench.py
@@ -38,6 +38,9 @@ app.session_backend = "mrcache"
@app.route('/')
def index(r):
return 'Hello World!'
[email protected]('/cached', options=['cache'])
+def cached_index(r):
+ return 'Hello World!'
@app.route('/print/{}')
def pr(r,foo):