progress: count amount of data "delivered" to application

... and apply the CURLOPT_MAXFILESIZE limit (if set) on that as well.
This effectively protects the user against "zip bombs".

Test case 1618 verifies using a 14 byte brotli payload that otherwise
explodes to 102400 zero bytes.
This commit is contained in:
Daniel Stenberg 2026-03-02 11:02:03 +01:00
parent 4b583b7585
commit d332b9057f
No known key found for this signature in database
GPG key ID: 5CC908FDB71E12C2
10 changed files with 115 additions and 7 deletions

View file

@ -144,3 +144,6 @@ and secure algorithms.
When asking curl or libcurl to automatically decompress data on arrival, there
is a risk that the size of the output from the decompression process ends up
many times larger than the input data size.
Since curl 8.20.0, users can mitigate this risk by setting the max filesize
option that also covers the decompressed size.

View file

@ -37,3 +37,6 @@ threshold during transfer.
Starting in curl 8.19.0, the maximum size can be specified using a fraction as
in `2.5M` for two and a half megabytes. It only works with a period (`.`)
delimiter, independent of what your locale might prefer.
Since 8.20.0, this option also stops ongoing transfers that would reach this
threshold due to automatic decompression using --compressed.

View file

@ -41,6 +41,9 @@ If you want a limit above 2GB, use CURLOPT_MAXFILESIZE_LARGE(3).
Since 8.4.0, this option also stops ongoing transfers if they reach this
threshold.
Since 8.20.0, this option also stops ongoing transfers that would reach this
threshold due to automatic decompression using CURLOPT_ACCEPT_ENCODING(3).
# DEFAULT
0, meaning disabled.

View file

@ -42,6 +42,9 @@ ends up being larger than this given limit.
Since 8.4.0, this option also stops ongoing transfers if they reach this
threshold.
Since 8.20.0, this option also stops ongoing transfers that would reach this
threshold due to automatic decompression using CURLOPT_ACCEPT_ENCODING(3).
# DEFAULT
0, meaning disabled.

View file

@ -31,7 +31,7 @@
#include "transfer.h"
#include "cw-out.h"
#include "cw-pause.h"
#include "progress.h"
/**
* OVERALL DESIGN of this client writer
@ -228,8 +228,8 @@ static CURLcode cw_out_ptr_flush(struct cw_out_ctx *ctx,
curl_write_callback wcb = NULL;
void *wcb_data;
size_t max_write, min_write;
size_t wlen, nwritten;
CURLcode result;
size_t wlen, nwritten = 0;
CURLcode result = CURLE_OK;
/* If we errored once, we do not invoke the client callback again */
if(ctx->errored)
@ -253,10 +253,15 @@ static CURLcode cw_out_ptr_flush(struct cw_out_ctx *ctx,
if(!flush_all && blen < min_write)
break;
wlen = max_write ? CURLMIN(blen, max_write) : blen;
result = cw_out_cb_write(ctx, data, wcb, wcb_data, otype,
buf, wlen, &nwritten);
if(otype == CW_OUT_BODY)
result = Curl_pgrs_deliver_check(data, wlen);
if(!result)
result = cw_out_cb_write(ctx, data, wcb, wcb_data, otype,
buf, wlen, &nwritten);
if(result)
return result;
if(otype == CW_OUT_BODY)
Curl_pgrs_deliver_inc(data, nwritten);
*pconsumed += nwritten;
blen -= nwritten;
buf += nwritten;

View file

@ -211,6 +211,7 @@ void Curl_pgrsReset(struct Curl_easy *data)
Curl_pgrsSetUploadSize(data, -1);
Curl_pgrsSetDownloadSize(data, -1);
data->progress.speeder_c = 0; /* reset speed records */
data->progress.deliver = 0;
pgrs_speedinit(data);
}
@ -339,6 +340,26 @@ void Curl_pgrsStartNow(struct Curl_easy *data)
p->ul_size_known = FALSE;
}
/* check that the 'delta' amount of bytes are okay to deliver to the
application, or return error if not. */
CURLcode Curl_pgrs_deliver_check(struct Curl_easy *data, size_t delta)
{
if(data->set.max_filesize &&
((curl_off_t)delta > data->set.max_filesize - data->progress.deliver)) {
failf(data, "Would have exceeded max file size");
return CURLE_FILESIZE_EXCEEDED;
}
return CURLE_OK;
}
/* this counts how much data is delivered to the application, which
in compressed cases may differ from downloaded amount */
void Curl_pgrs_deliver_inc(struct Curl_easy *data, size_t delta)
{
data->progress.deliver += delta;
}
void Curl_pgrs_download_inc(struct Curl_easy *data, size_t delta)
{
if(delta) {

View file

@ -50,7 +50,8 @@ int Curl_pgrsDone(struct Curl_easy *data);
void Curl_pgrsStartNow(struct Curl_easy *data);
void Curl_pgrsSetDownloadSize(struct Curl_easy *data, curl_off_t size);
void Curl_pgrsSetUploadSize(struct Curl_easy *data, curl_off_t size);
CURLcode Curl_pgrs_deliver_check(struct Curl_easy *data, size_t delta);
void Curl_pgrs_deliver_inc(struct Curl_easy *data, size_t delta);
void Curl_pgrs_download_inc(struct Curl_easy *data, size_t delta);
void Curl_pgrs_upload_inc(struct Curl_easy *data, size_t delta);
void Curl_pgrsSetUploadCounter(struct Curl_easy *data, curl_off_t size);

View file

@ -796,6 +796,7 @@ struct Progress {
force redraw at next call */
struct pgrs_dir ul;
struct pgrs_dir dl;
curl_off_t deliver; /* amount of data delivered to application */
curl_off_t current_speed; /* uses the currently fastest transfer */
curl_off_t earlydata_sent;

View file

@ -213,7 +213,7 @@ test1580 test1581 test1582 test1583 test1584 test1585 \
test1590 test1591 test1592 test1593 test1594 test1595 test1596 test1597 \
test1598 test1599 test1600 test1601 test1602 test1603 test1604 test1605 \
test1606 test1607 test1608 test1609 test1610 test1611 test1612 test1613 \
test1614 test1615 test1616 test1617 \
test1614 test1615 test1616 test1617 test1618 \
test1620 test1621 test1622 test1623 \
\
test1630 test1631 test1632 test1633 test1634 test1635 test1636 test1637 \

68
tests/data/test1618 Normal file
View file

@ -0,0 +1,68 @@
<?xml version="1.0" encoding="US-ASCII"?>
<testcase>
<info>
<keywords>
HTTP
HTTP GET
compressed
brotli
</keywords>
</info>
<reply>
<data crlf="headers" nonewline="yes">
HTTP/1.1 200 OK
Date: Mon, 29 Nov 2004 21:56:53 GMT
Vary: Accept-Encoding
Content-Encoding: br
Content-Length: 14
%hex[%81%fa%7f%0c%fc%13%00%f1%58%20%90%7b%18%00]hex%
</data>
<datacheck crlf="headers">
HTTP/1.1 200 OK
Date: Mon, 29 Nov 2004 21:56:53 GMT
Vary: Accept-Encoding
Content-Encoding: br
Content-Length: 14
</datacheck>
</reply>
# Client-side
<client>
<features>
brotli
</features>
<server>
http
</server>
<name>
HTTP GET brotli compression bomb
</name>
<command>
http://%HOSTIP:%HTTPPORT/%TESTNUMBER --compressed --max-filesize=1000
</command>
</client>
# Verify data after the test has been "shot"
<verify>
<strippart>
s/^Accept-Encoding: [a-zA-Z, ]*/Accept-Encoding: xxx/
</strippart>
<protocol crlf="headers">
GET /%TESTNUMBER HTTP/1.1
Host: %HOSTIP:%HTTPPORT
User-Agent: curl/%VERSION
Accept: */*
Accept-Encoding: xxx
</protocol>
<errorcode>
63
</errorcode>
</verify>
</testcase>