Skip to content

Commit

Permalink
Merge branch 'trunk' into 10125-template-recursion-error
Browse files Browse the repository at this point in the history
  • Loading branch information
glyph authored Apr 3, 2021
2 parents cba18c6 + 022659c commit 9a7b064
Show file tree
Hide file tree
Showing 9 changed files with 137 additions and 52 deletions.
7 changes: 4 additions & 3 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
branch = True
parallel = True
source = twisted
omit =
.tox/*/tmp/_trial_temp/*

[paths]
source=
src/twisted
build/*/lib/python*/site-packages/twisted
build/*/Lib/site-packages/twisted
build/pypy*/site-packages/twisted
*/site-packages/twisted
*\site-packages\twisted

[report]
precision = 2
Expand Down
1 change: 0 additions & 1 deletion .coveralls.yml

This file was deleted.

37 changes: 30 additions & 7 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ jobs:
runs-on: ubuntu-20.04
env:
TOXENV: "${{ matrix.tox-env }}"
CODECOV_OPTIONS: "-n 'lnx-${{ matrix.python-version }}-${{ matrix.tox-env }}${{ matrix.noipv6 }}'"
TRIAL_ARGS: "${{ matrix.trial-args }}"
name: ${{ matrix.python-version }}${{ matrix.noipv6 }}-${{ matrix.tox-env }}
strategy:
Expand All @@ -48,13 +47,14 @@ jobs:
# end to end functional test usage for non-distributed trial runs.
- python-version: 3.6.7
tox-env: nodeps-withcov-posix
trial-args: ''
trial-args: '-j 4'
# `noipv6` is created to make sure all is OK on an OS which doesn't
# have IPv6 available.
# Any supported Python version is OK for this job.
- python-version: 3.6
tox-env: alldeps-withcov-posix
noipv6: -noipv6
trial-args: '-j 4'
# On PYPY concurrent test jobs result in random failures so for now
# run non-distributed tests.
- python-version: pypy-3.6
Expand Down Expand Up @@ -99,18 +99,41 @@ jobs:
- uses: twisted/python-info-action@v1
- name: Install dependencies
run: |
python -m pip install --upgrade pip tox
python -m pip install --upgrade pip tox coveralls
tox --notest
- name: Test
run: |
python --version
tox -q
- name: Publish coverage
# We want to publish coverage even on failure.
if: contains(matrix['tox-env'], 'withcov') || failure()
run: tox -e coverage-prepare,codecov-push,coveralls-push
- name: Prepare coverage
if: always()
continue-on-error: true
run: |
# sub-process coverage are generated in separate files so we combine them
# to get an unified coverage for the local run.
# The XML is generate to be used with 3rd party tools like diff-cover.
python -m coverage combine
python -m coverage xml -o coverage.xml -i
python -m coverage report
- uses: codecov/codecov-action@v1
if: always()
continue-on-error: true
with:
files: coverage.xml
name: lnx-${{ matrix.python-version }}-${{ matrix.tox-env }}${{ matrix.noipv6 }}
fail_ci_if_error: true
functionalities: gcov,search

- name: Publish to Coveralls
if: always()
continue-on-error: true
run: |
python -m coveralls -v
env:
COVERALLS_REPO_TOKEN: 'JFDTIRUVOQ8jCM3zcajrZALlpKXyiXGAX'


pypi-publish:
Expand Down
46 changes: 40 additions & 6 deletions azure-pipelines/run_test_steps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ parameters:
steps:
- checkout: self
clean: true
fetchDepth: 1
# A minimum depth of 2 is required, as the HEAD is the auto-merge commit
# and we need the last commit from the PR to report the coverage.
fetchDepth: 2

- task: UsePythonVersion@0
displayName: "Use Python ${{ parameters.pythonVersion }}"
Expand All @@ -45,7 +47,7 @@ steps:
python -c "import os; [ print(e,v) for (e,v) in os.environ.items() ]"
displayName: 'Get Python Information'

- script: 'python -m pip install -U pip setuptools tox'
- script: 'python -m pip install -U pip setuptools tox coverage coveralls'
displayName: 'Update pip & install tox'

- ${{ if eq(parameters.platform, 'macos') }}:
Expand All @@ -62,7 +64,39 @@ steps:
env:
TWISTED_REACTOR: ${{ parameters.windowsReactor }}

- bash: |
python -m tox -e coverage-prepare,coveralls-push
bash <(curl -s https://codecov.io/bash) -n "${{ parameters.platform }}-${{ parameters.pythonVersion }}-alldeps-withcov"
displayName: 'Report coverage'
- bash: |
# sub-process coverage are generated in separate files so we combine them
# to get an unified coverage for the local run.
# The XML is generate to be used with 3rd party tools like diff-cover.
python -m coverage combine
python -m coverage xml -o coverage.xml -i
python -m coverage report
displayName: 'Prepare coverage'
condition: always()
continueOnError: true

- bash: |
bash <(curl -s https://codecov.io/bash) -X search -X gcov -f coverage.xml -n "${{ parameters.platform }}-${{ parameters.pythonVersion }}"
displayName: 'Report to Codecov'
condition: always()
continueOnError: true

# We are using a 3rd part tools to upload to Coveralls
# See https://github.com/TheKevJames/coveralls-python
# It has no support for Azure so we pretend to be CircleCI so that
# we can differentiate from GitHub Actions.
# https://github.com/TheKevJames/coveralls-python/blob/04b6a2876e4e7ab2e8cf0778f88ce23f94679931/coveralls/api.py#L147
# https://github.com/TheKevJames/coveralls-python/blob/04b6a2876e4e7ab2e8cf0778f88ce23f94679931/coveralls/git.py#L31
- bash: |
export CI_BRANCH=$SYSTEM_PULLREQUEST_SOURCEBRANCH
export CI_PULL_REQUEST=$SYSTEM_PULLREQUEST_PULLREQUESTNUMBER
export CI_BUILD_URL=$SYSTEM_PULLREQUEST_SOURCEREPOSITORYURI/pull/$CI_PULL_REQUEST
python -m coveralls -v
displayName: 'Report to Coveralls'
condition: always()
continueOnError: true
env:
CIRCLE_WORKFLOW_ID: $(Build.BuildNumber)
CIRCLECI: 1
COVERALLS_REPO_TOKEN: 'JFDTIRUVOQ8jCM3zcajrZALlpKXyiXGAX'
Empty file.
35 changes: 33 additions & 2 deletions src/twisted/web/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,12 @@
pre-condition (for example, the condition represented by an I{If-None-Match}
header is present in the request) has failed. This should typically
indicate that the server has not taken the requested action.
"""
@var maxChunkSizeLineLength: Maximum allowable length of the CRLF-terminated
line that indicates the size of a chunk and the extensions associated with
it, as in the HTTP 1.1 chunked I{Transfer-Encoding} (RFC 7230 section 4.1).
This limits how much data may be buffered when decoding the line.
"""

__all__ = [
"SWITCHING",
Expand Down Expand Up @@ -1782,6 +1786,9 @@ def noMoreData(self):
raise _DataLoss()


maxChunkSizeLineLength = 1024


class _ChunkedTransferDecoder:
"""
Protocol for decoding I{chunked} Transfer-Encoding, as defined by RFC 7230,
Expand Down Expand Up @@ -1819,6 +1826,17 @@ class _ChunkedTransferDecoder:
read. For C{'BODY'}, the contents of a chunk are being read. For
C{'FINISHED'}, the last chunk has been completely read and no more
input is valid.
@ivar _buffer: Accumulated received data for the current state. At each
state transition this is truncated at the front so that index 0 is
where the next state shall begin.
@ivar _start: While in the C{'CHUNK_LENGTH'} state, tracks the index into
the buffer at which search for CRLF should resume. Resuming the search
at this position avoids doing quadratic work if the chunk length line
arrives over many calls to C{dataReceived}.
Not used in any other state.
"""

state = "CHUNK_LENGTH"
Expand All @@ -1841,10 +1859,23 @@ def _dataReceived_CHUNK_LENGTH(self) -> bool:
C{self._buffer}. C{False} when more data is required.
@raises _MalformedChunkedDataError: when the chunk size cannot be
decoded.
decoded or the length of the line exceeds L{maxChunkSizeLineLength}.
"""
eolIndex = self._buffer.find(b"\r\n", self._start)

if eolIndex >= maxChunkSizeLineLength or (
eolIndex == -1 and len(self._buffer) > maxChunkSizeLineLength
):
raise _MalformedChunkedDataError(
"Chunk size line exceeds maximum of {} bytes.".format(
maxChunkSizeLineLength
)
)

if eolIndex == -1:
# Restart the search upon receipt of more data at the start of the
# new data, minus one in case the last character of the buffer is
# CR.
self._start = len(self._buffer) - 1
return False

Expand Down
1 change: 1 addition & 0 deletions src/twisted/web/newsfragments/10144.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
The server-side HTTP/1.1 chunking implementation now limits the length of the chunk size line (which includes chunk extensions) to twisted.web.http.maxChunkSizeLineLength — 1 KiB — so that it may not consume an unbounded amount of memory.
28 changes: 28 additions & 0 deletions src/twisted/web/test/test_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -1289,6 +1289,34 @@ def test_extensions(self):
p.dataReceived(b"3; x-foo=bar\r\nabc\r\n")
self.assertEqual(L, [b"abc"])

def test_oversizedChunkSizeLine(self):
"""
L{_ChunkedTransferDecoder.dataReceived} raises
L{_MalformedChunkedDataError} when the chunk size line exceeds 4 KiB.
This applies even when the data has already been received and buffered
so that behavior is consistent regardless of how bytes are framed.
"""
p = http._ChunkedTransferDecoder(None, None)
self.assertRaises(
http._MalformedChunkedDataError,
p.dataReceived,
b"3;" + b"." * http.maxChunkSizeLineLength + b"\r\nabc\r\n",
)

def test_oversizedChunkSizeLinePartial(self):
"""
L{_ChunkedTransferDecoder.dataReceived} raises
L{_MalformedChunkedDataError} when the amount of data buffered while
looking for the end of the chunk size line exceeds 4 KiB so
that buffering does not continue without bound.
"""
p = http._ChunkedTransferDecoder(None, None)
self.assertRaises(
http._MalformedChunkedDataError,
p.dataReceived,
b"." * (http.maxChunkSizeLineLength + 1),
)

def test_malformedChunkSize(self):
"""
L{_ChunkedTransferDecoder.dataReceived} raises
Expand Down
34 changes: 1 addition & 33 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,6 @@
;
; See README.rst for example tox commands.
;
; There are also various non-default environments used by the continuous
; integration system: the `codecov-push` and `coveralls-push` push the coverage
; results to codecov.io and coveralls.io, respectively. They should be called
; after running both some number of `-withcov` environments and also
; `coverage-prepare`.
;
; For compatibility with the current infrastructure, `codecov-publish`
; combines `coverage-prepare` and `codecov-push` into a single step.
;
[tox]
minversion=3.21.4
requires=
Expand Down Expand Up @@ -60,20 +51,10 @@ extras =

serial: serial

{withcov,coverage-prepare,codecov-publish}: dev
{withcov}: dev

;; dependencies that are not specified as extras
deps =
; We end up with a bit of deps duplication as we can't install
; coverage deps via `extras` as we run them with `skip_install`.
{coverage-prepare,codecov-publish}: coverage ~= 5.5

{codecov-push,codecov-publish}: codecov ~= 2.1

coveralls-push: coveralls
coveralls-push: PyYAML


lint: pre-commit

; All environment variables are passed.
Expand All @@ -90,9 +71,6 @@ setenv =
{windows,serial}: TWISTED_FORCE_SERIAL_TESTS = 1

skip_install =
coverage-prepare: True
codecov: True
coveralls: True
lint: True

commands =
Expand All @@ -112,16 +90,6 @@ commands =
withcov: coverage erase
withcov: coverage run -p --rcfile={toxinidir}/.coveragerc -m twisted.trial --temp-directory={envtmpdir}/_trial_temp --reactor={env:TWISTED_REACTOR:default} --reporter={env:TRIAL_REPORTER:verbose} {env:TRIAL_ARGS:} {posargs:twisted}

; Prepare coverage reports for publication.
{coverage-prepare,codecov-publish}: coverage combine
{coverage-prepare,codecov-publish}: coverage xml -o coverage.xml -i

; Publish coverage reports to codecov.
{codecov-push,codecov-publish}: codecov {env:CODECOV_OPTIONS:} -X search -X gcov -f coverage.xml

; Publish coverage reports to coveralls.
coveralls-push: coveralls

lint: pre-commit {posargs:run --all-files --show-diff-on-failure}

newsfragment: python {toxinidir}/bin/admin/check-newsfragment "{toxinidir}"
Expand Down

0 comments on commit 9a7b064

Please sign in to comment.