diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8e35f4eead..14bfa86a89 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -25,8 +25,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: [3.7, 3.8, 3.9, "3.10"] - pyopenssl: [0, 1] + python-version: [3.7, 3.8, 3.9, "3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 @@ -39,12 +38,8 @@ jobs: python -m pip install --upgrade pip wheel python -m pip install --upgrade '.[dev]' python -m pytest --verbose ./httpie ./tests - env: - HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }} - name: Linux & Mac setup if: matrix.os != 'windows-latest' run: | make install make test - env: - HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }} diff --git a/CHANGELOG.md b/CHANGELOG.md index fd80c096f2..02c5545917 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,9 +3,21 @@ This document records all notable changes to [HTTPie](https://httpie.io). This project adheres to [Semantic Versioning](https://semver.org/). -## [3.3.0-dev](https://github.com/httpie/cli/compare/3.2.2...master) (unreleased) - -- Make it possible to [unset](https://httpie.io/docs/cli/default-request-headers) the `User-Agent`, `Accept-Encoding`, and `Host` request headers. ([#1502](https://github.com/httpie/cli/issues/1502)) +## [4.0.0.b1](https://github.com/httpie/cli/compare/3.2.2...master) (unreleased) + +- Make it possible to [unset](https://httpie.io/docs/cli/default-request-headers) the `User-Agent`, and `Accept-Encoding` headers. ([#1502](https://github.com/httpie/cli/issues/1502)) +- Dependency on requests was changed in favor of compatible niquests. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for HTTP/2, and HTTP/3 protocols. ([#523](https://github.com/httpie/cli/issues/523)) ([#692](https://github.com/httpie/cli/issues/692)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added request metadata for the TLS certificate, negotiated version with cipher, and his revocation status and the remote peer IP address. ([#1495](https://github.com/httpie/cli/issues/1495)) ([#1023](https://github.com/httpie/cli/issues/1023)) ([#826](https://github.com/httpie/cli/issues/826)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support to load the operating system trust store for the peer certificate validation. ([#480](https://github.com/httpie/cli/issues/480)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added detailed timings in response metadata with DNS resolution, established, TLS handshake, and request sending delays. ([#1023](https://github.com/httpie/cli/issues/1023)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for using alternative DNS resolver using `--resolver`. DNS over HTTPS, DNS over TLS, DNS over QUIC, and DNS over UDP are accepted. ([#99](https://github.com/httpie/cli/issues/99)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for binding to a specific network adapter with `--interface`. ([#1422](https://github.com/httpie/cli/issues/1422)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Added support for forcing either IPv4 or IPv6 to reach the remote HTTP server with `-6` or `-4`. ([#94](https://github.com/httpie/cli/issues/94)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Removed support for pyopenssl. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Dropped dependency on `requests_toolbelt` in favor of directly including `MultipartEncoder` into HTTPie due to its direct dependency to requests. ([#1531](https://github.com/httpie/cli/pull/1531)) +- Dropped dependency on `multidict` in favor of implementing an internal one due to often missing pre-built wheels. ([#1522](https://github.com/httpie/cli/issues/1522)) ([#1531](https://github.com/httpie/cli/pull/1531)) +- Fixed the case when multiple headers where concatenated in the response output. ([#1413](https://github.com/httpie/cli/issues/1413)) ([#1531](https://github.com/httpie/cli/pull/1531)) ## [3.2.2](https://github.com/httpie/cli/compare/3.2.1...3.2.2) (2022-05-19) diff --git a/docs/README.md b/docs/README.md index 3b12e98f4c..a55c179e30 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1562,9 +1562,9 @@ be printed via several options: |---------------------------:|----------------------------------------------------------------------------------------------------| | `--headers, -h` | Only the response headers are printed | | `--body, -b` | Only the response body is printed | -| `--meta, -m` | Only the [response metadata](#response-meta) is printed | +| `--meta, -m` | Only the [request, response metadata](#response-meta) are printed | | `--verbose, -v` | Print the whole HTTP exchange (request and response). This option also enables `--all` (see below) | -| `--verbose --verbose, -vv` | Just like `-v`, but also include the response metadata. | +| `--verbose --verbose, -vv` | Just like `-v`, but also include the request, and response metadata. | | `--print, -p` | Selects parts of the HTTP exchange | | `--quiet, -q` | Don’t print anything to `stdout` and `stderr` | @@ -1573,13 +1573,13 @@ be printed via several options: All the other [output options](#output-options) are under the hood just shortcuts for the more powerful `--print, -p`. It accepts a string of characters each of which represents a specific part of the HTTP exchange: -| Character | Stands for | -|----------:|---------------------------------| -| `H` | request headers | -| `B` | request body | -| `h` | response headers | -| `b` | response body | -| `m` | [response meta](#response-meta) | +| Character | Stands for | +|----------:|------------------------------------------| +| `H` | request headers | +| `B` | request body | +| `h` | response headers | +| `b` | response body | +| `m` | [request, response meta](#response-meta) | Print request and response headers: @@ -1592,27 +1592,49 @@ $ http --print=Hh PUT pie.dev/put hello=world The response metadata section currently includes the total time elapsed. It’s the number of seconds between opening the network connection and downloading the last byte of response the body. -To _only_ show the response metadata, use `--meta, -m` (analogically to `--headers, -h` and `--body, -b`): +To _only_ show the request, and response metadata, use `--meta, -m` (analogically to `--headers, -h` and `--body, -b`): ```bash $ http --meta pie.dev/delay/1 ``` ```console -Elapsed time: 1.099171542s +Connected to: 2a06:98c1:3120::2 port 443 +Connection secured using: TLSv1.3 with AES-256-GCM-SHA384 +Server certificate: commonName="pie.dev"; DNS="*.pie.dev"; DNS="pie.dev" +Certificate validity: "Nov 11 01:14:24 2023 UTC" to "Feb 09 01:14:23 2024 UTC" +Issuer: countryName="US"; organizationName="Let's Encrypt"; commonName="E1" +Revocation status: Good + +Elapsed DNS: 0.11338s +Elapsed established connection: 3.8e-05s +Elapsed TLS handshake: 0.057503s +Elapsed emitting request: 0.000275s +Elapsed time: 0.292854214s ``` The [extra verbose `-vv` output](#extra-verbose-output) includes the meta section by default. You can also show it in combination with other parts of the exchange via [`--print=m`](#what-parts-of-the-http-exchange-should-be-printed). For example, here we print it together with the response headers: ```bash -$ http --print=hm pie.dev/get +$ https --print=hm pie.dev/get ``` ```http -HTTP/1.1 200 OK +Connected to: 2a06:98c1:3120::2 port 443 +Connection secured using: TLSv1.3 with AES-256-GCM-SHA384 +Server certificate: commonName="pie.dev"; DNS="*.pie.dev"; DNS="pie.dev" +Certificate validity: "Nov 11 01:14:24 2023 UTC" to "Feb 09 01:14:23 2024 UTC" +Issuer: countryName="US"; organizationName="Let's Encrypt"; commonName="E1" +Revocation status: Good + +HTTP/2 200 OK Content-Type: application/json -Elapsed time: 0.077538375s +Elapsed DNS: 0.11338s +Elapsed established connection: 3.8e-05s +Elapsed TLS handshake: 0.057503s +Elapsed emitting request: 0.000275s +Elapsed time: 0.292854214s ``` @@ -1626,19 +1648,19 @@ If you [use `--style` with one of the Pie themes](#colors-and-formatting), you `--verbose` can often be useful for debugging the request and generating documentation examples: ```bash -$ http --verbose PUT pie.dev/put hello=world -PUT /put HTTP/1.1 +$ https --verbose PUT pie.dev/put hello=world +PUT /put HTTP/2 Accept: application/json, */*;q=0.5 Accept-Encoding: gzip, deflate Content-Type: application/json Host: pie.dev -User-Agent: HTTPie/0.2.7dev +User-Agent: HTTPie/4.0.0 { "hello": "world" } -HTTP/1.1 200 OK +HTTP/2 200 OK Connection: keep-alive Content-Length: 477 Content-Type: application/json @@ -1652,10 +1674,10 @@ Server: gunicorn/0.13.4 #### Extra verbose output -If you run HTTPie with `-vv` or `--verbose --verbose`, then it would also display the [response metadata](#response-meta). +If you run HTTPie with `-vv` or `--verbose --verbose`, then it would also display the [response and request metadata](#response-meta). ```bash -# Just like the above, but with additional columns like the total elapsed time +# Just like the above, but with additional columns like the total elapsed time, remote peer connection informations $ http -vv pie.dev/get ``` @@ -1833,6 +1855,73 @@ $ http --chunked pie.dev/post @files/data.xml $ cat files/data.xml | http --chunked pie.dev/post ``` +## Disable HTTP/2, or HTTP/3 + +You can at your own discretion toggle on and off HTTP/2, or/and HTTP/3. + +```bash +$ https --disable-http2 PUT pie.dev/put hello=world +``` + +```bash +$ https --disable-http3 PUT pie.dev/put hello=world +``` + +## Force HTTP/3 + +By opposition to the previous section, you can force the HTTP/3 negotiation. + +```bash +$ https --http3 pie.dev/get +``` + +By default, HTTPie cannot negotiate HTTP/3 without a first HTTP/1.1, or HTTP/2 successful response unless the +remote host specified a DNS HTTPS record that indicate its support. + +The remote server yield its support for HTTP/3 in the Alt-Svc header, if present HTTPie will issue +the successive requests via HTTP/3. You may use that argument in case the remote peer does not support +either HTTP/1.1 or HTTP/2. + +## Custom DNS resolver + +### Using DNS url + +You can specify one or many custom DNS resolvers using the `--resolver` flag. They will be tested in +presented order to resolver given hostname. + +```bash +$ https --resolver "doh+cloudflare://" pie.dev/get +``` + +To know more about DNS url and supported protocols, visit [Niquests documentation](https://niquests.readthedocs.io/en/stable/user/quickstart.html#dns-resolution). + +### Forcing hostname to resolve with a manual entry + +It is possible to fake DNS resolution using a virtual resolver. We'll make use of the `--resolver` flag +using the `in-memory` provider. + +```bash +$ https --resolver "in-memory://default/?hosts=pie.dev:10.10.4.1" pie.dev/get +``` + +In that example, `pie.dev` will resolve to `10.10.4.1`. The TLS HELLO / SNI will be set with host = `pie.dev`. + +## Attach to a specific network adapter + +In order to bind emitted request from a specific network adapter you can use the `--interface` flag. + +```bash +$ https --interface 172.17.0.1 pie.dev/get +``` + +## Enforcing IPv4 or IPv6 + +Since HTTPie 4, you may pass the flags `--ipv4, -4` or `--ipv6, -6` to enforce connecting to an IPv4 or IPv6 address. + +```bash +$ https -4 pie.dev/get +``` + ## Compressed request body You can use the `--compress, -x` flag to instruct HTTPie to use `Content-Encoding: deflate` and compress the request data: @@ -2556,7 +2645,7 @@ HTTPie has the following community channels: Under the hood, HTTPie uses these two amazing libraries: -- [Requests](https://requests.readthedocs.io/en/latest/) — Python HTTP library for humans +- [Niquests](https://niquests.readthedocs.io/en/latest/) — Python HTTP library for humans - [Pygments](https://pygments.org/) — Python syntax highlighter #### HTTPie friends diff --git a/docs/contributors/fetch.py b/docs/contributors/fetch.py index ba94c28183..1ea1e8d05a 100644 --- a/docs/contributors/fetch.py +++ b/docs/contributors/fetch.py @@ -1,7 +1,7 @@ """ Generate the contributors database. -FIXME: replace `requests` calls with the HTTPie API, when available. +FIXME: replace `niquests` calls with the HTTPie API, when available. """ import json import os @@ -14,7 +14,7 @@ from time import sleep from typing import Any, Dict, Optional, Set -import requests +import niquests FullNames = Set[str] GitHubLogins = Set[str] @@ -197,10 +197,10 @@ def fetch(url: str, params: Optional[Dict[str, str]] = None) -> UserInfo: } for retry in range(1, 6): debug(f'[{retry}/5]', f'{url = }', f'{params = }') - with requests.get(url, params=params, headers=headers) as req: + with niquests.get(url, params=params, headers=headers) as req: try: req.raise_for_status() - except requests.exceptions.HTTPError as exc: + except niquests.exceptions.HTTPError as exc: if exc.response.status_code == 403: # 403 Client Error: rate limit exceeded for url: ... now = int(datetime.utcnow().timestamp()) diff --git a/httpie/__init__.py b/httpie/__init__.py index ffe0d35419..b1c1a48bcc 100644 --- a/httpie/__init__.py +++ b/httpie/__init__.py @@ -3,7 +3,7 @@ """ -__version__ = '3.2.2' -__date__ = '2022-05-06' +__version__ = '4.0.0.b1' +__date__ = '2024-01-01' __author__ = 'Jakub Roztocil' __licence__ = 'BSD' diff --git a/httpie/adapters.py b/httpie/adapters.py index 8e2dd7397f..fa6cfcec89 100644 --- a/httpie/adapters.py +++ b/httpie/adapters.py @@ -1,5 +1,5 @@ from httpie.cli.dicts import HTTPHeadersDict -from requests.adapters import HTTPAdapter +from niquests.adapters import HTTPAdapter class HTTPieHTTPAdapter(HTTPAdapter): diff --git a/httpie/cli/argparser.py b/httpie/cli/argparser.py index 9bf09b3b73..34dc401081 100644 --- a/httpie/cli/argparser.py +++ b/httpie/cli/argparser.py @@ -7,7 +7,7 @@ from textwrap import dedent from urllib.parse import urlsplit -from requests.utils import get_netrc_auth +from niquests.utils import get_netrc_auth from .argtypes import ( AuthCredentials, SSLCredentials, KeyValueArgType, diff --git a/httpie/cli/definition.py b/httpie/cli/definition.py index 843b29c9cf..bda9a67f95 100644 --- a/httpie/cli/definition.py +++ b/httpie/cli/definition.py @@ -726,6 +726,20 @@ def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False): """, ) +network.add_argument( + '--ipv6', + '-6', + default=False, + action='store_true', + short_help='Force using a IPv6 address to reach the remote peer.' +) +network.add_argument( + '--ipv4', + '-4', + default=False, + action='store_true', + short_help='Force using a IPv4 address to reach the remote peer.' +) network.add_argument( '--follow', '-F', @@ -802,6 +816,52 @@ def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False): 'The Transfer-Encoding header is set to chunked.' ) ) +network.add_argument( + "--disable-http2", + default=False, + action="store_true", + short_help="Disable the HTTP/2 protocol." +) +network.add_argument( + "--disable-http3", + default=False, + action="store_true", + short_help="Disable the HTTP/3 over QUIC protocol." +) +network.add_argument( + "--http3", + default=False, + dest="force_http3", + action="store_true", + short_help="Use the HTTP/3 protocol for the request.", + help=""" + By default, HTTPie cannot negotiate HTTP/3 without a first HTTP/1.1, or HTTP/2 successful response unless the + remote host specified a DNS HTTPS record that indicate its support. + + The remote server yield its support for HTTP/3 in the Alt-Svc header, if present HTTPie will issue + the successive requests via HTTP/3. You may use that argument in case the remote peer does not support + either HTTP/1.1 or HTTP/2. + + """ +) +network.add_argument( + "--resolver", + default=[], + action='append', + short_help="Specify a DNS resolver url to resolve hostname.", + help=""" + By default, HTTPie use the system DNS through Python standard library. + You can specify an alternative DNS server to be used. (e.g. doh://cloudflare-dns.com or doh://google.dns). + You can specify multiple resolvers with different protocols. The environment + variable $NIQUESTS_DNS_URL is supported as well. + + """ +) +network.add_argument( + "--interface", + default=None, + short_help="Bind to a specific network interface.", +) ####################################################################### # SSL diff --git a/httpie/cli/dicts.py b/httpie/cli/dicts.py index 6b6d4736d2..53faa234a0 100644 --- a/httpie/cli/dicts.py +++ b/httpie/cli/dicts.py @@ -1,49 +1,168 @@ +from __future__ import annotations + +import typing from collections import OrderedDict +from typing import Union, TypeVar -from multidict import MultiDict, CIMultiDict +T = TypeVar("T") -class BaseMultiDict(MultiDict): +class BaseMultiDictKeyView: """ - Base class for all MultiDicts. + Basic key view for BaseMultiDict. """ + def __init__(self, o: BaseMultiDict) -> None: + self._container = o + + def __iter__(self): + for key in self._container: + yield key + + def __contains__(self, item: str) -> bool: + return item in self._container -class HTTPHeadersDict(CIMultiDict, BaseMultiDict): + +class BaseMultiDict(typing.MutableMapping[str, Union[str, bytes]]): """ - Headers are case-insensitive and multiple values are supported - through the `add()` API. + This follow the multidict (case-insensitive) implementation but does not implement it fully. + We scoped this class according to our needs. In the future we should be able to refactor + HTTPie in order to use either kiss_headers.Headers or urllib3.HTTPHeaderDict. + The main constraints are: We use bytes sometime in values, and relly on multidict specific behaviors. """ - def add(self, key, value): - """ - Add or update a new header. + def __init__(self, d: BaseMultiDict | typing.MutableMapping[str, str | bytes] | None = None, **kwargs: str | bytes) -> None: + super().__init__() + self._container: typing.MutableMapping[str, list[tuple[str, str | bytes]] | str] = {} - If the given `value` is `None`, then all the previous - values will be overwritten and the value will be set - to `None`. - """ - if value is None: - self[key] = value + if d is not None: + self.update(d) + + for key, value in kwargs.items(): + self.add(key, value) + + def items(self) -> typing.Iterator[str, str | bytes | None]: + for key_i in self._container: + + if isinstance(self._container[key_i], str): + yield key_i, None + continue + + for original_key, value in self._container[key_i]: + yield original_key, value + + def keys(self) -> BaseMultiDictKeyView: + return BaseMultiDictKeyView(self) + + def copy(self: T) -> T: + return BaseMultiDict(self) + + def __delitem__(self, __key: str) -> None: + del self._container[__key.lower()] + + def __len__(self) -> int: + return len(self._container) + + def __iter__(self) -> typing.Iterator[str]: + for key_i in self._container: + if isinstance(self._container[key_i], list): + yield self._container[key_i][0][0] + else: + yield self._container[key_i] + + def __contains__(self, item: str) -> bool: + return item.lower() in self._container + + def update(self, __m, **kwargs) -> None: + if hasattr(__m, "items"): + for k in __m: + self[k] = None + for k, v in __m.items(): + self.add(k, v) + else: + for k, v in __m: + self.add(k, v) + + def getlist(self, key: str) -> list[str | bytes]: + key_lower = key.lower() + values = self._container[key_lower] + + if isinstance(values, str): + return [] + + return [_[-1] for _ in self._container[key_lower]] + + def __setitem__(self, key: str | bytes, val: str | bytes | None) -> None: + if isinstance(key, bytes): + key = key.decode("latin-1") + if val is not None: + self._container[key.lower()] = [(key, val,)] + else: + self._container[key.lower()] = key + + def __getitem__(self, key: str) -> str | None: + values = self._container[key.lower()] + if isinstance(values, str): return None + return ",".join([_[-1].decode() if isinstance(_[-1], bytes) else _[-1] for _ in values]) + + def popone(self, key: str) -> str | bytes: + key_lower = key.lower() + + val = self._container[key_lower].pop() + + if not self._container[key_lower]: + self._container[key_lower] = key + + return val[-1] + + def popall(self, key: str) -> list[str]: + key_lower = key.lower() + values = self._container[key_lower] + + self._container[key_lower] = values[0][0] + + return [_[-1] for _ in values] + + def add(self, key: str | bytes, val: str | bytes | None) -> None: + if isinstance(key, bytes): + key = key.decode("latin-1") - # If the previous value for the given header is `None` - # then discard it since we are explicitly giving a new - # value for it. - if key in self and self.getone(key) is None: - self.popone(key) + key_lower = key.lower() - super().add(key, value) + if val is None: + self._container[key_lower] = key + return - def remove_item(self, key, value): + if key_lower not in self._container or isinstance(self._container[key_lower], str): + self._container[key_lower] = [] + + self._container[key_lower].append((key, val,)) + + def remove_item(self, key: str, value: str | bytes) -> None: """ Remove a (key, value) pair from the dict. """ - existing_values = self.popall(key) - existing_values.remove(value) + key_lower = key.lower() - for value in existing_values: - self.add(key, value) + to_remove = None + + for k, v in self._container[key_lower]: + if (key == k or key == key_lower) and v == value: + to_remove = (k, v) + break + + if to_remove: + self._container[key_lower].remove(to_remove) + if not self._container[key_lower]: + del self._container[key_lower] + + +class HTTPHeadersDict(BaseMultiDict): + """ + Headers are case-insensitive and multiple values are supported + through the `add()` API. + """ class RequestJSONDataDict(OrderedDict): diff --git a/httpie/client.py b/httpie/client.py index a1da284a7c..1ed3f2fe96 100644 --- a/httpie/client.py +++ b/httpie/client.py @@ -1,16 +1,22 @@ import argparse -import http.client import json import sys -from contextlib import contextmanager +import typing from time import monotonic from typing import Any, Dict, Callable, Iterable from urllib.parse import urlparse, urlunparse -import requests -# noinspection PyPackageRequirements -import urllib3 -from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS +import niquests +from niquests._compat import HAS_LEGACY_URLLIB3 + +if not HAS_LEGACY_URLLIB3: + # noinspection PyPackageRequirements + import urllib3 + from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS, parse_url +else: + # noinspection PyPackageRequirements + import urllib3_future as urllib3 + from urllib3_future.util import SKIP_HEADER, SKIPPABLE_HEADERS, parse_url from . import __version__ from .adapters import HTTPieHTTPAdapter @@ -44,6 +50,7 @@ def collect_messages( env: Environment, args: argparse.Namespace, request_body_read_callback: Callable[[bytes], None] = None, + prepared_request_readiness: Callable[[niquests.PreparedRequest], None] = None, ) -> Iterable[RequestsMessage]: httpie_session = None httpie_session_headers = None @@ -65,12 +72,33 @@ def collect_messages( ) send_kwargs = make_send_kwargs(args) send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args) + + source_address = None + + if args.interface: + source_address = (args.interface, 0) + + print(args) + requests_session = build_requests_session( ssl_version=args.ssl_version, ciphers=args.ciphers, - verify=bool(send_kwargs_mergeable_from_env['verify']) + verify=bool(send_kwargs_mergeable_from_env['verify']), + disable_http2=args.disable_http2, + disable_http3=args.disable_http3, + resolver=args.resolver or None, + disable_ipv6=args.ipv4, + disable_ipv4=args.ipv6, + source_address=source_address, ) + if args.disable_http3 is False and args.force_http3 is True: + url = parse_url(args.url) + requests_session.quic_cache_layer.add_domain( + url.host, + url.port or 443, + ) + if httpie_session: httpie_session.update_headers(request_kwargs['headers']) requests_session.cookies = httpie_session.cookies @@ -88,7 +116,12 @@ def collect_messages( # TODO: reflect the split between request and send kwargs. dump_request(request_kwargs) - request = requests.Request(**request_kwargs) + hooks = None + + if prepared_request_readiness: + hooks = {"pre_send": [prepared_request_readiness]} + + request = niquests.Request(**request_kwargs, hooks=hooks) prepared_request = requests_session.prepare_request(request) transform_headers(request, prepared_request) if args.path_as_is: @@ -110,12 +143,13 @@ def collect_messages( url=prepared_request.url, **send_kwargs_mergeable_from_env, ) - with max_headers(args.max_headers): - response = requests_session.send( - request=prepared_request, - **send_kwargs_merged, - **send_kwargs, - ) + response = requests_session.send( + request=prepared_request, + **send_kwargs_merged, + **send_kwargs, + ) + if args.max_headers and len(response.headers) > args.max_headers: + raise niquests.ConnectionError(f"got more than {args.max_headers} headers") response._httpie_headers_parsed_at = monotonic() expired_cookies += get_expired_cookies( response.headers.get('Set-Cookie', '') @@ -124,7 +158,7 @@ def collect_messages( response_count += 1 if response.next: if args.max_redirects and response_count == args.max_redirects: - raise requests.TooManyRedirects + raise niquests.TooManyRedirects if args.follow: prepared_request = response.next if args.all: @@ -140,28 +174,26 @@ def collect_messages( httpie_session.save() -# noinspection PyProtectedMember -@contextmanager -def max_headers(limit): - # - # noinspection PyUnresolvedReferences - orig = http.client._MAXHEADERS - http.client._MAXHEADERS = limit or float('Inf') - try: - yield - finally: - http.client._MAXHEADERS = orig - - def build_requests_session( verify: bool, ssl_version: str = None, ciphers: str = None, -) -> requests.Session: - requests_session = requests.Session() + disable_http2: bool = False, + disable_http3: bool = False, + resolver: typing.List[str] = None, + disable_ipv4: bool = False, + disable_ipv6: bool = False, + source_address: typing.Tuple[str, int] = None, +) -> niquests.Session: + requests_session = niquests.Session() # Install our adapter. - http_adapter = HTTPieHTTPAdapter() + http_adapter = HTTPieHTTPAdapter( + resolver=resolver, + disable_ipv4=disable_ipv4, + disable_ipv6=disable_ipv6, + source_address=source_address, + ) https_adapter = HTTPieHTTPSAdapter( ciphers=ciphers, verify=verify, @@ -169,6 +201,13 @@ def build_requests_session( AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version] if ssl_version else None ), + disable_http2=disable_http2, + disable_http3=disable_http3, + resolver=resolver, + disable_ipv4=disable_ipv4, + disable_ipv6=disable_ipv6, + source_address=source_address, + quic_cache_layer=requests_session.quic_cache_layer, ) requests_session.mount('http://', http_adapter) requests_session.mount('https://', https_adapter) @@ -186,7 +225,7 @@ def build_requests_session( def dump_request(kwargs: dict): sys.stderr.write( - f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n') + f'\n>>> niquests.request(**{repr_dict(kwargs)})\n\n') def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict: @@ -210,13 +249,13 @@ def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict: def transform_headers( - request: requests.Request, - prepared_request: requests.PreparedRequest + request: niquests.Request, + prepared_request: niquests.PreparedRequest ) -> None: """Apply various transformations on top of the `prepared_requests`'s headers to change the request prepreation behavior.""" - # Remove 'Content-Length' when it is misplaced by requests. + # Remove 'Content-Length' when it is misplaced by niquests. if ( prepared_request.method in IGNORE_CONTENT_LENGTH_METHODS and prepared_request.headers.get('Content-Length') == '0' @@ -232,7 +271,7 @@ def transform_headers( def apply_missing_repeated_headers( original_headers: HTTPHeadersDict, - prepared_request: requests.PreparedRequest + prepared_request: niquests.PreparedRequest ) -> None: """Update the given `prepared_request`'s headers with the original ones. This allows the requests to be prepared as usual, and then later @@ -290,12 +329,6 @@ def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict: if args.cert: cert = args.cert if args.cert_key: - # Having a client certificate key passphrase is not supported - # by requests. So we are using our own transportation structure - # which is compatible with their format (a tuple of minimum two - # items). - # - # See: https://github.com/psf/requests/issues/2519 cert = HTTPieCertificate(cert, args.cert_key, args.cert_key_pass.value) return { @@ -329,7 +362,7 @@ def make_request_kwargs( request_body_read_callback=lambda chunk: chunk ) -> dict: """ - Translate our `args` into `requests.Request` keyword arguments. + Translate our `args` into `niquests.Request` keyword arguments. """ files = args.files diff --git a/httpie/context.py b/httpie/context.py index 2a54f46916..b853339963 100644 --- a/httpie/context.py +++ b/httpie/context.py @@ -99,8 +99,9 @@ def __init__(self, devnull=None, **kwargs): assert all(hasattr(type(self), attr) for attr in kwargs.keys()) self.__dict__.update(**kwargs) - # The original STDERR unaffected by --quiet’ing. + # The original STDERR/STDOUT unaffected by --quiet’ing. self._orig_stderr = self.stderr + self._orig_stdout = self.stdout self._devnull = devnull # Keyword arguments > stream.encoding > default UTF-8 diff --git a/httpie/core.py b/httpie/core.py index d0c26dcbcc..9505185fef 100644 --- a/httpie/core.py +++ b/httpie/core.py @@ -5,9 +5,9 @@ import socket from typing import List, Optional, Union, Callable -import requests +import niquests from pygments import __version__ as pygments_version -from requests import __version__ as requests_version +from niquests import __version__ as requests_version from . import __version__ as httpie_version from .cli.constants import OUT_REQ_BODY @@ -112,16 +112,16 @@ def handle_generic_error(e, annotation=None): if include_traceback: raise exit_status = ExitStatus.ERROR - except requests.Timeout: + except niquests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT env.log_error(f'Request timed out ({parsed_args.timeout}s).') - except requests.TooManyRedirects: + except niquests.TooManyRedirects: exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS env.log_error( f'Too many redirects' f' (--max-redirects={parsed_args.max_redirects}).' ) - except requests.exceptions.ConnectionError as exc: + except niquests.exceptions.ConnectionError as exc: annotation = None original_exc = unwrap_context(exc) if isinstance(original_exc, socket.gaierror): @@ -175,8 +175,8 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus: # TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere. exit_status = ExitStatus.SUCCESS downloader = None - initial_request: Optional[requests.PreparedRequest] = None - final_response: Optional[requests.Response] = None + initial_request: Optional[niquests.PreparedRequest] = None + final_response: Optional[niquests.Response] = None processing_options = ProcessingOptions.from_raw_args(args) def separate(): @@ -204,8 +204,35 @@ def request_body_read_callback(chunk: bytes): args.follow = True # --download implies --follow. downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume) downloader.pre_request(args.headers) - messages = collect_messages(env, args=args, - request_body_read_callback=request_body_read_callback) + + def prepared_request_readiness(pr): + + oo = OutputOptions.from_message( + pr, + args.output_options + ) + + oo = oo._replace( + body=isinstance(pr.body, (str, bytes)) and (args.verbose or oo.body) + ) + + write_message( + requests_message=pr, + env=env, + output_options=oo, + processing_options=processing_options + ) + + if oo.body > 1: + separate() + + messages = collect_messages( + env, + args=args, + request_body_read_callback=request_body_read_callback, + prepared_request_readiness=prepared_request_readiness + ) + force_separator = False prev_with_body = False @@ -225,6 +252,9 @@ def request_body_read_callback(chunk: bytes): is_streamed_upload = not isinstance(message.body, (str, bytes)) do_write_body = not is_streamed_upload force_separator = is_streamed_upload and env.stdout_isatty + if message.conn_info is None and not args.offline: + prev_with_body = output_options.body + continue else: final_response = message if args.check_status or downloader: @@ -261,6 +291,11 @@ def request_body_read_callback(chunk: bytes): return exit_status finally: + if args.data and hasattr(args.data, "close"): + args.data.close() + if args.files and hasattr(args.files, "items"): + for fd in args.files.items(): + fd[1][1].close() if downloader and not downloader.finished: downloader.failed() if args.output_file and args.output_file_specified: @@ -270,7 +305,7 @@ def request_body_read_callback(chunk: bytes): def print_debug_info(env: Environment): env.stderr.writelines([ f'HTTPie {httpie_version}\n', - f'Requests {requests_version}\n', + f'Niquests {requests_version}\n', f'Pygments {pygments_version}\n', f'Python {sys.version}\n{sys.executable}\n', f'{platform.system()} {platform.release()}', diff --git a/httpie/downloads.py b/httpie/downloads.py index 9c4b895e6f..7a86486334 100644 --- a/httpie/downloads.py +++ b/httpie/downloads.py @@ -10,7 +10,7 @@ from typing import IO, Optional, Tuple from urllib.parse import urlsplit -import requests +import niquests from .models import HTTPResponse, OutputOptions from .output.streams import RawStream @@ -202,7 +202,7 @@ def pre_request(self, request_headers: dict): def start( self, initial_url: str, - final_response: requests.Response + final_response: niquests.Response ) -> Tuple[RawStream, IO]: """ Initiate and return a stream for `response` body with progress @@ -288,7 +288,7 @@ def chunk_downloaded(self, chunk: bytes): @staticmethod def _get_output_file_from_response( initial_url: str, - final_response: requests.Response, + final_response: niquests.Response, ) -> IO: # Output file not specified. Pick a name that doesn't exist yet. filename = None diff --git a/httpie/internal/encoder.py b/httpie/internal/encoder.py new file mode 100644 index 0000000000..5da4dd8119 --- /dev/null +++ b/httpie/internal/encoder.py @@ -0,0 +1,472 @@ +""" +This program is part of the requests_toolbelt package. + +Copyright 2014 Ian Cordasco, Cory Benfield + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import contextlib +import io +import os +from uuid import uuid4 + +from niquests._compat import HAS_LEGACY_URLLIB3 + +if HAS_LEGACY_URLLIB3: + from urllib3_future.fields import RequestField +else: + from urllib3.fields import RequestField + + +class MultipartEncoder(object): + """ + The ``MultipartEncoder`` object is a generic interface to the engine that + will create a ``multipart/form-data`` body for you. + + The basic usage is: + + .. code-block:: python + + import requests + from requests_toolbelt import MultipartEncoder + + encoder = MultipartEncoder({'field': 'value', + 'other_field': 'other_value'}) + r = requests.post('https://httpbin.org/post', data=encoder, + headers={'Content-Type': encoder.content_type}) + + If you do not need to take advantage of streaming the post body, you can + also do: + + .. code-block:: python + + r = requests.post('https://httpbin.org/post', + data=encoder.to_string(), + headers={'Content-Type': encoder.content_type}) + + If you want the encoder to use a specific order, you can use an + OrderedDict or more simply, a list of tuples: + + .. code-block:: python + + encoder = MultipartEncoder([('field', 'value'), + ('other_field', 'other_value')]) + + .. versionchanged:: 0.4.0 + + You can also provide tuples as part values as you would provide them to + requests' ``files`` parameter. + + .. code-block:: python + + encoder = MultipartEncoder({ + 'field': ('file_name', b'{"a": "b"}', 'application/json', + {'X-My-Header': 'my-value'}) + ]) + + .. warning:: + + This object will end up directly in :mod:`httplib`. Currently, + :mod:`httplib` has a hard-coded read size of **8192 bytes**. This + means that it will loop until the file has been read and your upload + could take a while. This is **not** a bug in requests. A feature is + being considered for this object to allow you, the user, to specify + what size should be returned on a read. If you have opinions on this, + please weigh in on `this issue`_. + + .. _this issue: + https://github.com/requests/toolbelt/issues/75 + + """ + + def __init__(self, fields, boundary=None, encoding='utf-8'): + #: Boundary value either passed in by the user or created + self.boundary_value = boundary or uuid4().hex + + # Computed boundary + self.boundary = '--{}'.format(self.boundary_value) + + #: Encoding of the data being passed in + self.encoding = encoding + + # Pre-encoded boundary + self._encoded_boundary = b''.join([ + self.boundary.encode(self.encoding), + '\r\n'.encode(self.encoding) + ]) + + #: Fields provided by the user + self.fields = fields + + #: Whether or not the encoder is finished + self.finished = False + + #: Pre-computed parts of the upload + self.parts = [] + + # Pre-computed parts iterator + self._iter_parts = iter([]) + + # The part we're currently working with + self._current_part = None + + # Cached computation of the body's length + self._len = None + + # Our buffer + self._buffer = CustomBytesIO(encoding=encoding) + + # Pre-compute each part's headers + self._prepare_parts() + + # Load boundary into buffer + self._write_boundary() + + @property + def len(self): + """Length of the multipart/form-data body. + + requests will first attempt to get the length of the body by calling + ``len(body)`` and then by checking for the ``len`` attribute. + + On 32-bit systems, the ``__len__`` method cannot return anything + larger than an integer (in C) can hold. If the total size of the body + is even slightly larger than 4GB users will see an OverflowError. This + manifested itself in `bug #80`_. + + As such, we now calculate the length lazily as a property. + + .. _bug #80: + https://github.com/requests/toolbelt/issues/80 + """ + # If _len isn't already calculated, calculate, return, and set it + return self._len or self._calculate_length() + + def __repr__(self): + return ''.format(self.fields) + + def _calculate_length(self): + """ + This uses the parts to calculate the length of the body. + + This returns the calculated length so __len__ can be lazy. + """ + boundary_len = len(self.boundary) # Length of --{boundary} + # boundary length + header length + body length + len('\r\n') * 2 + + self._len = sum( + (boundary_len + total_len(p) + 4) for p in self.parts + ) + boundary_len + 4 + + return self._len + + def _calculate_load_amount(self, read_size): + """This calculates how many bytes need to be added to the buffer. + + When a consumer read's ``x`` from the buffer, there are two cases to + satisfy: + + 1. Enough data in the buffer to return the requested amount + 2. Not enough data + + This function uses the amount of unread bytes in the buffer and + determines how much the Encoder has to load before it can return the + requested amount of bytes. + + :param int read_size: the number of bytes the consumer requests + :returns: int -- the number of bytes that must be loaded into the + buffer before the read can be satisfied. This will be strictly + non-negative + """ + amount = read_size - total_len(self._buffer) + return amount if amount > 0 else 0 + + def _load(self, amount): + """Load ``amount`` number of bytes into the buffer.""" + self._buffer.smart_truncate() + part = self._current_part or self._next_part() + while amount == -1 or amount > 0: + written = 0 + if part and not part.bytes_left_to_write(): + written += self._write(b'\r\n') + written += self._write_boundary() + part = self._next_part() + + if not part: + written += self._write_closing_boundary() + self.finished = True + break + + written += part.write_to(self._buffer, amount) + + if amount != -1: + amount -= written + + def _next_part(self): + try: + p = self._current_part = next(self._iter_parts) + except StopIteration: + p = None + return p + + def _iter_fields(self): + _fields = self.fields + if hasattr(self.fields, 'items'): + _fields = list(self.fields.items()) + for k, v in _fields: + file_name = None + file_type = None + file_headers = None + if isinstance(v, (list, tuple)): + if len(v) == 2: + file_name, file_pointer = v + elif len(v) == 3: + file_name, file_pointer, file_type = v + else: + file_name, file_pointer, file_type, file_headers = v + else: + file_pointer = v + + field = RequestField( + name=k, + data=file_pointer, + filename=file_name, + headers=file_headers + ) + + field.make_multipart(content_type=file_type) + yield field + + def _prepare_parts(self): + """This uses the fields provided by the user and creates Part objects. + + It populates the `parts` attribute and uses that to create a + generator for iteration. + """ + enc = self.encoding + self.parts = [Part.from_field(f, enc) for f in self._iter_fields()] + self._iter_parts = iter(self.parts) + + def _write(self, bytes_to_write): + """Write the bytes to the end of the buffer. + + :param bytes bytes_to_write: byte-string (or bytearray) to append to + the buffer + :returns: int -- the number of bytes written + """ + return self._buffer.append(bytes_to_write) + + def _write_boundary(self): + """Write the boundary to the end of the buffer.""" + return self._write(self._encoded_boundary) + + def _write_closing_boundary(self): + """Write the bytes necessary to finish a multipart/form-data body.""" + with reset(self._buffer): + self._buffer.seek(-2, 2) + self._buffer.write(b'--\r\n') + return 2 + + def _write_headers(self, headers): + """Write the current part's headers to the buffer.""" + return self._write(headers.encode(self.encoding) if isinstance(headers, str) else headers) + + @property + def content_type(self): + return str( + 'multipart/form-data; boundary={}'.format(self.boundary_value) + ) + + def to_string(self): + """Return the entirety of the data in the encoder. + + .. note:: + + This simply reads all of the data it can. If you have started + streaming or reading data from the encoder, this method will only + return whatever data is left in the encoder. + + .. note:: + + This method affects the internal state of the encoder. Calling + this method will exhaust the encoder. + + :returns: the multipart message + :rtype: bytes + """ + + return self.read() + + def read(self, size=-1): + """Read data from the streaming encoder. + + :param int size: (optional), If provided, ``read`` will return exactly + that many bytes. If it is not provided, it will return the + remaining bytes. + :returns: bytes + """ + if self.finished: + return self._buffer.read(size) + + bytes_to_load = size + if bytes_to_load != -1 and bytes_to_load is not None: + bytes_to_load = self._calculate_load_amount(int(size)) + + self._load(bytes_to_load) + return self._buffer.read(size) + + +class Part(object): + def __init__(self, headers, body): + self.headers = headers + self.body = body + self.headers_unread = True + self.len = len(self.headers) + total_len(self.body) + + @classmethod + def from_field(cls, field, encoding): + """Create a part from a Request Field generated by urllib3.""" + headers = field.render_headers().encode(encoding) + body = coerce_data(field.data, encoding) + return cls(headers, body) + + def bytes_left_to_write(self): + """Determine if there are bytes left to write. + + :returns: bool -- ``True`` if there are bytes left to write, otherwise + ``False`` + """ + to_read = 0 + if self.headers_unread: + to_read += len(self.headers) + + return (to_read + total_len(self.body)) > 0 + + def write_to(self, buffer, size): + """Write the requested amount of bytes to the buffer provided. + + The number of bytes written may exceed size on the first read since we + load the headers ambitiously. + + :param CustomBytesIO buffer: buffer we want to write bytes to + :param int size: number of bytes requested to be written to the buffer + :returns: int -- number of bytes actually written + """ + written = 0 + if self.headers_unread: + written += buffer.append(self.headers) + self.headers_unread = False + + while total_len(self.body) > 0 and (size == -1 or written < size): + amount_to_read = size + if size != -1: + amount_to_read = size - written + written += buffer.append(self.body.read(amount_to_read)) + + return written + + +class CustomBytesIO(io.BytesIO): + def __init__(self, buffer=None, encoding='utf-8'): + buffer = buffer.encode(encoding) if buffer else b"" + super(CustomBytesIO, self).__init__(buffer) + + def _get_end(self): + current_pos = self.tell() + self.seek(0, 2) + length = self.tell() + self.seek(current_pos, 0) + return length + + @property + def len(self): + length = self._get_end() + return length - self.tell() + + def append(self, bytes): + with reset(self): + written = self.write(bytes) + return written + + def smart_truncate(self): + to_be_read = total_len(self) + already_read = self._get_end() - to_be_read + + if already_read >= to_be_read: + old_bytes = self.read() + self.seek(0, 0) + self.truncate() + self.write(old_bytes) + self.seek(0, 0) # We want to be at the beginning + + +class FileWrapper(object): + def __init__(self, file_object): + self.fd = file_object + + @property + def len(self): + return total_len(self.fd) - self.fd.tell() + + def read(self, length=-1): + return self.fd.read(length) + + +@contextlib.contextmanager +def reset(buffer): + """Keep track of the buffer's current position and write to the end. + + This is a context manager meant to be used when adding data to the buffer. + It eliminates the need for every function to be concerned with the + position of the cursor in the buffer. + """ + original_position = buffer.tell() + buffer.seek(0, 2) + yield + buffer.seek(original_position, 0) + + +def coerce_data(data, encoding): + """Ensure that every object's __len__ behaves uniformly.""" + if not isinstance(data, CustomBytesIO): + if hasattr(data, 'getvalue'): + return CustomBytesIO(data.getvalue(), encoding) + + if hasattr(data, 'fileno'): + return FileWrapper(data) + + if not hasattr(data, 'read'): + return CustomBytesIO(data, encoding) + + return data + + +def total_len(o): + if hasattr(o, '__len__'): + return len(o) + + if hasattr(o, 'len'): + return o.len + + if hasattr(o, 'fileno'): + try: + fileno = o.fileno() + except io.UnsupportedOperation: + pass + else: + return os.fstat(fileno).st_size + + if hasattr(o, 'getvalue'): + # e.g. BytesIO, cStringIO.StringIO + return len(o.getvalue()) diff --git a/httpie/internal/update_warnings.py b/httpie/internal/update_warnings.py index a4b80d46b5..c684bb80ad 100644 --- a/httpie/internal/update_warnings.py +++ b/httpie/internal/update_warnings.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Any, Optional, Callable -import requests +import niquests import httpie from httpie.context import Environment, LogLevel @@ -41,7 +41,7 @@ def _fetch_updates(env: Environment) -> str: file = env.config.version_info_file data = _read_data_error_free(file) - response = requests.get(PACKAGE_INDEX_LINK, verify=False) + response = niquests.get(PACKAGE_INDEX_LINK, verify=False) response.raise_for_status() data.setdefault('last_warned_date', None) diff --git a/httpie/models.py b/httpie/models.py index a0a68c8ddc..142fd69710 100644 --- a/httpie/models.py +++ b/httpie/models.py @@ -1,7 +1,17 @@ from time import monotonic -import requests -from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS +import niquests + +from niquests._compat import HAS_LEGACY_URLLIB3 + +if not HAS_LEGACY_URLLIB3: + from urllib3 import ConnectionInfo + from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS +else: + from urllib3_future import ConnectionInfo + from urllib3_future.util import SKIP_HEADER, SKIPPABLE_HEADERS + +from kiss_headers.utils import prettify_header_name from enum import Enum, auto from typing import Iterable, Union, NamedTuple @@ -18,6 +28,10 @@ from .utils import split_cookies, parse_content_type_header ELAPSED_TIME_LABEL = 'Elapsed time' +ELAPSED_DNS_RESOLUTION_LABEL = 'Elapsed DNS' +ELAPSED_TLS_HANDSHAKE = 'Elapsed TLS handshake' +ELAPSED_REQUEST_SEND = 'Elapsed emitting request' +ELAPSED_ESTABLISH_CONN = 'Elapsed established connection' class HTTPMessage: @@ -59,7 +73,7 @@ def content_type(self) -> str: class HTTPResponse(HTTPMessage): - """A :class:`requests.models.Response` wrapper.""" + """A :class:`niquests.models.Response` wrapper.""" def iter_body(self, chunk_size=1): return self._orig.iter_content(chunk_size=chunk_size) @@ -70,18 +84,19 @@ def iter_lines(self, chunk_size): @property def headers(self): original = self._orig + http_headers = original.raw.headers if original.raw and hasattr(original.raw, "headers") else original.headers status_line = f'HTTP/{self.version} {original.status_code} {original.reason}' headers = [status_line] headers.extend( - ': '.join(header) - for header in original.headers.items() - if header[0] != 'Set-Cookie' + ': '.join([prettify_header_name(header), value]) + for header, value in http_headers.items() + if header.lower() != 'set-cookie' ) headers.extend( f'Set-Cookie: {cookie}' - for header, value in original.headers.items() + for header, value in http_headers.items() for cookie in split_cookies(value) - if header == 'Set-Cookie' + if header.lower() == 'set-cookie' ) return '\r\n'.join(headers) @@ -89,12 +104,23 @@ def headers(self): def metadata(self) -> str: data = {} time_to_parse_headers = self._orig.elapsed.total_seconds() + # noinspection PyProtectedMember time_since_headers_parsed = monotonic() - self._orig._httpie_headers_parsed_at time_elapsed = time_to_parse_headers + time_since_headers_parsed - # data['Headers time'] = str(round(time_to_parse_headers, 5)) + 's' - # data['Body time'] = str(round(time_since_headers_parsed, 5)) + 's' + + if hasattr(self._orig, "conn_info") and self._orig.conn_info: + if self._orig.conn_info.resolution_latency: + data[ELAPSED_DNS_RESOLUTION_LABEL] = str(round(self._orig.conn_info.resolution_latency.total_seconds(), 10)) + 's' + if self._orig.conn_info.established_latency: + data[ELAPSED_ESTABLISH_CONN] = str(round(self._orig.conn_info.established_latency.total_seconds(), 10)) + 's' + if self._orig.conn_info.tls_handshake_latency: + data[ELAPSED_TLS_HANDSHAKE] = str(round(self._orig.conn_info.tls_handshake_latency.total_seconds(), 10)) + 's' + if self._orig.conn_info.request_sent_latency: + data[ELAPSED_REQUEST_SEND] = str(round(self._orig.conn_info.request_sent_latency.total_seconds(), 10)) + 's' + data[ELAPSED_TIME_LABEL] = str(round(time_elapsed, 10)) + 's' + return '\n'.join( f'{key}: {value}' for key, value in data.items() @@ -108,27 +134,11 @@ def version(self) -> str: Assume HTTP/1.1 if version is not available. """ - mapping = { - 9: '0.9', - 10: '1.0', - 11: '1.1', - 20: '2.0', - } - fallback = 11 - version = None - try: - raw = self._orig.raw - if getattr(raw, '_original_response', None): - version = raw._original_response.version - else: - version = raw.version - except AttributeError: - pass - return mapping[version or fallback] + return self._orig.conn_info.http_version.value.replace("HTTP/", "").replace(".0", "") if self._orig.conn_info and self._orig.conn_info.http_version else "1.1" class HTTPRequest(HTTPMessage): - """A :class:`requests.models.Request` wrapper.""" + """A :class:`niquests.models.Request` wrapper.""" def iter_body(self, chunk_size): yield self.body @@ -136,14 +146,69 @@ def iter_body(self, chunk_size): def iter_lines(self, chunk_size): yield self.body, b'' + @property + def metadata(self) -> str: + conn_info: ConnectionInfo = self._orig.conn_info + + metadatum = f"Connected to: {conn_info.destination_address[0]} port {conn_info.destination_address[1]}\n" + + if conn_info.certificate_dict: + metadatum += ( + f"Connection secured using: {conn_info.tls_version.name.replace('_', '.')} with {conn_info.cipher.replace('TLS_', '').replace('_', '-')}\n" + f"Server certificate: " + ) + + for entry in conn_info.certificate_dict['subject']: + if len(entry) == 2: + rdns, value = entry + elif len(entry) == 1: + rdns, value = entry[0] + else: + continue + + metadatum += f'{rdns}="{value}"; ' + + if "subjectAltName" in conn_info.certificate_dict: + for entry in conn_info.certificate_dict['subjectAltName']: + if len(entry) == 2: + rdns, value = entry + metadatum += f'{rdns}="{value}"; ' + + metadatum = metadatum[:-2] + "\n" + + metadatum += f'Certificate validity: "{conn_info.certificate_dict["notBefore"]}" to "{conn_info.certificate_dict["notAfter"]}"\n' + + if "issuer" in conn_info.certificate_dict: + metadatum += "Issuer: " + + for entry in conn_info.certificate_dict['issuer']: + if len(entry) == 2: + rdns, value = entry + elif len(entry) == 1: + rdns, value = entry[0] + else: + continue + + metadatum += f'{rdns}="{value}"; ' + + metadatum = metadatum[:-2] + "\n" + + if self._orig.ocsp_verified is None: + metadatum += "Revocation status: Unverified\n" + elif self._orig.ocsp_verified is True: + metadatum += "Revocation status: Good\n" + + return metadatum[:-1] + @property def headers(self): url = urlsplit(self._orig.url) - request_line = '{method} {path}{query} HTTP/1.1'.format( + request_line = '{method} {path}{query} {http_version}'.format( method=self._orig.method, path=url.path or '/', - query=f'?{url.query}' if url.query else '' + query=f'?{url.query}' if url.query else '', + http_version=self._orig.conn_info.http_version.value.replace(".0", "") if self._orig.conn_info and self._orig.conn_info.http_version else "HTTP/1.1" ) headers = self._orig.headers.copy() @@ -158,6 +223,7 @@ def headers(self): headers.insert(0, request_line) headers = '\r\n'.join(headers).strip() + return headers @property @@ -169,7 +235,7 @@ def body(self): return body or b'' -RequestsMessage = Union[requests.PreparedRequest, requests.Response] +RequestsMessage = Union[niquests.PreparedRequest, niquests.Response] class RequestsMessageKind(Enum): @@ -178,9 +244,9 @@ class RequestsMessageKind(Enum): def infer_requests_message_kind(message: RequestsMessage) -> RequestsMessageKind: - if isinstance(message, requests.PreparedRequest): + if isinstance(message, niquests.PreparedRequest): return RequestsMessageKind.REQUEST - elif isinstance(message, requests.Response): + elif isinstance(message, niquests.Response): return RequestsMessageKind.RESPONSE else: raise TypeError(f"Unexpected message type: {type(message).__name__}") @@ -190,6 +256,7 @@ def infer_requests_message_kind(message: RequestsMessage) -> RequestsMessageKind RequestsMessageKind.REQUEST: { 'headers': OUT_REQ_HEAD, 'body': OUT_REQ_BODY, + 'meta': OUT_RESP_META }, RequestsMessageKind.RESPONSE: { 'headers': OUT_RESP_HEAD, diff --git a/httpie/output/lexers/metadata.py b/httpie/output/lexers/metadata.py index fa68e45762..7f5c77f54d 100644 --- a/httpie/output/lexers/metadata.py +++ b/httpie/output/lexers/metadata.py @@ -1,6 +1,6 @@ import pygments -from httpie.models import ELAPSED_TIME_LABEL +from httpie.models import ELAPSED_TIME_LABEL, ELAPSED_DNS_RESOLUTION_LABEL, ELAPSED_TLS_HANDSHAKE, ELAPSED_REQUEST_SEND, ELAPSED_ESTABLISH_CONN from httpie.output.lexers.common import precise SPEED_TOKENS = { @@ -36,7 +36,7 @@ class MetadataLexer(pygments.lexer.RegexLexer): tokens = { 'root': [ ( - fr'({ELAPSED_TIME_LABEL})( *)(:)( *)(\d+\.\d+)(s)', pygments.lexer.bygroups( + fr'({ELAPSED_TIME_LABEL}|{ELAPSED_DNS_RESOLUTION_LABEL}|{ELAPSED_REQUEST_SEND}|{ELAPSED_TLS_HANDSHAKE}|{ELAPSED_ESTABLISH_CONN})( *)(:)( *)(\d+\.[\de\-]+)(s)', pygments.lexer.bygroups( pygments.token.Name.Decorator, # Name pygments.token.Text, pygments.token.Operator, # Colon diff --git a/httpie/output/streams.py b/httpie/output/streams.py index 811093808a..eb83e4aeea 100644 --- a/httpie/output/streams.py +++ b/httpie/output/streams.py @@ -5,7 +5,7 @@ from .processing import Conversion, Formatting from ..context import Environment from ..encoding import smart_decode, smart_encode, UTF8 -from ..models import HTTPMessage, OutputOptions +from ..models import HTTPMessage, OutputOptions, RequestsMessageKind from ..utils import parse_content_type_header @@ -62,6 +62,10 @@ def iter_body(self) -> Iterable[bytes]: def __iter__(self) -> Iterable[bytes]: """Return an iterator over `self.msg`.""" + if self.output_options.meta and self.output_options.kind is RequestsMessageKind.REQUEST: + yield self.get_metadata() + yield b'\n\n' + if self.output_options.headers: yield self.get_headers() yield b'\r\n\r\n' @@ -77,12 +81,11 @@ def __iter__(self) -> Iterable[bytes]: yield b'\n' yield e.message - if self.output_options.meta: + if self.output_options.meta and self.output_options.kind is RequestsMessageKind.RESPONSE: if self.output_options.body: yield b'\n\n' yield self.get_metadata() - yield b'\n\n' class RawStream(BaseStream): diff --git a/httpie/output/writer.py b/httpie/output/writer.py index 4a2949bce2..4e4071cd83 100644 --- a/httpie/output/writer.py +++ b/httpie/output/writer.py @@ -1,5 +1,5 @@ import errno -import requests +import niquests from typing import Any, Dict, IO, Optional, TextIO, Tuple, Type, Union from ..cli.dicts import HTTPHeadersDict @@ -105,7 +105,7 @@ def write_raw_data( headers: Optional[HTTPHeadersDict] = None, stream_kwargs: Optional[Dict[str, Any]] = None ): - msg = requests.PreparedRequest() + msg = niquests.PreparedRequest() msg.is_body_upload_chunk = True msg.body = data msg.headers = headers or HTTPHeadersDict() diff --git a/httpie/plugins/base.py b/httpie/plugins/base.py index 1b44e5aec5..4e26242bc7 100644 --- a/httpie/plugins/base.py +++ b/httpie/plugins/base.py @@ -63,7 +63,7 @@ def get_auth(self, username: str = None, password: str = None): Use `self.raw_auth` to access the raw value passed through `--auth, -a`. - Return a ``requests.auth.AuthBase`` subclass instance. + Return a ``niquests.auth.AuthBase`` subclass instance. """ raise NotImplementedError() @@ -73,7 +73,7 @@ class TransportPlugin(BasePlugin): """ Requests transport adapter docs: - + See httpie-unixsocket for an example transport plugin: @@ -86,7 +86,7 @@ class TransportPlugin(BasePlugin): def get_adapter(self): """ - Return a ``requests.adapters.BaseAdapter`` subclass instance to be + Return a ``niquests.adapters.BaseAdapter`` subclass instance to be mounted to ``self.prefix``. """ diff --git a/httpie/plugins/builtin.py b/httpie/plugins/builtin.py index 860aebf7f9..ad79d0a53f 100644 --- a/httpie/plugins/builtin.py +++ b/httpie/plugins/builtin.py @@ -1,6 +1,6 @@ from base64 import b64encode -import requests.auth +import niquests.auth from .base import AuthPlugin @@ -10,12 +10,12 @@ class BuiltinAuthPlugin(AuthPlugin): package_name = '(builtin)' -class HTTPBasicAuth(requests.auth.HTTPBasicAuth): +class HTTPBasicAuth(niquests.auth.HTTPBasicAuth): def __call__( self, - request: requests.PreparedRequest - ) -> requests.PreparedRequest: + request: niquests.PreparedRequest + ) -> niquests.PreparedRequest: """ Override username/password serialization to allow unicode. @@ -34,12 +34,12 @@ def make_header(username: str, password: str) -> str: return f'Basic {token}' -class HTTPBearerAuth(requests.auth.AuthBase): +class HTTPBearerAuth(niquests.auth.AuthBase): def __init__(self, token: str) -> None: self.token = token - def __call__(self, request: requests.PreparedRequest) -> requests.PreparedRequest: + def __call__(self, request: niquests.PreparedRequest) -> niquests.PreparedRequest: request.headers['Authorization'] = f'Bearer {self.token}' return request @@ -64,8 +64,8 @@ def get_auth( self, username: str, password: str - ) -> requests.auth.HTTPDigestAuth: - return requests.auth.HTTPDigestAuth(username, password) + ) -> niquests.auth.HTTPDigestAuth: + return niquests.auth.HTTPDigestAuth(username, password) class BearerAuthPlugin(BuiltinAuthPlugin): @@ -75,5 +75,5 @@ class BearerAuthPlugin(BuiltinAuthPlugin): auth_parse = False # noinspection PyMethodOverriding - def get_auth(self, **kwargs) -> requests.auth.HTTPDigestAuth: + def get_auth(self, **kwargs) -> niquests.auth.HTTPDigestAuth: return HTTPBearerAuth(self.raw_auth) diff --git a/httpie/sessions.py b/httpie/sessions.py index 99dcdba92e..5351959a9b 100644 --- a/httpie/sessions.py +++ b/httpie/sessions.py @@ -10,8 +10,8 @@ from pathlib import Path from typing import Any, Dict, List, Optional, Union -from requests.auth import AuthBase -from requests.cookies import RequestsCookieJar, remove_cookie_by_name +from niquests.auth import AuthBase +from niquests.cookies import RequestsCookieJar, remove_cookie_by_name from .context import Environment, LogLevel from .cookies import HTTPieCookiePolicy diff --git a/httpie/ssl_.py b/httpie/ssl_.py index af5ca548db..fa6dbfa4e4 100644 --- a/httpie/ssl_.py +++ b/httpie/ssl_.py @@ -1,6 +1,10 @@ import ssl -from typing import NamedTuple, Optional +from typing import NamedTuple, Optional, Tuple, MutableMapping +import json +import os.path +from os import makedirs +from httpie.config import DEFAULT_CONFIG_DIR from httpie.adapters import HTTPAdapter # noinspection PyPackageRequirements from urllib3.util.ssl_ import ( @@ -10,10 +14,6 @@ SSL_VERSION_ARG_MAPPING = { - 'ssl2.3': 'PROTOCOL_SSLv23', - 'ssl3': 'PROTOCOL_SSLv3', - 'tls1': 'PROTOCOL_TLSv1', - 'tls1.1': 'PROTOCOL_TLSv1_1', 'tls1.2': 'PROTOCOL_TLSv1_2', 'tls1.3': 'PROTOCOL_TLSv1_3', } @@ -24,6 +24,50 @@ } +class QuicCapabilityCache( + MutableMapping[Tuple[str, int], Optional[Tuple[str, int]]] +): + + def __init__(self): + self._cache = {} + if not os.path.exists(DEFAULT_CONFIG_DIR): + makedirs(DEFAULT_CONFIG_DIR, exist_ok=True) + if os.path.exists(os.path.join(DEFAULT_CONFIG_DIR, "quic.json")): + with open(os.path.join(DEFAULT_CONFIG_DIR, "quic.json"), "r") as fp: + self._cache = json.load(fp) + + def save(self): + with open(os.path.join(DEFAULT_CONFIG_DIR, "quic.json"), "w+") as fp: + json.dump(self._cache, fp) + + def __contains__(self, item: Tuple[str, int]): + return f"QUIC_{item[0]}_{item[1]}" in self._cache + + def __setitem__(self, key: Tuple[str, int], value: Optional[Tuple[str, int]]): + self._cache[f"QUIC_{key[0]}_{key[1]}"] = f"{value[0]}:{value[1]}" + self.save() + + def __getitem__(self, item: Tuple[str, int]): + key: str = f"QUIC_{item[0]}_{item[1]}" + if key in self._cache: + host, port = self._cache[key].split(":") + return host, int(port) + + return None + + def __delitem__(self, key: Tuple[str, int]): + key: str = f"QUIC_{key[0]}_{key[1]}" + if key in self._cache: + del self._cache[key] + self.save() + + def __len__(self): + return len(self._cache) + + def __iter__(self): + yield from self._cache.items() + + class HTTPieCertificate(NamedTuple): cert_file: Optional[str] = None key_file: Optional[str] = None @@ -32,7 +76,9 @@ class HTTPieCertificate(NamedTuple): def to_raw_cert(self): """Synthesize a requests-compatible (2-item tuple of cert and key file) object from HTTPie's internal representation of a certificate.""" - return (self.cert_file, self.key_file) + if self.key_password: + return self.cert_file, self.key_file, self.key_password + return self.cert_file, self.key_file class HTTPieHTTPSAdapter(HTTPAdapter): @@ -48,6 +94,7 @@ def __init__( ssl_version=ssl_version, ciphers=ciphers, ) + kwargs.setdefault("quic_cache_layer", QuicCapabilityCache()) super().__init__(**kwargs) def init_poolmanager(self, *args, **kwargs): @@ -60,7 +107,6 @@ def proxy_manager_for(self, *args, **kwargs): def cert_verify(self, conn, url, verify, cert): if isinstance(cert, HTTPieCertificate): - conn.key_password = cert.key_password cert = cert.to_raw_cert() return super().cert_verify(conn, url, verify, cert) diff --git a/httpie/uploads.py b/httpie/uploads.py index 4a993b3a25..3de4fd3716 100644 --- a/httpie/uploads.py +++ b/httpie/uploads.py @@ -3,18 +3,16 @@ import zlib import functools import threading -from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union, TYPE_CHECKING +from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union from urllib.parse import urlencode -import requests -from requests.utils import super_len - -if TYPE_CHECKING: - from requests_toolbelt import MultipartEncoder +import niquests +from niquests.utils import super_len from .context import Environment from .cli.dicts import MultipartRequestDataDict, RequestDataDict from .compat import is_windows +from .internal.encoder import MultipartEncoder class ChunkedStream: @@ -172,7 +170,6 @@ def _prepare_file_for_upload( ) if chunked: - from requests_toolbelt import MultipartEncoder if isinstance(file, MultipartEncoder): return ChunkedMultipartUploadStream( encoder=file, @@ -232,7 +229,6 @@ def get_multipart_data_and_content_type( boundary: str = None, content_type: str = None, ) -> Tuple['MultipartEncoder', str]: - from requests_toolbelt import MultipartEncoder encoder = MultipartEncoder( fields=data.items(), @@ -250,7 +246,7 @@ def get_multipart_data_and_content_type( def compress_request( - request: requests.PreparedRequest, + request: niquests.PreparedRequest, always: bool, ): deflater = zlib.compressobj() diff --git a/httpie/utils.py b/httpie/utils.py index 4735b2be5d..33d8158568 100644 --- a/httpie/utils.py +++ b/httpie/utils.py @@ -16,7 +16,7 @@ from urllib.parse import urlsplit from typing import Any, List, Optional, Tuple, Generator, Callable, Iterable, IO, TypeVar -import requests.auth +import niquests.auth RE_COOKIE_SPLIT = re.compile(r', (?=[^ ;]+=)') Item = Tuple[str, Any] @@ -121,7 +121,7 @@ def humanize_bytes(n, precision=2): return f'{n / factor:.{precision}f} {suffix}' -class ExplicitNullAuth(requests.auth.AuthBase): +class ExplicitNullAuth(niquests.auth.AuthBase): """Forces requests to ignore the ``.netrc``. """ @@ -201,7 +201,7 @@ def _max_age_to_expires(cookies, now): def parse_content_type_header(header): - """Borrowed from requests.""" + """Borrowed from niquests.""" tokens = header.split(';') content_type, params = tokens[0].strip(), tokens[1:] params_dict = {} diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index ced65979b1..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -markers = - # If you want to run tests without a full HTTPie installation - # we advise you to disable the markers below, e.g: - # pytest -m 'not requires_installation and not requires_external_processes' - requires_installation - requires_external_processes diff --git a/setup.cfg b/setup.cfg index 86c41ff308..67b12ad0f2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -11,7 +11,16 @@ testpaths = httpie tests norecursedirs = tests/fixtures addopts = --tb=native --doctest-modules --verbose xfail_strict = True - +markers = + # If you want to run tests without a full HTTPie installation + # we advise you to disable the markers below, e.g: + # pytest -m 'not requires_installation and not requires_external_processes' + requires_installation + requires_external_processes +filterwarnings = + default + ignore:Passing msg=\.\. is deprecated:DeprecationWarning + ignore:Unverified HTTPS request is being made to host:urllib3.exceptions.InsecureRequestWarning [flake8] # diff --git a/setup.py b/setup.py index 93bdb8f957..7b833d272e 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,8 @@ 'pytest-lazy-fixture>=0.0.6', 'responses', 'pytest-mock', - 'werkzeug<2.1.0' + 'werkzeug<2.1.0', + 'flaky', ] dev_require = [ *tests_require, @@ -23,7 +24,6 @@ 'flake8-deprecated', 'flake8-mutable', 'flake8-tuple', - 'pyopenssl', 'pytest-cov', 'pyyaml', 'twine', @@ -34,13 +34,11 @@ 'pip', 'charset_normalizer>=2.0.0', 'defusedxml>=0.6.0', - 'requests[socks]>=2.22.0', + 'niquests[socks]>=3.4.0,<4', 'Pygments>=2.5.2', - 'requests-toolbelt>=0.9.1', - 'multidict>=4.7.0', 'setuptools', 'importlib-metadata>=1.4.0; python_version < "3.8"', - 'rich>=9.10.0' + 'rich>=9.10.0', ] install_requires_win_only = [ 'colorama>=0.2.4', diff --git a/tests/conftest.py b/tests/conftest.py index 7ca172a867..fa8642edd9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,6 @@ HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN, HTTPBIN_WITH_CHUNKED_SUPPORT, REMOTE_HTTPBIN_DOMAIN, - IS_PYOPENSSL, mock_env ) from .utils.plugins_cli import ( # noqa @@ -20,6 +19,17 @@ ) from .utils.http_server import http_server, localhost_http_server # noqa +from sys import modules + +import niquests +import urllib3 + +# the mock utility 'response' only works with 'requests' +modules["requests"] = niquests +modules["requests.adapters"] = niquests.adapters +modules["requests.exceptions"] = niquests.exceptions +modules["requests.packages.urllib3"] = urllib3 + @pytest.fixture(scope='function', autouse=True) def httpbin_add_ca_bundle(monkeypatch): @@ -73,19 +83,3 @@ def remote_httpbin(_remote_httpbin_available): if _remote_httpbin_available: return 'http://' + REMOTE_HTTPBIN_DOMAIN pytest.skip(f'{REMOTE_HTTPBIN_DOMAIN} not resolvable') - - -@pytest.fixture(autouse=True, scope='session') -def pyopenssl_inject(): - """ - Injects `pyOpenSSL` module to make sure `requests` will use it. - - """ - if IS_PYOPENSSL: - try: - import urllib3.contrib.pyopenssl - urllib3.contrib.pyopenssl.inject_into_urllib3() - except ModuleNotFoundError: - pytest.fail('Missing "pyopenssl" module.') - - yield diff --git a/tests/test_auth.py b/tests/test_auth.py index 696fb22826..3f9b742cd7 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -93,8 +93,8 @@ def test_missing_auth(httpbin): def test_netrc(httpbin_both): # This one gets handled by requests (no --auth, --auth-type present), - # that’s why we patch inside `requests.sessions`. - with mock.patch('requests.sessions.get_netrc_auth') as get_netrc_auth: + # that’s why we patch inside `niquests.sessions`. + with mock.patch('niquests.sessions.get_netrc_auth') as get_netrc_auth: get_netrc_auth.return_value = ('httpie', 'password') r = http(httpbin_both + '/basic-auth/httpie/password') assert get_netrc_auth.call_count == 1 @@ -106,7 +106,7 @@ def test_ignore_netrc(httpbin_both): get_netrc_auth.return_value = ('httpie', 'password') r = http('--ignore-netrc', httpbin_both + '/basic-auth/httpie/password') assert get_netrc_auth.call_count == 0 - assert 'HTTP/1.1 401 UNAUTHORIZED' in r + assert 'HTTP/1.1 401 Unauthorized' in r def test_ignore_netrc_together_with_auth(): diff --git a/tests/test_binary.py b/tests/test_binary.py index ca51aa1686..9e5747ad22 100644 --- a/tests/test_binary.py +++ b/tests/test_binary.py @@ -1,5 +1,5 @@ """Tests for dealing with binary request and response data.""" -import requests +import niquests from .fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG from httpie.output.streams import BINARY_SUPPRESSED_NOTICE @@ -46,5 +46,5 @@ def test_binary_included_and_correct_when_suitable(self, httpbin): env = MockEnvironment(stdin_isatty=True, stdout_isatty=False) url = httpbin + '/bytes/1024?seed=1' r = http('GET', url, env=env) - expected = requests.get(url).content + expected = niquests.get(url).content assert r == expected diff --git a/tests/test_cli.py b/tests/test_cli.py index 6504c8a980..865eeb41c0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -2,7 +2,7 @@ import argparse import pytest -from requests.exceptions import InvalidSchema +from niquests.exceptions import InvalidSchema import httpie.cli.argparser from httpie.cli import constants diff --git a/tests/test_cookie.py b/tests/test_cookie.py index c2a9746509..9499119fa9 100644 --- a/tests/test_cookie.py +++ b/tests/test_cookie.py @@ -16,9 +16,19 @@ def setup_mock_server(self, handler): # Start running mock server in a separate thread. # Daemon threads automatically shut down when the main process exits. self.mock_server_thread = Thread(target=self.mock_server.serve_forever) - self.mock_server_thread.setDaemon(True) + self.mock_server_thread.daemon = True self.mock_server_thread.start() + def shutdown_mock_server(self): + if self.mock_server is None: + return + self.mock_server.socket.close() + self.mock_server.shutdown() + self.mock_server_thread.join() + + self.mock_server = None + self.mock_server_port = None + def test_cookie_parser(self): """Not directly testing HTTPie but `requests` to ensure their cookies handling is still as expected by `get_expired_cookies()`. @@ -28,7 +38,7 @@ class MockServerRequestHandler(BaseHTTPRequestHandler): """"HTTP request handler.""" def do_GET(self): - """Handle GET requests.""" + """Handle GET niquests.""" # Craft multiple cookies cookie = SimpleCookie() cookie['hello'] = 'world' @@ -45,3 +55,4 @@ def do_GET(self): response = http(f'http://localhost:{self.mock_server_port}/') assert 'Set-Cookie: hello=world; Path=/' in response assert 'Set-Cookie: oatmeal_raisin="is the best"; Path=/' in response + self.shutdown_mock_server() diff --git a/tests/test_downloads.py b/tests/test_downloads.py index d6e98867bc..180e702d43 100644 --- a/tests/test_downloads.py +++ b/tests/test_downloads.py @@ -1,12 +1,12 @@ import os import tempfile import time -import requests +import niquests from unittest import mock from urllib.request import urlopen import pytest -from requests.structures import CaseInsensitiveDict +from niquests.structures import CaseInsensitiveDict from httpie.downloads import ( parse_content_range, filename_from_content_disposition, filename_from_url, @@ -15,7 +15,7 @@ from .utils import http, MockEnvironment -class Response(requests.Response): +class Response(niquests.Response): # noinspection PyDefaultArgument def __init__(self, url, headers={}, status_code=200): self.url = url diff --git a/tests/test_encoding.py b/tests/test_encoding.py index e9f50dc9bb..b16de3c846 100644 --- a/tests/test_encoding.py +++ b/tests/test_encoding.py @@ -168,7 +168,7 @@ def test_terminal_output_response_content_type_charset_with_stream(charset, text method=responses.GET, url=DUMMY_URL, body=f'\n{text}'.encode(charset), - stream=True, + # stream=True, content_type=f'text/xml; charset={charset.upper()}', ) r = http('--pretty', pretty, '--stream', DUMMY_URL) diff --git a/tests/test_errors.py b/tests/test_errors.py index fca48fff15..fb9f030dcf 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -3,7 +3,7 @@ from unittest import mock from pytest import raises from requests import Request -from requests.exceptions import ConnectionError +from niquests.exceptions import ConnectionError from httpie.status import ExitStatus from .utils import HTTP_OK, http diff --git a/tests/test_exit_status.py b/tests/test_exit_status.py index 4438d3485c..97f071da54 100644 --- a/tests/test_exit_status.py +++ b/tests/test_exit_status.py @@ -26,7 +26,7 @@ def test_ok_response_exits_0(httpbin): def test_error_response_exits_0_without_check_status(httpbin): r = http('GET', httpbin.url + '/status/500') - assert '500 INTERNAL SERVER ERROR' in r + assert '500 Internal Server Error' in r assert r.exit_status == ExitStatus.SUCCESS assert not r.stderr @@ -44,7 +44,7 @@ def test_3xx_check_status_exits_3_and_stderr_when_stdout_redirected( r = http('--check-status', '--headers', 'GET', httpbin.url + '/status/301', env=env, tolerate_error_exit_status=True) - assert '301 MOVED PERMANENTLY' in r + assert '301 Moved Permanently' in r assert r.exit_status == ExitStatus.ERROR_HTTP_3XX assert '301 moved permanently' in r.stderr.lower() @@ -61,7 +61,7 @@ def test_3xx_check_status_redirects_allowed_exits_0(httpbin): def test_4xx_check_status_exits_4(httpbin): r = http('--check-status', 'GET', httpbin.url + '/status/401', tolerate_error_exit_status=True) - assert '401 UNAUTHORIZED' in r + assert '401 Unauthorized' in r assert r.exit_status == ExitStatus.ERROR_HTTP_4XX # Also stderr should be empty since stdout isn't redirected. assert not r.stderr @@ -70,5 +70,5 @@ def test_4xx_check_status_exits_4(httpbin): def test_5xx_check_status_exits_5(httpbin): r = http('--check-status', 'GET', httpbin.url + '/status/500', tolerate_error_exit_status=True) - assert '500 INTERNAL SERVER ERROR' in r + assert '500 Internal Server Error' in r assert r.exit_status == ExitStatus.ERROR_HTTP_5XX diff --git a/tests/test_json.py b/tests/test_json.py index e758ebe7f4..bf1b3857e9 100644 --- a/tests/test_json.py +++ b/tests/test_json.py @@ -338,13 +338,14 @@ def test_complex_json_arguments_with_non_json(httpbin, request_type, value): [ r'foo\[key\]:=1', r'bar\[1\]:=2', - r'baz\[\]:3', + r'baz\[\]:=3', r'quux[key\[escape\]]:=4', r'quux[key 2][\\][\\\\][\\\[\]\\\]\\\[\n\\]:=5', ], { 'foo[key]': 1, 'bar[1]': 2, + 'baz[]': 3, 'quux': { 'key[escape]': 4, 'key 2': {'\\': {'\\\\': {'\\[]\\]\\[\\n\\': 5}}}, diff --git a/tests/test_output.py b/tests/test_output.py index f85f38fa72..ac07bba7fc 100644 --- a/tests/test_output.py +++ b/tests/test_output.py @@ -9,7 +9,7 @@ from urllib.request import urlopen import pytest -import requests +import niquests import responses from httpie.cli.argtypes import ( @@ -97,18 +97,22 @@ def test_quiet_quiet_with_check_status_non_zero_pipe(self, httpbin): (['-q'], 1), (['-qq'], 0), ]) - # Might fail on Windows due to interference from other warnings. - @pytest.mark.xfail def test_quiet_on_python_warnings(self, test_patch, httpbin, flags, expected_warnings): def warn_and_run(*args, **kwargs): warnings.warn('warning!!') return ExitStatus.SUCCESS test_patch.side_effect = warn_and_run - with pytest.warns(None) as record: - http(*flags, httpbin + '/get') - assert len(record) == expected_warnings + if expected_warnings == 0: + with warnings.catch_warnings(): + warnings.simplefilter("error") + http(*flags, httpbin + '/get') + else: + with pytest.warns(Warning) as record: + http(*flags, httpbin + '/get') + + assert len(record) >= expected_warnings def test_double_quiet_on_error(self, httpbin): r = http( @@ -116,7 +120,7 @@ def test_double_quiet_on_error(self, httpbin): tolerate_error_exit_status=True, ) assert not r - assert 'Couldn’t resolve the given hostname' in r.stderr + assert 'Couldn’t resolve the given hostname' in r.stderr or 'Name or service not known' in r.stderr @pytest.mark.parametrize('quiet_flags', QUIET_SCENARIOS) @mock.patch('httpie.cli.argtypes.AuthCredentials._getpass', @@ -160,7 +164,7 @@ def test_quiet_with_output_redirection(self, tmp_path, httpbin, quiet_flags, wit output_path = Path('output.txt') env = MockEnvironment() orig_cwd = os.getcwd() - output = requests.get(url).text + output = niquests.get(url).text extra_args = ['--download'] if with_download else [] os.chdir(tmp_path) try: @@ -214,7 +218,7 @@ def test_verbose_json(self, httpbin): def test_verbose_implies_all(self, httpbin): r = http('--verbose', '--follow', httpbin + '/redirect/1') assert 'GET /redirect/1 HTTP/1.1' in r - assert 'HTTP/1.1 302 FOUND' in r + assert 'HTTP/1.1 302 Found' in r assert 'GET /get HTTP/1.1' in r assert HTTP_OK in r @@ -281,8 +285,14 @@ def test_ensure_status_code_is_shown_on_all_themes(http_server, style, msg): http_server + '/status/msg', '--raw', msg, env=env) + # Custom reason phrase are most likely to disappear, + # due to HTTP/2+ protocols. urllib3.future replace them anyway in HTTP/1.1 + # for uniformity across protocols. + if 'CUSTOM' in msg: + msg = ' OK' + # Trailing space is stripped away. - assert 'HTTP/1.0 200' + msg.rstrip() in strip_colors(r) + assert 'HTTP/1.1 200' + msg.rstrip() in strip_colors(r) class TestPrettyOptions: diff --git a/tests/test_redirects.py b/tests/test_redirects.py index a761fa2571..692bb8ef68 100644 --- a/tests/test_redirects.py +++ b/tests/test_redirects.py @@ -15,7 +15,7 @@ def test_follow_all_redirects_shown(httpbin): r = http('--follow', '--all', httpbin.url + '/redirect/2') assert r.count('HTTP/1.1') == 3 - assert r.count('HTTP/1.1 302 FOUND', 2) + assert r.count('HTTP/1.1 302 Found', 2) assert HTTP_OK in r diff --git a/tests/test_regressions.py b/tests/test_regressions.py index 07d60a583b..7d7f3e66d5 100644 --- a/tests/test_regressions.py +++ b/tests/test_regressions.py @@ -30,7 +30,6 @@ def test_output_devnull(httpbin): def test_verbose_redirected_stdout_separator(httpbin): """ - """ r = http( diff --git a/tests/test_ssl.py b/tests/test_ssl.py index 6fb983785a..6a6ba5c86f 100644 --- a/tests/test_ssl.py +++ b/tests/test_ssl.py @@ -2,7 +2,7 @@ import pytest import pytest_httpbin.certs -import requests.exceptions +import niquests.exceptions import urllib3 from unittest import mock @@ -10,23 +10,11 @@ from httpie.ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS_STRING from httpie.status import ExitStatus -from .utils import HTTP_OK, TESTS_ROOT, IS_PYOPENSSL, http +from .utils import HTTP_OK, TESTS_ROOT, http - -try: - # Handle OpenSSL errors, if installed. - # See - # noinspection PyUnresolvedReferences - import OpenSSL.SSL - ssl_errors = ( - requests.exceptions.SSLError, - OpenSSL.SSL.Error, - ValueError, # TODO: Remove with OSS-65 - ) -except ImportError: - ssl_errors = ( - requests.exceptions.SSLError, - ) +ssl_errors = ( + niquests.exceptions.SSLError, +) CERTS_ROOT = TESTS_ROOT / 'client_certs' CLIENT_CERT = str(CERTS_ROOT / 'client.crt') @@ -59,10 +47,7 @@ def test_ssl_version(httpbin_secure, ssl_version): ) assert HTTP_OK in r except ssl_errors as e: - if ssl_version == 'ssl3': - # pytest-httpbin doesn't support ssl3 - pass - elif e.__context__ is not None: # Check if root cause was an unsupported TLS version + if e.__context__ is not None: # Check if root cause was an unsupported TLS version root = e.__context__ while root.__context__ is not None: root = root.__context__ @@ -151,7 +136,6 @@ def test_ciphers(httpbin_secure): assert HTTP_OK in r -@pytest.mark.skipif(IS_PYOPENSSL, reason='pyOpenSSL uses a different message format.') def test_ciphers_none_can_be_selected(httpbin_secure): r = http( httpbin_secure.url + '/get', @@ -168,15 +152,6 @@ def test_ciphers_none_can_be_selected(httpbin_secure): assert 'cipher' in r.stderr -def test_pyopenssl_presence(): - if not IS_PYOPENSSL: - assert not urllib3.util.ssl_.IS_PYOPENSSL - assert not urllib3.util.IS_PYOPENSSL - else: - assert urllib3.util.ssl_.IS_PYOPENSSL - assert urllib3.util.IS_PYOPENSSL - - @mock.patch('httpie.cli.argtypes.SSLCredentials._prompt_password', new=lambda self, prompt: PWD_CLIENT_PASS) def test_password_protected_cert_prompt(httpbin_secure): diff --git a/tests/test_stream.py b/tests/test_stream.py index 45b8e4dd32..c3b50a758d 100644 --- a/tests/test_stream.py +++ b/tests/test_stream.py @@ -72,7 +72,7 @@ def test_pretty_options_with_and_without_stream_with_converter(pretty, stream): body = b'\x00{"foo":42,\n"bar":"baz"}' responses.add(responses.GET, DUMMY_URL, body=body, - stream=True, content_type='json/bytes') + content_type='json/bytes') args = ['--pretty=' + pretty, 'GET', DUMMY_URL] if stream: diff --git a/tests/test_tokens.py b/tests/test_tokens.py index 655445ce49..7001510074 100644 --- a/tests/test_tokens.py +++ b/tests/test_tokens.py @@ -92,10 +92,10 @@ def test_redirected_headers_multipart_no_separator(): def test_verbose_chunked(httpbin_with_chunked_support): - r = http('--verbose', '--chunked', httpbin_with_chunked_support + '/post', 'hello=world') + r = http('-vv', '--chunked', httpbin_with_chunked_support + '/post', 'hello=world') assert HTTP_OK in r assert 'Transfer-Encoding: chunked' in r - assert_output_matches(r, ExpectSequence.TERMINAL_EXCHANGE) + assert_output_matches(r, ExpectSequence.TERMINAL_EXCHANGE_META) def test_request_headers_response_body(httpbin): @@ -115,4 +115,4 @@ def test_request_double_verbose(httpbin): def test_request_meta(httpbin): r = http('--meta', httpbin + '/get') - assert_output_matches(r, [Expect.RESPONSE_META]) + assert_output_matches(r, [Expect.REQUEST_META, Expect.RESPONSE_META]) diff --git a/tests/test_transport_plugin.py b/tests/test_transport_plugin.py index b71592df8d..5f04ec6203 100644 --- a/tests/test_transport_plugin.py +++ b/tests/test_transport_plugin.py @@ -1,8 +1,8 @@ from io import BytesIO -from requests.adapters import BaseAdapter -from requests.models import Response -from requests.utils import get_encoding_from_headers +from niquests.adapters import BaseAdapter +from niquests.models import Response +from niquests.utils import get_encoding_from_headers from httpie.plugins import TransportPlugin from httpie.plugins.registry import plugin_manager diff --git a/tests/test_update_warnings.py b/tests/test_update_warnings.py index b2c24c36de..36e2596355 100644 --- a/tests/test_update_warnings.py +++ b/tests/test_update_warnings.py @@ -213,7 +213,7 @@ def fetch_update_mock(mocker): @pytest.fixture def static_fetch_data(mocker): - mock_get = mocker.patch('requests.get') + mock_get = mocker.patch('niquests.get') mock_get.return_value.status_code = 200 mock_get.return_value.json.return_value = { BUILD_CHANNEL: HIGHEST_VERSION, diff --git a/tests/test_uploads.py b/tests/test_uploads.py index d0156063d4..1f6c79465e 100644 --- a/tests/test_uploads.py +++ b/tests/test_uploads.py @@ -4,6 +4,9 @@ import subprocess import time import contextlib + +from flaky import flaky + import httpie.__main__ as main import pytest @@ -125,6 +128,7 @@ def stdin_processes(httpbin, *args, warn_threshold=0.1): @pytest.mark.parametrize("wait", (True, False)) @pytest.mark.requires_external_processes @pytest.mark.skipif(is_windows, reason="Windows doesn't support select() calls into files") +@flaky(max_runs=6) def test_reading_from_stdin(httpbin, wait): with stdin_processes(httpbin) as (process_1, process_2): process_1.communicate(timeout=0.1, input=b"bleh") @@ -143,6 +147,7 @@ def test_reading_from_stdin(httpbin, wait): @pytest.mark.requires_external_processes @pytest.mark.skipif(is_windows, reason="Windows doesn't support select() calls into files") +@flaky(max_runs=6) def test_stdin_read_warning(httpbin): with stdin_processes(httpbin) as (process_1, process_2): # Wait before sending any data @@ -154,11 +159,12 @@ def test_stdin_read_warning(httpbin): except subprocess.TimeoutExpired: errs = b'' - assert b'> warning: no stdin data read in 0.1s' in errs + assert b'> warning: no stdin data read in' in errs @pytest.mark.requires_external_processes @pytest.mark.skipif(is_windows, reason="Windows doesn't support select() calls into files") +@flaky(max_runs=6) def test_stdin_read_warning_with_quiet(httpbin): with stdin_processes(httpbin, "-qq") as (process_1, process_2): # Wait before sending any data @@ -175,6 +181,7 @@ def test_stdin_read_warning_with_quiet(httpbin): @pytest.mark.requires_external_processes @pytest.mark.skipif(is_windows, reason="Windows doesn't support select() calls into files") +@flaky(max_runs=6) def test_stdin_read_warning_blocking_exit(httpbin): # Use a very large number. with stdin_processes(httpbin, warn_threshold=999) as (process_1, process_2): @@ -284,7 +291,7 @@ def test_multipart_custom_content_type_boundary_added(self, httpbin): assert r.count(boundary) == 4 def test_multipart_custom_content_type_boundary_preserved(self, httpbin): - # Allow explicit nonsense requests. + # Allow explicit nonsense niquests. boundary_in_header = 'HEADER_BOUNDARY' boundary_in_body = 'BODY_BOUNDARY' r = http( diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index ada0905ff2..072c223834 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,17 +1,16 @@ """Utilities for HTTPie test suite.""" import re import shlex -import os import sys import time import json import tempfile -import warnings import pytest from contextlib import suppress from io import BytesIO from pathlib import Path from typing import Any, Optional, Union, List, Iterable +from shutil import rmtree import httpie.core as core import httpie.manager.__main__ as manager @@ -31,8 +30,6 @@ HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN = 'pie.dev' HTTPBIN_WITH_CHUNKED_SUPPORT = 'http://' + HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN -IS_PYOPENSSL = os.getenv('HTTPIE_TEST_WITH_PYOPENSSL', '0') == '1' - TESTS_ROOT = Path(__file__).parent.parent CRLF = '\r\n' COLOR = '\x1b[' @@ -139,7 +136,7 @@ def __init__(self, create_temp_config_dir=True, **kwargs): if 'stdout' not in kwargs: kwargs['stdout'] = tempfile.NamedTemporaryFile( mode='w+t', - prefix='httpie_stderr', + prefix='httpie_stdout', newline='', encoding=UTF8, ) @@ -170,10 +167,15 @@ def cleanup(self): self.devnull.close() self.stdout.close() self.stderr.close() - warnings.resetwarnings() + if self._orig_stdout and self._orig_stdout != self.stdout: + self._orig_stdout.close() + if self._orig_stderr and self.stderr != self._orig_stderr: + self._orig_stderr.close() + self.devnull.close() + # it breaks without reasons pytest filterwarnings + # warnings.resetwarnings() if self._delete_config_dir: assert self._temp_dir in self.config_dir.parents - from shutil import rmtree rmtree(self.config_dir, ignore_errors=True) def __del__(self): @@ -210,7 +212,7 @@ class BaseCLIResponse: complete_args: List[str] = [] @property - def command(self): + def command(self): # noqa: F811 cmd = ' '.join(shlex.quote(arg) for arg in ['http', *self.args]) # pytest-httpbin to real httpbin. return re.sub(r'127\.0\.0\.1:\d+', 'httpbin.org', cmd) diff --git a/tests/utils/http_server.py b/tests/utils/http_server.py index 86cc069c57..728946f555 100644 --- a/tests/utils/http_server.py +++ b/tests/utils/http_server.py @@ -135,7 +135,8 @@ def _http_server(): thread = threading.Thread(target=server.serve_forever) thread.start() yield server - server.shutdown() + server.socket.close() + server.shutdown() # shutdown seems only to stop the thread, not closing the socket. thread.join() diff --git a/tests/utils/matching/parsing.py b/tests/utils/matching/parsing.py index e502d76bc8..b574aa2395 100644 --- a/tests/utils/matching/parsing.py +++ b/tests/utils/matching/parsing.py @@ -8,6 +8,7 @@ SEPARATOR_RE = re.compile(f'^{MESSAGE_SEPARATOR}') KEY_VALUE_RE = re.compile(r'[\n]*((.*?):(.+)[\n]?)+[\n]*') +KEY_VALUE_RE_NO_LF = re.compile(r'((.*?):(.+)(\n))+(\n)') def make_headers_re(message_type: Expect): @@ -18,7 +19,7 @@ def make_headers_re(message_type: Expect): non_crlf = rf'[^{CRLF}]' # language=RegExp - http_version = r'HTTP/\d+\.\d+' + http_version = r'HTTP/((\d+\.\d+)|\d+)' if message_type is Expect.REQUEST_HEADERS: # POST /post HTTP/1.1 start_line_re = fr'{non_crlf}*{http_version}{crlf}' @@ -42,6 +43,7 @@ def make_headers_re(message_type: Expect): CRLF, # Not really but useful for testing (just remember not to include it in a body). ] TOKEN_REGEX_MAP = { + Expect.REQUEST_META: KEY_VALUE_RE_NO_LF, Expect.REQUEST_HEADERS: make_headers_re(Expect.REQUEST_HEADERS), Expect.RESPONSE_HEADERS: make_headers_re(Expect.RESPONSE_HEADERS), Expect.RESPONSE_META: KEY_VALUE_RE, @@ -56,6 +58,7 @@ class OutputMatchingError(ValueError): def expect_tokens(tokens: Iterable[Expect], s: str): for token in tokens: s = expect_token(token, s) + # print(token, "OK") if s: raise OutputMatchingError(f'Unmatched remaining output for {tokens} in {s!r}') diff --git a/tests/utils/matching/tokens.py b/tests/utils/matching/tokens.py index c82dafedc2..1dfe7d0c57 100644 --- a/tests/utils/matching/tokens.py +++ b/tests/utils/matching/tokens.py @@ -6,6 +6,7 @@ class Expect(Enum): Predefined token types we can expect in the output. """ + REQUEST_META = auto() REQUEST_HEADERS = auto() RESPONSE_HEADERS = auto() RESPONSE_META = auto() @@ -47,6 +48,7 @@ class ExpectSequence: *TERMINAL_RESPONSE, ] TERMINAL_EXCHANGE_META = [ + Expect.REQUEST_META, *TERMINAL_EXCHANGE, Expect.RESPONSE_META ]