Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expose requests interface on the top package #468

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,10 @@ To install unstable version from GitHub:
### requests-like

```python
from curl_cffi import requests
import curl_cffi

# Notice the impersonate parameter
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome")
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome")

print(r.json())
# output: {..., "ja3n_hash": "aa56c057ad164ec4fdcb7a5a283be9fc", ...}
Expand All @@ -108,27 +108,27 @@ print(r.json())
# To keep using the latest browser version as `curl_cffi` updates,
# simply set impersonate="chrome" without specifying a version.
# Other similar values are: "safari" and "safari_ios"
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome")
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome")

# To pin a specific version, use version numbers together.
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome124")
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome124")

# To impersonate other than browsers, bring your own ja3/akamai strings
# See examples directory for details.
r = requests.get("https://tls.browserleaks.com/json", ja3=..., akamai=...)
r = curl_cffi.get("https://tls.browserleaks.com/json", ja3=..., akamai=...)

# http/socks proxies are supported
proxies = {"https": "http://localhost:3128"}
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome", proxies=proxies)
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome", proxies=proxies)

proxies = {"https": "socks://localhost:3128"}
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome", proxies=proxies)
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome", proxies=proxies)
```

### Sessions

```python
s = requests.Session()
s = curl_cffi.Session()

# httpbin is a http test website, this endpoint makes the server set cookies
s.get("https://httpbin.org/cookies/set/foo/bar")
Expand Down Expand Up @@ -183,7 +183,7 @@ Notes:
### asyncio

```python
from curl_cffi.requests import AsyncSession
from curl_cffi import AsyncSession

async with AsyncSession() as s:
r = await s.get("https://example.com")
Expand All @@ -193,7 +193,7 @@ More concurrency:

```python
import asyncio
from curl_cffi.requests import AsyncSession
from curl_cffi import AsyncSession

urls = [
"https://google.com/",
Expand All @@ -212,7 +212,7 @@ async with AsyncSession() as s:
### WebSockets

```python
from curl_cffi.requests import WebSocket
from curl_cffi import WebSocket

def on_message(ws: WebSocket, message: str | bytes):
print(message)
Expand All @@ -228,7 +228,7 @@ For low-level APIs, Scrapy integration and other advanced topics, see the

```python
import asyncio
from curl_cffi.requests import AsyncSession
from curl_cffi import AsyncSession

async with AsyncSession() as s:
ws = await s.ws_connect("wss://echo.websocket.org")
Expand Down
56 changes: 56 additions & 0 deletions curl_cffi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,33 @@
"CurlWsFlag",
"ffi",
"lib",
"Session",
"AsyncSession",
"BrowserType",
"BrowserTypeLiteral",
"request",
"head",
"get",
"post",
"put",
"patch",
"delete",
"options",
"Cookies",
"Headers",
"Request",
"Response",
"AsyncWebSocket",
"WebSocket",
"WebSocketError",
"WebSocketClosed",
"WebSocketTimeout",
"WsCloseCode",
"ExtraFingerprints",
"CookieTypes",
"HeaderTypes",
"ProxySpec",
"exceptions",
]

import _cffi_backend # noqa: F401 # required by _wrapper
Expand All @@ -31,3 +58,32 @@
CurlWsFlag,
)
from .curl import Curl, CurlError, CurlMime
from .requests import (
AsyncSession,
AsyncWebSocket,
BrowserType,
BrowserTypeLiteral,
Cookies,
CookieTypes,
ExtraFingerprints,
Headers,
HeaderTypes,
ProxySpec,
Request,
Response,
Session,
WebSocket,
WebSocketClosed,
WebSocketError,
WebSocketTimeout,
WsCloseCode,
delete,
exceptions,
get,
head,
options,
patch,
post,
put,
request,
)
2 changes: 1 addition & 1 deletion curl_cffi/requests/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import json
from typing import Literal, Union

from .. import CurlError
from ..curl import CurlError
from ..const import CurlECode


Expand Down
2 changes: 1 addition & 1 deletion curl_cffi/requests/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from concurrent.futures import Future
from typing import Any, Awaitable, Callable, Dict, List, Optional, Union

from .. import Curl
from ..curl import Curl
from .cookies import Cookies
from .exceptions import HTTPError, RequestException
from .headers import Headers
Expand Down
6 changes: 3 additions & 3 deletions docs/cookies.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Using pickle:
# example from: https://github.com/encode/httpx/issues/895
import pickle
# import httpx
from curl_cffi import requests
import curl_cffi

def save_cookies(client):
with open("cookies.pk", "wb") as f:
Expand All @@ -27,11 +27,11 @@ Using pickle:
return pickle.load(f)

# client = httpx.Client(cookies=load_cookies())
client = requests.Session()
client = curl_cffi.Session()
client.get("https://httpbin.org/cookies/set/foo/bar")
save_cookies(client)

client = requests.Session()
client = curl_cffi.Session()
client.cookies.jar._cookies.update(load_cookies())
print(client.cookies.get("foo"))

Expand Down
14 changes: 7 additions & 7 deletions docs/faq.rst
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ The simplest way is to turn off cert verification by ``verify=False``:

.. code-block:: python

r = requests.get("https://example.com", verify=False)
r = curl_cffi.get("https://example.com", verify=False)


ErrCode: 77, Reason: error setting certificate verify locations
Expand Down Expand Up @@ -89,9 +89,9 @@ To force curl to use http 1.1 only.

.. code-block:: python

from curl_cffi import requests, CurlHttpVersion
import curl_cffi

r = requests.get("https://postman-echo.com", http_version=CurlHttpVersion.V1_1)
r = curl_cffi.get("https://postman-echo.com", http_version=curl_cffi.CurlHttpVersion.V1_1)

Related issues:

Expand Down Expand Up @@ -136,9 +136,9 @@ You can use the ``proxy`` parameter:

.. code-block:: python

from curl_cffi import requests
import curl_cffi

requests.get(url, proxy="http://user:[email protected]:3128")
curl_cffi.get(url, proxy="http://user:[email protected]:3128")

You can also use the ``http_proxy``, ``https_proxy``, and ``ws_proxy``, ``wss_proxy``
environment variables, respectively.
Expand All @@ -165,8 +165,8 @@ Use ``chardet`` or ``cchardet``

.. code-block::

>>> from curl_cffi import requests
>>> r = requests.get("https://example.com/messy_codec.html")
>>> import curl_cffi
>>> r = curl_cffi.get("https://example.com/messy_codec.html")
>>> import chardet
>>> chardet.detect(r.content)
{'encoding': 'GB2312', 'confidence': 0.99, 'language': 'Chinese'}
Expand Down
11 changes: 6 additions & 5 deletions docs/impersonate.rst
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ browser versions, you can simply use ``chrome``, ``safari`` and ``safari_ios``.

.. code-block:: python

from curl_cffi import requests
import curl_cffi

requests.get(url, impersonate="chrome")
curl_cffi.get(url, impersonate="chrome")

iOS has restrictions on WebView and TLS libs, so ``safari_x_ios`` should work for most apps.
If you encountered an android app with custom fingerprints, you can try the ``safari_ios``
Expand Down Expand Up @@ -112,7 +112,7 @@ You can retrieve the JA3 and Akamai strings using tools like WireShark or from T
}


r = requests.get(
r = curl_cffi.get(
url, ja3=okhttp4_android10_ja3, akamai=okhttp4_android10_akamai, extra_fp=extra_fp
)
print(r.json())
Expand All @@ -123,13 +123,14 @@ To modify them, use ``curl.setopt(CurlOpt, value)``, for example:

.. code-block:: python

from curl_cffi import Curl, CurlOpt, requests
import curl_cffi
from curl_cffi import Curl, CurlOpt

c = Curl()
c.setopt(CurlOpt.HTTP2_PSEUDO_HEADERS_ORDER, "masp")

# or
requests.get(url, curl_options={CurlOpt.HTTP2_PSEUDO_HEADERS_ORDER, "masp"})
curl_cffi.get(url, curl_options={CurlOpt.HTTP2_PSEUDO_HEADERS_ORDER, "masp"})

Here are a list of options:

Expand Down
16 changes: 8 additions & 8 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -128,12 +128,12 @@ requests-like

.. code-block:: python

from curl_cffi import requests
import curl_cffi

url = "https://tools.scrapfly.io/api/fp/ja3"

# Notice the impersonate parameter
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome110")
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome110")

print(r.json())
# output: {..., "ja3n_hash": "aa56c057ad164ec4fdcb7a5a283be9fc", ...}
Expand All @@ -142,14 +142,14 @@ requests-like
# To keep using the latest browser version as `curl_cffi` updates,
# simply set impersonate="chrome" without specifying a version.
# Other similar values are: "safari" and "safari_ios"
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome")
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome")

# http/socks proxies are supported
proxies = {"https": "http://localhost:3128"}
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome110", proxies=proxies)
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome110", proxies=proxies)

proxies = {"https": "socks://localhost:3128"}
r = requests.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome110", proxies=proxies)
r = curl_cffi.get("https://tools.scrapfly.io/api/fp/ja3", impersonate="chrome110", proxies=proxies)

Sessions
~~~~~~
Expand All @@ -173,7 +173,7 @@ asyncio

.. code-block:: python

from curl_cffi.requests import AsyncSession
from curl_cffi import AsyncSession

async with AsyncSession() as s:
r = await s.get("https://example.com")
Expand All @@ -183,7 +183,7 @@ More concurrency:
.. code-block:: python

import asyncio
from curl_cffi.requests import AsyncSession
from curl_cffi import AsyncSession

urls = [
"https://google.com/",
Expand All @@ -203,7 +203,7 @@ WebSockets

.. code-block:: python

from curl_cffi.requests import Session, WebSocket
from curl_cffi import Session, WebSocket

def on_message(ws: WebSocket, message):
print(message)
Expand Down
4 changes: 2 additions & 2 deletions examples/async_session.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import asyncio

from curl_cffi import requests
import curl_cffi


async def main():
async with requests.AsyncSession() as s:
async with curl_cffi.AsyncSession() as s:
r = await s.get("https://httpbin.org/headers")
print(r.text)

Expand Down
10 changes: 5 additions & 5 deletions examples/custom_response_class.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from curl_cffi import requests
from curl_cffi.curl import Curl, CurlInfo
import curl_cffi
from curl_cffi import Curl, CurlInfo
from typing import cast


class CustomResponse(requests.Response):
def __init__(self, curl: Curl | None = None, request: requests.Request | None = None):
class CustomResponse(curl_cffi.Response):
def __init__(self, curl: Curl | None = None, request: curl_cffi.Request | None = None):
super().__init__(curl, request)
self.local_port = cast(int, curl.getinfo(CurlInfo.LOCAL_PORT))
self.connect_time = cast(float, curl.getinfo(CurlInfo.CONNECT_TIME))
Expand All @@ -17,7 +17,7 @@ def custom_method(self):
return "this is a custom method"


session = requests.Session(response_class=CustomResponse)
session = curl_cffi.Session(response_class=CustomResponse)
response: CustomResponse = session.get("http://example.com")
print(f"{response.status=}")
print(response.custom_method())
Expand Down
4 changes: 2 additions & 2 deletions examples/impersonate.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from curl_cffi import requests
import curl_cffi

# OKHTTP impersonatation examples
# credits: https://github.com/bogdanfinn/tls-client/blob/master/profiles/contributed_custom_profiles.go
Expand Down Expand Up @@ -41,5 +41,5 @@
}


r = requests.get(url, ja3=okhttp4_android10_ja3, akamai=okhttp4_android10_akamai, extra_fp=extra_fp)
r = curl_cffi.get(url, ja3=okhttp4_android10_ja3, akamai=okhttp4_android10_akamai, extra_fp=extra_fp)
print(r.json())
Loading
Loading