Skip to content
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
7490585
feat: make attachment downloading chunkable
hzhreal Oct 15, 2025
87edc40
style(pre-commit): auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 15, 2025
fc2a70e
chore: update changelog
hzhreal Oct 16, 2025
2c8485a
Merge branch 'master' into feat/attachment-chunking
hzhreal Oct 17, 2025
379b499
Update discord/message.py
Paillat-dev Oct 20, 2025
16f32dc
Merge branch 'master' into feat/attachment-chunking
Lulalaby Oct 20, 2025
5793021
chore: change naming
hzhreal Oct 20, 2025
4daab6e
Update CHANGELOG.md
Paillat-dev Oct 20, 2025
4462af3
Update discord/http.py
Paillat-dev Oct 20, 2025
9692f8b
style(pre-commit): auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 20, 2025
be95ec2
Merge branch 'master' into feat/attachment-chunking
Lulalaby Oct 21, 2025
f6aeef3
Merge branch 'master' into feat/attachment-chunking
Paillat-dev Oct 22, 2025
46e6582
Merge branch 'master' into feat/attachment-chunking
Lulalaby Oct 22, 2025
d434ede
chore: fix changelog position
Lulalaby Oct 22, 2025
8fa9468
Update discord/message.py
hzhreal Oct 22, 2025
aab8b11
Update discord/message.py
hzhreal Oct 22, 2025
51a670e
style(pre-commit): auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 22, 2025
5bb3d6b
chore: refactor
hzhreal Oct 22, 2025
ebabe24
style(pre-commit): auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Oct 22, 2025
270f395
Apply suggestion from @Lulalaby
Lulalaby Oct 22, 2025
34511d9
Merge branch 'master' into feat/attachment-chunking
Paillat-dev Nov 4, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ These changes are available on the `master` branch, but have not yet been releas

### Added

- Added `Attachment.read_chunked` and added optional `chunksize` argument to
`Attachment.save` for retrieving attachments in chunks.
([#2956](https://github.com/Pycord-Development/pycord/pull/2956))

### Changed

### Fixed
Expand Down
25 changes: 24 additions & 1 deletion discord/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,15 @@
import logging
import sys
import weakref
from typing import TYPE_CHECKING, Any, Coroutine, Iterable, Sequence, TypeVar
from typing import (
TYPE_CHECKING,
Any,
AsyncGenerator,
Coroutine,
Iterable,
Sequence,
TypeVar,
)
from urllib.parse import quote as _uriquote

import aiohttp
Expand Down Expand Up @@ -406,6 +414,21 @@ async def get_from_cdn(self, url: str) -> bytes:
else:
raise HTTPException(resp, "failed to get asset")

async def stream_from_cdn(self, url: str, chunksize: int) -> AsyncGenerator[bytes]:
if not isinstance(chunksize, int) or chunksize < 1:
raise InvalidArgument("The chunksize must be a positive integer.")

async with self.__session.get(url) as resp:
if resp.status == 200:
async for chunk in resp.content.iter_chunked(chunksize):
yield chunk
elif resp.status == 404:
raise NotFound(resp, "asset not found")
elif resp.status == 403:
raise Forbidden(resp, "cannot retrieve asset")
else:
raise HTTPException(resp, "failed to get asset")

# state management

async def close(self) -> None:
Expand Down
66 changes: 63 additions & 3 deletions discord/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from typing import (
TYPE_CHECKING,
Any,
AsyncGenerator,
Callable,
ClassVar,
Sequence,
Expand Down Expand Up @@ -290,6 +291,7 @@ async def save(
*,
seek_begin: bool = True,
use_cached: bool = False,
chunksize: int | None = None,
) -> int:
"""|coro|

Expand All @@ -311,6 +313,8 @@ async def save(
after the message is deleted. Note that this can still fail to download
deleted attachments if too much time has passed, and it does not work
on some types of attachments.
chunksize: Optional[:class:`int`]
The maximum size of each chunk to process.

Returns
-------
Expand All @@ -323,16 +327,33 @@ async def save(
Saving the attachment failed.
NotFound
The attachment was deleted.
InvalidArgument
Argument `chunksize` is less than 1.
"""
data = await self.read(use_cached=use_cached)
if chunksize is not None:
data = self.read_chunked(use_cached=use_cached, chunksize=chunksize)
else:
data = await self.read(use_cached=use_cached)

if isinstance(fp, io.BufferedIOBase):
written = fp.write(data)
if chunksize:
written = 0
async for chunk in data:
written += fp.write(chunk)
else:
written = fp.write(data)
if seek_begin:
fp.seek(0)
return written
else:
with open(fp, "wb") as f:
return f.write(data)
if chunksize:
written = 0
async for chunk in data:
written += f.write(chunk)
return written
else:
return f.write(data)

async def read(self, *, use_cached: bool = False) -> bytes:
"""|coro|
Expand Down Expand Up @@ -369,6 +390,45 @@ async def read(self, *, use_cached: bool = False) -> bytes:
data = await self._http.get_from_cdn(url)
return data

async def read_chunked(
self, chunksize: int, *, use_cached: bool = False
) -> AsyncGenerator[bytes]:
"""|coro|

Retrieves the content of this attachment in chunks as a :class:`AsyncGenerator` object of bytes.

Parameters
----------
chunksize: :class:`int`
The maximum size of each chunk to process.
use_cached: :class:`bool`
Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading
the attachment. This will allow attachments to be saved after deletion
more often, compared to the regular URL which is generally deleted right
after the message is deleted. Note that this can still fail to download
deleted attachments if too much time has passed, and it does not work
on some types of attachments.

Yields
------
:class:`bytes`
A chunk of the file.

Raises
------
HTTPException
Downloading the attachment failed.
Forbidden
You do not have permissions to access this attachment
NotFound
The attachment was deleted.
InvalidArgument
Argument `chunksize` is less than 1.
"""
url = self.proxy_url if use_cached else self.url
async for chunk in self._http.stream_from_cdn(url, chunksize):
yield chunk

async def to_file(self, *, use_cached: bool = False, spoiler: bool = False) -> File:
"""|coro|

Expand Down