Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Remove types from comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
clokep committed Nov 10, 2022
1 parent b2c2b03 commit a442e6b
Show file tree
Hide file tree
Showing 25 changed files with 70 additions and 82 deletions.
5 changes: 2 additions & 3 deletions synapse/config/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,10 +317,9 @@ def setup_logging(
Set up the logging subsystem.
Args:
config (LoggingConfig | synapse.config.worker.WorkerConfig):
configuration data
config: configuration data
use_worker_options (bool): True to use the 'worker_log_config' option
use_worker_options: True to use the 'worker_log_config' option
instead of 'log_config'.
logBeginner: The Twisted logBeginner to use.
Expand Down
11 changes: 5 additions & 6 deletions synapse/federation/transport/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,12 +280,11 @@ async def make_membership_event(
Note that this does not append any events to any graphs.
Args:
destination (str): address of remote homeserver
room_id (str): room to join/leave
user_id (str): user to be joined/left
membership (str): one of join/leave
params (dict[str, str|Iterable[str]]): Query parameters to include in the
request.
destination: address of remote homeserver
room_id: room to join/leave
user_id: user to be joined/left
membership: one of join/leave
params: Query parameters to include in the request.
Returns:
Succeeds when we get a 2xx HTTP response. The result
Expand Down
4 changes: 2 additions & 2 deletions synapse/federation/transport/server/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,10 +224,10 @@ class BaseFederationServlet:
With arguments:
origin (unicode|None): The authenticated server_name of the calling server,
origin (str|None): The authenticated server_name of the calling server,
unless REQUIRE_AUTH is set to False and authentication failed.
content (unicode|None): decoded json body of the request. None if the
content (str|None): decoded json body of the request. None if the
request was a GET.
query (dict[bytes, list[bytes]]): Query params from the request. url-decoded
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/e2e_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -870,7 +870,7 @@ async def _process_self_signatures(
- signatures of the user's master key by the user's devices.
Args:
user_id (string): the user uploading the keys
user_id: the user uploading the keys
signatures (dict[string, dict]): map of devices to signed keys
Returns:
Expand Down
4 changes: 2 additions & 2 deletions synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1596,8 +1596,8 @@ async def get_room_complexity(
Fetch the complexity of a remote room over federation.
Args:
remote_room_hosts (list[str]): The remote servers to ask.
room_id (str): The room ID to ask about.
remote_room_hosts: The remote servers to ask.
room_id: The room ID to ask about.
Returns:
Dict contains the complexity
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/identity.py
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ async def ask_id_server_for_third_party_invite(
inviter_display_name: The current display name of the
inviter.
inviter_avatar_url: The URL of the inviter's avatar.
id_access_token (str): The access token to authenticate to the identity
id_access_token: The access token to authenticate to the identity
server with
Returns:
Expand Down
3 changes: 1 addition & 2 deletions synapse/http/additional_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,7 @@ def __init__(
Args:
hs: homeserver
handler ((twisted.web.server.Request) -> twisted.internet.defer.Deferred):
function to be called to handle the request.
handler: function to be called to handle the request.
"""
super().__init__()
self._handler = handler
Expand Down
2 changes: 1 addition & 1 deletion synapse/http/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def register_paths(
request. The first argument will be the request object and
subsequent arguments will be any matched groups from the regex.
This should return either tuple of (code, response), or None.
servlet_classname (str): The name of the handler to be used in prometheus
servlet_classname: The name of the handler to be used in prometheus
and opentracing logs.
"""

Expand Down
2 changes: 1 addition & 1 deletion synapse/http/site.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ def _started_processing(self, servlet_name: str) -> None:
be sure to call finished_processing.
Args:
servlet_name (str): the name of the servlet which will be
servlet_name: the name of the servlet which will be
processing this request. This is used in the metrics.
It is possible to update this afterwards by updating
Expand Down
22 changes: 10 additions & 12 deletions synapse/logging/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,7 @@ def __init__(self, copy_from: "Optional[ContextResourceUsage]" = None) -> None:
"""Create a new ContextResourceUsage
Args:
copy_from (ContextResourceUsage|None): if not None, an object to
copy stats from
copy_from: if not None, an object to copy stats from
"""
if copy_from is None:
self.reset()
Expand Down Expand Up @@ -162,7 +161,7 @@ def __iadd__(self, other: "ContextResourceUsage") -> "ContextResourceUsage":
"""Add another ContextResourceUsage's stats to this one's.
Args:
other (ContextResourceUsage): the other resource usage object
other: the other resource usage object
"""
self.ru_utime += other.ru_utime
self.ru_stime += other.ru_stime
Expand Down Expand Up @@ -898,13 +897,12 @@ def defer_to_thread(
on it.
Args:
reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread
the Deferred will be invoked, and whose threadpool we should use for the
function.
reactor: The reactor in whose main thread the Deferred will be invoked,
and whose threadpool we should use for the function.
Normally this will be hs.get_reactor().
f (callable): The function to call.
f: The function to call.
args: positional arguments to pass to f.
Expand Down Expand Up @@ -939,13 +937,13 @@ def defer_to_threadpool(
on it.
Args:
reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread
the Deferred will be invoked. Normally this will be hs.get_reactor().
reactor: The reactor in whose main thread the Deferred will be invoked.
Normally this will be hs.get_reactor().
threadpool (twisted.python.threadpool.ThreadPool): The threadpool to use for
running `f`. Normally this will be hs.get_reactor().getThreadPool().
threadpool: The threadpool to use for running `f`. Normally this will be
hs.get_reactor().getThreadPool().
f (callable): The function to call.
f: The function to call.
args: positional arguments to pass to f.
Expand Down
2 changes: 1 addition & 1 deletion synapse/logging/opentracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -721,7 +721,7 @@ def inject_header_dict(
destination: address of entity receiving the span context. Must be given unless
check_destination is False. The context will only be injected if the
destination matches the opentracing whitelist
check_destination (bool): If false, destination will be ignored and the context
check_destination: If false, destination will be ignored and the context
will always be injected.
Note:
Expand Down
4 changes: 2 additions & 2 deletions synapse/rest/media/v1/media_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,8 +344,8 @@ async def _get_remote_media_impl(
download from remote server.
Args:
server_name (str): Remote server_name where the media originated.
media_id (str): The media ID of the content (as defined by the
server_name: Remote server_name where the media originated.
media_id: The media ID of the content (as defined by the
remote server).
Returns:
Expand Down
5 changes: 2 additions & 3 deletions synapse/server_notices/consent_server_notices.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,8 @@ def copy_with_str_subst(x: Any, substitutions: Any) -> Any:
"""Deep-copy a structure, carrying out string substitutions on any strings
Args:
x (object): structure to be copied
substitutions (object): substitutions to be made - passed into the
string '%' operator
x: structure to be copied
substitutions: substitutions to be made - passed into the string '%' operator
Returns:
copy of x
Expand Down
5 changes: 2 additions & 3 deletions synapse/storage/controllers/persist_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,9 +204,8 @@ async def add_to_queue(
process to to so, calling the per_item_callback for each item.
Args:
room_id (str):
task (_EventPersistQueueTask): A _PersistEventsTask or
_UpdateCurrentStateTask to process.
room_id:
task: A _PersistEventsTask or _UpdateCurrentStateTask to process.
Returns:
the result returned by the `_per_item_callback` passed to
Expand Down
22 changes: 10 additions & 12 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -1282,9 +1282,10 @@ def _filter_events_and_contexts_for_duplicates(
Pick the earliest non-outlier if there is one, else the earliest one.
Args:
events_and_contexts (list[(EventBase, EventContext)]):
events_and_contexts:
Returns:
list[(EventBase, EventContext)]: filtered list
filtered list
"""
new_events_and_contexts: OrderedDict[
str, Tuple[EventBase, EventContext]
Expand All @@ -1310,9 +1311,8 @@ def _update_room_depths_txn(
"""Update min_depth for each room
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
txn: db connection
events_and_contexts: events we are persisting
"""
depth_updates: Dict[str, int] = {}
for event, context in events_and_contexts:
Expand Down Expand Up @@ -1583,13 +1583,11 @@ def _update_metadata_tables_txn(
"""Update all the miscellaneous tables for new events
Args:
txn (twisted.enterprise.adbapi.Connection): db connection
events_and_contexts (list[(EventBase, EventContext)]): events
we are persisting
all_events_and_contexts (list[(EventBase, EventContext)]): all
events that we were going to persist. This includes events
we've already persisted, etc, that wouldn't appear in
events_and_context.
txn: db connection
events_and_contexts: events we are persisting
all_events_and_contexts: all events that we were going to persist.
This includes events we've already persisted, etc, that wouldn't
appear in events_and_context.
inhibit_local_membership_updates: Stop the local_current_membership
from being updated by these events. This should be set to True
for backfilled events because backfilled events in the past do
Expand Down
6 changes: 3 additions & 3 deletions synapse/storage/databases/main/monthly_active_users.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ async def reap_monthly_active_users(self) -> None:
def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None:
"""
Args:
reserved_users (tuple): reserved users to preserve
reserved_users: reserved users to preserve
"""

thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30)
Expand Down Expand Up @@ -370,8 +370,8 @@ def upsert_monthly_active_user_txn(
should not appear in the MAU stats).
Args:
txn (cursor):
user_id (str): user to add/update
txn:
user_id: user to add/update
"""
assert (
self._update_on_this_worker
Expand Down
6 changes: 3 additions & 3 deletions synapse/storage/databases/main/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -953,7 +953,7 @@ def get_user_id_by_threepid_txn(
"""Returns user id from threepid
Args:
txn (cursor):
txn:
medium: threepid medium e.g. email
address: threepid address e.g. [email protected]
Expand Down Expand Up @@ -1283,8 +1283,8 @@ def set_expiration_date_for_user_txn(
"""Sets an expiration date to the account with the given user ID.
Args:
user_id (str): User ID to set an expiration date for.
use_delta (bool): If set to False, the expiration date for the user will be
user_id: User ID to set an expiration date for.
use_delta: If set to False, the expiration date for the user will be
now + validity period. If set to True, this expiration date will be a
random value in the [now + period - d ; now + period] range, d being a
delta equal to 10% of the validity period.
Expand Down
4 changes: 2 additions & 2 deletions synapse/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,8 @@ def deserialize(
Requester.
Args:
store (DataStore): Used to convert AS ID to AS object
input (dict): A dict produced by `serialize`
store: Used to convert AS ID to AS object
input: A dict produced by `serialize`
Returns:
Requester
Expand Down
2 changes: 1 addition & 1 deletion synapse/util/caches/deferred_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def get(
Args:
key:
callback: Gets called when the entry in the cache is invalidated
update_metrics (bool): whether to update the cache hit rate metrics
update_metrics: whether to update the cache hit rate metrics
Returns:
A Deferred which completes with the result. Note that this may later fail
Expand Down
6 changes: 3 additions & 3 deletions synapse/util/caches/lrucache.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,11 +389,11 @@ def __init__(
cache_name: The name of this cache, for the prometheus metrics. If unset,
no metrics will be reported on this cache.
cache_type (type):
cache_type:
type of underlying cache to be used. Typically one of dict
or TreeCache.
size_callback (func(V) -> int | None):
size_callback:
metrics_collection_callback:
metrics collection callback. This is called early in the metrics
Expand All @@ -403,7 +403,7 @@ def __init__(
Ignored if cache_name is None.
apply_cache_factor_from_config (bool): If true, `max_size` will be
apply_cache_factor_from_config: If true, `max_size` will be
multiplied by a cache factor derived from the homeserver config
clock:
Expand Down
2 changes: 1 addition & 1 deletion synapse/util/ratelimitutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def ratelimit(self, host: str) -> "_GeneratorContextManager[defer.Deferred[None]
# Handle request ...
Args:
host (str): Origin of incoming request.
host: Origin of incoming request.
Returns:
context manager which returns a deferred.
Expand Down
2 changes: 1 addition & 1 deletion synapse/util/wheel_timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def fetch(self, now: int) -> List[T]:
"""Fetch any objects that have timed out
Args:
now (ms): Current time in msec
now: Current time in msec
Returns:
list: List of objects that have timed out
Expand Down
2 changes: 0 additions & 2 deletions tests/replication/test_multi_media_repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,6 @@ def _build_test_server(connection_creator):
Args:
connection_creator (IOpenSSLServerConnectionCreator): thing to build
SSL connections
sanlist (list[bytes]): list of the SAN entries for the cert returned
by the server
Returns:
TLSMemoryBIOProtocol
Expand Down
9 changes: 5 additions & 4 deletions tests/server_notices/test_resource_limits_server_notices.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple
from unittest.mock import Mock

from twisted.test.proto_helpers import MemoryReactor
Expand Down Expand Up @@ -350,14 +351,14 @@ def test_invite_with_notice(self):

self.assertTrue(notice_in_room, "No server notice in room")

def _trigger_notice_and_join(self):
def _trigger_notice_and_join(self) -> Tuple[str, str, str]:
"""Creates enough active users to hit the MAU limit and trigger a system notice
about it, then joins the system notices room with one of the users created.
Returns:
user_id (str): The ID of the user that joined the room.
tok (str): The access token of the user that joined the room.
room_id (str): The ID of the room that's been joined.
user_id: The ID of the user that joined the room.
tok: The access token of the user that joined the room.
room_id: The ID of the room that's been joined.
"""
user_id = None
tok = None
Expand Down
Loading

0 comments on commit a442e6b

Please sign in to comment.