Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
[pyupgrade] synapse/ (#10348)
Browse files Browse the repository at this point in the history
This PR is tantamount to running 
```
pyupgrade --py36-plus --keep-percent-format `find synapse/ -type f -name "*.py"`
```

Part of #9744
  • Loading branch information
ShadowJonathan authored Jul 19, 2021
1 parent 7387d6f commit 95e47b2
Show file tree
Hide file tree
Showing 29 changed files with 86 additions and 102 deletions.
1 change: 1 addition & 0 deletions changelog.d/10348.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Run `pyupgrade` on the codebase.
6 changes: 2 additions & 4 deletions synapse/app/generic_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,10 +395,8 @@ def start_listening(self):
elif listener.type == "metrics":
if not self.config.enable_metrics:
logger.warning(
(
"Metrics listener configured, but "
"enable_metrics is not True!"
)
"Metrics listener configured, but "
"enable_metrics is not True!"
)
else:
_base.listen_metrics(listener.bind_addresses, listener.port)
Expand Down
6 changes: 2 additions & 4 deletions synapse/app/homeserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,10 +305,8 @@ def start_listening(self):
elif listener.type == "metrics":
if not self.config.enable_metrics:
logger.warning(
(
"Metrics listener configured, but "
"enable_metrics is not True!"
)
"Metrics listener configured, but "
"enable_metrics is not True!"
)
else:
_base.listen_metrics(listener.bind_addresses, listener.port)
Expand Down
2 changes: 1 addition & 1 deletion synapse/config/appservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def load_appservices(hostname, config_files):

for config_file in config_files:
try:
with open(config_file, "r") as f:
with open(config_file) as f:
appservice = _load_appservice(hostname, yaml.safe_load(f), config_file)
if appservice.id in seen_ids:
raise ConfigError(
Expand Down
6 changes: 2 additions & 4 deletions synapse/config/tls.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,8 @@ def read_config(self, config: dict, config_dir_path: str, **kwargs):
if self.federation_client_minimum_tls_version == "1.3":
if getattr(SSL, "OP_NO_TLSv1_3", None) is None:
raise ConfigError(
(
"federation_client_minimum_tls_version cannot be 1.3, "
"your OpenSSL does not support it"
)
"federation_client_minimum_tls_version cannot be 1.3, "
"your OpenSSL does not support it"
)

# Whitelist of domains to not verify certificates for
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/cas.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, error, error_description=None):

def __str__(self):
if self.error_description:
return "{}: {}".format(self.error, self.error_description)
return f"{self.error}: {self.error_description}"
return self.error


Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/federation.py
Original file line number Diff line number Diff line change
Expand Up @@ -735,7 +735,7 @@ async def _get_state_after_missing_prev_event(
# we need to make sure we re-load from the database to get the rejected
# state correct.
fetched_events.update(
(await self.store.get_events(missing_desired_events, allow_rejected=True))
await self.store.get_events(missing_desired_events, allow_rejected=True)
)

# check for events which were in the wrong room.
Expand Down
4 changes: 2 additions & 2 deletions synapse/handlers/identity.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ async def try_unbind_threepid_with_id_server(
)

url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,)
url_bytes = "/_matrix/identity/api/v1/3pid/unbind".encode("ascii")
url_bytes = b"/_matrix/identity/api/v1/3pid/unbind"

content = {
"mxid": mxid,
Expand Down Expand Up @@ -695,7 +695,7 @@ async def _lookup_3pid_v1(
return data["mxid"]
except RequestTimedOutError:
raise SynapseError(500, "Timed out contacting identity server")
except IOError as e:
except OSError as e:
logger.warning("Error from v1 identity server lookup: %s" % (e,))

return None
Expand Down
38 changes: 20 additions & 18 deletions synapse/handlers/oidc.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,26 +72,26 @@
(b"oidc_session_no_samesite", b"HttpOnly"),
]


#: A token exchanged from the token endpoint, as per RFC6749 sec 5.1. and
#: OpenID.Core sec 3.1.3.3.
Token = TypedDict(
"Token",
{
"access_token": str,
"token_type": str,
"id_token": Optional[str],
"refresh_token": Optional[str],
"expires_in": int,
"scope": Optional[str],
},
)
class Token(TypedDict):
access_token: str
token_type: str
id_token: Optional[str]
refresh_token: Optional[str]
expires_in: int
scope: Optional[str]


#: A JWK, as per RFC7517 sec 4. The type could be more precise than that, but
#: there is no real point of doing this in our case.
JWK = Dict[str, str]


#: A JWK Set, as per RFC7517 sec 5.
JWKS = TypedDict("JWKS", {"keys": List[JWK]})
class JWKS(TypedDict):
keys: List[JWK]


class OidcHandler:
Expand Down Expand Up @@ -255,7 +255,7 @@ def __init__(self, error, error_description=None):

def __str__(self):
if self.error_description:
return "{}: {}".format(self.error, self.error_description)
return f"{self.error}: {self.error_description}"
return self.error


Expand Down Expand Up @@ -639,7 +639,7 @@ async def _exchange_code(self, code: str) -> Token:
)
logger.warning(description)
# Body was still valid JSON. Might be useful to log it for debugging.
logger.warning("Code exchange response: {resp!r}".format(resp=resp))
logger.warning("Code exchange response: %r", resp)
raise OidcError("server_error", description)

return resp
Expand Down Expand Up @@ -1217,10 +1217,12 @@ class OidcSessionData:
ui_auth_session_id = attr.ib(type=str)


UserAttributeDict = TypedDict(
"UserAttributeDict",
{"localpart": Optional[str], "display_name": Optional[str], "emails": List[str]},
)
class UserAttributeDict(TypedDict):
localpart: Optional[str]
display_name: Optional[str]
emails: List[str]


C = TypeVar("C")


Expand Down
15 changes: 6 additions & 9 deletions synapse/handlers/register.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,12 @@
["guest", "auth_provider"],
)

LoginDict = TypedDict(
"LoginDict",
{
"device_id": str,
"access_token": str,
"valid_until_ms": Optional[int],
"refresh_token": Optional[str],
},
)

class LoginDict(TypedDict):
device_id: str
access_token: str
valid_until_ms: Optional[int]
refresh_token: Optional[str]


class RegistrationHandler(BaseHandler):
Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/saml.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def expire_sessions(self):


DOT_REPLACE_PATTERN = re.compile(
("[^%s]" % (re.escape("".join(mxid_localpart_allowed_characters)),))
"[^%s]" % (re.escape("".join(mxid_localpart_allowed_characters)),)
)


Expand Down
2 changes: 1 addition & 1 deletion synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -1601,7 +1601,7 @@ async def _get_rooms_changed(
logger.debug(
"Membership changes in %s: [%s]",
room_id,
", ".join(("%s (%s)" % (e.event_id, e.membership) for e in events)),
", ".join("%s (%s)" % (e.event_id, e.membership) for e in events),
)

non_joins = [e for e in events if e.membership != Membership.JOIN]
Expand Down
2 changes: 1 addition & 1 deletion synapse/http/proxyagent.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def request(self, method, uri, headers=None, bodyProducer=None):
"""
uri = uri.strip()
if not _VALID_URI.match(uri):
raise ValueError("Invalid URI {!r}".format(uri))
raise ValueError(f"Invalid URI {uri!r}")

parsed_uri = URI.fromBytes(uri)
pool_key = (parsed_uri.scheme, parsed_uri.host, parsed_uri.port)
Expand Down
2 changes: 1 addition & 1 deletion synapse/http/site.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def _finished_processing(self):
# authenticated (e.g. and admin is puppetting a user) then we log both.
requester, authenticated_entity = self.get_authenticated_entity()
if authenticated_entity:
requester = "{}.{}".format(authenticated_entity, requester)
requester = f"{authenticated_entity}.{requester}"

self.site.access_logger.log(
log_level,
Expand Down
2 changes: 1 addition & 1 deletion synapse/logging/opentracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ def init_tracer(hs: "HomeServer"):

config = JaegerConfig(
config=hs.config.jaeger_config,
service_name="{} {}".format(hs.config.server_name, hs.get_instance_name()),
service_name=f"{hs.config.server_name} {hs.get_instance_name()}",
scope_manager=LogContextScopeManager(hs.config),
metrics_factory=PrometheusMetricsFactory(),
)
Expand Down
26 changes: 12 additions & 14 deletions synapse/metrics/_exposition.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@

from synapse.util import caches

CONTENT_TYPE_LATEST = str("text/plain; version=0.0.4; charset=utf-8")
CONTENT_TYPE_LATEST = "text/plain; version=0.0.4; charset=utf-8"


INF = float("inf")
Expand All @@ -55,8 +55,8 @@ def floatToGoString(d):
# Go switches to exponents sooner than Python.
# We only need to care about positive values for le/quantile.
if d > 0 and dot > 6:
mantissa = "{0}.{1}{2}".format(s[0], s[1:dot], s[dot + 1 :]).rstrip("0.")
return "{0}e+0{1}".format(mantissa, dot - 1)
mantissa = f"{s[0]}.{s[1:dot]}{s[dot + 1 :]}".rstrip("0.")
return f"{mantissa}e+0{dot - 1}"
return s


Expand All @@ -65,7 +65,7 @@ def sample_line(line, name):
labelstr = "{{{0}}}".format(
",".join(
[
'{0}="{1}"'.format(
'{}="{}"'.format(
k,
v.replace("\\", r"\\").replace("\n", r"\n").replace('"', r"\""),
)
Expand All @@ -78,10 +78,8 @@ def sample_line(line, name):
timestamp = ""
if line.timestamp is not None:
# Convert to milliseconds.
timestamp = " {0:d}".format(int(float(line.timestamp) * 1000))
return "{0}{1} {2}{3}\n".format(
name, labelstr, floatToGoString(line.value), timestamp
)
timestamp = f" {int(float(line.timestamp) * 1000):d}"
return "{}{} {}{}\n".format(name, labelstr, floatToGoString(line.value), timestamp)


def generate_latest(registry, emit_help=False):
Expand Down Expand Up @@ -118,12 +116,12 @@ def generate_latest(registry, emit_help=False):
# Output in the old format for compatibility.
if emit_help:
output.append(
"# HELP {0} {1}\n".format(
"# HELP {} {}\n".format(
mname,
metric.documentation.replace("\\", r"\\").replace("\n", r"\n"),
)
)
output.append("# TYPE {0} {1}\n".format(mname, mtype))
output.append(f"# TYPE {mname} {mtype}\n")

om_samples: Dict[str, List[str]] = {}
for s in metric.samples:
Expand All @@ -143,13 +141,13 @@ def generate_latest(registry, emit_help=False):
for suffix, lines in sorted(om_samples.items()):
if emit_help:
output.append(
"# HELP {0}{1} {2}\n".format(
"# HELP {}{} {}\n".format(
metric.name,
suffix,
metric.documentation.replace("\\", r"\\").replace("\n", r"\n"),
)
)
output.append("# TYPE {0}{1} gauge\n".format(metric.name, suffix))
output.append(f"# TYPE {metric.name}{suffix} gauge\n")
output.extend(lines)

# Get rid of the weird colon things while we're at it
Expand All @@ -163,12 +161,12 @@ def generate_latest(registry, emit_help=False):
# Also output in the new format, if it's different.
if emit_help:
output.append(
"# HELP {0} {1}\n".format(
"# HELP {} {}\n".format(
mnewname,
metric.documentation.replace("\\", r"\\").replace("\n", r"\n"),
)
)
output.append("# TYPE {0} {1}\n".format(mnewname, mtype))
output.append(f"# TYPE {mnewname} {mtype}\n")

for s in metric.samples:
# Get rid of the OpenMetrics specific samples (we should already have
Expand Down
3 changes: 1 addition & 2 deletions synapse/metrics/background_process_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,7 @@ def collect(self):
_background_process_db_txn_duration,
_background_process_db_sched_duration,
):
for r in m.collect():
yield r
yield from m.collect()


REGISTRY.register(_Collector())
Expand Down
25 changes: 9 additions & 16 deletions synapse/rest/client/v1/login.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,14 @@
logger = logging.getLogger(__name__)


LoginResponse = TypedDict(
"LoginResponse",
{
"user_id": str,
"access_token": str,
"home_server": str,
"expires_in_ms": Optional[int],
"refresh_token": Optional[str],
"device_id": str,
"well_known": Optional[Dict[str, Any]],
},
total=False,
)
class LoginResponse(TypedDict, total=False):
user_id: str
access_token: str
home_server: str
expires_in_ms: Optional[int]
refresh_token: Optional[str]
device_id: str
well_known: Optional[Dict[str, Any]]


class LoginRestServlet(RestServlet):
Expand Down Expand Up @@ -150,9 +145,7 @@ def on_GET(self, request: SynapseRequest):
# login flow types returned.
flows.append({"type": LoginRestServlet.TOKEN_TYPE})

flows.extend(
({"type": t} for t in self.auth_handler.get_supported_login_types())
)
flows.extend({"type": t} for t in self.auth_handler.get_supported_login_types())

flows.append({"type": LoginRestServlet.APPSERVICE_TYPE})

Expand Down
4 changes: 2 additions & 2 deletions synapse/rest/media/v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# check for JPEG support.
try:
PIL.Image._getdecoder("rgb", "jpeg", None)
except IOError as e:
except OSError as e:
if str(e).startswith("decoder jpeg not available"):
raise Exception(
"FATAL: jpeg codec not supported. Install pillow correctly! "
Expand All @@ -32,7 +32,7 @@
# check for PNG support.
try:
PIL.Image._getdecoder("rgb", "zip", None)
except IOError as e:
except OSError as e:
if str(e).startswith("decoder zip not available"):
raise Exception(
"FATAL: zip codec not supported. Install pillow correctly! "
Expand Down
2 changes: 1 addition & 1 deletion synapse/storage/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -907,7 +907,7 @@ def simple_insert_many_txn(
# The sort is to ensure that we don't rely on dictionary iteration
# order.
keys, vals = zip(
*[zip(*(sorted(i.items(), key=lambda kv: kv[0]))) for i in values if i]
*(zip(*(sorted(i.items(), key=lambda kv: kv[0]))) for i in values if i)
)

for k in keys:
Expand Down
Loading

0 comments on commit 95e47b2

Please sign in to comment.