Skip to content

Commit b00bdd4

Browse files
committed
[SPARK-25253][PYSPARK] Refactor local connection & auth code
This eliminates some duplication in the code to connect to a server on localhost to talk directly to the jvm. Also it gives consistent ipv6 and error handling. Two other incidental changes, that shouldn't matter: 1) python barrier tasks perform authentication immediately (rather than waiting for the BARRIER_FUNCTION indicator) 2) for `rdd._load_from_socket`, the timeout is only increased after authentication. Closes apache#22247 from squito/py_connection_refactor. Authored-by: Imran Rashid <[email protected]> Signed-off-by: hyukjinkwon <[email protected]> (cherry picked from commit 38391c9)
1 parent db9c041 commit b00bdd4

File tree

3 files changed

+35
-28
lines changed

3 files changed

+35
-28
lines changed

python/pyspark/java_gateway.py

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def killChild():
134134
return gateway
135135

136136

137-
def do_server_auth(conn, auth_secret):
137+
def _do_server_auth(conn, auth_secret):
138138
"""
139139
Performs the authentication protocol defined by the SocketAuthHelper class on the given
140140
file-like object 'conn'.
@@ -145,3 +145,33 @@ def do_server_auth(conn, auth_secret):
145145
if reply != "ok":
146146
conn.close()
147147
raise Exception("Unexpected reply from iterator server.")
148+
149+
150+
def local_connect_and_auth(port, auth_secret):
151+
"""
152+
Connect to local host, authenticate with it, and return a (sockfile,sock) for that connection.
153+
Handles IPV4 & IPV6, does some error handling.
154+
:param port
155+
:param auth_secret
156+
:return: a tuple with (sockfile, sock)
157+
"""
158+
sock = None
159+
errors = []
160+
# Support for both IPv4 and IPv6.
161+
# On most of IPv6-ready systems, IPv6 will take precedence.
162+
for res in socket.getaddrinfo("127.0.0.1", port, socket.AF_UNSPEC, socket.SOCK_STREAM):
163+
af, socktype, proto, _, sa = res
164+
try:
165+
sock = socket.socket(af, socktype, proto)
166+
sock.settimeout(15)
167+
sock.connect(sa)
168+
sockfile = sock.makefile("rwb", 65536)
169+
_do_server_auth(sockfile, auth_secret)
170+
return (sockfile, sock)
171+
except socket.error as e:
172+
emsg = _exception_message(e)
173+
errors.append("tried to connect to %s, but an error occured: %s" % (sa, emsg))
174+
sock.close()
175+
sock = None
176+
else:
177+
raise Exception("could not open socket: %s" % errors)

python/pyspark/rdd.py

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
else:
4040
from itertools import imap as map, ifilter as filter
4141

42-
from pyspark.java_gateway import do_server_auth
42+
from pyspark.java_gateway import local_connect_and_auth
4343
from pyspark.serializers import NoOpSerializer, CartesianDeserializer, \
4444
BatchedSerializer, CloudPickleSerializer, PairDeserializer, \
4545
PickleSerializer, pack_long, AutoBatchedSerializer, write_with_length, \
@@ -139,30 +139,10 @@ def _parse_memory(s):
139139

140140

141141
def _load_from_socket(sock_info, serializer):
142-
port, auth_secret = sock_info
143-
sock = None
144-
# Support for both IPv4 and IPv6.
145-
# On most of IPv6-ready systems, IPv6 will take precedence.
146-
for res in socket.getaddrinfo("localhost", port, socket.AF_UNSPEC, socket.SOCK_STREAM):
147-
af, socktype, proto, canonname, sa = res
148-
sock = socket.socket(af, socktype, proto)
149-
try:
150-
sock.settimeout(15)
151-
sock.connect(sa)
152-
except socket.error:
153-
sock.close()
154-
sock = None
155-
continue
156-
break
157-
if not sock:
158-
raise Exception("could not open socket")
142+
(sockfile, sock) = local_connect_and_auth(*sock_info)
159143
# The RDD materialization time is unpredicable, if we set a timeout for socket reading
160144
# operation, it will very possibly fail. See SPARK-18281.
161145
sock.settimeout(None)
162-
163-
sockfile = sock.makefile("rwb", 65536)
164-
do_server_auth(sockfile, auth_secret)
165-
166146
# The socket will be automatically closed when garbage-collected.
167147
return serializer.load_stream(sockfile)
168148

python/pyspark/worker.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727

2828
from pyspark.accumulators import _accumulatorRegistry
2929
from pyspark.broadcast import Broadcast, _broadcastRegistry
30-
from pyspark.java_gateway import do_server_auth
30+
from pyspark.java_gateway import local_connect_and_auth
3131
from pyspark.taskcontext import TaskContext
3232
from pyspark.files import SparkFiles
3333
from pyspark.rdd import PythonEvalType
@@ -269,8 +269,5 @@ def process():
269269
# Read information about how to connect back to the JVM from the environment.
270270
java_port = int(os.environ["PYTHON_WORKER_FACTORY_PORT"])
271271
auth_secret = os.environ["PYTHON_WORKER_FACTORY_SECRET"]
272-
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
273-
sock.connect(("127.0.0.1", java_port))
274-
sock_file = sock.makefile("rwb", 65536)
275-
do_server_auth(sock_file, auth_secret)
272+
(sock_file, _) = local_connect_and_auth(java_port, auth_secret)
276273
main(sock_file, sock_file)

0 commit comments

Comments
 (0)