-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathapp.py
49 lines (39 loc) · 1.39 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# -*- coding: utf-8 -*-
import json
import logging
import subprocess
from collections import OrderedDict
from celery import Celery
from kafka import KafkaProducer
import config
app = Celery("app", broker="redis://{}:6379".format(config.REDIS_HOST))
@app.task
def submit_flink_sql(definition):
logging.info(definition)
definition = json.loads(definition)
filename = "./jobs/{}.{}.{}.sql".format(definition["id"], definition["projectId"], definition["pipelineId"]
file = open(filename, "w")
file.write(definition["queries"])
file.close()
process = subprocess.Popen(
["./bin/sql-client.sh", "embedded", "-l", "sql-jars", "-f", filename],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = process.communicate()
stdout, stderr = stdout.decode("utf-8"), stderr.decode("utf-8")
feedback = {"id": definition["id"], "success": not bool(stderr)}
logging.info(stdout)
if stderr:
feedback["error"] = stderr
logging.error(stderr)
else:
feedback["jobId"] = stdout.strip().split("JobID ")[1]
producer = KafkaProducer(bootstrap_servers=config.BOOTSTRAP_SERVERS)
future = producer.send(
"{}.{}.feedback".format(
definition["projectId"], definition["pipelineId"]),
json.dumps(feedback).encode("utf-8"),
)
result = future.get(timeout=10)
return (stdout, stderr)