Skip to content

Commit

Permalink
Fixing bug where tail-task-logs would fail if log stream doesnt exist…
Browse files Browse the repository at this point in the history
… and task fails to start
  • Loading branch information
adenot committed Jul 18, 2020
1 parent 821dc27 commit 31120a6
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 37 deletions.
7 changes: 4 additions & 3 deletions src/run-task.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ do
sleep 1
done

echo "---> Task ID $TASK_ID"
echo "---> Task ARN $TASK_ID"

./tail-task-logs.py $TASK_ID

Expand All @@ -54,14 +54,15 @@ CONTAINER_EXIT_CODE=$(aws ecs describe-tasks \
--cluster $CLUSTER_NAME \
--query="tasks[0].containers[0].exitCode" \
--output=text)
echo "---> Task Exit Code $CONTAINER_EXIT_CODE"
echo "---> Task Exit Code: $CONTAINER_EXIT_CODE"
RET=$CONTAINER_EXIT_CODE


if [ $RET -eq 0 ]; then
if [ "$RET" = "0" ]; then
echo "---> TaskStatus completed!"
else
echo "---> ERROR: TaskStatus FAILED!"
RET=1
fi

exit $RET
70 changes: 36 additions & 34 deletions src/tail-task-logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,9 @@
task_id=task_arn.split(":task/",1)[1] #get the task number id
last_event = None
log_group_name='/ecs/'+cluster_name+'/'+app_name
log_stream_prefix = None

log_streams = logs.describe_log_streams(logGroupName=log_group_name, orderBy='LastEventTime', descending=True, limit=1)

if len(log_streams['logStreams']) == 0:
print("No log streams found for log group %s" % log_group_name)
exit(1)

log_stream_prefix='/'.join(log_streams['logStreams'][0]['logStreamName'].split('/')[:-1])

extra_args = {
'logGroupName': log_group_name,
'logStreamName': log_stream_prefix+'/'+task_id,
'startFromHead': True
}
print(" Waiting for logs...")

while True:
try:
Expand All @@ -34,28 +23,41 @@
tasks=[task_arn])
task_status = response['tasks'][0]['lastStatus']

log_stream_events = logs.get_log_events(**extra_args)

for event in log_stream_events['events']:
print("%s" % (event['message']))

if 'nextToken' in extra_args and log_stream_events['nextForwardToken'] == extra_args['nextToken']:
if task_status == "STOPPED":
print("======== TASK STOPPED ========")
print("Task ID: %s" % task_id)
print("Task ARN: %s" % task_arn)
print("Service Name: %s" % app_name)
print("Cluster Name: %s" % cluster_name)
if log_stream_prefix is None:
log_streams = logs.describe_log_streams(logGroupName=log_group_name, orderBy='LastEventTime', descending=True, limit=1)

if len(log_streams['logStreams']) != 0:
log_stream_prefix='/'.join(log_streams['logStreams'][0]['logStreamName'].split('/')[:-1])
extra_args = {
'logGroupName': log_group_name,
'logStreamName': log_stream_prefix+'/'+task_id,
'startFromHead': True
}

else:
log_stream_events = logs.get_log_events(**extra_args)

for event in log_stream_events['events']:
print("%s" % (event['message']))

if 'nextToken' not in extra_args or log_stream_events['nextForwardToken'] != extra_args['nextToken']:
extra_args['nextToken'] = log_stream_events['nextForwardToken']

if task_status == "STOPPED":
print("======== TASK STOPPED ========")
print("Task ID: %s" % task_id)
print("Task ARN: %s" % task_arn)
print("Service Name: %s" % app_name)
print("Cluster Name: %s" % cluster_name)
if 'startedAt' in response['tasks'][0]:
print("Started at: %s" % response['tasks'][0]['startedAt'])
print("Stopped at: %s" % response['tasks'][0]['stoppedAt'])
print("Stopped Reason: %s" % response['tasks'][0]['stoppedReason'])
if 'stopCode' in response['tasks'][0]:
print("Stop Code: %s" % response['tasks'][0]['stopCode'])
print("")
break
else:
extra_args['nextToken'] = log_stream_events['nextForwardToken']

print("Stopped at: %s" % response['tasks'][0]['stoppedAt'])
print("Stopped Reason: %s" % response['tasks'][0]['stoppedReason'])
if 'stopCode' in response['tasks'][0]:
print("Stop Code: %s" % response['tasks'][0]['stopCode'])
print("")
break

time.sleep(1)

except logs.exceptions.ResourceNotFoundException as e:
Expand Down

0 comments on commit 31120a6

Please sign in to comment.