Skip to content

Commit

Permalink
Merge pull request #1385 from HubSpot/logfetch_start_end_times
Browse files Browse the repository at this point in the history
(logfetch) Use start/end time metadata if available
  • Loading branch information
ssalinas authored Dec 23, 2016
2 parents 222986e + 2940b6c commit 9f8d64f
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 7 deletions.
2 changes: 1 addition & 1 deletion scripts/logfetch/live_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def download_live_logs(args):
elif args.logtype:
logfetch_base.log(colored('Excluding log {0}, doesn\'t match {1}'.format(log_file, args.logtype), 'magenta') + '\n', args, True)
tasks_check_progress += 1
logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent)
logfetch_base.update_progress_bar(tasks_check_progress, tasks_check_goal, 'Log Finder', args.silent or args.verbose)

if async_requests:
logfetch_base.log(colored('\nStarting {0} live logs downloads\n'.format(len(async_requests)), 'cyan'), args, False)
Expand Down
4 changes: 2 additions & 2 deletions scripts/logfetch/logfetch_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def all_tasks_for_request(args, request):
elif len(active_tasks) == 0:
return historical_tasks
else:
return active_tasks + [h for h in historical_tasks if is_task_in_date_range(args, int(str(h['updatedAt'])[0:-3]), int(str(h['taskId']['startedAt'])[0:-3]))]
return active_tasks + [h for h in historical_tasks if date_range_overlaps(args, int(str(h['updatedAt'])[0:-3]), int(str(h['taskId']['startedAt'])[0:-3]))]
else:
return active_tasks

Expand All @@ -72,7 +72,7 @@ def is_in_date_range(args, timestamp):
else:
return False if timstamp_datetime < args.start else True

def is_task_in_date_range(args, start, end):
def date_range_overlaps(args, start, end):
start_datetime = datetime.utcfromtimestamp(start)
end_datetime = datetime.utcfromtimestamp(end)
if args.end:
Expand Down
17 changes: 14 additions & 3 deletions scripts/logfetch/s3_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ def download_s3_logs(args):
async_requests = []
all_logs = []
for log_file in logs:
filename = log_file['key'].rsplit("/", 1)[1]
if logfetch_base.is_in_date_range(args, int(str(log_file['lastModified'])[0:-3])):
if log_file_in_date_range(args, log_file):
filename = log_file['key'].rsplit("/", 1)[1]
if not args.logtype or log_matches(args, filename):
logfetch_base.log(colored('Including log {0}'.format(filename), 'blue') + '\n', args, True)
if not already_downloaded(args.dest, filename):
Expand All @@ -48,6 +48,17 @@ def download_s3_logs(args):
all_logs = modify_download_list(all_logs)
return all_logs

def log_file_in_date_range(args, log_file):
if 'startTime' in log_file:
if 'endTime' in log_file:
return logfetch_base.date_range_overlaps(args, int(str(log_file['startTime'])[0:-3]), int(str(log_file['endTime'])[0:-3]))
else:
return logfetch_base.date_range_overlaps(args, int(str(log_file['startTime'])[0:-3]), int(str(log_file['lastModified'])[0:-3]))
elif 'endTime' in log_file:
return logfetch_base.is_in_date_range(args, int(str(log_file['endTime'])[0:-3]))
else:
return logfetch_base.is_in_date_range(args, int(str(log_file['lastModified'])[0:-3]))

def modify_download_list(all_logs):
for index, log in enumerate(all_logs):
if log.endswith('.gz') and not os.path.isfile(log) and os.path.isfile(log[:-3]):
Expand All @@ -71,7 +82,7 @@ def logs_for_all_requests(args):
s3_logs = logfetch_base.get_json_response(s3_task_logs_uri(args, task), args, s3_params)
logs = logs + s3_logs if s3_logs else logs
tasks_progress += 1
logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent)
logfetch_base.update_progress_bar(tasks_progress, tasks_goal, 'S3 Log Finder', args.silent or args.verbose)
logfetch_base.log(colored('\nAlso searching s3 history...\n', 'cyan'), args, False)
for request in logfetch_base.all_requests(args):
s3_logs = logfetch_base.get_json_response(s3_request_logs_uri(args, request), args, s3_params)
Expand Down
2 changes: 1 addition & 1 deletion scripts/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

setup(
name='singularity-logfetch',
version='0.28.0',
version='0.29.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='singularity-users@googlegroups.com',
Expand Down

0 comments on commit 9f8d64f

Please sign in to comment.