Skip to content

Commit b663660

Browse files
committed
Merge pull request #580 from HubSpot/logfetch_headers
allow addition of custom headers on logfetch requests
2 parents d48688a + 92d19e3 commit b663660

File tree

6 files changed

+43
-18
lines changed

6 files changed

+43
-18
lines changed

scripts/logfetch/entrypoint.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ def fetch():
9191
conf_dir = args.conf_folder if args.conf_folder else DEFAULT_CONF_DIR
9292
conf_file = os.path.expanduser(conf_dir + '/' + args.conf_file) if args.conf_file else os.path.expanduser(conf_dir + '/' + DEFAULT_CONF_FILE)
9393
config = ConfigParser.SafeConfigParser()
94+
config.optionxform = str
9495

9596
defaults = {
9697
"num_parallel_fetches" : DEFAULT_PARALLEL_FETCHES,
@@ -136,6 +137,11 @@ def fetch():
136137
args.end_days = convert_to_date(args, args.end_days)
137138

138139
args.dest = os.path.expanduser(args.dest)
140+
try:
141+
setattr(args, 'headers', dict(config.items("Request Headers")))
142+
except:
143+
sys.stderr.write('No additional request headers found\n')
144+
setattr(args, 'headers', {})
139145

140146
fetch_logs(args)
141147

@@ -147,6 +153,7 @@ def cat():
147153
conf_dir = args.conf_folder if args.conf_folder else DEFAULT_CONF_DIR
148154
conf_file = os.path.expanduser(conf_dir + '/' + args.conf_file) if args.conf_file else os.path.expanduser(conf_dir + '/' + DEFAULT_CONF_FILE)
149155
config = ConfigParser.SafeConfigParser()
156+
config.optionxform = str
150157

151158
defaults = {
152159
"num_parallel_fetches" : DEFAULT_PARALLEL_FETCHES,
@@ -191,6 +198,12 @@ def cat():
191198
args.end_days = convert_to_date(args, args.end_days)
192199

193200
args.dest = os.path.expanduser(args.dest)
201+
try:
202+
setattr(args, 'headers', dict(config.items("Request Headers")))
203+
except:
204+
sys.stderr.write('No additional request headers found\n')
205+
setattr(args, 'headers', {})
206+
194207

195208
cat_logs(args)
196209

@@ -202,6 +215,7 @@ def tail():
202215
conf_dir = args.conf_folder if args.conf_folder else DEFAULT_CONF_DIR
203216
conf_file = os.path.expanduser(conf_dir + '/' + args.conf_file) if args.conf_file else os.path.expanduser(conf_dir + '/' + DEFAULT_CONF_FILE)
204217
config = ConfigParser.SafeConfigParser()
218+
config.optionxform = str
205219

206220
defaults = {'verbose': False}
207221

@@ -230,5 +244,11 @@ def tail():
230244
check_args(args)
231245

232246
args.dest = os.path.expanduser(args.dest)
247+
try:
248+
setattr(args, 'headers', dict(config.items("Request Headers")))
249+
except:
250+
sys.stderr.write('No additional request headers found\n')
251+
setattr(args, 'headers', {})
252+
233253

234254
tail_logs(args)

scripts/logfetch/live_logs.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ def download_live_logs(args):
2727
async_requests.append(
2828
grequests.AsyncRequest('GET',uri ,
2929
callback=generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose),
30-
params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}
30+
params={'path' : '{0}/{1}/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
31+
headers=args.headers
3132
)
3233
)
3334
if logfile_name.endswith('.gz'):
@@ -45,7 +46,8 @@ def download_live_logs(args):
4546
async_requests.append(
4647
grequests.AsyncRequest('GET',uri ,
4748
callback=generate_callback(uri, args.dest, logfile_name, args.chunk_size, args.verbose),
48-
params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)}
49+
params={'path' : '{0}/{1}/logs/{2}'.format(metadata['fullPathToRoot'], metadata['currentDirectory'], log_file)},
50+
headers=args.headers
4951
)
5052
)
5153
if logfile_name.endswith('.gz'):
@@ -71,11 +73,11 @@ def tasks_to_check(args):
7173

7274
def files_json(args, task):
7375
uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
74-
return get_json_response(uri)
76+
return get_json_response(uri, args)
7577

7678
def logs_folder_files(args, task):
7779
uri = BROWSE_FOLDER_FORMAT.format(logfetch_base.base_uri(args), task)
78-
files_json = get_json_response(uri, {'path' : '{0}/logs'.format(task)})
80+
files_json = get_json_response(uri, args, {'path' : '{0}/logs'.format(task)})
7981
if 'files' in files_json:
8082
files = files_json['files']
8183
return [f['name'] for f in files if logfetch_base.is_in_date_range(args, f['mtime'])]

scripts/logfetch/logfetch_base.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import sys
33
import gzip
44
import fnmatch
5-
from datetime import datetime
5+
from datetime import datetime, timedelta
66
from termcolor import colored
77
from singularity_request import get_json_response
88

@@ -54,17 +54,20 @@ def tasks_for_requests(args):
5454
tasks = [task["taskId"]["id"] for task in all_tasks_for_request(args, request)]
5555
tasks = tasks[0:args.task_count] if hasattr(args, 'task_count') else tasks
5656
all_tasks = all_tasks + tasks
57+
if not all_tasks:
58+
sys.stderr.write(colored('No tasks found, check that the request/task you are searching for exists...', 'red'))
59+
exit(1)
5760
return all_tasks
5861

5962
def log_matches(inputString, pattern):
6063
return fnmatch.fnmatch(inputString, pattern) or fnmatch.fnmatch(inputString, pattern + '*.gz')
6164

6265
def all_tasks_for_request(args, request):
6366
uri = '{0}{1}'.format(base_uri(args), ACTIVE_TASKS_FORMAT.format(request))
64-
active_tasks = get_json_response(uri)
67+
active_tasks = get_json_response(uri, args)
6568
if hasattr(args, 'start_days'):
6669
uri = '{0}{1}'.format(base_uri(args), REQUEST_TASKS_FORMAT.format(request))
67-
historical_tasks = get_json_response(uri)
70+
historical_tasks = get_json_response(uri, args)
6871
if len(historical_tasks) == 0:
6972
return active_tasks
7073
elif len(active_tasks) == 0:
@@ -76,7 +79,7 @@ def all_tasks_for_request(args, request):
7679

7780
def all_requests(args):
7881
uri = '{0}{1}'.format(base_uri(args), ALL_REQUESTS)
79-
requests = get_json_response(uri)
82+
requests = get_json_response(uri, args)
8083
included_requests = []
8184
for request in requests:
8285
if fnmatch.fnmatch(request['request']['id'], args.requestId):

scripts/logfetch/s3_logs.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def download_s3_logs(args):
2424
if not args.logtype or log_matches(args, filename):
2525
if not already_downloaded(args.dest, filename):
2626
async_requests.append(
27-
grequests.AsyncRequest('GET', log_file['getUrl'], callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose))
27+
grequests.AsyncRequest('GET', log_file['getUrl'], callback=generate_callback(log_file['getUrl'], args.dest, filename, args.chunk_size, args.verbose), headers=args.headers)
2828
)
2929
else:
3030
if args.verbose:
@@ -52,16 +52,16 @@ def already_downloaded(dest, filename):
5252

5353
def logs_for_all_requests(args):
5454
if args.taskId:
55-
return get_json_response(s3_task_logs_uri(args, args.taskId))
55+
return get_json_response(s3_task_logs_uri(args, args.taskId), args)
5656
else:
5757
tasks = logfetch_base.tasks_for_requests(args)
5858
logs = []
5959
for task in tasks:
60-
s3_logs = get_json_response(s3_task_logs_uri(args, task))
60+
s3_logs = get_json_response(s3_task_logs_uri(args, task), args)
6161
logs = logs + s3_logs if s3_logs else logs
6262
sys.stderr.write(colored('Also searching s3 history...\n', 'cyan'))
6363
for request in logfetch_base.all_requests(args):
64-
s3_logs = get_json_response(s3_request_logs_uri(args, request))
64+
s3_logs = get_json_response(s3_request_logs_uri(args, request), args)
6565
logs = logs + s3_logs if s3_logs else logs
6666
return [dict(t) for t in set([tuple(l.items()) for l in logs])] # remove any duplicates
6767

scripts/logfetch/singularity_request.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44

55
ERROR_STATUS_FORMAT = 'Singularity responded with an invalid status code ({0})'
66

7-
def get_json_response(uri, params={}):
8-
singularity_response = requests.get(uri, params=params)
7+
def get_json_response(uri, args, params={}):
8+
singularity_response = requests.get(uri, params=params, headers=args.headers)
99
if singularity_response.status_code < 199 or singularity_response.status_code > 299:
1010
sys.stderr.write('{0} params:{1}\n'.format(uri, str(params)))
1111
sys.stderr.write(colored(ERROR_STATUS_FORMAT.format(singularity_response.status_code), 'red') + '\n')

scripts/logfetch/tail.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def stream_log_for_task(self, args, task):
5151
path = '{0}/{1}'.format(task, args.logfile)
5252
keep_trying = True
5353
try:
54-
offset = self.get_initial_offset(uri, path)
54+
offset = self.get_initial_offset(uri, path, args)
5555
except ValueError:
5656
sys.stderr.write(colored('Could not tail logs for task {0}, check that the task is still active and that the slave it runs on has not been decommissioned\n'.format(task), 'red'))
5757
keep_trying = False
@@ -63,16 +63,16 @@ def stream_log_for_task(self, args, task):
6363
sys.stderr.write(colored('Could not tail logs for task {0}, check that the task is still active and that the slave it runs on has not been decommissioned\n'.format(task), 'red'))
6464
keep_trying = False
6565

66-
def get_initial_offset(self, uri, path):
66+
def get_initial_offset(self, uri, path, args):
6767
params = {"path" : path}
68-
return long(requests.get(uri, params=params).json()['offset'])
68+
return long(requests.get(uri, params=params, headers=args.headers).json()['offset'])
6969

7070
def fetch_new_log_data(self, uri, path, offset, args, task):
7171
params = {
7272
"path" : path,
7373
"offset" : offset
7474
}
75-
response = requests.get(uri, params=params).json()
75+
response = requests.get(uri, params=params, headers=args.headers).json()
7676
prefix = '({0}) =>\n'.format(task) if args.verbose else ''
7777
if len(response['data'].encode('utf-8')) > 0:
7878
if args.grep:

0 commit comments

Comments
 (0)