@@ -182,22 +182,20 @@ def join_bulk_responses(bulk_responses)
182
182
def bulk_send ( body_stream , batch_actions )
183
183
params = compression_level? ? { :headers => { "Content-Encoding" => "gzip" } } : { }
184
184
185
- response = @pool . post ( @bulk_path , params , body_stream . string )
186
-
187
- @bulk_response_metrics . increment ( response . code . to_s )
188
-
189
- case response . code
190
- when 200 # OK
191
- LogStash ::Json . load ( response . body )
192
- when 413 # Payload Too Large
185
+ begin
186
+ response = @pool . post ( @bulk_path , params , body_stream . string )
187
+ @bulk_response_metrics . increment ( response . code . to_s )
188
+ rescue ::LogStash ::Outputs ::ElasticSearch ::HttpClient ::Pool ::BadResponseCodeError => e
189
+ @bulk_response_metrics . increment ( e . response_code . to_s )
190
+ raise e unless e . response_code == 413
191
+ # special handling for 413, treat it as a document level issue
193
192
logger . warn ( "Bulk request rejected: `413 Payload Too Large`" , :action_count => batch_actions . size , :content_length => body_stream . size )
194
- emulate_batch_error_response ( batch_actions , response . code , 'payload_too_large' )
195
- else
196
- url = ::LogStash ::Util ::SafeURI . new ( response . final_url )
197
- raise ::LogStash ::Outputs ::ElasticSearch ::HttpClient ::Pool ::BadResponseCodeError . new (
198
- response . code , url , body_stream . to_s , response . body
199
- )
193
+ return emulate_batch_error_response ( batch_actions , 413 , 'payload_too_large' )
194
+ rescue => e # it may be a network issue instead, re-raise
195
+ raise e
200
196
end
197
+
198
+ LogStash ::Json . load ( response . body )
201
199
end
202
200
203
201
def emulate_batch_error_response ( actions , http_code , reason )
@@ -411,6 +409,9 @@ def host_to_url(h)
411
409
def exists? ( path , use_get = false )
412
410
response = use_get ? @pool . get ( path ) : @pool . head ( path )
413
411
response . code >= 200 && response . code <= 299
412
+ rescue ::LogStash ::Outputs ::ElasticSearch ::HttpClient ::Pool ::BadResponseCodeError => e
413
+ return true if e . code == 404
414
+ raise e
414
415
end
415
416
416
417
def template_exists? ( template_endpoint , name )
@@ -420,7 +421,10 @@ def template_exists?(template_endpoint, name)
420
421
def template_put ( template_endpoint , name , template )
421
422
path = "#{ template_endpoint } /#{ name } "
422
423
logger . info ( "Installing Elasticsearch template" , name : name )
423
- @pool . put ( path , nil , LogStash ::Json . dump ( template ) )
424
+ response = @pool . put ( path , nil , LogStash ::Json . dump ( template ) )
425
+ rescue ::LogStash ::Outputs ::ElasticSearch ::HttpClient ::Pool ::BadResponseCodeError => e
426
+ return response if e . code == 404
427
+ raise e
424
428
end
425
429
426
430
# ILM methods
@@ -432,17 +436,15 @@ def rollover_alias_exists?(name)
432
436
433
437
# Create a new rollover alias
434
438
def rollover_alias_put ( alias_name , alias_definition )
435
- begin
436
- @pool . put ( CGI ::escape ( alias_name ) , nil , LogStash ::Json . dump ( alias_definition ) )
437
- logger . info ( "Created rollover alias" , name : alias_name )
438
- # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
439
- rescue ::LogStash ::Outputs ::ElasticSearch ::HttpClient ::Pool ::BadResponseCodeError => e
440
- if e . response_code == 400
441
- logger . info ( "Rollover alias already exists, skipping" , name : alias_name )
442
- return
443
- end
444
- raise e
439
+ @pool . put ( CGI ::escape ( alias_name ) , nil , LogStash ::Json . dump ( alias_definition ) )
440
+ logger . info ( "Created rollover alias" , name : alias_name )
441
+ # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
442
+ rescue ::LogStash ::Outputs ::ElasticSearch ::HttpClient ::Pool ::BadResponseCodeError => e
443
+ if e . response_code == 400
444
+ logger . info ( "Rollover alias already exists, skipping" , name : alias_name )
445
+ return
445
446
end
447
+ raise e
446
448
end
447
449
448
450
def get_xpack_info
0 commit comments