11# Copyright (c) IBM Corporation 2019, 2020
22# Apache License, Version 2.0 (see https://opensource.org/licenses/Apache-2.0)
33
4- from __future__ import ( absolute_import , division , print_function )
4+ from __future__ import absolute_import , division , print_function
55
66__metaclass__ = type
77
2323def _update_result (result , src , dest , ds_type = "USS" , is_binary = False ):
2424 """ Helper function to update output result with the provided values """
2525 data_set_types = {
26- 'PS' : "Sequential" ,
27- 'PO' : "Partitioned" ,
28- ' PDSE' : "Partitioned Extended" ,
29- 'PE' : "Partitioned Extended" ,
30- ' VSAM' : "VSAM" ,
31- ' USS' : "USS"
26+ "PS" : "Sequential" ,
27+ "PO" : "Partitioned" ,
28+ " PDSE" : "Partitioned Extended" ,
29+ "PE" : "Partitioned Extended" ,
30+ " VSAM" : "VSAM" ,
31+ " USS" : "USS" ,
3232 }
33- file_or_ds = "file" if ds_type == ' USS' else "data set"
33+ file_or_ds = "file" if ds_type == " USS" else "data set"
3434 updated_result = dict ((k , v ) for k , v in result .items ())
35- updated_result .update ({
36- 'file' : src ,
37- 'dest' : dest ,
38- 'data_set_type' : data_set_types [ds_type ],
39- 'is_binary' : is_binary
40- })
35+ updated_result .update (
36+ {
37+ "file" : src ,
38+ "dest" : dest ,
39+ "data_set_type" : data_set_types [ds_type ],
40+ "is_binary" : is_binary ,
41+ }
42+ )
4143 return updated_result
4244
4345
@@ -59,7 +61,7 @@ def _get_file_checksum(src):
5961 blksize = 64 * 1024
6062 hash_digest = sha256 ()
6163 try :
62- with open (to_bytes (src , errors = ' surrogate_or_strict' ), 'rb' ) as infile :
64+ with open (to_bytes (src , errors = " surrogate_or_strict" ), "rb" ) as infile :
6365 block = infile .read (blksize )
6466 while block :
6567 hash_digest .update (block )
@@ -101,7 +103,7 @@ def run(self, tmp=None, task_vars=None):
101103 self ._task .args .get ('ignore_sftp_stderr' ), default = False
102104 )
103105 validate_checksum = _process_boolean (
104- self ._task .args .get (' validate_checksum' ), default = True
106+ self ._task .args .get (" validate_checksum" ), default = True
105107 )
106108
107109 # ********************************************************** #
@@ -112,14 +114,10 @@ def run(self, tmp=None, task_vars=None):
112114 if src is None or dest is None :
113115 msg = "Source and destination are required"
114116 elif not isinstance (src , string_types ):
115- msg = (
116- "Invalid type supplied for 'source' option, "
117- "it must be a string"
118- )
117+ msg = "Invalid type supplied for 'source' option, " "it must be a string"
119118 elif not isinstance (dest , string_types ):
120119 msg = (
121- "Invalid type supplied for 'destination' option, "
122- "it must be a string"
120+ "Invalid type supplied for 'destination' option, " "it must be a string"
123121 )
124122 elif len (src ) < 1 or len (dest ) < 1 :
125123 msg = "Source and destination parameters must not be empty"
@@ -128,12 +126,12 @@ def run(self, tmp=None, task_vars=None):
128126 msg = "Invalid port provided for SFTP. Expected an integer between 0 to 65535."
129127
130128 if msg :
131- result [' msg' ] = msg
132- result [' failed' ] = True
129+ result [" msg" ] = msg
130+ result [" failed" ] = True
133131 return result
134132
135133 ds_type = None
136- fetch_member = '(' in src and src .endswith (')' )
134+ fetch_member = "(" in src and src .endswith (")" )
137135 if fetch_member :
138136 member_name = src [src .find ('(' ) + 1 :src .find (')' )]
139137 src = self ._connection ._shell .join_path (src )
@@ -155,24 +153,23 @@ def run(self, tmp=None, task_vars=None):
155153 # and dest is: /tmp/, then updated dest would be /tmp/DATA #
156154 # ********************************************************** #
157155
158- if os .path .sep not in self ._connection ._shell .join_path ('a' , '' ):
156+ if os .path .sep not in self ._connection ._shell .join_path ("a" , "" ):
159157 src = self ._connection ._shell ._unquote (src )
160- source_local = src .replace (' \\ ' , '/' )
158+ source_local = src .replace (" \\ " , "/" )
161159 else :
162160 source_local = src
163161
164162 dest = os .path .expanduser (dest )
165163 if flat :
166- if (
167- os .path .isdir (to_bytes (dest , errors = 'surrogate_or_strict' )) and
168- not dest .endswith (os .sep )
169- ):
170- result ['msg' ] = (
164+ if os .path .isdir (
165+ to_bytes (dest , errors = "surrogate_or_strict" )
166+ ) and not dest .endswith (os .sep ):
167+ result ["msg" ] = (
171168 "dest is an existing directory, append a forward "
172169 "slash to the dest if you want to fetch src into "
173170 "that directory"
174171 )
175- result [' failed' ] = True
172+ result [" failed" ] = True
176173 return result
177174 if dest .endswith (os .sep ):
178175 if fetch_member :
@@ -184,22 +181,22 @@ def run(self, tmp=None, task_vars=None):
184181 if not dest .startswith ("/" ):
185182 dest = self ._loader .path_dwim (dest )
186183 else :
187- if ' inventory_hostname' in task_vars :
188- target_name = task_vars [' inventory_hostname' ]
184+ if " inventory_hostname" in task_vars :
185+ target_name = task_vars [" inventory_hostname" ]
189186 else :
190187 target_name = self ._play_context .remote_addr
191188 suffix = member_name if fetch_member else source_local
192189 dest = "{0}/{1}/{2}" .format (
193- self ._loader .path_dwim (dest ),
194- target_name ,
195- suffix
190+ self ._loader .path_dwim (dest ), target_name , suffix
196191 )
197192 try :
198193 dirname = os .path .dirname (dest ).replace ("//" , "/" )
199194 if not os .path .exists (dirname ):
200195 os .makedirs (dirname )
201196 except OSError as err :
202- result ["msg" ] = "Unable to create destination directory {0}" .format (dirname )
197+ result ["msg" ] = "Unable to create destination directory {0}" .format (
198+ dirname
199+ )
203200 result ["stderr" ] = str (err )
204201 result ["stderr_lines" ] = str (err ).splitlines ()
205202 result ["failed" ] = True
@@ -216,57 +213,63 @@ def run(self, tmp=None, task_vars=None):
216213 fetch_res = self ._execute_module (
217214 module_name = 'zos_fetch' ,
218215 module_args = self ._task .args ,
219- task_vars = task_vars
216+ task_vars = task_vars ,
220217 )
221- ds_type = fetch_res .get ('ds_type' )
222- src = fetch_res .get ('file' )
223- remote_path = fetch_res .get ('remote_path' )
224-
225- if fetch_res .get ('msg' ):
226- result ['msg' ] = fetch_res .get ('msg' )
227- result ['stdout' ] = fetch_res .get ('stdout' ) or fetch_res .get ("module_stdout" )
228- result ['stderr' ] = fetch_res .get ('stderr' ) or fetch_res .get ("module_stderr" )
229- result ['stdout_lines' ] = fetch_res .get ('stdout_lines' )
230- result ['stderr_lines' ] = fetch_res .get ('stderr_lines' )
218+ ds_type = fetch_res .get ("ds_type" )
219+ src = fetch_res .get ("file" )
220+ remote_path = fetch_res .get ("remote_path" )
221+
222+ if fetch_res .get ("msg" ):
223+ result ["msg" ] = fetch_res .get ("msg" )
224+ result ["stdout" ] = fetch_res .get ("stdout" ) or fetch_res .get (
225+ "module_stdout"
226+ )
227+ result ["stderr" ] = fetch_res .get ("stderr" ) or fetch_res .get (
228+ "module_stderr"
229+ )
230+ result ["stdout_lines" ] = fetch_res .get ("stdout_lines" )
231+ result ["stderr_lines" ] = fetch_res .get ("stderr_lines" )
231232 result ["rc" ] = fetch_res .get ("rc" )
232- result [' failed' ] = True
233+ result [" failed" ] = True
233234 return result
234235
235- elif fetch_res .get (' note' ):
236- result [' note' ] = fetch_res .get (' note' )
236+ elif fetch_res .get (" note" ):
237+ result [" note" ] = fetch_res .get (" note" )
237238 return result
238239
239240 if ds_type in SUPPORTED_DS_TYPES :
240241 if ds_type == "PO" and os .path .isfile (dest ) and not fetch_member :
241- result ["msg" ] = "Destination must be a directory to fetch a partitioned data set"
242+ result [
243+ "msg"
244+ ] = "Destination must be a directory to fetch a partitioned data set"
242245 result ["failed" ] = True
243246 return result
244247
245248 fetch_content = self ._transfer_remote_content (
246249 dest , remote_path , ds_type , sftp_port , ignore_stderr = ignore_sftp_stderr
247250 )
248- if fetch_content .get (' msg' ):
251+ if fetch_content .get (" msg" ):
249252 return fetch_content
250253
251254 if validate_checksum and ds_type != "PO" and not is_binary :
252255 new_checksum = _get_file_checksum (dest )
253- result [' changed' ] = local_checksum != new_checksum
254- result [' checksum' ] = new_checksum
256+ result [" changed" ] = local_checksum != new_checksum
257+ result [" checksum" ] = new_checksum
255258 else :
256- result [' changed' ] = True
259+ result [" changed" ] = True
257260
258261 else :
259- result [' msg' ] = (
262+ result [" msg" ] = (
260263 "The data set type '{0}' is not"
261264 " currently supported" .format (ds_type )
262265 )
263- result [' failed' ] = True
266+ result [" failed" ] = True
264267 return result
265268 except Exception as err :
266- result [' msg' ] = "Failure during module execution"
267- result [' stderr' ] = str (err )
268- result [' stderr_lines' ] = str (err ).splitlines ()
269- result [' failed' ] = True
269+ result [" msg" ] = "Failure during module execution"
270+ result [" stderr" ] = str (err )
271+ result [" stderr_lines" ] = str (err ).splitlines ()
272+ result [" failed" ] = True
270273 return result
271274
272275 # ********************************************************** #
@@ -292,10 +295,7 @@ def _transfer_remote_content(self, dest, remote_path, src_type, port, ignore_std
292295 stdin = stdin .replace (" -r" , "" )
293296
294297 transfer_pds = subprocess .Popen (
295- cmd ,
296- stdin = subprocess .PIPE ,
297- stdout = subprocess .PIPE ,
298- stderr = subprocess .PIPE
298+ cmd , stdin = subprocess .PIPE , stdout = subprocess .PIPE , stderr = subprocess .PIPE
299299 )
300300 out , err = transfer_pds .communicate (to_bytes (stdin ))
301301 err = _detect_sftp_errors (err )
@@ -305,8 +305,8 @@ def _transfer_remote_content(self, dest, remote_path, src_type, port, ignore_std
305305 result ['msg' ] = "Error transferring remote data from z/OS system"
306306 result ['rc' ] = transfer_pds .returncode
307307 if result .get ("msg" ):
308- result [' stderr' ] = err
309- result [' failed' ] = True
308+ result [" stderr" ] = err
309+ result [" failed" ] = True
310310 return result
311311
312312 def _remote_cleanup (self , remote_path , src_type , encoding ):
0 commit comments