Skip to content

Commit 1f7a27e

Browse files
indentation error
1 parent ba39a0b commit 1f7a27e

File tree

1 file changed

+86
-86
lines changed

1 file changed

+86
-86
lines changed

operators/salesforce_to_s3_operator.py

Lines changed: 86 additions & 86 deletions
Original file line numberDiff line numberDiff line change
@@ -58,92 +58,92 @@ def execute(self, context):
5858

5959

6060
class SalesforceToS3Operator(BaseOperator):
61-
"""
62-
Salesforce to S3 Operator
63-
64-
Makes a query against Salesforce and write the resulting data to a file.
65-
66-
:param sf_conn_id: Name of the Airflow connection that has
67-
the following information:
68-
- username
69-
- password
70-
- security_token
71-
:type sf_conn_id: string
72-
:param sf_obj: Name of the relevant Salesforce object
73-
:param s3_conn_id: The destination s3 connection id.
74-
:type s3_conn_id: string
75-
:param s3_bucket: The destination s3 bucket.
76-
:type s3_bucket: string
77-
:param s3_key: The destination s3 key.
78-
:type s3_key: string
79-
:param sf_fields: *(optional)* list of fields that you want
80-
to get from the object.
81-
If *None*, then this will get all fields
82-
for the object
83-
:type sf_fields: list
84-
:param fmt: *(optional)* format that the s3_key of the
85-
data should be in. Possible values include:
86-
- csv
87-
- json
88-
- ndjson
89-
*Default: csv*
90-
:type fmt: list
91-
:param query: *(optional)* A specific query to run for
92-
the given object. This will override
93-
default query creation.
94-
*Default: None*
95-
:type query: string
96-
:param relationship_object: *(optional)* Some queries require
97-
relationship objects to work, and
98-
these are not the same names as
99-
the SF object. Specify that
100-
relationship object here.
101-
*Default: None*
102-
:type relationship_object: string
103-
:param record_time_added: *(optional)* True if you want to add a
104-
Unix timestamp field to the resulting data
105-
that marks when the data was
106-
fetched from Salesforce.
107-
*Default: False*.
108-
:type record_time_added: string
109-
:param coerce_to_timestamp: *(optional)* True if you want to convert
110-
all fields with dates and datetimes
111-
into Unix timestamp (UTC).
112-
*Default: False*.
113-
:type coerce_to_timestamp: string
114-
"""
115-
template_fields = ("s3_key",
116-
"query")
117-
118-
@apply_defaults
119-
def __init__(self,
120-
sf_conn_id,
121-
sf_obj,
122-
s3_conn_id,
123-
s3_bucket,
124-
s3_key,
125-
sf_fields=None,
126-
fmt="csv",
127-
query=None,
128-
relationship_object=None,
129-
record_time_added=False,
130-
coerce_to_timestamp=False,
131-
*args,
132-
**kwargs):
133-
134-
super(SalesforceToS3Operator, self).__init__(*args, **kwargs)
135-
136-
self.sf_conn_id = sf_conn_id
137-
self.object = sf_obj
138-
self.fields = sf_fields
139-
self.s3_conn_id = s3_conn_id
140-
self.s3_bucket = s3_bucket
141-
self.s3_key = s3_key
142-
self.fmt = fmt.lower()
143-
self.query = query
144-
self.relationship_object = relationship_object
145-
self.record_time_added = record_time_added
146-
self.coerce_to_timestamp = coerce_to_timestamp
61+
"""
62+
Salesforce to S3 Operator
63+
64+
Makes a query against Salesforce and write the resulting data to a file.
65+
66+
:param sf_conn_id: Name of the Airflow connection that has
67+
the following information:
68+
- username
69+
- password
70+
- security_token
71+
:type sf_conn_id: string
72+
:param sf_obj: Name of the relevant Salesforce object
73+
:param s3_conn_id: The destination s3 connection id.
74+
:type s3_conn_id: string
75+
:param s3_bucket: The destination s3 bucket.
76+
:type s3_bucket: string
77+
:param s3_key: The destination s3 key.
78+
:type s3_key: string
79+
:param sf_fields: *(optional)* list of fields that you want
80+
to get from the object.
81+
If *None*, then this will get all fields
82+
for the object
83+
:type sf_fields: list
84+
:param fmt: *(optional)* format that the s3_key of the
85+
data should be in. Possible values include:
86+
- csv
87+
- json
88+
- ndjson
89+
*Default: csv*
90+
:type fmt: list
91+
:param query: *(optional)* A specific query to run for
92+
the given object. This will override
93+
default query creation.
94+
*Default: None*
95+
:type query: string
96+
:param relationship_object: *(optional)* Some queries require
97+
relationship objects to work, and
98+
these are not the same names as
99+
the SF object. Specify that
100+
relationship object here.
101+
*Default: None*
102+
:type relationship_object: string
103+
:param record_time_added: *(optional)* True if you want to add a
104+
Unix timestamp field to the resulting data
105+
that marks when the data was
106+
fetched from Salesforce.
107+
*Default: False*.
108+
:type record_time_added: string
109+
:param coerce_to_timestamp: *(optional)* True if you want to convert
110+
all fields with dates and datetimes
111+
into Unix timestamp (UTC).
112+
*Default: False*.
113+
:type coerce_to_timestamp: string
114+
"""
115+
template_fields = ("s3_key",
116+
"query")
117+
118+
@apply_defaults
119+
def __init__(self,
120+
sf_conn_id,
121+
sf_obj,
122+
s3_conn_id,
123+
s3_bucket,
124+
s3_key,
125+
sf_fields=None,
126+
fmt="csv",
127+
query=None,
128+
relationship_object=None,
129+
record_time_added=False,
130+
coerce_to_timestamp=False,
131+
*args,
132+
**kwargs):
133+
134+
super(SalesforceToS3Operator, self).__init__(*args, **kwargs)
135+
136+
self.sf_conn_id = sf_conn_id
137+
self.object = sf_obj
138+
self.fields = sf_fields
139+
self.s3_conn_id = s3_conn_id
140+
self.s3_bucket = s3_bucket
141+
self.s3_key = s3_key
142+
self.fmt = fmt.lower()
143+
self.query = query
144+
self.relationship_object = relationship_object
145+
self.record_time_added = record_time_added
146+
self.coerce_to_timestamp = coerce_to_timestamp
147147

148148
def special_query(self, query, sf_hook, relationship_object=None):
149149
if not query:

0 commit comments

Comments
 (0)