Skip to content

Commit 19aaff9

Browse files
committed
Preparing for release 0.3.2
1 parent aef7001 commit 19aaff9

File tree

7 files changed

+17
-42
lines changed

7 files changed

+17
-42
lines changed

awswrangler/pandas.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -935,7 +935,9 @@ def _data_to_s3_object_writer(dataframe: pd.DataFrame,
935935
if file_format == "parquet":
936936
outfile: str = f"{guid}{compression_extension}.parquet"
937937
elif file_format == "csv":
938-
filename: Optional[str] = extra_args.get("filename")
938+
filename: Optional[str] = None
939+
if extra_args is not None:
940+
filename = extra_args.get("filename")
939941
if filename is None:
940942
outfile = f"{guid}{compression_extension}.csv"
941943
else:
@@ -989,8 +991,11 @@ def _write_csv_dataframe(dataframe, path, preserve_index, compression, fs, extra
989991
csv_extra_args["quoting"] = csv.QUOTE_NONE
990992
csv_extra_args["escapechar"] = "\\"
991993
csv_buffer: bytes = bytes(
992-
dataframe.to_csv(None, header=extra_args.get("header"), index=preserve_index, compression=compression, **csv_extra_args),
993-
"utf-8")
994+
dataframe.to_csv(None,
995+
header=extra_args.get("header"),
996+
index=preserve_index,
997+
compression=compression,
998+
**csv_extra_args), "utf-8")
994999
Pandas._write_csv_to_s3_retrying(fs=fs, path=path, buffer=csv_buffer)
9951000

9961001
@staticmethod

awswrangler/s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ def list_objects(self, path: str) -> List[str]:
325325
after=tenacity.after_log(logger, INFO))
326326
def head_object_with_retry(client_s3: client, bucket: str, key: str) -> Dict[str, Any]:
327327
"""
328-
Executes the Boto3 head_object() function with an extra layer of random exponential back-off.
328+
Execute the Boto3 head_object() function with an extra layer of random exponential back-off.
329329
330330
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.head_object
331331

docs/source/_templates/globaltoc.html

Whitespace-only changes.

docs/source/index.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,9 @@
1212
:maxdepth: 4
1313

1414
install
15-
examples
1615
tutorials
16+
examples
1717
divingdeep
18-
contributing
1918
api/awswrangler
19+
contributing
2020
license

testing/test_awswrangler/test_pandas.py

Lines changed: 4 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -2519,20 +2519,8 @@ def test_sequential_overwrite(bucket):
25192519
path = f"s3://{bucket}/test_sequential_overwrite/"
25202520
df = pd.DataFrame({"col": [1]})
25212521
df2 = pd.DataFrame({"col": [2]})
2522-
wr.pandas.to_parquet(
2523-
dataframe=df,
2524-
path=path,
2525-
preserve_index=False,
2526-
mode="overwrite",
2527-
procs_cpu_bound=1
2528-
)
2529-
wr.pandas.to_parquet(
2530-
dataframe=df2,
2531-
path=path,
2532-
preserve_index=False,
2533-
mode="overwrite",
2534-
procs_cpu_bound=1
2535-
)
2522+
wr.pandas.to_parquet(dataframe=df, path=path, preserve_index=False, mode="overwrite", procs_cpu_bound=1)
2523+
wr.pandas.to_parquet(dataframe=df2, path=path, preserve_index=False, mode="overwrite", procs_cpu_bound=1)
25362524
df3 = wr.pandas.read_parquet(path=path)
25372525
assert len(df3.index) == 1
25382526
assert df3.col[0] == 2
@@ -2541,13 +2529,7 @@ def test_sequential_overwrite(bucket):
25412529
def test_read_parquet_int_na(bucket):
25422530
path = f"s3://{bucket}/test_read_parquet_int_na/"
25432531
df = pd.DataFrame({"col": [1] + [pd.NA for _ in range(10_000)]}, dtype="Int64")
2544-
wr.pandas.to_parquet(
2545-
dataframe=df,
2546-
path=path,
2547-
preserve_index=False,
2548-
mode="overwrite",
2549-
procs_cpu_bound=4
2550-
)
2532+
wr.pandas.to_parquet(dataframe=df, path=path, preserve_index=False, mode="overwrite", procs_cpu_bound=4)
25512533
df2 = wr.pandas.read_parquet(path=path)
25522534
assert len(df2.index) == 10_001
25532535
assert len(df2.columns) == 1
@@ -2557,13 +2539,7 @@ def test_read_parquet_int_na(bucket):
25572539
def test_to_csv_header_filename(bucket):
25582540
path = f"s3://{bucket}/test_to_csv_header_filename/"
25592541
df = pd.DataFrame({"col1": [1, 2], "col2": ["foo", "boo"]})
2560-
paths = wr.pandas.to_csv(
2561-
dataframe=df,
2562-
path=path,
2563-
filename="file.csv",
2564-
header=True,
2565-
preserve_index=False
2566-
)
2542+
paths = wr.pandas.to_csv(dataframe=df, path=path, filename="file.csv", header=True, preserve_index=False)
25672543
assert len(paths) == 1
25682544
assert paths[0].endswith("/test_to_csv_header_filename/file.csv")
25692545
df2 = wr.pandas.read_csv(path=paths[0])

testing/test_awswrangler/test_redshift.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -933,9 +933,7 @@ def test_to_redshift_int_na(bucket, redshift_parameters):
933933
iam_role=redshift_parameters.get("RedshiftRole"),
934934
mode="overwrite",
935935
preserve_index=False,
936-
cast_columns={
937-
"col4": "INT8"
938-
})
936+
cast_columns={"col4": "INT8"})
939937
conn = wr.glue.get_connection("aws-data-wrangler-redshift")
940938
with conn.cursor() as cursor:
941939
cursor.execute("SELECT * FROM public.test_to_redshift_int_na")

testing/test_awswrangler/test_s3.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -236,9 +236,5 @@ def test_wait_object_exists(bucket):
236236
def test_head_object_with_retry(bucket):
237237
key = "test_head_object_with_retry"
238238
boto3.resource("s3").Object(bucket, key).put(Body=str("Hello!"))
239-
res = wr.s3.head_object_with_retry(
240-
client_s3=boto3.client("s3"),
241-
bucket=bucket,
242-
key=key
243-
)
239+
res = wr.s3.head_object_with_retry(client_s3=boto3.client("s3"), bucket=bucket, key=key)
244240
assert res["ResponseMetadata"]["HTTPHeaders"]["content-length"] == "6"

0 commit comments

Comments
 (0)