Skip to content

Commit 5f2fb9e

Browse files
authored
Fixed issues with package basedosdados update (#15)
1 parent 0db8139 commit 5f2fb9e

File tree

3 files changed

+51
-48
lines changed

3 files changed

+51
-48
lines changed

poetry.lock

Lines changed: 23 additions & 22 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ lint = "scripts.lint:main"
1717

1818
[tool.poetry.dependencies]
1919
python = ">=3.9,<4"
20-
basedosdados = {version = "2.0.0-beta.26", extras = ["all"]}
20+
basedosdados = {version = "2.0.0-beta.27", extras = ["all"]}
2121
pandas = "^2.2.2"
2222
numpy = "^1.26.4"
2323
requests = "^2.32.3"
@@ -26,14 +26,15 @@ ruamel-yaml = "^0.18.6"
2626

2727
[tool.poetry.group.dev.dependencies]
2828
pytest = "^8.2.2"
29-
ruff = "^0.4.8"
29+
ruff = "^0.9.4"
3030

3131
[build-system]
3232
requires = ["poetry-core"]
3333
build-backend = "poetry.core.masonry.api"
3434

3535
[tool.ruff]
3636
line-length = 80
37+
exclude = ["misc"]
3738

3839
[tool.pytest.ini_options]
3940
pythonpath = "src"

src/databasers_utils/upload_columns.py

Lines changed: 25 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -33,14 +33,16 @@ def get_directory_column_id(
3333
"""
3434

3535
variables = {"column_name": directory_column_name}
36-
response = backend._execute_query(query=query, variables=variables)
37-
response = backend._simplify_response(response)["allColumn"]
36+
response = backend._execute_query(query=query, variables=variables)[
37+
"allColumn"
38+
]["items"]
3839
df = pd.json_normalize(response)
3940

40-
colunas_de_diretorio = df["table.dataset.fullSlug"].str.contains(
41-
"diretorios"
42-
)
43-
for _, coluna in df[colunas_de_diretorio].iterrows():
41+
colunas_de_diretorio = df[
42+
df["table.dataset.fullSlug"].str.contains("diretorios") == True # noqa: E712
43+
]
44+
45+
for _, coluna in colunas_de_diretorio.iterrows():
4446
if coluna["table.slug"] == directory_table_name:
4547
if verbose:
4648
print(
@@ -121,9 +123,9 @@ def get_column_id(
121123
}}
122124
}}"""
123125

124-
data = backend._execute_query(query=query)
125-
data = backend._simplify_response(response=data)["allColumn"]
126-
if data:
126+
data = backend._execute_query(query=query)["allColumn"]["items"]
127+
128+
if len(data) > 0:
127129
return data[0]["_id"]
128130
else:
129131
return None
@@ -142,8 +144,7 @@ def get_n_columns(table_id, backend: b.Backend):
142144
}}
143145
}}"""
144146

145-
data = backend._execute_query(query=query)
146-
data = backend._simplify_response(response=data)["allTable"]
147+
data = backend._execute_query(query=query)["allTable"]["items"]
147148

148149
return data[0]["columns"]["edgeCount"]
149150

@@ -162,10 +163,7 @@ def get_bqtype_dict(backend: b.Backend):
162163
}"""
163164

164165
# Execute the GraphQL query to retrieve the data
165-
data = backend._execute_query(query=query)
166-
167-
# Simplify the GraphQL response to extract the relevant data
168-
data = backend._simplify_response(response=data)["allBigquerytype"]
166+
data = backend._execute_query(query=query)["allBigquerytype"]["items"]
169167

170168
# Create a dictionary where the 'name' part is the key and the '_id' is the value
171169
bqtype_dict = {item["name"]: item["_id"] for item in data}
@@ -207,17 +205,16 @@ def get_all_columns_id(table_id: str, backend: b.Backend):
207205
}}
208206
}}"""
209207

210-
data = backend._execute_query(query=query)
211-
columns_json = backend._simplify_response(response=data)["allColumn"]
208+
data = backend._execute_query(query=query)["allColumn"]["items"]
212209

213210
if data:
214-
columns_list = [col["_id"] for col in columns_json]
211+
columns_list = [col["_id"] for col in data]
215212
return columns_list
216213
else:
217214
print("There is no column in this table to be deleted")
218215

219216

220-
def delete_column_by_id(column_id: str, backend: b.Backend):
217+
def delete_column_by_id(column_id: str, backend: b.Backend) -> bool:
221218
mutation = """
222219
mutation($input: UUID!) {
223220
DeleteColumn(id: $input) {
@@ -243,11 +240,12 @@ def delete_column_by_id(column_id: str, backend: b.Backend):
243240
return True
244241

245242

246-
def delete_all_columns(table_id: str, backend: b.Backend):
243+
def delete_all_columns(table_id: str, backend: b.Backend) -> None:
247244
columns = get_all_columns_id(table_id, backend)
248245

249-
for col in columns:
250-
delete_column_by_id(col, backend)
246+
if columns is not None:
247+
for col in columns:
248+
delete_column_by_id(col, backend)
251249

252250

253251
def upload_columns_from_architecture(
@@ -258,7 +256,7 @@ def upload_columns_from_architecture(
258256
if_column_exists: str = "pass",
259257
replace_all_schema: bool = True,
260258
verbose: bool = False,
261-
):
259+
) -> None:
262260
"""
263261
Uploads columns from an architecture table to the specified dataset and table in platform.
264262
@@ -322,9 +320,12 @@ def upload_columns_from_architecture(
322320
directory_column_name, directory_table_slug, backend, verbose
323321
)
324322

323+
row_bq_type = row["bigquery_type"].strip().upper()
324+
bigquery_type = "BOOLEAN" if row_bq_type == "BOOL" else row_bq_type
325+
325326
mutation_parameters = {
326327
"table": table_slug,
327-
"bigqueryType": bqtype_dict[row["bigquery_type"].upper()],
328+
"bigqueryType": bqtype_dict[bigquery_type],
328329
"name": row["name"],
329330
"description": row["description"],
330331
"coveredByDictionary": row["covered_by_dictionary"] == "yes",

0 commit comments

Comments
 (0)