Skip to content

Commit

Permalink
fix backend response (#934)
Browse files Browse the repository at this point in the history
* fix backend response

* more fixes

* update basedosdados to b27
  • Loading branch information
aspeddro authored Jan 28, 2025
1 parent ad1656b commit 72ce7a4
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 19 deletions.
6 changes: 2 additions & 4 deletions pipelines/datasets/cross_update/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,7 @@ def find_closed_tables(backend: Backend):
}
}"""

response = backend._execute_query(query=query)
response = backend._simplify_response(response)["allCoverage"]
response = backend._execute_query(query=query)["allCoverage"]["items"]
data = json_normalize(response)
open_tables = data["table._id"].tolist()

Expand Down Expand Up @@ -104,8 +103,7 @@ def find_closed_tables(backend: Backend):
}
}"""

response = backend._execute_query(query=query)
response = backend._simplify_response(response)["allCoverage"]
response = backend._execute_query(query=query)["allCoverage"]["items"]
data = json_normalize(response)
closed_tables = data["table._id"].tolist()

Expand Down
6 changes: 3 additions & 3 deletions pipelines/utils/dump_to_gcs/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,11 +115,11 @@ def download_data_to_gcs( # pylint: disable=R0912,R0913,R0914,R0915
"""
log(query)
data = b._execute_query(query_graphql, {'table_id' : table_id})
nodes = data['allTable']['edges']
if nodes == []:
nodes = data['allTable']['items']
if len(nodes) == 0:
return None

num_bytes = nodes[0]['node']['uncompressedFileSize']
num_bytes = nodes[0]['uncompressedFileSize']

url_path = get_credentials_from_secret('url_download_data')
secret_path_url_free = url_path['URL_DOWNLOAD_OPEN']
Expand Down
14 changes: 7 additions & 7 deletions pipelines/utils/metadata/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,15 +146,15 @@ def get_id(
variables = dict(zip(keys, values))

response = backend._execute_query(query, variables=variables)
nodes = response[query_class]['edges']
nodes = response[query_class]['items']

if len(nodes)>1:
raise ValueError(f'More than 1 node was found in this query. Plese give query parameters that retrieve only one object. \nQuery:\n\t{query}\nVariables:{variables} \nNodes found:{nodes}')

if len(nodes) == 0:
return response, None

id = nodes[0]['node']['_id']
id = nodes[0]['_id']

return response, id

Expand All @@ -171,14 +171,14 @@ def get_table_status(table_id:str, backend: bd.Backend) -> str:
}
} """

response = backend._execute_query(query,{"table_id": table_id})
response = backend._execute_query(query, {"table_id": table_id})

nodes = response['allTable']['edges']
nodes = response['allTable']['items']

if nodes == []:
if len(nodes) == 0:
return None

return nodes[0]['node']['status']['slug']
return nodes[0]['status']['slug']

def extract_last_date_from_bq(
dataset_id: str,
Expand Down Expand Up @@ -708,7 +708,7 @@ def get_api_last_update_date(dataset_id: str, table_id: str, backend: bd.Backend
"""
variables = {"table_Id": django_table_id}
response = backend._execute_query(query, variables)
clean_response = response['allUpdate']['edges'][0]['node']['latest']
clean_response = response['allUpdate']['items'][0]['latest']
date_result = (datetime.strptime(clean_response[:10],"%Y-%m-%d")).date()
return date_result

Expand Down
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Jinja2 = "3.0.3"
MarkupSafe = "2.0.1"
PyYAML = "6.0"
Unidecode = "^1.3.4"
basedosdados = {version = "2.0.0b26", extras = ["all"]}
basedosdados = {version = "2.0.0b27", extras = ["all"]}
beautifulsoup4 = "4.11.1"
cachetools = "4.2.4"
certifi = "2021.10.8"
Expand Down

0 comments on commit 72ce7a4

Please sign in to comment.