forked from googleapis/python-bigquery-pandas
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconftest.py
80 lines (60 loc) · 2.18 KB
/
conftest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# Copyright (c) 2017 pandas-gbq Authors All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Shared pytest fixtures for `tests/system` and `samples/tests` tests."""
import os
import os.path
import uuid
import google.oauth2.service_account
import pytest
@pytest.fixture(scope="session")
def project_id():
return os.environ.get("GBQ_PROJECT_ID") or os.environ.get(
"GOOGLE_CLOUD_PROJECT"
) # noqa
@pytest.fixture(scope="session")
def private_key_path():
path = os.path.join(
"ci", "service_account.json"
) # Written by the 'ci/config_auth.sh' script.
if "GBQ_GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
path = os.environ["GBQ_GOOGLE_APPLICATION_CREDENTIALS"]
elif "GOOGLE_APPLICATION_CREDENTIALS" in os.environ:
path = os.environ["GOOGLE_APPLICATION_CREDENTIALS"]
if not os.path.isfile(path):
pytest.skip(
"Cannot run integration tests when there is "
"no file at the private key json file path"
)
return None
return path
@pytest.fixture(scope="session")
def private_key_contents(private_key_path):
if private_key_path is None:
return None
with open(private_key_path) as f:
return f.read()
@pytest.fixture(scope="module")
def bigquery_client(project_id, private_key_path):
from google.cloud import bigquery
return bigquery.Client.from_service_account_json(
private_key_path, project=project_id
)
@pytest.fixture()
def random_dataset_id(bigquery_client):
import google.api_core.exceptions
from google.cloud import bigquery
dataset_id = "".join(["pandas_gbq_", str(uuid.uuid4()).replace("-", "_")])
dataset_ref = bigquery.DatasetReference(
bigquery_client.project, dataset_id
)
yield dataset_id
try:
bigquery_client.delete_dataset(dataset_ref, delete_contents=True)
except google.api_core.exceptions.NotFound:
pass # Not all tests actually create a dataset
@pytest.fixture()
def credentials(private_key_path):
return google.oauth2.service_account.Credentials.from_service_account_file(
private_key_path
)