import pandas
sql = """
SELECT name
FROM `bigquery-public-data.usa_names.usa_1910_current`
WHERE state = 'TX'
LIMIT 100
"""
# Run a Standard SQL query using the environment's default project
df = pandas.read_gbq(sql, dialect="standard")
# Run a Standard SQL query with the project set explicitly
project_id = "your-project-id"
df = pandas.read_gbq(sql, project_id=project_id, dialect="standard")
만약 큰 데이터라고 하면 read_gbq 옵션의 use_bqstorage_api=True를 지정하면 속도가 15~30배 개선
from google.cloud import bigquery
client = bigquery.Client()
sql = """
SELECT name
FROM `bigquery-public-data.usa_names.usa_1910_current`
WHERE state = 'TX'
LIMIT 100
"""
# Run a Standard SQL query using the environment's default project
df = client.query(sql).to_dataframe()
# Run a Standard SQL query with the project set explicitly
project_id = "your-project-id"
df = client.query(sql, project=project_id).to_dataframe()