python-common-code/example/example_bigquery.py
2025-10-26 17:10:27 +09:00

110 lines
3.9 KiB
Python

import sys
import os
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),"..", "src")))
from lib.custom_logger import get_logger
logger = get_logger(level=10)
from providers.google_cloud_bigquery_provider import GoogleCloudBigQueryProvider
def example_bigquery():
try:
# GoogleCloudBigQueryProviderのインスタンスを作成
provider = GoogleCloudBigQueryProvider(
cred_path="keys/google_service_accout.json",
)
dss = provider.get_datasets()
for ds in dss:
logger.info(f"Dataset ID: {ds.dataset_id}")
# データセットを作成する
dataset_id = "example_dataset"
if provider.is_exists_dataset(dataset_id):
logger.info(f"Dataset {dataset_id} already exists.")
else:
dataset = provider.create_dataset(dataset_id)
logger.info(f"Dataset {dataset_id} created at {dataset.created}.")
table_id = provider.full_table_id(dataset_id, "example_table")
# テーブルを作成する
if provider.is_exists_table(table_id):
logger.info(f"Table {table_id} already exists.")
else:
logger.info(f"Creating table {table_id}...")
schema = [
provider.addSchemaField("device_code", "string", "REQUIRED", description="Device code"),
provider.addSchemaField("time_stamp", "timestamp", "REQUIRED", description="Timestamp"),
]
table = provider.create_table(
table_id=table_id,
schema=schema
)
# tables = provider.get_tables(dataset_id)
# for table in tables:
# logger.info(f"Table ID: {table.table_id}")
# テーブル情報を挿入する
# provider.insert_rows(
# table_id=table_id,
# rows=[
# {"device_code": "device_001", "time_stamp": "2025-01-01 12:00:00"},
# {"device_code": "device_002", "time_stamp": "2025-01-01 12:05:00"},
# {"device_code": "device_003", "time_stamp": "2025-01-01 12:10:00"},
# ]
# )
# テーブル情報を確認する(JOBあり・SQL発行)
# job_qyuery = provider.excute_query(
# query=f"SELECT * FROM `{table_id}` where device_code='device_001'",
# )
# results = job_qyuery.result()
# for row in results:
# logger.info(f"Row: {row}")
# テーブル情報を確認する(JOBあり)
# rows = provider.list_rows(
# table_id=table_id,
# )
# for row in rows:
# logger.info(f"Row: {row}")
# バッファ状況の確認
# buffer_status = provider.get_streaming_buffer_info(table_id=table_id)
# logger.info(f"Streaming Buffer Info: {buffer_status}")
# テーブルを更新する
# query_job = provider.update_query(
# table_id=table_id,
# values={
# "device_code": "'device_999'"
# },
# were_clause="device_code='device_002'"
# )
# query_job.result() # ジョブの完了を待機
# # テーブル情報を確認する(JOBあり)
# job = provider.select_query(
# table_id=table_id,
# )
# results = job.result()
# for row in results:
# logger.info(f"Row: {row}")
# # テーブルのレコードを削除する
# provider.delete_query(
# table_id=table_id,
# were_clause="device_code='device_012'"
# )
# テーブルを削除する
# provider.delete_table(table_id=table_id)
except Exception as e:
logger.error(f"Error in example_bigquery: {e}")
example_bigquery()