Compare commits

...

6 Commits

Author SHA1 Message Date
460f097d1e Update src/utils/custom_logger.py
Some checks failed
Python Test / python-test (push) Failing after 12s
2025-12-11 22:27:47 +09:00
b9ef4422fb Cloud Function 追加
Some checks failed
Python Test / python-test (push) Failing after 39s
2025-12-06 05:38:21 +09:00
f16a505d24 デプロイCICD用生成 2025-12-06 04:56:12 +09:00
9a3ee9efe3 Cloud Functinを修正する
Some checks failed
Python Test / python-test (push) Failing after 8s
2025-12-06 04:49:07 +09:00
4857e68f93 デプロイ用の基本tfファイル 2025-12-06 04:19:20 +09:00
df4c0cfdd7 更新 2025-12-06 03:40:39 +09:00
17 changed files with 483 additions and 116 deletions

62
.github/workflows/deploy_to_gcp.yml vendored Normal file
View File

@ -0,0 +1,62 @@
name: Gitea Deploy to GCP
on:
workflow_dispatch:
pull_request:
branches:
- deploy-prd
- deploy-dev
jobs:
gcp-deploy:
name: Deploy to GCP
runs-on: gcloud-tf
env:
GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }}
GCP_SA_KEY: ${{ secrets.GCP_SA_KEY }}
REPO_NAME: ${{ github.repository }}
COMPONENT_NAME: ${{ vars.COMPONENT_NAME }}
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Check Deploy Tools
run: |
ls -la
echo "Checking gcloud and terraform versions..."
gcloud --version
terraform --version
- name: Check Gcloud auth
run: |
echo "HOME: ${HOME}"
printf '%s' "$GCP_SA_KEY" > $HOME/sa.json
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
gcloud auth activate-service-account --key-file="$GOOGLE_APPLICATION_CREDENTIALS"
gcloud config set project "$GCP_PROJECT_ID"
echo "Check gcloud"
gcloud config list
gcloud --version
- name: Exec Terraform init shell
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/init_terraform.sh
- name: Exec Terraform plan shell
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/plan_terraform.sh
- name: Exec Terraform apply shell
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/apply_terraform.sh
- name: Clean up Gcloud auth file
run: |
rm -f $HOME/sa.json
echo "Cleaned up Gcloud auth file."

8
.gitignore vendored
View File

@ -171,3 +171,11 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# terraform.tfstate files
_*.tfvars
.terraform/
.terraform.lock.hcl
*.tfstate
*.tfstate.backup
*deploy.env

57
readme/deploy.md Normal file
View File

@ -0,0 +1,57 @@
# デプロイの方法について
## インストール方法
MACの場合
```sh
brew tap hashicorp/tap
brew install hashicorp/tap/terraform
# 確認
terraform -version
```
## 環境について
* terraform
* google cloud
* Cloud Fucntions
## ローカル実行する方法
ローカルで実行する場合はバックエンドを修正する必要がある
`provider.tf`の次の文をコメントアウトする
```tf
terraform {
# backend "gcs" {}
}
```
```sh
# 初期化を実行する
cd terraform
# Terraformの初期化
terraform init
# デプロイするコンポーネントを確認する
terraform plan -var-file=dev.tfvars
# デプロイを実行する
terraform apply \
-var-file=dev.tfvars \
-auto-approve
```
ローカルでビルドで試す場合
```sh
# デフォルトでビルドする場合
docker build -t cloud-run-job-base .
# arm64でビルドしたい場合
docker buildx build -platform linux/amd64,linux/arm64 -t cloud-run-job-base .
# Dockerを実行する(1回だけ実行してコンテナインスタンスを削除する場合)
docker run --rm cloud-run-job-base:latest
```

View File

@ -0,0 +1,26 @@
#!/bin/bash
# Safe mode(when error,kill script)
set -euo pipefail
# 変数の設定({HOME}/hash.txt からハッシュ値を取得)
TF_DIR=${TF_DIR:-terraform}
ENV=${ENV:-dev}
cd "$TF_DIR"
# --- デプロイ条件 ---
if [[ "${BRANCH_NAME:-}" =~ ^.*deploy$ ]]; then
echo "Start terraform apply (ENV=${ENV}, DIR=${TF_DIR}) ..."
else
echo "Skip terraform apply (branch=${BRANCH_NAME:-})"
exit 0
fi
# --- plan 結果があるか確認 ---
if [[ ! -f tfplan ]]; then
echo "ERROR: tfplan not found in $(pwd). Run plan step first." >&2
exit 1
fi
terraform apply -auto-approve tfplan

View File

@ -0,0 +1,21 @@
#!/bin/bash
# Safe mode(when error,kill script)
set -euo pipefail
TF_DIR=${TF_DIR:-terraform}
# GCS S3などで保存する
TF_STATE_BUCKET=${TF_STATE_BUCKET:-cicd-tfstate-bucket-20250906}
ENV=${ENV:-dev}
REPO_NAME=${REPO_NAME:-unknown}
cd "$TF_DIR"
echo "$REPO_NAME"
# # --- terraform init 実行 ---
terraform init \
-backend-config="bucket=${TF_STATE_BUCKET}" \
-backend-config="prefix=${REPO_NAME}/${ENV}" \

View File

@ -0,0 +1,21 @@
#!/bin/bash
# Safe mode(when error,kill script)
set -euo pipefail
# 変数の設定({HOME}/hash.txt からハッシュ値を取得)
TF_DIR=${TF_DIR:-terraform}
ENV=${ENV:-dev}
cd "$TF_DIR"
if [ -f "${ENV}.tfvars" ]; then
terraform plan \
-out=tfplan \
-var-file="${ENV}.tfvars"
else
# error raise
echo "ERROR: ${ENV}.tfvars not found in $(pwd)" >&2
exit 1
fi

View File

@ -1,10 +1,6 @@
from flask import Request
import functions_framework
import os
os.environ["ENV"]="dev" # For testing purposes
from utils.custom_logger import get_logger
logger = get_logger(__name__)
@ -18,6 +14,7 @@ def main(request: Request):
# Headerを取得する
ua = request.headers.get("User-Agent", "Unknown")
logger.info(f"User-Agent: {ua}")
logger.debug(f"Request Method: {request.method}")
if request.method == "GET":
logger.info("Processing GET request")

View File

@ -1 +1,2 @@
functions-framework==3.*
functions-framework==3.*
google-cloud-logging

View File

@ -1,111 +1,113 @@
import os
import logging
import json
import functools
from .singleton import Singleton
class CoogelCustomLogger():
"""Google Cloud Functions用のシンプルなカスタムロガー"""
def __init__(self, name="main"):
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
# メッセージのみ(フォーマットなし)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
if not self.logger.handlers:
self.logger.addHandler(handler)
def _log(self, message,level="INFO",**fields):
payload = {
"serverity": level,
"message": f"{message}",
**fields
}
self.logger.info(json.dumps(payload, ensure_ascii=False))
def info(self, message, **fields):
self._log(message, level="INFO", **fields)
def warning(self, message, **fields):
self._log(message, level="WARNING", **fields)
def error(self, message, **fields):
self._log(message, level="ERROR", **fields)
def exception(self, message, **fields):
payload = {
"serverity": "ERROR",
"message": f"{message}",
**fields
}
self.logger.info(
json.dumps(payload, ensure_ascii=False),
exc_info=True
)
def debug(self, message, **fields):
self._log(message, level="DEBUG", **fields)
class CustomLogger(Singleton):
"""
Singleton logger class that initializes a logger with a specified name
and log file.It provides a method to log entry and exit of functions.
"""
def __init__(self, name="main", log_file=None, level=logging.INFO):
if hasattr(self, "_initialized") and self._initialized:
return # すでに初期化済みなら何もしない
if os.getenv("ENV", "local")=="local":
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.propagate = False
formatter = logging.Formatter(
"%(asctime)s %(levelname)s "
"[%(filename)s:%(lineno)3d]: %(message)s"
)
# Console handler
ch = logging.StreamHandler()
ch.setFormatter(formatter)
self.logger.addHandler(ch)
# File handler
if log_file:
fh = logging.FileHandler(log_file, encoding="utf-8")
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self._initialized = True
elif os.getenv("ENV") in ["dev", "prd"]:
self.logger = CoogelCustomLogger(name)
self._initialized = True
def get_logger(self):
return self.logger
def log_entry_exit(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
self.logger.info(f"Enter: {func.__qualname__}")
result = func(*args, **kwargs)
self.logger.info(f"Exit: {func.__qualname__}")
return result
return wrapper
def get_logger(name="main", log_file=None, level=logging.INFO):
custom_logger = CustomLogger(name, log_file, level)
return custom_logger.get_logger()
import os
import logging
import json
import functools
from .singleton import Singleton
class CoogelCustomLogger():
"""Google Cloud Functions用のシンプルなカスタムロガー"""
def __init__(self, name="main"):
self.logger = logging.getLogger(name)
self.logger.setLevel(logging.INFO)
if not self.logger.handlers:
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
# メッセージのみ(フォーマットなし)
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
self.logger.addHandler(handler)
# 親ロガー(root)への伝播を止める → 二重出力防止
self.logger.propagate = False
def _log(self, message,level="INFO",**fields):
payload = {
"severity": level,
"message": f"{message}",
**fields
}
self.logger.info(json.dumps(payload, ensure_ascii=False))
def info(self, message, **fields):
self._log(message, level="INFO", **fields)
def warning(self, message, **fields):
self._log(message, level="WARNING", **fields)
def error(self, message, **fields):
self._log(message, level="ERROR", **fields)
def exception(self, message, **fields):
payload = {
"severity": "ERROR",
"message": f"{message}",
**fields
}
self.logger.info(
json.dumps(payload, ensure_ascii=False),
exc_info=True
)
def debug(self, message, **fields):
self._log(message, level="DEBUG", **fields)
class CustomLogger(Singleton):
"""
Singleton logger class that initializes a logger with a specified name
and log file.It provides a method to log entry and exit of functions.
"""
def __init__(self, name="main", log_file=None, level=logging.INFO):
if hasattr(self, "_initialized") and self._initialized:
return # すでに初期化済みなら何もしない
if os.getenv("ENV", "local")=="local":
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.propagate = False
formatter = logging.Formatter(
"%(asctime)s %(levelname)s "
"[%(filename)s:%(lineno)3d]: %(message)s"
)
# Console handler
ch = logging.StreamHandler()
ch.setFormatter(formatter)
self.logger.addHandler(ch)
# File handler
if log_file:
fh = logging.FileHandler(log_file, encoding="utf-8")
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self._initialized = True
elif os.getenv("ENV") in ["dev", "prd"]:
self.logger = CoogelCustomLogger(name)
self._initialized = True
def get_logger(self):
return self.logger
def log_entry_exit(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
self.logger.info(f"Enter: {func.__qualname__}")
result = func(*args, **kwargs)
self.logger.info(f"Exit: {func.__qualname__}")
return result
return wrapper
def get_logger(name="main", log_file=None, level=logging.INFO):
custom_logger = CustomLogger(name, log_file, level)
return custom_logger.get_logger()

6
terraform/dev.tfvars Normal file
View File

@ -0,0 +1,6 @@
project_id = "gcp-devel-project"
region = "asia-northeast1"
env_name = "dev"
component_name = "base"

38
terraform/function.tf Normal file
View File

@ -0,0 +1,38 @@
# Cloud Functionのリソース
# 2Cloud Function (Cloud Functions 2nd Gen) 使
# https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/cloudfunctions2_function
# 1Cloud Function (Cloud Functions 1st Gen) 使
# https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/cloudfunctions_function
resource "google_cloudfunctions2_function" "function" {
name = "cf-${var.env_name}-${var.component_name}"
location = var.region
description = "${var.component_name}のCloud Function"
build_config {
runtime = var.runtime
entry_point = var.entry_point
source {
storage_source {
bucket = google_storage_bucket.bucket.name
object = google_storage_bucket_object.source.name
}
}
environment_variables = {
ENV = var.env_name
}
}
service_config {
max_instance_count = var.max_instance_count
min_instance_count = var.min_instance_count
timeout_seconds = var.timeout_seconds
available_memory = var.available_memory
service_account_email = google_service_account.account.email
}
}

9
terraform/platform.tf Normal file
View File

@ -0,0 +1,9 @@
# Google CloudのAPIを有効化
resource "google_project_service" "services" {
for_each = toset([
"run.googleapis.com",
"cloudfunctions.googleapis.com",
])
service = each.key
}

9
terraform/provider.tf Normal file
View File

@ -0,0 +1,9 @@
terraform {
# backend "gcs" {}
}
# Google Providerの設定
provider "google" {
project = var.project_id
region = var.region
}

24
terraform/sa.tf Normal file
View File

@ -0,0 +1,24 @@
resource "google_service_account" "account" {
account_id = "sa-${var.env_name}-${var.component_name}"
display_name = "Cloud Run Job Service Account for ${var.env_name} in ${var.component_name} environment"
description = "Cloud Run Job Service Account for ${var.env_name} in ${var.component_name} environment"
project = var.project_id
}
# Cloud FunctionのIAM設定
resource "google_cloudfunctions2_function_iam_member" "invoker" {
project = google_cloudfunctions2_function.function.project
location = google_cloudfunctions2_function.function.location
cloud_function = google_cloudfunctions2_function.function.name
role = "roles/cloudfunctions.invoker"
member = "serviceAccount:${google_service_account.account.email}"
}
# Cloud Run ServiceのIAM設定
resource "google_cloud_run_service_iam_member" "cloud_run_invoker" {
project = google_cloudfunctions2_function.function.project
location = google_cloudfunctions2_function.function.location
service = google_cloudfunctions2_function.function.name
role = "roles/run.invoker"
member = "serviceAccount:${google_service_account.account.email}"
}

6
terraform/sample.tfvars Normal file
View File

@ -0,0 +1,6 @@
project_id = "プロジェクトIDを指定してください"
region = "asia-northeast1"
env_name = "dev"
component_name = "ジョブ名を指定してください"

18
terraform/storage.tf Normal file
View File

@ -0,0 +1,18 @@
resource "google_storage_bucket" "bucket" {
provider = google
name = "${var.component_name}-gcf-source" # Every bucket name must be globally unique
location = var.region
uniform_bucket_level_access = true
}
data "archive_file" "default" {
type = "zip"
output_path = "/tmp/function-source.zip"
source_dir = "../src/"
}
resource "google_storage_bucket_object" "source" {
name = "function-source.zip"
bucket = google_storage_bucket.bucket.name
source = data.archive_file.default.output_path
}

62
terraform/variables.tf Normal file
View File

@ -0,0 +1,62 @@
# GCPプロジェクトIDとリージョン
variable "project_id" {
description = "The ID of the GCP project to deploy resources into."
type = string
}
variable "region" {
description = "The GCP region to deploy resources into."
type = string
default = "asia-northeast1" #
}
variable "env_name" {
description = "The environment name for the deployment."
type = string
default = "dev"
validation {
condition = contains(["dev", "staging", "prd"], var.env_name)
error_message = "env_name must be one of: dev, staging, prd."
}
}
variable "component_name" {
description = "The name of the Cloud Function."
type = string
}
# Cloud Functino
variable "runtime" {
description = "The runtime environment for the Cloud Function."
type = string
default = "python312"
}
variable "entry_point" {
description = "The entry point function for the Cloud Function."
type = string
default = "main"
}
variable "max_instance_count" {
description = "The maximum number of instances for the Cloud Function."
type = number
default = 3
}
variable "min_instance_count" {
description = "The minimum number of instances for the Cloud Function."
type = number
default = 0
}
variable "timeout_seconds" {
description = "The timeout duration for the Cloud Function in seconds."
type = number
default = 60
}
variable "available_memory" {
description = "The amount of memory available to the Cloud Function."
type = string
default = "256M"
}