Compare commits

...

No commits in common. "main" and "coverage-report" have entirely different histories.

47 changed files with 29 additions and 1613 deletions

View File

@ -1,69 +0,0 @@
name: Gitea Deploy to GCP
on:
workflow_dispatch:
pull_request:
branches:
- deploy-prd
- deploy-dev
jobs:
gcp-deploy:
name: Deploy to GCP
runs-on: gcloud-tf
env:
GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }}
GCP_SA_KEY: ${{ secrets.GCP_SA_KEY }}
REPO_NAME: ${{ github.repository }}
HASH_SUFFIX: ${{ github.sha }}
JOB_NAME: ${{ vars.JOB_NAME }}
BRANCH_NAME: ${{ github.ref_name }}
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Check Deploy Tools
run: |
ls -la
echo "Checking gcloud and terraform versions..."
gcloud --version
terraform --version
- name: Check Gcloud auth
run: |
echo "HOME: ${HOME}"
printf '%s' "$GCP_SA_KEY" > $HOME/sa.json
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
gcloud auth activate-service-account --key-file="$GOOGLE_APPLICATION_CREDENTIALS"
gcloud config set project "$GCP_PROJECT_ID"
echo "Check gcloud"
gcloud config list
gcloud --version
- name: Exec Terraform init shell
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/init_terraform.sh
- name: Exec Container Image Push to Artifact Registry
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/build_image_to_gar.sh
- name: Exec Terraform plan shell
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/plan_terraform.sh
- name: Exec Terraform apply shell
run: |
export GOOGLE_APPLICATION_CREDENTIALS="$HOME/sa.json"
./scripts/deploy/apply_terraform.sh
- name: Clean up Gcloud auth file
run: |
rm -f $HOME/sa.json
echo "Cleaned up Gcloud auth file."

View File

@ -1,66 +0,0 @@
name: Python Lint with Ruff
on:
workflow_dispatch:
pull_request:
branches:
- main
- develop
paths:
- 'src/**'
- 'tests/**'
- 'pyproject.toml'
- 'ruff.toml'
- 'requirements.txt'
- 'requirements-dev.txt'
jobs:
python-lint:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
- name: Install dependencies
id: installDependencies
run: |
pip install -r requirements.txt
pip install -r requirements-dev.txt
- name: Check Initer
id: checkIniter
run: |
echo "Running Ruff Lint Check..."
python -m ruff check . --exit-zero --no-cache --output-format json --output-file ruff-report.json
echo "Ruff Lint Check completed. ruff-report.json"
- name: Generate Linter Report
id: generateLinterReport
run: |
python scripts/generate_linter.py
- name: pull_request message with Ruff Lint results
id: prMessageRuffLint
run: |
# イベントがプルリクエストの場合はPRにコメントを追加する
if [ "${{ github.event_name }}" = "pull_request" ]; then
echo "Posting Ruff Lint results to Pull Request..."
curl -v -X POST \
-H "Content-Type: application/json" \
-H "Authorization: token ${{ secrets.GITEA_TOKEN }}" \
-d @lint-result.json \
${{ gitea.server_url }}/api/v1/repos/${{ gitea.repository }}/issues/${{ github.event.pull_request.number }}/comments
else
echo "Not a pull request event."
echo "Ruff Lint results:"
echo "-------------------"
cat lint-result.md
echo "-------------------"
echo "No PR detected. Skipping API comment."
fi

View File

@ -1,99 +0,0 @@
name: Python Test
on:
workflow_dispatch:
push:
branches:
- main
# - develop
paths:
- 'src/**'
- 'tests/**'
- '.github/workflows/pytest.yml'
- 'requirements.txt'
- 'requirements-dev.txt'
jobs:
python-test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
- name: Install dependencies
id: installDependencies
run: |
pip install -r requirements.txt
pip install -r requirements-dev.txt
- name: Run Python Test
id: runPyTest
run: |
pytest --junitxml=pytest.xml --cov-report term-missing --cov=src tests/ | tee pytest-coverage.txt
- name: Coverage Report
id: CoverageReport
if: success() # テスト成功時のみ実行
run: |
coverage-badge -o .coverage.svg
python - <<EOF
from scripts.generate_coverage import GenerateCoverage
generate_coverage = GenerateCoverage()
generate_coverage.save_table()
EOF
- name: Generate coverage-report Branch AND README.md
id: generateCoverageReportBranch
if: success() # テスト成功時のみ実行
run: |
# coverage-report ブランチが存在しない場合は作成 あればチェックアウト
if git ls-remote --exit-code origin coverage-report; then
echo "coverage-report branch exists"
git fetch origin coverage-report:coverage-report
git checkout -B coverage-report origin/coverage-report
else
echo "coverage-report branch does not exist"
git checkout --orphan coverage-report
git rm -rf . # すべてのファイルを削除
fi
ls -l
- name: Update Readme
id: updateReadme
run: |
ls -l
mv .coverage.svg coverage.svg
echo "# Pytest Report" > README.md
echo "" >> README.md
echo "![test](coverage.svg)" >> README.md
echo "" >> README.md
cat coverage_table.md >> README.md
cat README.md
- name: Check files before upload
id: checkFiles
run: ls -l README.md coverage.svg
- name: Commit Test Report To coverage-report Branch
id: commitTestReport
if: success() # テスト成功時のみ実行
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git add README.md coverage.svg
# 変更があるかどうか確認(ステージング領域)
if git diff --cached --quiet; then
echo "No changes to commit"
else
git commit -m "Update coverage report"
git push https://actions-bot:${{ secrets.CICD_GITEA_TOKEN }}@gitea.pglikers.com/data-science/cloud-run-job-base.git coverage-report --force
fi

181
.gitignore vendored
View File

@ -1,181 +0,0 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
ruff-report.*
lint-result.md
lint-result.json
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
.ruff_cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# terraform.tfstate files
_*.tfvars
.terraform/
.terraform.lock.hcl
*.tfstate
*.tfstate.backup
*deploy.env

View File

@ -1 +0,0 @@
3.12

View File

@ -1,6 +0,0 @@
{
"recommendations": [
"charliermarsh.ruff",
"littlefoxteam.vscode-python-test-adapter"
]
}

View File

@ -1,7 +0,0 @@
{
"python.testing.pytestArgs": [
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}

View File

@ -1,44 +0,0 @@
# AGENT ガイドライン
このドキュメントは、本リポジトリで動作する **AIエージェントCursor, ChatGPT など)向けの指示書**です。
コード提案・自動修正・ドキュメント生成を行う際は、必ずこの内容に従ってください。
---
## 1. プロジェクト概要
- プロジェクト名: `TODO: プロジェクト名を書いてください`
- 主な用途: `TODO: このプロジェクトの目的を1〜3行で書く`
- 想定する利用者:
- `例: 自分用のテンプレ / 小規模API / バッチスクリプト など`
## 2. 技術スタック & 環境
- 言語: Python 3.x`TODO: 具体バージョンを書く: 例: 3.12`
- 仮想環境: `.venv``python -m venv .venv`
- パッケージ管理: `pip``requirements.txt` を利用)
- Linter / Formatter:
- **Ruff**Lint & Format 両方に使用)
- テスト: `pytest`(予定または導入済みなら)
### 環境のセットアップ
```bash
python -m venv .venv
source .venv/bin/activate # Windows: .venv\Scripts\activate
pip install -r requirements.txt
```
### フォルダ構成
.
├── src/ # メインのアプリケーションコード
├── tests/ # テストコード
├── requirements.txt
├── ruff.toml
└── AGENT.md # このファイル
エージェントは 原則として src/ 以下にコードを追加・修正してください。
設定ファイルやCI/CDは、既存の構成に従ってください。

View File

@ -1,19 +0,0 @@
FROM python:3.12-slim
# 必要なパッケージをインストール
RUN apt-get update && apt-get install -y \
curl
# pythonパッケージのインストール
COPY requirements.txt .
RUN pip install --upgrade pip
RUN pip install --no-cache-dir -r requirements.txt
# 作業ディレクトリを設定
WORKDIR /app
# アプリケーションコードをコピー
COPY ./src /app
# コンテナ起動時に実行されるコマンドを指定
CMD ["python", "main.py"]

View File

@ -1,9 +0,0 @@
MIT License
Copyright (c) 2025 templates
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,53 +1,10 @@
# python-template # Pytest Report
Python Template ![test](coverage.svg)
## Tool | File | Statements | Missed | Coverage | Missing Lines |
|------|------------|--------|----------|---------------|
### Test | src/main.py | 7 | 1 | 86% | 12 |
| src/utils/__init__.py | 0 | 0 | 100% | - |
テスト用のライブラリをインストールしてください。 | src/utils/custom_logger.py | 34 | 10 | 71% | 35-37, 45-52 |
| src/utils/singleton.py | 10 | 0 | 100% | - |
```sh
pip install -r requirements-dev.txt
```
テストを実行する
```sh
pytest tests/
# ログを出力する場合
# pytest -s tests/
```
### Lint
```sh
pip install -r requirements-dev.txt
```
**lintを実行する方法**
```sh
ruff check .
```
例えば使われていない変数が存在する場合はなどは以下のように表示される
```log
F841 Local variable `x` is assigned to but never used
--> src/main.py:7:5
|
6 | def func_wrong():
7 | x = 1
| ^
|
help: Remove assignment to unused variable `x`
```
**自動生成も実行する場合**
```sh
ruff check . --fix
```

21
coverage.svg Normal file
View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" width="99" height="20">
<linearGradient id="b" x2="0" y2="100%">
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
<stop offset="1" stop-opacity=".1"/>
</linearGradient>
<mask id="a">
<rect width="99" height="20" rx="3" fill="#fff"/>
</mask>
<g mask="url(#a)">
<path fill="#555" d="M0 0h63v20H0z"/>
<path fill="#a4a61d" d="M63 0h36v20H63z"/>
<path fill="url(#b)" d="M0 0h99v20H0z"/>
</g>
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11">
<text x="31.5" y="15" fill="#010101" fill-opacity=".3">coverage</text>
<text x="31.5" y="14">coverage</text>
<text x="80" y="15" fill="#010101" fill-opacity=".3">78%</text>
<text x="80" y="14">78%</text>
</g>
</svg>

After

Width:  |  Height:  |  Size: 904 B

View File

@ -1,20 +0,0 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

@ -1,35 +0,0 @@
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
import os
import sys
sys.path.insert(0, os.path.abspath('../src'))
project = 'プロジェクト名を設定してください'
copyright = '2025, 作成者名を設定してください'
author = '作成者名を設定してください'
release = '1.0.0'
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon', # GoogleスタイルやNumPyスタイルのdocstring対応
'sphinx.ext.viewcode', # ソースコードへのリンク
'sphinx_rtd_theme']
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
language = 'ja'
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']

View File

@ -1,20 +0,0 @@
.. プロジェクト名を設定してください documentation master file, created by
sphinx-quickstart on Fri Dec 5 01:02:07 2025.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
プロジェクト名を設定してください documentation
==============================================
Add your content using ``reStructuredText`` syntax. See the
`reStructuredText <https://www.sphinx-doc.org/en/master/usage/restructuredtext/index.html>`_
documentation for details.
.. toctree::
:maxdepth: 2
:caption: Contents:
modules

View File

@ -1,7 +0,0 @@
main module
===========
.. automodule:: main
:members:
:show-inheritance:
:undoc-members:

View File

@ -1,35 +0,0 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=build
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)
if "%1" == "" goto help
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

View File

@ -1,9 +0,0 @@
src
===
.. toctree::
:maxdepth: 4
main
utils

View File

@ -1,29 +0,0 @@
utils package
=============
Submodules
----------
utils.custom\_logger module
---------------------------
.. automodule:: utils.custom_logger
:members:
:show-inheritance:
:undoc-members:
utils.singleton module
----------------------
.. automodule:: utils.singleton
:members:
:show-inheritance:
:undoc-members:
Module contents
---------------
.. automodule:: utils
:members:
:show-inheritance:
:undoc-members:

View File

@ -1,19 +0,0 @@
import sys
import os
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "src"))
)
from utils.custom_logger import get_logger
logger = get_logger(__name__)
def example():
logger.info("Application started")
print("Hello, World!")
example()

View File

@ -1,7 +0,0 @@
[project]
name = "プロジェクト名を設定してください"
version = "0.1.0"
description = "プロジェクトの説明"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []

View File

@ -1,61 +0,0 @@
# エージェントへの目的
`terraform`フォルダを確認して`readme/components_design`フォルダに
.drawioファイル(XML形式)で作成してください
## 役割
あなたはシステム構成を考える専門家です。
## 規約
【要件】
* 出力ファイルは`system_components.drawio`としてください。
* リクエストに指定がない場合は環境変数は`_dev.tfvars`を優先してください。
* サービスアカウントやロールなどは記載しなくて良い。
* **重要**terraformに存在しないコンポーネントは使用しないこと
* ユーザーが利用するコンポーネント図と、開発者が利用するコンポーネント図は分離してください
【レイアウト要件】
- Region、VPCを大きな枠で表現
- Region: 最も外側の枠として配置
- VPC: Regionの内側に配置
- 接続線が重ならないよう、コンポーネントを階段状に配置
- 各コンポーネント間の間隔を100px以上確保
- ユーザーはVPCの外側に配置インターネット経由でアクセスする想定
- コンポーネントは左から右に向かってデータフローを表現User → Frontend → Backend → Database
【スタイル要件】
**枠のスタイル:**
- VPC: `fillColor=#D5E8D4;strokeColor=#82b366;dashed=1;verticalAlign=top;fontStyle=1;fontSize=14`
- Region: `fillColor=#E1F5FE;strokeColor=#01579B;dashed=1;verticalAlign=top;fontStyle=1;fontSize=14`
- 枠のラベルは左上に配置(`align=left;spacingLeft=10;spacingTop=5`
**接続線:**
- 双方向通信: `endArrow=classic;startArrow=classic;strokeWidth=2`
- 単方向通信: `endArrow=classic;strokeWidth=2`
- HTTPSアクセス: `strokeColor=#4285F4`(青)
- データベース接続: `strokeColor=#DB4437`(赤)
- ストレージアクセス: `strokeColor=#34A853`(緑)
- 接続線にラベルを付ける(例: "HTTPS", "API", "SQL"
【座標とサイズの目安】
- Region枠: 幅800-1000px、高さ500-700px
- VPC枠: Region内部で余白50px程度、幅700-900px、高さ400-600px
- コンポーネントアイコン: 78x78 または 80x80
- コンポーネント間の横間隔: 150-200px
- コンポーネント間の縦間隔: 100-150px
**アイコン:**
- ユーザー/クライアントアイコン
- `shape=mxgraph.aws4.resourceIcon;resIcon=mxgraph.aws4.user`(共通で使用可能)
- コンポーネントのアイコンについて以下のマップを参考にしてください
- 以下のアイコンを使用する場合、必ず対応する mxCell テンプレートを使用すること。
- id / x / y / width / height / parent などは適宜書き換えて構いません。

View File

@ -1,76 +0,0 @@
# デプロイの方法について
## インストール方法
MACの場合
```sh
brew tap hashicorp/tap
brew install hashicorp/tap/terraform
# 確認
terraform -version
```
## 環境について
* terraform
* google cloud
* Cloud Run Job
## 実行する方法
```sh
# 初期化を実行する
cd terraform
# Terraformの初期化
terraform init
# アーティファクトやバケットについては先に生成する
terraform apply \
-var-file=_dev.tfvars \
-auto-approve \
-target="google_artifact_registry_repository.repo"
# DockerファイルをビルドしてGARにプッシュする場合
cd ../
# 1. Artifact Registryへの認証設定(初回のみ実行)
source deploy.env
gcloud auth configure-docker "${AR_REGION}-docker.pkg.dev"
# arm64
source deploy.env
gcloud builds submit --tag "${IMAGE_URI}" .
echo "${IMAGE_URI}"
# デプロイするコンポーネントを確認する
cd terraform
terraform plan \
-var-file=_dev.tfvars \
-var="hash_suffix=${HASH_SUFFIX}"
# デプロイを実行する
terraform apply \
-var-file=_dev.tfvars \
-var="hash_suffix=${HASH_SUFFIX}" \
-auto-approve
```
ローカルでビルドで試す場合
```sh
# デフォルトでビルドする場合
docker build -t cloud-run-job-base .
# arm64でビルドしたい場合
docker buildx build -platform linux/amd64,linux/arm64 -t cloud-run-job-base .
# Dockerを実行する(1回だけ実行してコンテナインスタンスを削除する場合)
docker run --rm cloud-run-job-base:latest
```
### CI/CDでデプロイを実行する場合
**Github(Gitea) Acrtionsで実行する場合**
**Cloud Buildで実行する場合**
### Big Quderyにデータが取得できた場合をトリガーにしてJOBを実行する方法

View File

@ -1,107 +0,0 @@
# Python Project
## 仮想環境を構築する
### venvで構築する
```sh
python3 -m venv .venv
# Linux
source .venv/bin/activate
# Windows
# .venv/Scripts/activate
pip install -r requirements.txt
# For Develop(Tests,Docs)
pip install -r requirements-dev.txt
```
### uvで構築する
プロジェクトを生成する場合
```bash
uv init <ProjectName>
# --pythonまたは-pにバージョンを指定できます
# uv init <ProjectName> -p 3.10とすると">=3.10,<3.11"となる
```
同期する場合
```sh
uv sync
```
## Linter
### ruffライブラリの場合
RuffFlake8 のルールを多数サポートしています。
ruff.tomlで設定できます
```toml
select = ["E", "W"]
line-length = 88
```
| 分類 | コード | 内容 |
| ---- | ------ | ----------------------------------- |
| W | W291 | 末尾スペースtrailing whitespace |
| | W293 | ファイル末尾の空行の空白 |
| E | E303 | 空行が多すぎる |
| | E501 | 行が長すぎる |
リンターを実行する場合
```sh
# プロジェクト全体のPythonファイルを確認する
ruff check .
# ソースコードのみ実行する場合
ruff check src
```
**レポートを生成する場合**
シンプルなレポートとしては`generate_linter.sh`を実行してください
```sh
sh scripts/generate_linter.sh
```
形式を指定して出力する方法
```sh
ruff check . --output-format json --output-file ruff-report.json
```
`--output-file``github`など様々な形式が指定できます
コメントを生成する方法を検討が必要
## Doc
初期設定を行う
```sh
mkdir docs
cd docs
sphinx-quickstart
```
rstファイルを自動生成する
```sh
cd docs
sphinx-apidoc -o . ../src
```
Docsを自動生成する
```
cd docs
make html
```

View File

@ -1,11 +0,0 @@
# testing tools
pytest
pytest-cov
coverage-badge
# Linting tool
ruff==0.14.7
# Docs
sphinx
sphinx-rtd-theme
autodoc

View File

View File

@ -1,12 +0,0 @@
line-length = 79
# 末尾スペース・空行まわりをチェックするのは E と W系のルール
# E7xx/E9xx構文/実行時エラーの可能性)
# W1xx/W5xxスタイル・フォーマット関連
# DXXXドキュメンテーション文字列関連
# F (未使用インポートなどのエラー)
# BXXバグの可能性
[lint]
select = ["F", "E", "W", "D101", "B"]
ignore = []

View File

@ -1,26 +0,0 @@
#!/bin/bash
# Safe mode(when error,kill script)
set -euo pipefail
# 変数の設定({HOME}/hash.txt からハッシュ値を取得)
TF_DIR=${TF_DIR:-terraform}
ENV=${ENV:-dev}
cd "$TF_DIR"
# --- デプロイ条件 ---
if [[ "${BRANCH_NAME:-}" =~ ^.*deploy$ ]]; then
echo "Start terraform apply (ENV=${ENV}, DIR=${TF_DIR}) ..."
else
echo "Skip terraform apply (branch=${BRANCH_NAME:-})"
exit 0
fi
# --- plan 結果があるか確認 ---
if [[ ! -f tfplan ]]; then
echo "ERROR: tfplan not found in $(pwd). Run plan step first." >&2
exit 1
fi
terraform apply -auto-approve tfplan

View File

@ -1,29 +0,0 @@
#!/bin/bash
# Google Container RegistryへDockerイメージをビルドしてプッシュするスクリプト
set -euo pipefail
# 環境変数の設定
REGION=${REGION:-asia-northeast1}
ENV=${ENV:-dev}
JOB_NAME=${JOB_NAME}
AR_REPO_NAME="cicd-repo-${ENV}"
HASH_SUFFIX=${HASH_SUFFIX}
# IMAGE_URIの設定
# ローカル実行時は epoch 秒で自動採番。
IMAGE_URI="${REGION}-docker.pkg.dev/${GCP_PROJECT_ID}/${AR_REPO_NAME}/run-job-${JOB_NAME}-image:${HASH_SUFFIX}"
echo "REGION : ${REGION}"
echo "ENV : ${ENV}"
echo "JOB_NAME : ${JOB_NAME}"
echo "HASH_SUFFIX : ${HASH_SUFFIX}"
echo "IMAGE_URI : ${IMAGE_URI}"
# Artifact Registry への認証設定
gcloud auth configure-docker "${REGION}-docker.pkg.dev"
# GARへDockerイメージをビルドしてプッシュ
gcloud builds submit --tag "${IMAGE_URI}" .

View File

@ -1,21 +0,0 @@
#!/bin/bash
# Safe mode(when error,kill script)
set -euo pipefail
TF_DIR=${TF_DIR:-terraform}
# GCS S3などで保存する
TF_STATE_BUCKET=${TF_STATE_BUCKET:-cicd-tfstate-bucket-20250906}
ENV=${ENV:-dev}
REPO_NAME=${REPO_NAME:-unknown}
cd "$TF_DIR"
echo "$REPO_NAME"
# # --- terraform init 実行 ---
terraform init \
-backend-config="bucket=${TF_STATE_BUCKET}" \
-backend-config="prefix=${REPO_NAME}/${ENV}" \

View File

@ -1,23 +0,0 @@
#!/bin/bash
# Safe mode(when error,kill script)
set -euo pipefail
# 変数の設定({HOME}/hash.txt からハッシュ値を取得)
TF_DIR=${TF_DIR:-terraform}
ENV=${ENV:-dev}
HASH_SUFFIX=${HASH_SUFFIX}
cd "$TF_DIR"
if [ -f "${ENV}.tfvars" ]; then
terraform plan \
-out=tfplan \
-var-file="${ENV}.tfvars" \
-var="hash_suffix=${HASH_SUFFIX}"
else
# error raise
echo "ERROR: ${ENV}.tfvars not found in $(pwd)" >&2
exit 1
fi

View File

@ -1,123 +0,0 @@
import re
class GenerateCoverage:
"""カバレッジ結果を解析して Markdown テーブルを生成"""
def __init__(
self,
coverage_file="pytest-coverage.txt",
output_file="coverage_table.md",
):
"""
初期化
:param coverage_file: カバレッジ結果のテキストファイル
:param output_file: 出力する Markdown ファイル
"""
self.coverage_file = coverage_file
self.output_file = output_file
self.coverage_data = []
self.markdown_table = None
def parse_coverage(self):
"""
coverage.txt のデータを解析してカバレッジ情報を抽出
"""
with open(self.coverage_file, "r") as f:
lines = f.readlines()
coverage_info = []
in_coverage_section = False
for line in lines:
# Coverage セクションの始まりを検出
if "Name" in line and "Stmts" in line and "Miss" in line:
in_coverage_section = True
continue
# セパレーター行をスキップ
if in_coverage_section and line.strip().startswith("---"):
continue
# Coverage セクションの終わりを検出TOTAL行の次の空行
if in_coverage_section and (
line.strip().startswith("TOTAL")
or line.strip().startswith("=")
):
break
# Coverage データを抽出
if in_coverage_section:
match = re.match(
r"(.+?)\s+(\d+)\s+(\d+)\s+(\d+%)\s*(.*)", line
)
if match:
filename = match.group(1).strip()
statements = match.group(2).strip()
missed = match.group(3).strip()
coverage = match.group(4).strip()
missing_lines = (
match.group(5).strip() if match.group(5) else "-"
)
coverage_info.append(
{
"filename": filename,
"statements": statements,
"missed": missed,
"coverage": coverage,
"missing_lines": missing_lines,
}
)
self.coverage_data = coverage_info
def generate_table(self):
"""
Markdown テーブルを生成
"""
if not self.coverage_data:
self.parse_coverage()
print("Parsed coverage data.")
# Markdown テーブルヘッダー
table_header = (
"| File | Statements | Missed | Coverage | Missing Lines |\n"
)
table_header += (
"|------|------------|--------|----------|---------------|\n"
)
# テーブル行を生成
table_rows = [
(
f"| {data['filename']} | {data['statements']} | "
f"{data['missed']} | {data['coverage']} | "
f"{data['missing_lines']} |"
)
for data in self.coverage_data
]
self.markdown_table = table_header + "\n".join(table_rows)
def print_table(self):
"""
テーブルをコンソールに出力
"""
if self.markdown_table is None:
self.generate_table()
print(self.markdown_table)
def save_table(self):
"""
テーブルをファイルに保存
"""
if self.markdown_table is None:
self.generate_table()
with open(self.output_file, "w", encoding="utf-8") as f:
print(f"Markdown table has been saved to {self.output_file}")
f.write(self.markdown_table)
if __name__ == "__main__":
generator = GenerateCoverage()
generator.generate_table()
generator.save_table()

View File

@ -1,123 +0,0 @@
import json
from pathlib import Path
PROJECT_NAME = Path(".").resolve().name
print(f"Project Name: {PROJECT_NAME}")
CODE_MAP = {
"W291": {"message": "行末に不要な空白があります。", "severity": "🟢低"},
"W292": {
"message": "ファイルの最後に改行がありません。",
"severity": "🟢低",
},
"E501": {
"message": "行が長すぎます。79文字以内にしてください。",
"severity": "🟢低",
},
"D101": {
"message": "クラスにドキュメンテーション文字列がありません。",
"severity": "⚪️無害",
},
}
def get_relative_path(absolute_path: str) -> str:
"""
絶対パスからプロジェクトルートからの相対パスを取得
"""
try:
index = absolute_path.index(PROJECT_NAME)
return absolute_path[index + len(PROJECT_NAME) + 1 :]
except ValueError:
return absolute_path
class GenerateLinter:
"""Linterレポートを生成するクラス"""
def __init__(
self, json_file="ruff-report.json", output_file="lint-result"
):
"""
初期化
"""
self.json_file = json_file
self.output_file = output_file
def _genarate_lint_report(self, data: list) -> str:
_str = ""
if not data:
_str += "## Linter(リンタ)指摘事項なし\n\n"
_str += "素晴らしいコードです!🎉\n"
return _str
_str += "## Linter(リンタ)レビュー\n\n"
_str += "以下の指摘事項があります。コードを見直してください。\n\n"
_str += f"総数:{len(data)}\n"
_str += "### 指摘事項一覧\n"
_str += "|コード|重要性|項目|ファイル名|行数|自動修正|\n"
_str += "|---|---|---|---|---|---|\n"
for issue in data:
code = issue.get("code", "-")
severity = (
CODE_MAP.get(code, {}).get("severity", "❓不明")
if code != "-"
else "-"
)
message = CODE_MAP.get(code, {}).get(
"message", issue.get("message", "-")
)
filename = get_relative_path(issue.get("filename", "-"))
file_link = f"./{filename}"
line = ""
if issue.get("location") and issue["location"].get("row"):
line = f"{issue['location']['row']}行目"
if issue["location"].get("column"):
line += f"{issue['location']['column']}"
if issue.get("end_location"):
if issue["end_location"].get("row"):
line += f"{issue['end_location']['row']}行目"
if issue["end_location"].get("column"):
line += f"{issue['end_location']['column']}"
auto_fix = "" if issue.get("fix") else ""
_str += f"|{code}|{severity}|{message}|"
_str += f"[{filename}]({file_link})|{line}|{auto_fix}|\n"
_str += "\n\n"
_str += "### 自動修正コマンド\n"
_str += (
"自動修正が可能な指摘事項については、"
"以下のコマンドで自動修正を試みることができます。\n\n"
)
_str += "```bash\n"
_str += "ruff check --fix .\n"
_str += "```\n\n"
return _str
def generate_lint_report_json(self):
with open(self.json_file, "r") as f:
data = json.load(f)
with open(f"{self.output_file}.md", "w") as f:
report_body = self._genarate_lint_report(data)
f.write(report_body)
with open(f"{self.output_file}.json", "w") as f:
report = {"body": self._genarate_lint_report(data)}
json.dump(report, f, ensure_ascii=False, indent=4)
print(
f"Linter report generated: {self.output_file}.md"
f" and {self.output_file}.json"
)
if __name__ == "__main__":
generator = GenerateLinter()
generator.generate_lint_report_json()

View File

@ -1,12 +0,0 @@
from utils.custom_logger import get_logger
logger = get_logger(__name__)
def main():
logger.info("Application started")
print("Hello, World!")
if __name__ == "__main__":
main()

View File

View File

@ -1,57 +0,0 @@
import os
import logging
import functools
from .singleton import Singleton
class CustomLogger(Singleton):
"""
Singleton logger class that initializes a logger with a specified name
and log file.It provides a method to log entry and exit of functions.
"""
def __init__(self, name="main", log_file=None, level=logging.INFO):
if hasattr(self, "_initialized") and self._initialized:
return # すでに初期化済みなら何もしない
# self.logger.setLevel(level)
if os.getenv("ENV", "local"):
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.propagate = False
formatter = logging.Formatter(
"%(asctime)s %(levelname)s "
"[%(filename)s:%(lineno)3d]: %(message)s"
)
# Console handler
ch = logging.StreamHandler()
ch.setFormatter(formatter)
self.logger.addHandler(ch)
# File handler
if log_file:
fh = logging.FileHandler(log_file, encoding="utf-8")
fh.setFormatter(formatter)
self.logger.addHandler(fh)
self._initialized = True
def get_logger(self):
return self.logger
def log_entry_exit(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
self.logger.info(f"Enter: {func.__qualname__}")
result = func(*args, **kwargs)
self.logger.info(f"Exit: {func.__qualname__}")
return result
return wrapper
def get_logger(name="main", log_file=None, level=logging.INFO):
custom_logger = CustomLogger(name, log_file, level)
return custom_logger.get_logger()

View File

@ -1,23 +0,0 @@
"""Singleton pattern implementation in Python.
This implementation is thread-safe and
ensures that only one instance of the class is created.
Singleton が提供するのは同じインスタンスを返す仕組み
* __init__() は毎回呼ばれる(多くの人が意図しない動作)
* __init__の2回目は_initialized というフラグは 使う側で管理する必要がある
"""
import threading
class Singleton(object):
"""シングルトンパターンの基底クラス"""
_instances = {}
_lock = threading.Lock()
def __new__(cls, *args, **kwargs):
if cls not in cls._instances:
with cls._lock:
if cls not in cls._instances: # ダブルチェック
cls._instances[cls] = super(Singleton, cls).__new__(cls)
return cls._instances[cls]

View File

@ -1,5 +0,0 @@
resource "google_artifact_registry_repository" "repo" {
location = var.region
repository_id = "cicd-repo-${var.env_name}"
format = "DOCKER"
}

View File

@ -1,12 +0,0 @@
project_id = "gcp-devel-project"
region = "asia-northeast1"
env_name = "dev"
job_name = "base"
# コンテナイメージCI/CDから渡される想定
cpu_limit = "1"
memory_limit = "512Mi"
timeout = "1800s"

View File

@ -1,10 +0,0 @@
# Google CloudのAPIを有効化
resource "google_project_service" "services" {
for_each = toset([
"run.googleapis.com",
"artifactregistry.googleapis.com",
"cloudbuild.googleapis.com",
])
service = each.key
}

View File

@ -1,9 +0,0 @@
terraform {
backend "gcs" {}
}
# Google Providerの設定
provider "google" {
project = var.project_id
region = var.region
}

View File

@ -1,29 +0,0 @@
# Cloud Run Jobのリソース
# https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/cloud_run_v2_job
resource "google_cloud_run_v2_job" "job" {
name = "${var.job_name}-${var.env_name}-job"
location = var.region
template {
template {
#
service_account = google_service_account.job_sa.email
containers {
image = "${var.region}-docker.pkg.dev/${var.project_id}/cicd-repo-${var.env_name}/run-job-${var.job_name}-image:${var.hash_suffix}"
resources {
limits = {
cpu = var.cpu_limit
memory = var.memory_limit
}
}
}
timeout = var.timeout
}
}
}

View File

@ -1,14 +0,0 @@
resource "google_service_account" "job_sa" {
account_id = "sa-${var.job_name}-${var.env_name}"
display_name = "Cloud Run Job Service Account for ${var.job_name} in ${var.env_name} environment"
description = "Cloud Run Job Service Account for ${var.job_name} in ${var.env_name} environment"
project = var.project_id
}
# IAM role assignment
# Cloud Run Job実行に必要な権限を付与
resource "google_project_iam_member" "run_job_invoker" {
project = var.project_id
role = "roles/run.invoker"
member = "serviceAccount:${google_service_account.job_sa.email}"
}

View File

@ -1,10 +0,0 @@
project_id = "プロジェクトIDを指定してください"
region = "asia-northeast1"
env_name = "dev"
job_name = "ジョブ名を指定してください"
# コンテナイメージCI/CDから渡される想定
cpu_limit = "1"
memory_limit = "512Mi"
timeout = "1800s"

View File

@ -1,54 +0,0 @@
# GCPプロジェクトIDとリージョン
variable "project_id" {
description = "The ID of the GCP project to deploy resources into."
type = string
}
variable "region" {
description = "The GCP region to deploy resources into."
type = string
default = "asia-northeast1" #
}
variable "env_name" {
description = "The environment name for the deployment."
type = string
default = "dev"
validation {
condition = contains(["dev", "staging", "prd"], var.env_name)
error_message = "env_name must be one of: dev, staging, prd."
}
}
variable "job_name" {
description = "The name of the Cloud Run Job."
type = string
default = "get-news-ai"
}
# (CI/CDから渡される想定)
variable "hash_suffix" {
description = "The container image for the Cloud Run Job."
type = string
default = null
}
# Cloud Run Jobの設定変数
variable "cpu_limit" {
description = "The CPU limit for the Cloud Run Job container."
type = string
default = "1"
}
variable "memory_limit" {
description = "The memory limit for the Cloud Run Job container."
type = string
default = "512Mi"
}
variable "timeout" {
description = "The task timeout in seconds for the Cloud Run Job."
type = string
default = "1800s"
}

View File

@ -1,6 +0,0 @@
import sys
from pathlib import Path
# srcディレクトリをパスに追加
src_path = Path(__file__).parent.parent / "src"
sys.path.insert(0, str(src_path))

View File

@ -1,27 +0,0 @@
import pytest
from utils.custom_logger import get_logger
from main import main
def test_main(capsys):
"""mainが正しく実行されることを確認"""
main()
captured = capsys.readouterr()
assert "Hello, World!" in captured.out
def test_main_no_exception():
"""mainが例外を発生させないことを確認"""
try:
main()
except Exception as e:
pytest.fail(f"main() raised an exception: {e}")
def test_logger_initialization():
"""ロガーが正しく初期化されることを確認"""
logger = get_logger()
logger.info("This is a test log message.")
assert logger is not None
assert logger.name == "main"