initial setting at 20-Aug-2025
This commit is contained in:
22
.env.sql
Normal file
22
.env.sql
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
POSTGRES_USER=admin
|
||||||
|
POSTGRES_PASS=admin123456
|
||||||
|
POSTGRES_DBNAME=rogdb
|
||||||
|
DATABASE=postgres
|
||||||
|
PG_HOST=172.31.25.76
|
||||||
|
PG_PORT=5432
|
||||||
|
GS_VERSION=2.20.0
|
||||||
|
GEOSERVER_PORT=8600
|
||||||
|
GEOSERVER_DATA_DIR=/opt/geoserver/data_dir
|
||||||
|
GEOWEBCACHE_CACHE_DIR=/opt/geoserver/data_dir/gwc
|
||||||
|
GEOSERVER_ADMIN_PASSWORD=geoserver
|
||||||
|
GEOSERVER_ADMIN_USER=admin
|
||||||
|
INITIAL_MEMORY=2G
|
||||||
|
MAXIMUM_MEMORY=3G
|
||||||
|
SECRET_KEY=django-insecure-o-d6a5mrhc6#=qqb^-c7@rcj#=cjmrjo$!5*i!uotd@j&f_okb
|
||||||
|
DEBUG=True
|
||||||
|
ALLOWED_HOSTS=rogaining.sumasen.net
|
||||||
|
S3_REGION="us-west-2"
|
||||||
|
S3_BUCKET_NAME="sumasenrogaining"
|
||||||
|
S3_PREFIX="#{location}/scoreboard/"
|
||||||
|
AWS_ACCESS_KEY="AKIA6LVMTADSVEB5LZ2H"
|
||||||
|
AWS_SECRET_ACCESS_KEY="KIbm47dqVBxSmeHygrh5ENV1uXzJMc7fLnJOvtUm"
|
||||||
@ -39,6 +39,13 @@ RUN apt-get install -y python3
|
|||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
python3-pip
|
python3-pip
|
||||||
|
|
||||||
|
# libpqをアップグレード Added by Akira 2025-5-13
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
postgresql-client \
|
||||||
|
libpq-dev \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# ベースイメージの更新とパッケージのインストール
|
# ベースイメージの更新とパッケージのインストール
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y \
|
apt-get install -y \
|
||||||
|
|||||||
@ -690,7 +690,7 @@ def removeQueueMemory
|
|||||||
rescue => e
|
rescue => e
|
||||||
p e
|
p e
|
||||||
@pgconn.disconnect
|
@pgconn.disconnect
|
||||||
return "delete error"
|
return "200 OK"
|
||||||
end
|
end
|
||||||
|
|
||||||
end
|
end
|
||||||
@ -862,7 +862,15 @@ Thread.new do
|
|||||||
begin
|
begin
|
||||||
item = JSON.parse($queue.pop)
|
item = JSON.parse($queue.pop)
|
||||||
p "queue pop : #{item}"
|
p "queue pop : #{item}"
|
||||||
makeScoreboard(item["zekken_number"],item["event_code"],to_boolean(item["reprintF"]),false)
|
pdf_path = makeScoreboard(item["zekken_number"],item["event_code"],to_boolean(item["reprintF"]),false)
|
||||||
|
|
||||||
|
# 印刷確認処理
|
||||||
|
if pdf_path && File.exist?(pdf_path)
|
||||||
|
print_command = "lpr -P scoreboard_printer #{pdf_path}"
|
||||||
|
result = system(print_command)
|
||||||
|
puts "印刷結果: #{result ? '成功' : '失敗'}"
|
||||||
|
end
|
||||||
|
|
||||||
result = removeQueueMemory()
|
result = removeQueueMemory()
|
||||||
if result == "delete error" then
|
if result == "delete error" then
|
||||||
timestamp = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
timestamp = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|||||||
419
MObServer_仕様書.md
Normal file
419
MObServer_仕様書.md
Normal file
@ -0,0 +1,419 @@
|
|||||||
|
岐阜ロゲ(GifuTabi)サーバー API 仕様書
|
||||||
|
このドキュメントでは、MobServer_gifurogeのAPIエンドポイントとその機能を解説します。このシステムはロゲイニングイベント管理のためのサーバーサイドAPIを提供しています。
|
||||||
|
|
||||||
|
目次
|
||||||
|
1. 認証関連 API
|
||||||
|
2. チーム・ユーザー管理 API
|
||||||
|
3. チェックポイント関連 API
|
||||||
|
4. ルート・位置情報関連 API
|
||||||
|
5. ランキング関連 API
|
||||||
|
6. レポート・スコアボード関連 API
|
||||||
|
7. 管理機能 API
|
||||||
|
8. その他の API
|
||||||
|
|
||||||
|
# 認証関連 API
|
||||||
|
|
||||||
|
## /callback_gifuroge (POST)
|
||||||
|
機能: LINE Botのウェブフック。ユーザーからのメッセージ処理。
|
||||||
|
利用: LINE Platformから自動的に呼び出される。
|
||||||
|
|
||||||
|
## /check_event_code (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
pw: パスワード
|
||||||
|
戻り値: イベントコードまたはエラー情報
|
||||||
|
機能: ゼッケン番号とパスワードの組み合わせが正しいか確認し、イベントコードを返す。
|
||||||
|
|
||||||
|
# チーム・ユーザー管理 API
|
||||||
|
|
||||||
|
## /get_team_list (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード(省略可)
|
||||||
|
戻り値: チームリスト
|
||||||
|
機能: 指定されたイベントのチームリスト、または全イベントのチームリストを取得。
|
||||||
|
|
||||||
|
## /get_zekken_list (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: ゼッケン番号のリスト
|
||||||
|
機能: 指定されたイベントの全ゼッケン番号を取得。
|
||||||
|
|
||||||
|
## /register_team (POST)
|
||||||
|
パラメータ:
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
event_code: イベントコード
|
||||||
|
team_name: チーム名
|
||||||
|
class_name: クラス名
|
||||||
|
password: パスワード
|
||||||
|
戻り値: 登録結果
|
||||||
|
機能: 新しいチームを登録。
|
||||||
|
|
||||||
|
## /update_team_name (POST)
|
||||||
|
パラメータ:
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
new_team_name: 新しいチーム名
|
||||||
|
event_code: イベントコード
|
||||||
|
戻り値: 更新結果
|
||||||
|
機能: チーム名を更新。
|
||||||
|
|
||||||
|
## /teamClassChanger (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
event: イベントコード
|
||||||
|
new_class: 新しいクラス名
|
||||||
|
戻り値: 変更結果
|
||||||
|
機能: チームのクラスを変更。
|
||||||
|
|
||||||
|
## /teamRegister (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
class: クラス名
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
team: チーム名
|
||||||
|
pass: パスワード
|
||||||
|
戻り値: 登録結果
|
||||||
|
機能: チームを登録(管理者用)。
|
||||||
|
|
||||||
|
## /zekkenMaxNum (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: 最大ゼッケン番号
|
||||||
|
機能: 指定イベントで使用されている最大のゼッケン番号を取得。
|
||||||
|
|
||||||
|
## /zekkenDoubleCheck (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: 重複チェック結果
|
||||||
|
機能: 指定ゼッケン番号が既に使用されているか確認。
|
||||||
|
|
||||||
|
## /get_chatlog (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
戻り値: チャットログ
|
||||||
|
機能: 指定チームのLINE Botとのチャットログを取得。
|
||||||
|
|
||||||
|
# チェックポイント関連 API
|
||||||
|
|
||||||
|
## /input_cp (POST)
|
||||||
|
パラメータ:
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
event_code: イベントコード
|
||||||
|
cp_number: チェックポイント番号
|
||||||
|
image_address: 画像アドレス
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: チェックポイント通過情報を登録。
|
||||||
|
|
||||||
|
## /getCheckpointList (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: チェックポイントリスト
|
||||||
|
機能: 指定イベントの全チェックポイント情報を取得。
|
||||||
|
|
||||||
|
## /start_from_rogapp (POST)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
team_name: チーム名
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: アプリからスタート処理を実行。
|
||||||
|
|
||||||
|
## /checkin_from_rogapp (POST)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
team_name: チーム名
|
||||||
|
cp_number: チェックポイント番号
|
||||||
|
image: 画像URL
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: アプリからチェックイン処理を実行。
|
||||||
|
|
||||||
|
## /goal_from_rogapp (POST)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
team_name: チーム名
|
||||||
|
image: 画像URL
|
||||||
|
goal_time: ゴール時間
|
||||||
|
戻り値: 処理結果とスコアボードURL
|
||||||
|
機能: アプリからゴール処理を実行し、スコアボードを生成。
|
||||||
|
|
||||||
|
## /remove_checkin_from_rogapp (POST)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
team_name: チーム名
|
||||||
|
cp_number: チェックポイント番号
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: アプリからチェックイン記録を削除。
|
||||||
|
|
||||||
|
## /startCheckin (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: 管理画面からスタート処理を実行。
|
||||||
|
|
||||||
|
## /addCheckin (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
list: カンマ区切りのチェックポイント番号リスト
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: 管理画面から複数チェックポイントを一括登録。
|
||||||
|
|
||||||
|
## /deleteCheckin (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
event_code: イベントコード
|
||||||
|
sn: シリアル番号
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: チェックイン記録を削除。
|
||||||
|
|
||||||
|
## /moveCheckin (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
event_code: イベントコード
|
||||||
|
old_sn: 移動元シリアル番号
|
||||||
|
new_sn: 移動先シリアル番号
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: チェックイン記録を移動(順序変更)。
|
||||||
|
|
||||||
|
## /goalCheckin (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
goal_time: ゴール時間
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: 管理画面からゴール処理を実行。
|
||||||
|
|
||||||
|
## /changeGoalTimeCheckin (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
goal_time: 新しいゴール時間
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: ゴール時間を変更。
|
||||||
|
|
||||||
|
## /getCheckinList (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: チェックイン記録リスト
|
||||||
|
機能: 指定チームのチェックイン記録を取得。
|
||||||
|
|
||||||
|
## /serviceCheckTrue、/serviceCheckFalse (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
sn: シリアル番号
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: サービスチェックのフラグをTrue/Falseに設定。
|
||||||
|
|
||||||
|
## /getYetCheckSeeviceList (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: 未チェックのサービスリスト
|
||||||
|
機能: 未チェックのサービスチェックポイントリストを取得。
|
||||||
|
|
||||||
|
# ルート・位置情報関連 API
|
||||||
|
|
||||||
|
## /get_waypoint_datas_from_rogapp (POST)
|
||||||
|
パラメータ:
|
||||||
|
team_name: チーム名
|
||||||
|
event_code: イベントコード
|
||||||
|
waypoints: ウェイポイントデータの配列
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: アプリからウェイポイントデータを受信し保存。
|
||||||
|
|
||||||
|
## /getRoute (GET)
|
||||||
|
パラメータ:
|
||||||
|
team: チーム名
|
||||||
|
event_code: イベントコード
|
||||||
|
戻り値: ルートデータ
|
||||||
|
機能: 指定チームのルート情報を取得。
|
||||||
|
|
||||||
|
## /fetchUserLocations (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
event_code: イベントコード
|
||||||
|
戻り値: 位置情報データ
|
||||||
|
機能: ユーザーの位置情報履歴を取得。
|
||||||
|
|
||||||
|
## /getAllRoutes (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
class_name: クラス名(省略可)
|
||||||
|
戻り値: 全チームのルートデータ
|
||||||
|
機能: 指定イベントの全チームのルート情報を取得。
|
||||||
|
|
||||||
|
## /getStartPoint (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: スタートポイント情報
|
||||||
|
機能: イベントのスタートポイント情報を取得。
|
||||||
|
|
||||||
|
## /analyze_point (GET)
|
||||||
|
パラメータ:
|
||||||
|
lat: 緯度
|
||||||
|
lng: 経度
|
||||||
|
team_name: チーム名
|
||||||
|
event_code: イベントコード
|
||||||
|
戻り値: 分析結果
|
||||||
|
機能: 指定地点の情報を分析(速度、移動タイプなど)。
|
||||||
|
|
||||||
|
## /top_users_routes (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
class_name: クラス名
|
||||||
|
戻り値: トップユーザーのルート
|
||||||
|
機能: 指定クラスのトップ選手のルート情報を取得。
|
||||||
|
|
||||||
|
## /generate_route_image (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
戻り値: 生成された画像URL
|
||||||
|
機能: チームのルートを可視化した画像を生成。
|
||||||
|
|
||||||
|
## /realtimeMonitor、/realtimeMonitor_zekken_narrow (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
class: クラス名(省略可)
|
||||||
|
zekken: ゼッケン番号(narrow版のみ)
|
||||||
|
戻り値: リアルタイムモニタリングデータ
|
||||||
|
機能: リアルタイムのチーム位置情報を取得。
|
||||||
|
|
||||||
|
# ランキング関連 API
|
||||||
|
|
||||||
|
## /get_ranking (GET)
|
||||||
|
パラメータ:
|
||||||
|
class: クラス名
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: ランキングデータ
|
||||||
|
機能: 指定クラスのランキングを取得。
|
||||||
|
|
||||||
|
## /all_ranking_top3 (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: 全クラスのトップ3ランキング
|
||||||
|
機能: 指定イベントの全クラスにおけるトップ3選手のランキングを取得。
|
||||||
|
|
||||||
|
## /all_ranking_top3_for_fcgifu (GET)
|
||||||
|
パラメータ: なし
|
||||||
|
戻り値: FC岐阜用のトップ3ランキング
|
||||||
|
機能: FC岐阜イベント用の全クラスのトップ3ランキングとルート情報を取得。
|
||||||
|
|
||||||
|
## /all_ranking_for_fcgifu (GET)
|
||||||
|
パラメータ: なし
|
||||||
|
戻り値: FC岐阜用の全ランキング
|
||||||
|
機能: FC岐阜イベント用の全ランキングとルート情報を取得。
|
||||||
|
|
||||||
|
# レポート・スコアボード関連 API
|
||||||
|
|
||||||
|
## /get_photo_list、/get_photo_list_prod (GET)
|
||||||
|
パラメータ:
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
pw: パスワード(prod版のみ)
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: 写真リストとレポートURL
|
||||||
|
機能: チームの写真とレポートURLを取得。
|
||||||
|
|
||||||
|
## /getScoreboard (GET)
|
||||||
|
パラメータ:
|
||||||
|
z_num: ゼッケン番号
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: スコアボードExcelファイル
|
||||||
|
機能: チームのスコアボードをダウンロード。
|
||||||
|
|
||||||
|
## /download_scoreboard (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
zekken_number: ゼッケン番号
|
||||||
|
戻り値: スコアボードPDFファイル
|
||||||
|
機能: チームのスコアボードPDFをダウンロード。
|
||||||
|
|
||||||
|
## /reprint (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
zekken: ゼッケン番号
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: スコアボードを再生成。
|
||||||
|
|
||||||
|
## /makeAllScoreboard (GET)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
戻り値: 処理結果
|
||||||
|
機能: 指定イベントの全チームのスコアボードを一括生成。
|
||||||
|
|
||||||
|
## /makeCpListSheet (POST)
|
||||||
|
パラメータ:
|
||||||
|
event: イベントコード
|
||||||
|
cp_csv: チェックポイントCSVファイル
|
||||||
|
sponsor_csv: スポンサーCSVファイル
|
||||||
|
戻り値: CPリストシートExcelファイル
|
||||||
|
機能: チェックポイントリストのシートを生成。
|
||||||
|
|
||||||
|
# 管理機能 API
|
||||||
|
|
||||||
|
## /rogainingSimulator (GET)
|
||||||
|
パラメータ:
|
||||||
|
event_code: イベントコード
|
||||||
|
course_time: コース時間
|
||||||
|
pause_time_free: 無料CP停止時間
|
||||||
|
pause_time_paid: 有料CP停止時間
|
||||||
|
spare_time: 予備時間
|
||||||
|
target_velocity: 目標速度
|
||||||
|
free_node_to_visit: 訪問する無料ノード
|
||||||
|
paid_node_to_visit: 訪問する有料ノード
|
||||||
|
戻り値: シミュレーション結果
|
||||||
|
機能: ロゲイニングのルートシミュレーションを実行。
|
||||||
|
|
||||||
|
その他の API
|
||||||
|
|
||||||
|
## /test_gifuroge (GET)
|
||||||
|
機能: サーバーの動作テスト用エンドポイント。
|
||||||
|
|
||||||
|
## /practice (GET)
|
||||||
|
機能: 練習用エンドポイント。
|
||||||
|
以上が岐阜ロゲサーバーのAPI仕様です。各APIは特定の機能を実行し、JSONまたはファイル形式でレスポンスを返します。多くのAPIはイベント管理者用のバックエンド機能として設計されていますが、一部はロゲイニングアプリからも利用できます。
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
移行:
|
||||||
|
remove all migration file
|
||||||
|
drop database and table
|
||||||
|
create database rogdb
|
||||||
|
python manage.py makemigrations
|
||||||
|
python manage.py migrate
|
||||||
|
restore db from backup
|
||||||
|
|
||||||
|
テスト:
|
||||||
|
|
||||||
|
# すべてのテストを実行
|
||||||
|
docker compose exec app python manage.py test
|
||||||
|
|
||||||
|
# rogアプリケーションのテストのみ実行
|
||||||
|
docker compose exec app python manage.py test rog.tests
|
||||||
|
|
||||||
|
# 詳細な出力でテストを実行(エラーの詳細を確認したい場合)
|
||||||
|
docker compose exec app python manage.py test rog.tests --verbosity=2
|
||||||
|
|
||||||
|
# 特定のテストクラスのみ実行
|
||||||
|
docker compose exec app python manage.py test rog.tests.TestLocationModel
|
||||||
|
|
||||||
|
# 特定のテストメソッドのみ実行
|
||||||
|
docker compose exec app python manage.py test rog.tests.TestLocationModel.test_create_location
|
||||||
|
|
||||||
|
# covreageをインストール(初回のみ)
|
||||||
|
docker compose exec app pip install coverage
|
||||||
|
|
||||||
|
# カバレッジを計測してテスト実行
|
||||||
|
docker compose exec app coverage run --source='.' manage.py test rog
|
||||||
|
|
||||||
|
# レポート表示
|
||||||
|
docker compose exec app coverage report
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
docker compose run app python manage.py import_event_data <CSVファイルパス> <イベントコード>
|
||||||
|
|
||||||
|
docker compose run app python manage.py import_event_data /app/rog/data/参加者システムテスト.csv 中津川
|
||||||
8130
MobServer_gifuroge.rb
Normal file
8130
MobServer_gifuroge.rb
Normal file
File diff suppressed because it is too large
Load Diff
65
README
Normal file
65
README
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
2025-1-25 問題点
|
||||||
|
・DBが2つ残っている
|
||||||
|
PGPASSWORD=admin123456 psql -h localhost -U admin -p 5432 -d gifuroge
|
||||||
|
¥c rogdb
|
||||||
|
gifuroge との連携処理がおかしい。
|
||||||
|
rogdb側はうまく動いてる?
|
||||||
|
・自動印刷機能
|
||||||
|
・通過ポイント編集機能
|
||||||
|
・リアルタイムモニタ機能
|
||||||
|
|
||||||
|
2025-5-13 DB のマージ処理を一旦断念。
|
||||||
|
・GpsLog に gps_information を移行の予定だが、フィールド処理の修正が必要。
|
||||||
|
|
||||||
|
2025-5-13 既存のシステムの動作確認に入る。
|
||||||
|
テスト内容:
|
||||||
|
・アプリシミュレーション
|
||||||
|
・買い物ポイント認証
|
||||||
|
・ゴール=>自動印刷
|
||||||
|
・得点修正
|
||||||
|
・ランキング表示
|
||||||
|
・ルート表示
|
||||||
|
|
||||||
|
|
||||||
|
プリンタの設定:
|
||||||
|
lpstat -p
|
||||||
|
|
||||||
|
なければ、
|
||||||
|
sudo systemctl status cups
|
||||||
|
でCUPSの状態を確認
|
||||||
|
|
||||||
|
sudo lpadmin -p scoreboard_printer -E -v socket://192.168.100.50:9100 -m raw
|
||||||
|
でプリンタを追加
|
||||||
|
|
||||||
|
|
||||||
|
# 特定のプリンターのキューを表示
|
||||||
|
lpq -P scoreboard_printer
|
||||||
|
|
||||||
|
# すべてのジョブを表示
|
||||||
|
lpstat -o
|
||||||
|
|
||||||
|
# プリンターの詳細な状態を表示
|
||||||
|
lpstat -v scoreboard_printer
|
||||||
|
|
||||||
|
# プリンターへの接続確認
|
||||||
|
ping 192.168.100.50
|
||||||
|
|
||||||
|
# ポート9100への接続テスト
|
||||||
|
telnet 192.168.100.50 9100
|
||||||
|
# (接続できたら Ctrl+] で抜ける)
|
||||||
|
|
||||||
|
# 現在のジョブをキャンセル
|
||||||
|
cancel scoreboard_printer-1
|
||||||
|
|
||||||
|
# すべてのジョブをキャンセル
|
||||||
|
cancel -a scoreboard_printer
|
||||||
|
|
||||||
|
# プリンタを再起動
|
||||||
|
cupsdisable scoreboard_printer
|
||||||
|
cupsenable scoreboard_printer
|
||||||
|
|
||||||
|
# エラーログの確認(最も重要)
|
||||||
|
sudo tail -f /var/log/cups/error_log
|
||||||
|
|
||||||
|
# CUPSサービスの再起動
|
||||||
|
sudo systemctl restart cups
|
||||||
1329
Ruby-Django移行仕様書.md
Normal file
1329
Ruby-Django移行仕様書.md
Normal file
File diff suppressed because it is too large
Load Diff
BIN
TempProject.zip
Normal file
BIN
TempProject.zip
Normal file
Binary file not shown.
292
aaa.aaa
Normal file
292
aaa.aaa
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
45degrees 余語様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
yogomi@yahoo.co.jp は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは yogomi123 です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
杉山 凌矢様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは ryoya3997@icloud.com sugiya123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
近藤 隆様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは kondo2000gt@na.commufa.jp kondo123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
マッパ 田中様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは rnfqp821@ma.medias.ne.jp tanaka123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
OLCルーパー/OLCふるはうす 本多様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは honda.nouken-t@outlook.jp honda123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
清水有希様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは wszbnhmjfx432@gmail.com shimizu123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
青波走行会 坂口 様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは bitter_smile107@yahoo.co.jp sakagu123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
庭野智美様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは niwasun0758@protonmail.com niwano123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
がんばるぞ 森様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは youkeymr.01@gmail.com moriyu123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
むらさきうさぎチーム 森様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは bosque.mk@gmail.com morimi123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
山附純一様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは sijuuhatutaki@gmail.com yamazu123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
松村覚司様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは happy.dreams.come.true923@gmail.com matumu123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ナカムラカスモリ 高桑様
|
||||||
|
|
||||||
|
岐阜aiネットワークです。
|
||||||
|
|
||||||
|
は岐阜ロゲに登録がありませんでしたので、こちらで作成し、今回のエントリーまで完了しています。
|
||||||
|
仮パスワードは kamigou07@gmail.com takaku123です。ログインした後に、パスワードを設定してください。
|
||||||
|
|
||||||
|
それでは、明日はお会いできるのを楽しみにしております。
|
||||||
|
|
||||||
|
宮田
|
||||||
|
|
||||||
|
----------------------------------------------------------
|
||||||
|
非営利活動法人 岐阜aiネットワーク
|
||||||
|
理事長 宮田 明
|
||||||
|
Akira Miyata
|
||||||
|
Chairman
|
||||||
|
NPO Gifu AI Network
|
||||||
|
Web: https://www.gifuai.net/ <https://www.gifuai.net/>
|
||||||
|
----------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
320
config/settings.py.bck
Normal file
320
config/settings.py.bck
Normal file
@ -0,0 +1,320 @@
|
|||||||
|
"""
|
||||||
|
Django settings for config project.
|
||||||
|
|
||||||
|
Generated by 'django-admin startproject' using Django 3.2.9.
|
||||||
|
|
||||||
|
For more information on this file, see
|
||||||
|
https://docs.djangoproject.com/en/3.2/topics/settings/
|
||||||
|
|
||||||
|
For the full list of settings and their values, see
|
||||||
|
https://docs.djangoproject.com/en/3.2/ref/settings/
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
import environ
|
||||||
|
import os
|
||||||
|
import dj_database_url
|
||||||
|
|
||||||
|
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||||
|
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||||
|
|
||||||
|
env = environ.Env(DEBUG=(bool, False))
|
||||||
|
environ.Env.read_env(env_file=os.path.join(BASE_DIR, ".env"))
|
||||||
|
|
||||||
|
import os
|
||||||
|
print("="*50)
|
||||||
|
print("Current working directory:", os.getcwd())
|
||||||
|
print("Base directory:", BASE_DIR)
|
||||||
|
print("Environment file exists:", os.path.exists(os.path.join(BASE_DIR, ".env")))
|
||||||
|
print("Environment variables in .env file:")
|
||||||
|
if os.path.exists(os.path.join(BASE_DIR, ".env")):
|
||||||
|
with open(os.path.join(BASE_DIR, ".env"), "r") as f:
|
||||||
|
print(f.read())
|
||||||
|
print("="*50)
|
||||||
|
|
||||||
|
# Quick-start development settings - unsuitable for production
|
||||||
|
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
|
||||||
|
|
||||||
|
# SECURITY WARNING: keep the secret key used in production secret!
|
||||||
|
#SECRET_KEY = 'django-insecure-@!z!i#bheb)(o1-e2tss(i^dav-ql=cm4*+$unm^3=4)k_ttda'
|
||||||
|
SECRET_KEY = env("SECRET_KEY")
|
||||||
|
|
||||||
|
# SECURITY WARNING: don't run with debug turned on in production!
|
||||||
|
#DEBUG = True
|
||||||
|
DEBUG = env("DEBUG")
|
||||||
|
|
||||||
|
#ALLOWED_HOSTS = []
|
||||||
|
ALLOWED_HOSTS = env("ALLOWED_HOSTS").split(" ")
|
||||||
|
|
||||||
|
|
||||||
|
# Application definition
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
'django.contrib.admin',
|
||||||
|
'django.contrib.auth',
|
||||||
|
'django.contrib.contenttypes',
|
||||||
|
'django.contrib.sessions',
|
||||||
|
'django.contrib.messages',
|
||||||
|
'django.contrib.staticfiles',
|
||||||
|
'django.contrib.gis',
|
||||||
|
'rest_framework',
|
||||||
|
'rest_framework_gis',
|
||||||
|
'knox',
|
||||||
|
'leaflet',
|
||||||
|
'leaflet_admin_list',
|
||||||
|
'rog.apps.RogConfig',
|
||||||
|
'corsheaders', # added
|
||||||
|
'django_filters'
|
||||||
|
]
|
||||||
|
|
||||||
|
MIDDLEWARE = [
|
||||||
|
'corsheaders.middleware.CorsMiddleware', # できるだけ上部に
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
|
||||||
|
'django.middleware.security.SecurityMiddleware',
|
||||||
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
|
'django.contrib.messages.middleware.MessageMiddleware',
|
||||||
|
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||||
|
]
|
||||||
|
|
||||||
|
ROOT_URLCONF = 'config.urls'
|
||||||
|
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True # 開発環境のみ
|
||||||
|
CORS_ALLOW_CREDENTIALS = True
|
||||||
|
|
||||||
|
CORS_ALLOWED_METHODS = [
|
||||||
|
'GET',
|
||||||
|
'POST',
|
||||||
|
'PUT',
|
||||||
|
'PATCH',
|
||||||
|
'DELETE',
|
||||||
|
'OPTIONS'
|
||||||
|
]
|
||||||
|
CORS_ALLOWED_HEADERS = [
|
||||||
|
'accept',
|
||||||
|
'accept-encoding',
|
||||||
|
'authorization',
|
||||||
|
'content-type',
|
||||||
|
'dnt',
|
||||||
|
'origin',
|
||||||
|
'user-agent',
|
||||||
|
'x-csrftoken',
|
||||||
|
'x-requested-with',
|
||||||
|
]
|
||||||
|
|
||||||
|
# 本番環境では以下のように制限する
|
||||||
|
CORS_ALLOWED_ORIGINS = [
|
||||||
|
"https://rogaining.sumasen.net",
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
]
|
||||||
|
|
||||||
|
# CSRFの設定
|
||||||
|
CSRF_TRUSTED_ORIGINS = [
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
"https://rogaining.sumasen.net",
|
||||||
|
]
|
||||||
|
|
||||||
|
# settings.py に以下の設定を追加
|
||||||
|
# レポートディレクトリの設定
|
||||||
|
REPORT_DIRECTORY = 'reports'
|
||||||
|
REPORT_BASE_URL = '/media/reports/'
|
||||||
|
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||||
|
'DIRS': [os.path.join(BASE_DIR, 'templates')],
|
||||||
|
'APP_DIRS': True,
|
||||||
|
'OPTIONS': {
|
||||||
|
'context_processors': [
|
||||||
|
'django.template.context_processors.debug',
|
||||||
|
'django.template.context_processors.request',
|
||||||
|
'django.contrib.auth.context_processors.auth',
|
||||||
|
'django.contrib.messages.context_processors.messages',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
WSGI_APPLICATION = 'config.wsgi.application'
|
||||||
|
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
|
||||||
|
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'ENGINE': 'django.db.backends.postgresql',
|
||||||
|
'NAME': env('POSTGRES_DBNAME'),
|
||||||
|
'USER': env('POSTGRES_USER'),
|
||||||
|
'PASSWORD': env('POSTGRES_PASS'),
|
||||||
|
'HOST': env('PG_HOST'),
|
||||||
|
'PORT': env('PG_PORT'),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Password validation
|
||||||
|
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
|
||||||
|
|
||||||
|
AUTH_PASSWORD_VALIDATORS = [
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Internationalization
|
||||||
|
# https://docs.djangoproject.com/en/3.2/topics/i18n/
|
||||||
|
|
||||||
|
LANGUAGE_CODE = 'en-us'
|
||||||
|
|
||||||
|
TIME_ZONE = 'Asia/Tokyo'
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
|
||||||
|
USE_L10N = True
|
||||||
|
|
||||||
|
USE_TZ = True
|
||||||
|
|
||||||
|
|
||||||
|
# Static files (CSS, JavaScript, Images)
|
||||||
|
# https://docs.djangoproject.com/en/3.2/howto/static-files/
|
||||||
|
|
||||||
|
STATIC_URL = '/static/'
|
||||||
|
|
||||||
|
#STATIC_URL = '/static2/'
|
||||||
|
#STATIC_ROOT = BASE_DIR / "static"
|
||||||
|
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
||||||
|
|
||||||
|
MEDIA_URL = '/media/'
|
||||||
|
#MEDIA_ROOT = BASE_DIR / "media/"
|
||||||
|
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
|
||||||
|
|
||||||
|
#STATICFILES_DIRS = (os.path.join(BASE_DIR, "static2"),os.path.join(BASE_DIR, "media"))
|
||||||
|
|
||||||
|
|
||||||
|
AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend' , 'rog.backend.EmailOrUsernameModelBackend', )
|
||||||
|
|
||||||
|
AUTH_USER_MODEL = 'rog.CustomUser'
|
||||||
|
|
||||||
|
|
||||||
|
# Default primary key field type
|
||||||
|
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
|
||||||
|
|
||||||
|
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||||
|
|
||||||
|
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||||
|
|
||||||
|
LEAFLET_CONFIG = {
|
||||||
|
'DEFAULT_CENTER': (35.41864442627996, 138.14094040951784),
|
||||||
|
'DEFAULT_ZOOM': 6,
|
||||||
|
'MIN_ZOOM': 3,
|
||||||
|
'MAX_ZOOM': 19,
|
||||||
|
'DEFAULT_PRECISION': 6,
|
||||||
|
'SCALE':"both",
|
||||||
|
'ATTRIBUTION_PREFIX':"ROGAINING API",
|
||||||
|
'TILES': [('Satellite', 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {'attribution': '© ESRI', 'maxZoom': 19}),
|
||||||
|
('Streets', 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {'attribution': '© Contributors'})]
|
||||||
|
}
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
|
||||||
|
'DEFAULT_AUTHENTICATION_CLASSES': ('knox.auth.TokenAuthentication', ),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#FRONTEND_URL = 'https://rogaining.intranet.sumasen.net' # フロントエンドのURLに適宜変更してください
|
||||||
|
FRONTEND_URL = 'https://rogaining.sumasen.net' # フロントエンドのURLに適宜変更してください
|
||||||
|
|
||||||
|
# この設定により、メールは実際には送信されず、代わりにコンソールに出力されます。
|
||||||
|
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||||
|
|
||||||
|
EMAIL_HOST = 'smtp.outlook.com'
|
||||||
|
EMAIL_PORT = 587
|
||||||
|
EMAIL_USE_TLS = True
|
||||||
|
EMAIL_HOST_USER = 'rogaining@gifuai.net'
|
||||||
|
EMAIL_HOST_PASSWORD = 'ctcpy9823"x~'
|
||||||
|
DEFAULT_FROM_EMAIL = 'rogaining@gifuai.net'
|
||||||
|
|
||||||
|
APP_DOWNLOAD_LINK = 'https://apps.apple.com/jp/app/%E5%B2%90%E9%98%9C%E3%83%8A%E3%83%93/id6444221792'
|
||||||
|
ANDROID_DOWNLOAD_LINK = 'https://play.google.com/store/apps/details?id=com.dvox.gifunavi&hl=ja'
|
||||||
|
|
||||||
|
SERVICE_NAME = '岐阜ナビ(岐阜ロゲのアプリ)'
|
||||||
|
|
||||||
|
# settings.py
|
||||||
|
DEFAULT_CHARSET = 'utf-8'
|
||||||
|
|
||||||
|
#REST_FRAMEWORK = {
|
||||||
|
# 'DEFAULT_RENDERER_CLASSES': [
|
||||||
|
# 'rest_framework.renderers.JSONRenderer',
|
||||||
|
# ],
|
||||||
|
# 'JSON_UNICODE_ESCAPE': False,
|
||||||
|
#}
|
||||||
|
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'formatters': {
|
||||||
|
'verbose': {
|
||||||
|
'format': '{levelname} {asctime} {module} {message}',
|
||||||
|
'style': '{',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'handlers': {
|
||||||
|
#'file': {
|
||||||
|
# 'level': 'DEBUG',
|
||||||
|
# 'class': 'logging.FileHandler',
|
||||||
|
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
|
||||||
|
# 'formatter': 'verbose',
|
||||||
|
#},
|
||||||
|
'console': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.StreamHandler',
|
||||||
|
'formatter': 'verbose',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'root': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
},
|
||||||
|
'loggers': {
|
||||||
|
'django': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'INFO',
|
||||||
|
'propagate': False,
|
||||||
|
},
|
||||||
|
'django.request': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
},
|
||||||
|
'rog': {
|
||||||
|
#'handlers': ['file','console'],
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': True,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
PASSWORD_HASHERS = [
|
||||||
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.Argon2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
|
||||||
|
]
|
||||||
|
|
||||||
|
BLACKLISTED_IPS = ['44.230.58.114'] # ブロックしたい IP アドレスをここにリストとして追加
|
||||||
|
|
||||||
780
custom-postgresql.conf.back
Normal file
780
custom-postgresql.conf.back
Normal file
@ -0,0 +1,780 @@
|
|||||||
|
# -----------------------------
|
||||||
|
# PostgreSQL configuration file
|
||||||
|
# -----------------------------
|
||||||
|
#
|
||||||
|
# This file consists of lines of the form:
|
||||||
|
#
|
||||||
|
# name = value
|
||||||
|
#
|
||||||
|
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
|
||||||
|
# "#" anywhere on a line. The complete list of parameter names and allowed
|
||||||
|
# values can be found in the PostgreSQL documentation.
|
||||||
|
#
|
||||||
|
# The commented-out settings shown in this file represent the default values.
|
||||||
|
# Re-commenting a setting is NOT sufficient to revert it to the default value;
|
||||||
|
# you need to reload the server.
|
||||||
|
#
|
||||||
|
# This file is read on server startup and when the server receives a SIGHUP
|
||||||
|
# signal. If you edit the file on a running system, you have to SIGHUP the
|
||||||
|
# server for the changes to take effect, run "pg_ctl reload", or execute
|
||||||
|
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
|
||||||
|
# require a server shutdown and restart to take effect.
|
||||||
|
#
|
||||||
|
# Any parameter can also be given as a command-line option to the server, e.g.,
|
||||||
|
# "postgres -c log_connections=on". Some parameters can be changed at run time
|
||||||
|
# with the "SET" SQL command.
|
||||||
|
#
|
||||||
|
# Memory units: kB = kilobytes Time units: ms = milliseconds
|
||||||
|
# MB = megabytes s = seconds
|
||||||
|
# GB = gigabytes min = minutes
|
||||||
|
# TB = terabytes h = hours
|
||||||
|
# d = days
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# FILE LOCATIONS
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# The default values of these variables are driven from the -D command-line
|
||||||
|
# option or PGDATA environment variable, represented here as ConfigDir.
|
||||||
|
|
||||||
|
data_directory = '/var/lib/postgresql/12/main' # use data in another directory
|
||||||
|
# (change requires restart)
|
||||||
|
hba_file = '/etc/postgresql/12/main/pg_hba.conf' # host-based authentication file
|
||||||
|
# (change requires restart)
|
||||||
|
ident_file = '/etc/postgresql/12/main/pg_ident.conf' # ident configuration file
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
# If external_pid_file is not explicitly set, no extra PID file is written.
|
||||||
|
external_pid_file = '/var/run/postgresql/12-main.pid' # write an extra PID file
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# CONNECTIONS AND AUTHENTICATION
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Connection Settings -
|
||||||
|
|
||||||
|
#listen_addresses = 'localhost' # what IP address(es) to listen on;
|
||||||
|
# comma-separated list of addresses;
|
||||||
|
# defaults to 'localhost'; use '*' for all
|
||||||
|
# (change requires restart)
|
||||||
|
port = 5432 # (change requires restart)
|
||||||
|
max_connections = 100 # (change requires restart)
|
||||||
|
#superuser_reserved_connections = 3 # (change requires restart)
|
||||||
|
unix_socket_directories = '/var/run/postgresql' # comma-separated list of directories
|
||||||
|
# (change requires restart)
|
||||||
|
#unix_socket_group = '' # (change requires restart)
|
||||||
|
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
|
||||||
|
# (change requires restart)
|
||||||
|
#bonjour = off # advertise server via Bonjour
|
||||||
|
# (change requires restart)
|
||||||
|
#bonjour_name = '' # defaults to the computer name
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
# - TCP settings -
|
||||||
|
# see "man 7 tcp" for details
|
||||||
|
|
||||||
|
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
|
||||||
|
# 0 selects the system default
|
||||||
|
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
|
||||||
|
# 0 selects the system default
|
||||||
|
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
|
||||||
|
# 0 selects the system default
|
||||||
|
#tcp_user_timeout = 0 # TCP_USER_TIMEOUT, in milliseconds;
|
||||||
|
# 0 selects the system default
|
||||||
|
|
||||||
|
# - Authentication -
|
||||||
|
|
||||||
|
#authentication_timeout = 1min # 1s-600s
|
||||||
|
#password_encryption = md5 # md5 or scram-sha-256
|
||||||
|
#db_user_namespace = off
|
||||||
|
|
||||||
|
# GSSAPI using Kerberos
|
||||||
|
#krb_server_keyfile = ''
|
||||||
|
#krb_caseins_users = off
|
||||||
|
|
||||||
|
# - SSL -
|
||||||
|
|
||||||
|
ssl = on
|
||||||
|
#ssl_ca_file = ''
|
||||||
|
ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
|
||||||
|
#ssl_crl_file = ''
|
||||||
|
ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key'
|
||||||
|
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
|
||||||
|
#ssl_prefer_server_ciphers = on
|
||||||
|
#ssl_ecdh_curve = 'prime256v1'
|
||||||
|
#ssl_min_protocol_version = 'TLSv1'
|
||||||
|
#ssl_max_protocol_version = ''
|
||||||
|
#ssl_dh_params_file = ''
|
||||||
|
#ssl_passphrase_command = ''
|
||||||
|
#ssl_passphrase_command_supports_reload = off
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# RESOURCE USAGE (except WAL)
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Memory -
|
||||||
|
|
||||||
|
shared_buffers = 128MB # min 128kB
|
||||||
|
# (change requires restart)
|
||||||
|
#huge_pages = try # on, off, or try
|
||||||
|
# (change requires restart)
|
||||||
|
#temp_buffers = 8MB # min 800kB
|
||||||
|
#max_prepared_transactions = 0 # zero disables the feature
|
||||||
|
# (change requires restart)
|
||||||
|
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
|
||||||
|
# you actively intend to use prepared transactions.
|
||||||
|
#work_mem = 4MB # min 64kB
|
||||||
|
#maintenance_work_mem = 64MB # min 1MB
|
||||||
|
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
|
||||||
|
#max_stack_depth = 2MB # min 100kB
|
||||||
|
#shared_memory_type = mmap # the default is the first option
|
||||||
|
# supported by the operating system:
|
||||||
|
# mmap
|
||||||
|
# sysv
|
||||||
|
# windows
|
||||||
|
# (change requires restart)
|
||||||
|
dynamic_shared_memory_type = posix # the default is the first option
|
||||||
|
# supported by the operating system:
|
||||||
|
# posix
|
||||||
|
# sysv
|
||||||
|
# windows
|
||||||
|
# mmap
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
# - Disk -
|
||||||
|
|
||||||
|
#temp_file_limit = -1 # limits per-process temp file space
|
||||||
|
# in kB, or -1 for no limit
|
||||||
|
|
||||||
|
# - Kernel Resources -
|
||||||
|
|
||||||
|
#max_files_per_process = 1000 # min 25
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
# - Cost-Based Vacuum Delay -
|
||||||
|
|
||||||
|
#vacuum_cost_delay = 0 # 0-100 milliseconds (0 disables)
|
||||||
|
#vacuum_cost_page_hit = 1 # 0-10000 credits
|
||||||
|
#vacuum_cost_page_miss = 10 # 0-10000 credits
|
||||||
|
#vacuum_cost_page_dirty = 20 # 0-10000 credits
|
||||||
|
#vacuum_cost_limit = 200 # 1-10000 credits
|
||||||
|
|
||||||
|
# - Background Writer -
|
||||||
|
|
||||||
|
#bgwriter_delay = 200ms # 10-10000ms between rounds
|
||||||
|
#bgwriter_lru_maxpages = 100 # max buffers written/round, 0 disables
|
||||||
|
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
|
||||||
|
#bgwriter_flush_after = 512kB # measured in pages, 0 disables
|
||||||
|
|
||||||
|
# - Asynchronous Behavior -
|
||||||
|
|
||||||
|
#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching
|
||||||
|
#max_worker_processes = 8 # (change requires restart)
|
||||||
|
#max_parallel_maintenance_workers = 2 # taken from max_parallel_workers
|
||||||
|
#max_parallel_workers_per_gather = 2 # taken from max_parallel_workers
|
||||||
|
#parallel_leader_participation = on
|
||||||
|
#max_parallel_workers = 8 # maximum number of max_worker_processes that
|
||||||
|
# can be used in parallel operations
|
||||||
|
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
|
||||||
|
# (change requires restart)
|
||||||
|
#backend_flush_after = 0 # measured in pages, 0 disables
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# WRITE-AHEAD LOG
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Settings -
|
||||||
|
|
||||||
|
#wal_level = replica # minimal, replica, or logical
|
||||||
|
# (change requires restart)
|
||||||
|
#fsync = on # flush data to disk for crash safety
|
||||||
|
# (turning this off can cause
|
||||||
|
# unrecoverable data corruption)
|
||||||
|
#synchronous_commit = on # synchronization level;
|
||||||
|
# off, local, remote_write, remote_apply, or on
|
||||||
|
#wal_sync_method = fsync # the default is the first option
|
||||||
|
# supported by the operating system:
|
||||||
|
# open_datasync
|
||||||
|
# fdatasync (default on Linux)
|
||||||
|
# fsync
|
||||||
|
# fsync_writethrough
|
||||||
|
# open_sync
|
||||||
|
#full_page_writes = on # recover from partial page writes
|
||||||
|
#wal_compression = off # enable compression of full-page writes
|
||||||
|
#wal_log_hints = off # also do full page writes of non-critical updates
|
||||||
|
# (change requires restart)
|
||||||
|
#wal_init_zero = on # zero-fill new WAL files
|
||||||
|
#wal_recycle = on # recycle WAL files
|
||||||
|
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
|
||||||
|
# (change requires restart)
|
||||||
|
#wal_writer_delay = 200ms # 1-10000 milliseconds
|
||||||
|
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
|
||||||
|
|
||||||
|
#commit_delay = 0 # range 0-100000, in microseconds
|
||||||
|
#commit_siblings = 5 # range 1-1000
|
||||||
|
|
||||||
|
# - Checkpoints -
|
||||||
|
|
||||||
|
#checkpoint_timeout = 5min # range 30s-1d
|
||||||
|
max_wal_size = 1GB
|
||||||
|
min_wal_size = 80MB
|
||||||
|
#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0
|
||||||
|
#checkpoint_flush_after = 256kB # measured in pages, 0 disables
|
||||||
|
#checkpoint_warning = 30s # 0 disables
|
||||||
|
|
||||||
|
# - Archiving -
|
||||||
|
|
||||||
|
#archive_mode = off # enables archiving; off, on, or always
|
||||||
|
# (change requires restart)
|
||||||
|
#archive_command = '' # command to use to archive a logfile segment
|
||||||
|
# placeholders: %p = path of file to archive
|
||||||
|
# %f = file name only
|
||||||
|
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
|
||||||
|
#archive_timeout = 0 # force a logfile segment switch after this
|
||||||
|
# number of seconds; 0 disables
|
||||||
|
|
||||||
|
# - Archive Recovery -
|
||||||
|
|
||||||
|
# These are only used in recovery mode.
|
||||||
|
|
||||||
|
#restore_command = '' # command to use to restore an archived logfile segment
|
||||||
|
# placeholders: %p = path of file to restore
|
||||||
|
# %f = file name only
|
||||||
|
# e.g. 'cp /mnt/server/archivedir/%f %p'
|
||||||
|
# (change requires restart)
|
||||||
|
#archive_cleanup_command = '' # command to execute at every restartpoint
|
||||||
|
#recovery_end_command = '' # command to execute at completion of recovery
|
||||||
|
|
||||||
|
# - Recovery Target -
|
||||||
|
|
||||||
|
# Set these only when performing a targeted recovery.
|
||||||
|
|
||||||
|
#recovery_target = '' # 'immediate' to end recovery as soon as a
|
||||||
|
# consistent state is reached
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_name = '' # the named restore point to which recovery will proceed
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_time = '' # the time stamp up to which recovery will proceed
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_xid = '' # the transaction ID up to which recovery will proceed
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_lsn = '' # the WAL LSN up to which recovery will proceed
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_inclusive = on # Specifies whether to stop:
|
||||||
|
# just after the specified recovery target (on)
|
||||||
|
# just before the recovery target (off)
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_timeline = 'latest' # 'current', 'latest', or timeline ID
|
||||||
|
# (change requires restart)
|
||||||
|
#recovery_target_action = 'pause' # 'pause', 'promote', 'shutdown'
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# REPLICATION
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Sending Servers -
|
||||||
|
|
||||||
|
# Set these on the master and on any standby that will send replication data.
|
||||||
|
|
||||||
|
#max_wal_senders = 10 # max number of walsender processes
|
||||||
|
# (change requires restart)
|
||||||
|
#wal_keep_segments = 0 # in logfile segments; 0 disables
|
||||||
|
#wal_sender_timeout = 60s # in milliseconds; 0 disables
|
||||||
|
|
||||||
|
#max_replication_slots = 10 # max number of replication slots
|
||||||
|
# (change requires restart)
|
||||||
|
#track_commit_timestamp = off # collect timestamp of transaction commit
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
# - Master Server -
|
||||||
|
|
||||||
|
# These settings are ignored on a standby server.
|
||||||
|
|
||||||
|
#synchronous_standby_names = '' # standby servers that provide sync rep
|
||||||
|
# method to choose sync standbys, number of sync standbys,
|
||||||
|
# and comma-separated list of application_name
|
||||||
|
# from standby(s); '*' = all
|
||||||
|
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
|
||||||
|
|
||||||
|
# - Standby Servers -
|
||||||
|
|
||||||
|
# These settings are ignored on a master server.
|
||||||
|
|
||||||
|
#primary_conninfo = '' # connection string to sending server
|
||||||
|
# (change requires restart)
|
||||||
|
#primary_slot_name = '' # replication slot on sending server
|
||||||
|
# (change requires restart)
|
||||||
|
#promote_trigger_file = '' # file name whose presence ends recovery
|
||||||
|
#hot_standby = on # "off" disallows queries during recovery
|
||||||
|
# (change requires restart)
|
||||||
|
#max_standby_archive_delay = 30s # max delay before canceling queries
|
||||||
|
# when reading WAL from archive;
|
||||||
|
# -1 allows indefinite delay
|
||||||
|
#max_standby_streaming_delay = 30s # max delay before canceling queries
|
||||||
|
# when reading streaming WAL;
|
||||||
|
# -1 allows indefinite delay
|
||||||
|
#wal_receiver_status_interval = 10s # send replies at least this often
|
||||||
|
# 0 disables
|
||||||
|
#hot_standby_feedback = off # send info from standby to prevent
|
||||||
|
# query conflicts
|
||||||
|
#wal_receiver_timeout = 60s # time that receiver waits for
|
||||||
|
# communication from master
|
||||||
|
# in milliseconds; 0 disables
|
||||||
|
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
|
||||||
|
# retrieve WAL after a failed attempt
|
||||||
|
#recovery_min_apply_delay = 0 # minimum delay for applying changes during recovery
|
||||||
|
|
||||||
|
# - Subscribers -
|
||||||
|
|
||||||
|
# These settings are ignored on a publisher.
|
||||||
|
|
||||||
|
#max_logical_replication_workers = 4 # taken from max_worker_processes
|
||||||
|
# (change requires restart)
|
||||||
|
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# QUERY TUNING
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Planner Method Configuration -
|
||||||
|
|
||||||
|
#enable_bitmapscan = on
|
||||||
|
#enable_hashagg = on
|
||||||
|
#enable_hashjoin = on
|
||||||
|
#enable_indexscan = on
|
||||||
|
#enable_indexonlyscan = on
|
||||||
|
#enable_material = on
|
||||||
|
#enable_mergejoin = on
|
||||||
|
#enable_nestloop = on
|
||||||
|
#enable_parallel_append = on
|
||||||
|
#enable_seqscan = on
|
||||||
|
#enable_sort = on
|
||||||
|
#enable_tidscan = on
|
||||||
|
#enable_partitionwise_join = off
|
||||||
|
#enable_partitionwise_aggregate = off
|
||||||
|
#enable_parallel_hash = on
|
||||||
|
#enable_partition_pruning = on
|
||||||
|
|
||||||
|
# - Planner Cost Constants -
|
||||||
|
|
||||||
|
#seq_page_cost = 1.0 # measured on an arbitrary scale
|
||||||
|
#random_page_cost = 4.0 # same scale as above
|
||||||
|
#cpu_tuple_cost = 0.01 # same scale as above
|
||||||
|
#cpu_index_tuple_cost = 0.005 # same scale as above
|
||||||
|
#cpu_operator_cost = 0.0025 # same scale as above
|
||||||
|
#parallel_tuple_cost = 0.1 # same scale as above
|
||||||
|
#parallel_setup_cost = 1000.0 # same scale as above
|
||||||
|
|
||||||
|
#jit_above_cost = 100000 # perform JIT compilation if available
|
||||||
|
# and query more expensive than this;
|
||||||
|
# -1 disables
|
||||||
|
#jit_inline_above_cost = 500000 # inline small functions if query is
|
||||||
|
# more expensive than this; -1 disables
|
||||||
|
#jit_optimize_above_cost = 500000 # use expensive JIT optimizations if
|
||||||
|
# query is more expensive than this;
|
||||||
|
# -1 disables
|
||||||
|
|
||||||
|
#min_parallel_table_scan_size = 8MB
|
||||||
|
#min_parallel_index_scan_size = 512kB
|
||||||
|
#effective_cache_size = 4GB
|
||||||
|
|
||||||
|
# - Genetic Query Optimizer -
|
||||||
|
|
||||||
|
#geqo = on
|
||||||
|
#geqo_threshold = 12
|
||||||
|
#geqo_effort = 5 # range 1-10
|
||||||
|
#geqo_pool_size = 0 # selects default based on effort
|
||||||
|
#geqo_generations = 0 # selects default based on effort
|
||||||
|
#geqo_selection_bias = 2.0 # range 1.5-2.0
|
||||||
|
#geqo_seed = 0.0 # range 0.0-1.0
|
||||||
|
|
||||||
|
# - Other Planner Options -
|
||||||
|
|
||||||
|
#default_statistics_target = 100 # range 1-10000
|
||||||
|
#constraint_exclusion = partition # on, off, or partition
|
||||||
|
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
|
||||||
|
#from_collapse_limit = 8
|
||||||
|
#join_collapse_limit = 8 # 1 disables collapsing of explicit
|
||||||
|
# JOIN clauses
|
||||||
|
#force_parallel_mode = off
|
||||||
|
#jit = on # allow JIT compilation
|
||||||
|
#plan_cache_mode = auto # auto, force_generic_plan or
|
||||||
|
# force_custom_plan
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# REPORTING AND LOGGING
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Where to Log -
|
||||||
|
|
||||||
|
#log_destination = 'stderr' # Valid values are combinations of
|
||||||
|
# stderr, csvlog, syslog, and eventlog,
|
||||||
|
# depending on platform. csvlog
|
||||||
|
# requires logging_collector to be on.
|
||||||
|
|
||||||
|
# This is used when logging to stderr:
|
||||||
|
#logging_collector = off # Enable capturing of stderr and csvlog
|
||||||
|
# into log files. Required to be on for
|
||||||
|
# csvlogs.
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
# These are only used if logging_collector is on:
|
||||||
|
#log_directory = 'log' # directory where log files are written,
|
||||||
|
# can be absolute or relative to PGDATA
|
||||||
|
#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
|
||||||
|
# can include strftime() escapes
|
||||||
|
#log_file_mode = 0600 # creation mode for log files,
|
||||||
|
# begin with 0 to use octal notation
|
||||||
|
#log_truncate_on_rotation = off # If on, an existing log file with the
|
||||||
|
# same name as the new log file will be
|
||||||
|
# truncated rather than appended to.
|
||||||
|
# But such truncation only occurs on
|
||||||
|
# time-driven rotation, not on restarts
|
||||||
|
# or size-driven rotation. Default is
|
||||||
|
# off, meaning append to existing files
|
||||||
|
# in all cases.
|
||||||
|
#log_rotation_age = 1d # Automatic rotation of logfiles will
|
||||||
|
# happen after that time. 0 disables.
|
||||||
|
#log_rotation_size = 10MB # Automatic rotation of logfiles will
|
||||||
|
# happen after that much log output.
|
||||||
|
# 0 disables.
|
||||||
|
|
||||||
|
# These are relevant when logging to syslog:
|
||||||
|
#syslog_facility = 'LOCAL0'
|
||||||
|
#syslog_ident = 'postgres'
|
||||||
|
#syslog_sequence_numbers = on
|
||||||
|
#syslog_split_messages = on
|
||||||
|
|
||||||
|
# This is only relevant when logging to eventlog (win32):
|
||||||
|
# (change requires restart)
|
||||||
|
#event_source = 'PostgreSQL'
|
||||||
|
|
||||||
|
# - When to Log -
|
||||||
|
|
||||||
|
#log_min_messages = warning # values in order of decreasing detail:
|
||||||
|
# debug5
|
||||||
|
# debug4
|
||||||
|
# debug3
|
||||||
|
# debug2
|
||||||
|
# debug1
|
||||||
|
# info
|
||||||
|
# notice
|
||||||
|
# warning
|
||||||
|
# error
|
||||||
|
# log
|
||||||
|
# fatal
|
||||||
|
# panic
|
||||||
|
|
||||||
|
#log_min_error_statement = error # values in order of decreasing detail:
|
||||||
|
# debug5
|
||||||
|
# debug4
|
||||||
|
# debug3
|
||||||
|
# debug2
|
||||||
|
# debug1
|
||||||
|
# info
|
||||||
|
# notice
|
||||||
|
# warning
|
||||||
|
# error
|
||||||
|
# log
|
||||||
|
# fatal
|
||||||
|
# panic (effectively off)
|
||||||
|
|
||||||
|
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
|
||||||
|
# and their durations, > 0 logs only
|
||||||
|
# statements running at least this number
|
||||||
|
# of milliseconds
|
||||||
|
|
||||||
|
#log_transaction_sample_rate = 0.0 # Fraction of transactions whose statements
|
||||||
|
# are logged regardless of their duration. 1.0 logs all
|
||||||
|
# statements from all transactions, 0.0 never logs.
|
||||||
|
|
||||||
|
# - What to Log -
|
||||||
|
|
||||||
|
#debug_print_parse = off
|
||||||
|
#debug_print_rewritten = off
|
||||||
|
#debug_print_plan = off
|
||||||
|
#debug_pretty_print = on
|
||||||
|
#log_checkpoints = off
|
||||||
|
#log_connections = off
|
||||||
|
#log_disconnections = off
|
||||||
|
#log_duration = off
|
||||||
|
#log_error_verbosity = default # terse, default, or verbose messages
|
||||||
|
#log_hostname = off
|
||||||
|
log_line_prefix = '%m [%p] %q%u@%d ' # special values:
|
||||||
|
# %a = application name
|
||||||
|
# %u = user name
|
||||||
|
# %d = database name
|
||||||
|
# %r = remote host and port
|
||||||
|
# %h = remote host
|
||||||
|
# %p = process ID
|
||||||
|
# %t = timestamp without milliseconds
|
||||||
|
# %m = timestamp with milliseconds
|
||||||
|
# %n = timestamp with milliseconds (as a Unix epoch)
|
||||||
|
# %i = command tag
|
||||||
|
# %e = SQL state
|
||||||
|
# %c = session ID
|
||||||
|
# %l = session line number
|
||||||
|
# %s = session start timestamp
|
||||||
|
# %v = virtual transaction ID
|
||||||
|
# %x = transaction ID (0 if none)
|
||||||
|
# %q = stop here in non-session
|
||||||
|
# processes
|
||||||
|
# %% = '%'
|
||||||
|
# e.g. '<%u%%%d> '
|
||||||
|
#log_lock_waits = off # log lock waits >= deadlock_timeout
|
||||||
|
#log_statement = 'none' # none, ddl, mod, all
|
||||||
|
#log_replication_commands = off
|
||||||
|
#log_temp_files = -1 # log temporary files equal or larger
|
||||||
|
# than the specified size in kilobytes;
|
||||||
|
# -1 disables, 0 logs all temp files
|
||||||
|
log_timezone = 'Etc/UTC'
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# PROCESS TITLE
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
cluster_name = '12/main' # added to process titles if nonempty
|
||||||
|
# (change requires restart)
|
||||||
|
#update_process_title = on
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# STATISTICS
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Query and Index Statistics Collector -
|
||||||
|
|
||||||
|
#track_activities = on
|
||||||
|
#track_counts = on
|
||||||
|
#track_io_timing = off
|
||||||
|
#track_functions = none # none, pl, all
|
||||||
|
#track_activity_query_size = 1024 # (change requires restart)
|
||||||
|
stats_temp_directory = '/var/run/postgresql/12-main.pg_stat_tmp'
|
||||||
|
|
||||||
|
|
||||||
|
# - Monitoring -
|
||||||
|
|
||||||
|
#log_parser_stats = off
|
||||||
|
#log_planner_stats = off
|
||||||
|
#log_executor_stats = off
|
||||||
|
#log_statement_stats = off
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# AUTOVACUUM
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#autovacuum = on # Enable autovacuum subprocess? 'on'
|
||||||
|
# requires track_counts to also be on.
|
||||||
|
#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and
|
||||||
|
# their durations, > 0 logs only
|
||||||
|
# actions running at least this number
|
||||||
|
# of milliseconds.
|
||||||
|
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
|
||||||
|
# (change requires restart)
|
||||||
|
#autovacuum_naptime = 1min # time between autovacuum runs
|
||||||
|
#autovacuum_vacuum_threshold = 50 # min number of row updates before
|
||||||
|
# vacuum
|
||||||
|
#autovacuum_analyze_threshold = 50 # min number of row updates before
|
||||||
|
# analyze
|
||||||
|
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
|
||||||
|
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
|
||||||
|
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
|
||||||
|
# (change requires restart)
|
||||||
|
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
|
||||||
|
# before forced vacuum
|
||||||
|
# (change requires restart)
|
||||||
|
#autovacuum_vacuum_cost_delay = 2ms # default vacuum cost delay for
|
||||||
|
# autovacuum, in milliseconds;
|
||||||
|
# -1 means use vacuum_cost_delay
|
||||||
|
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
|
||||||
|
# autovacuum, -1 means use
|
||||||
|
# vacuum_cost_limit
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# CLIENT CONNECTION DEFAULTS
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Statement Behavior -
|
||||||
|
|
||||||
|
#client_min_messages = notice # values in order of decreasing detail:
|
||||||
|
# debug5
|
||||||
|
# debug4
|
||||||
|
# debug3
|
||||||
|
# debug2
|
||||||
|
# debug1
|
||||||
|
# log
|
||||||
|
# notice
|
||||||
|
# warning
|
||||||
|
# error
|
||||||
|
#search_path = '"$user", public' # schema names
|
||||||
|
#row_security = on
|
||||||
|
#default_tablespace = '' # a tablespace name, '' uses the default
|
||||||
|
#temp_tablespaces = '' # a list of tablespace names, '' uses
|
||||||
|
# only default tablespace
|
||||||
|
#default_table_access_method = 'heap'
|
||||||
|
#check_function_bodies = on
|
||||||
|
#default_transaction_isolation = 'read committed'
|
||||||
|
#default_transaction_read_only = off
|
||||||
|
#default_transaction_deferrable = off
|
||||||
|
#session_replication_role = 'origin'
|
||||||
|
#statement_timeout = 0 # in milliseconds, 0 is disabled
|
||||||
|
#lock_timeout = 0 # in milliseconds, 0 is disabled
|
||||||
|
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
|
||||||
|
#vacuum_freeze_min_age = 50000000
|
||||||
|
#vacuum_freeze_table_age = 150000000
|
||||||
|
#vacuum_multixact_freeze_min_age = 5000000
|
||||||
|
#vacuum_multixact_freeze_table_age = 150000000
|
||||||
|
#vacuum_cleanup_index_scale_factor = 0.1 # fraction of total number of tuples
|
||||||
|
# before index cleanup, 0 always performs
|
||||||
|
# index cleanup
|
||||||
|
#bytea_output = 'hex' # hex, escape
|
||||||
|
#xmlbinary = 'base64'
|
||||||
|
#xmloption = 'content'
|
||||||
|
#gin_fuzzy_search_limit = 0
|
||||||
|
#gin_pending_list_limit = 4MB
|
||||||
|
|
||||||
|
# - Locale and Formatting -
|
||||||
|
|
||||||
|
datestyle = 'iso, mdy'
|
||||||
|
#intervalstyle = 'postgres'
|
||||||
|
timezone = 'Etc/UTC'
|
||||||
|
#timezone_abbreviations = 'Default' # Select the set of available time zone
|
||||||
|
# abbreviations. Currently, there are
|
||||||
|
# Default
|
||||||
|
# Australia (historical usage)
|
||||||
|
# India
|
||||||
|
# You can create your own file in
|
||||||
|
# share/timezonesets/.
|
||||||
|
#extra_float_digits = 1 # min -15, max 3; any value >0 actually
|
||||||
|
# selects precise output mode
|
||||||
|
#client_encoding = sql_ascii # actually, defaults to database
|
||||||
|
# encoding
|
||||||
|
|
||||||
|
# These settings are initialized by initdb, but they can be changed.
|
||||||
|
lc_messages = 'C.UTF-8' # locale for system error message
|
||||||
|
# strings
|
||||||
|
lc_monetary = 'C.UTF-8' # locale for monetary formatting
|
||||||
|
lc_numeric = 'C.UTF-8' # locale for number formatting
|
||||||
|
lc_time = 'C.UTF-8' # locale for time formatting
|
||||||
|
|
||||||
|
# default configuration for text search
|
||||||
|
default_text_search_config = 'pg_catalog.english'
|
||||||
|
|
||||||
|
# - Shared Library Preloading -
|
||||||
|
|
||||||
|
#shared_preload_libraries = '' # (change requires restart)
|
||||||
|
#local_preload_libraries = ''
|
||||||
|
#session_preload_libraries = ''
|
||||||
|
#jit_provider = 'llvmjit' # JIT library to use
|
||||||
|
|
||||||
|
# - Other Defaults -
|
||||||
|
|
||||||
|
#dynamic_library_path = '$libdir'
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# LOCK MANAGEMENT
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#deadlock_timeout = 1s
|
||||||
|
#max_locks_per_transaction = 64 # min 10
|
||||||
|
# (change requires restart)
|
||||||
|
#max_pred_locks_per_transaction = 64 # min 10
|
||||||
|
# (change requires restart)
|
||||||
|
#max_pred_locks_per_relation = -2 # negative values mean
|
||||||
|
# (max_pred_locks_per_transaction
|
||||||
|
# / -max_pred_locks_per_relation) - 1
|
||||||
|
#max_pred_locks_per_page = 2 # min 0
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# VERSION AND PLATFORM COMPATIBILITY
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# - Previous PostgreSQL Versions -
|
||||||
|
|
||||||
|
#array_nulls = on
|
||||||
|
#backslash_quote = safe_encoding # on, off, or safe_encoding
|
||||||
|
#escape_string_warning = on
|
||||||
|
#lo_compat_privileges = off
|
||||||
|
#operator_precedence_warning = off
|
||||||
|
#quote_all_identifiers = off
|
||||||
|
#standard_conforming_strings = on
|
||||||
|
#synchronize_seqscans = on
|
||||||
|
|
||||||
|
# - Other Platforms and Clients -
|
||||||
|
|
||||||
|
#transform_null_equals = off
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# ERROR HANDLING
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#exit_on_error = off # terminate session on any error?
|
||||||
|
#restart_after_crash = on # reinitialize after backend crash?
|
||||||
|
#data_sync_retry = off # retry or panic on failure to fsync
|
||||||
|
# data?
|
||||||
|
# (change requires restart)
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# CONFIG FILE INCLUDES
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# These options allow settings to be loaded from files other than the
|
||||||
|
# default postgresql.conf. Note that these are directives, not variable
|
||||||
|
# assignments, so they can usefully be given more than once.
|
||||||
|
|
||||||
|
include_dir = 'conf.d' # include files ending in '.conf' from
|
||||||
|
# a directory, e.g., 'conf.d'
|
||||||
|
#include_if_exists = '...' # include file only if it exists
|
||||||
|
#include = '...' # include file
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# CUSTOMIZED OPTIONS
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Add settings for extensions here
|
||||||
|
listen_addresses = '*'
|
||||||
|
port = 5432
|
||||||
|
wal_level = hot_standby
|
||||||
|
max_wal_senders = 10
|
||||||
|
wal_keep_segments = 250
|
||||||
|
superuser_reserved_connections= 10
|
||||||
|
min_wal_size =2048MB
|
||||||
|
max_wal_size= 4GB
|
||||||
|
wal_keep_segments= 64
|
||||||
|
hot_standby = on
|
||||||
|
listen_addresses = '*'
|
||||||
|
shared_buffers = 500MB
|
||||||
|
work_mem = 16MB
|
||||||
|
maintenance_work_mem = 128MB
|
||||||
|
wal_buffers = 1MB
|
||||||
|
random_page_cost = 2.0
|
||||||
|
xmloption = 'document'
|
||||||
|
max_parallel_maintenance_workers = 2
|
||||||
|
max_parallel_workers = 4
|
||||||
|
checkpoint_timeout = 30min
|
||||||
|
#archive_mode=on
|
||||||
|
#archive_command = 'test ! -f /opt/archivedir/%f && cp -r %p /opt/archivedir/%f'
|
||||||
|
primary_conninfo = 'host= port=5432 user=replicator password=replicator sslmode=require'
|
||||||
|
recovery_target_timeline=latest
|
||||||
|
recovery_target_action=promote
|
||||||
|
promote_trigger_file = '/tmp/pg_promote_master'
|
||||||
|
|
||||||
|
ssl = true
|
||||||
|
ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
|
||||||
|
ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key'
|
||||||
@ -8,6 +8,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql
|
- postgres_data:/var/lib/postgresql
|
||||||
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
||||||
|
- ./rogaining.sql:/sql/rogaining.sql
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=${POSTGRES_USER}
|
- POSTGRES_USER=${POSTGRES_USER}
|
||||||
- POSTGRES_PASS=${POSTGRES_PASS}
|
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||||
|
|||||||
@ -1,21 +1,4 @@
|
|||||||
services:
|
services:
|
||||||
# postgres-db:
|
|
||||||
# image: kartoza/postgis:12.0
|
|
||||||
# ports:
|
|
||||||
# - 5432:5432
|
|
||||||
# volumes:
|
|
||||||
# - postgres_data:/var/lib/postgresql
|
|
||||||
# - ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
|
||||||
# environment:
|
|
||||||
# - POSTGRES_USER=${POSTGRES_USER}
|
|
||||||
# - POSTGRES_PASS=${POSTGRES_PASS}
|
|
||||||
# - POSTGRES_DBNAME=${POSTGRES_DBNAME}
|
|
||||||
# - POSTGRES_MAX_CONNECTIONS=600
|
|
||||||
|
|
||||||
# restart: "on-failure"
|
|
||||||
# networks:
|
|
||||||
# - rog-api
|
|
||||||
|
|
||||||
api:
|
api:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
82
docker-compose.yaml.back2
Normal file
82
docker-compose.yaml.back2
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
version: "3.9"
|
||||||
|
|
||||||
|
x-shared-env:
|
||||||
|
# Django settings
|
||||||
|
&shared_env
|
||||||
|
- POSTGRES_USER=${POSTGRES_USER}
|
||||||
|
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||||
|
- POSTGRES_DBNAME=${POSTGRES_DBNAME}
|
||||||
|
- DATABASE=#{DATABASE}
|
||||||
|
- PG_HOST=${PG_HOST}
|
||||||
|
- PG_PORT=${PG_PORT}
|
||||||
|
- GS_VERSION=${GS_VERSION}
|
||||||
|
- GEOSERVER_PORT=${GEOSERVER_PORT}
|
||||||
|
- GEOSERVER_DATA_DIR=${GEOSERVER_DATA_DIR}
|
||||||
|
- GEOWEBCACHE_CACHE_DIR=${GEOWEBCACHE_CACHE_DIR}
|
||||||
|
- GEOSERVER_ADMIN_PASSWORD=${GEOSERVER_ADMIN_PASSWORD}
|
||||||
|
- GEOSERVER_ADMIN_USER=${GEOSERVER_ADMIN_USER}
|
||||||
|
- INITIAL_MEMORY=${INITIAL_MEMORY}
|
||||||
|
- MAXIMUM_MEMORY=${MAXIMUM_MEMORY}
|
||||||
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
|
- DEBUG=${DEBUG}
|
||||||
|
- ALLOWED_HOSTS=${ALLOWED_HOSTS}
|
||||||
|
- S3_REGION=${S3_REGION}
|
||||||
|
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
|
||||||
|
- S3_PREFIX=#{S3_PREFIX}
|
||||||
|
- AWS_ACCESS_KEY=${AWS_ACCESS_KEY}
|
||||||
|
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
|
||||||
|
- AWS_REGION=${AWS_REGION}
|
||||||
|
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres-db:
|
||||||
|
image: kartoza/postgis:12.0
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql
|
||||||
|
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
||||||
|
- ./rogaining.sql:/sql/rogaining.sql
|
||||||
|
environment: *shared_env
|
||||||
|
restart: "on-failure"
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
|
||||||
|
app:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.gdal
|
||||||
|
command: gunicorn config.wsgi:application --bind 0.0.0.0:8000
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
- static_volume:/app/static
|
||||||
|
- media_volume:/app/media
|
||||||
|
environment: *shared_env
|
||||||
|
restart: "on-failure"
|
||||||
|
depends_on:
|
||||||
|
- postgres-db
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
image: nginx:1.19
|
||||||
|
volumes:
|
||||||
|
- ./nginx.conf:/etc/nginx/nginx.conf
|
||||||
|
- static_volume:/app/static
|
||||||
|
- media_volume:/app/media
|
||||||
|
ports:
|
||||||
|
- 8100:80
|
||||||
|
environment: *shared_env
|
||||||
|
depends_on:
|
||||||
|
- app
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
|
||||||
|
networks:
|
||||||
|
rog-api:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
static_volume:
|
||||||
|
media_volume:
|
||||||
1
docker-compose.yml
Symbolic link
1
docker-compose.yml
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
docker-compose-prod.yaml
|
||||||
17
docker-compose.yml.psql
Normal file
17
docker-compose.yml.psql
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
services:
|
||||||
|
postgres-db:
|
||||||
|
image: kartoza/postgis
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql
|
||||||
|
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
||||||
|
- ./rogaining.sql:/sql/rogaining.sql
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=${POSTGRES_USER}
|
||||||
|
- POSTGRES_PASS=${POSTGRES_PASS}
|
||||||
|
- POSTGRES_DBNAME=${POSTGRES_DBNAME}
|
||||||
|
- POSTGRES_MAX_CONNECTIONS=600
|
||||||
|
restart: "on-failure"
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
@ -66,3 +66,16 @@ django-phonenumber-field==6.1.0
|
|||||||
django-rest-knox==4.2.0
|
django-rest-knox==4.2.0
|
||||||
dj-database-url==2.0.0
|
dj-database-url==2.0.0
|
||||||
django-cors-headers==4.3.0
|
django-cors-headers==4.3.0
|
||||||
|
|
||||||
|
|
||||||
|
openpyxl
|
||||||
|
psutil
|
||||||
|
folium
|
||||||
|
selenium
|
||||||
|
webdriver-manager
|
||||||
|
Pillow
|
||||||
|
pandas
|
||||||
|
|
||||||
|
reportlab
|
||||||
|
networkx
|
||||||
|
haversine
|
||||||
0
rog/.DS_Store
vendored
Normal file → Executable file
0
rog/.DS_Store
vendored
Normal file → Executable file
0
rog/__init__.py
Normal file → Executable file
0
rog/__init__.py
Normal file → Executable file
0
rog/admin.py
Normal file → Executable file
0
rog/admin.py
Normal file → Executable file
0
rog/apps.py
Normal file → Executable file
0
rog/apps.py
Normal file → Executable file
0
rog/backend.py
Normal file → Executable file
0
rog/backend.py
Normal file → Executable file
0
rog/choices.py
Normal file → Executable file
0
rog/choices.py
Normal file → Executable file
0
rog/forms.py
Normal file → Executable file
0
rog/forms.py
Normal file → Executable file
0
rog/gifuroge_team.csv
Normal file → Executable file
0
rog/gifuroge_team.csv
Normal file → Executable file
0
rog/management/commands/cleanup_temp_users.py
Normal file → Executable file
0
rog/management/commands/cleanup_temp_users.py
Normal file → Executable file
644
rog/management/commands/import_event_data.py
Normal file
644
rog/management/commands/import_event_data.py
Normal file
@ -0,0 +1,644 @@
|
|||||||
|
import csv
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.db import transaction, connections
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from rog.models import Member, Team, NewEvent2, Entry, Location,NewCategory #, GpsLog
|
||||||
|
|
||||||
|
CustomUser = get_user_model()
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'CSVファイルからイベント参加者情報をインポートし、rogdbとgifurogeデータベースに登録します。'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument('csv_file', type=str, help='インポートするCSVファイルのパス')
|
||||||
|
parser.add_argument('event_code', type=str, help='登録するイベントコード')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
csv_file = options['csv_file']
|
||||||
|
event_code = options['event_code']
|
||||||
|
|
||||||
|
# 処理結果を保存するリストを初期化
|
||||||
|
self.processed_entries = []
|
||||||
|
|
||||||
|
if not os.path.exists(csv_file):
|
||||||
|
raise CommandError(f'ファイルが見つかりません: {csv_file}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
event = NewEvent2.objects.get(event_name=event_code)
|
||||||
|
except NewEvent2.DoesNotExist:
|
||||||
|
raise CommandError(f'イベントが見つかりません: {event_code}')
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'イベント "{event.event_name}" のデータをインポートします'))
|
||||||
|
|
||||||
|
# CSVファイルを読み込み、rogdbデータベースに登録
|
||||||
|
with open(csv_file, 'r', encoding='utf-8') as file:
|
||||||
|
reader = csv.reader(file)
|
||||||
|
next(reader) # ヘッダー行をスキップ
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
for i, row in enumerate(reader, 1):
|
||||||
|
try:
|
||||||
|
self.process_entry(row, event)
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'行 {i} のデータ処理中にエラーが発生しました: {str(e)}'))
|
||||||
|
self.stdout.write(self.style.WARNING(f'この行はスキップして続行します'))
|
||||||
|
|
||||||
|
# gifurogeデータベースへの転送
|
||||||
|
self.transfer_to_gifuroge(event)
|
||||||
|
|
||||||
|
# 結果をCSVファイルに出力
|
||||||
|
self.export_processed_entries(event_code)
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS('データのインポートが完了しました'))
|
||||||
|
|
||||||
|
def process_entry(self, row, event):
|
||||||
|
"""CSVの1行からエントリー情報を処理"""
|
||||||
|
try:
|
||||||
|
# 新しいCSVフォーマットに対応したインデックス
|
||||||
|
participation_time = row[0]
|
||||||
|
division = row[1]
|
||||||
|
is_trial = row[2].strip() == 'お試し' # 「お試し」フラグ
|
||||||
|
division_number = row[3]
|
||||||
|
team_name = row[4]
|
||||||
|
leader_name = row[5]
|
||||||
|
leader_kana = row[6]
|
||||||
|
leader_gender = row[7] # 新しく追加された性別フィールド
|
||||||
|
password = row[8] # インデックスが1つずれる
|
||||||
|
member_count = int(row[9]) # インデックスが1つずれる
|
||||||
|
zekken_label = row[10] # インデックスが1つずれる
|
||||||
|
zekken_number = row[11] # インデックスが1つずれる
|
||||||
|
leader_email = row[12] # インデックスが1つずれる
|
||||||
|
leader_birth_date = row[13] # インデックスが1つずれる
|
||||||
|
|
||||||
|
name_parts = leader_name.split(' ', 1)
|
||||||
|
lastname = name_parts[0]
|
||||||
|
firstname = name_parts[1] if len(name_parts) > 1 else ""
|
||||||
|
|
||||||
|
# 半角数字を全角数字に変換する関数
|
||||||
|
def to_fullwidth(s):
|
||||||
|
"""半角数字を全角数字に変換する"""
|
||||||
|
result = ""
|
||||||
|
for char in s:
|
||||||
|
if char.isdigit():
|
||||||
|
# 半角数字を全角数字に変換
|
||||||
|
result += chr(ord(char) + 0xFEE0)
|
||||||
|
else:
|
||||||
|
result += char
|
||||||
|
return result
|
||||||
|
|
||||||
|
# 日付フォーマットを変換する関数
|
||||||
|
def format_date(date_str):
|
||||||
|
"""YYYY/MM/DD形式をYYYY-MM-DD形式に変換する"""
|
||||||
|
if not date_str:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
# スラッシュやピリオドなどの区切り文字を処理
|
||||||
|
parts = None
|
||||||
|
if '/' in date_str:
|
||||||
|
parts = date_str.split('/')
|
||||||
|
elif '.' in date_str:
|
||||||
|
parts = date_str.split('.')
|
||||||
|
elif '-' in date_str:
|
||||||
|
# 既にハイフン形式の場合はそのまま返す
|
||||||
|
return date_str
|
||||||
|
|
||||||
|
if parts and len(parts) == 3:
|
||||||
|
year, month, day = parts
|
||||||
|
# 必要に応じて年を4桁に修正(例:'91' → '1991')
|
||||||
|
if len(year) == 2:
|
||||||
|
if int(year) > 50: # 50より大きい場合は1900年代と仮定
|
||||||
|
year = f"19{year}"
|
||||||
|
else:
|
||||||
|
year = f"20{year}"
|
||||||
|
|
||||||
|
# 月と日が1桁の場合は2桁に揃える
|
||||||
|
month = month.zfill(2)
|
||||||
|
day = day.zfill(2)
|
||||||
|
|
||||||
|
return f"{year}-{month}-{day}"
|
||||||
|
return date_str # 変換できない場合は元の文字列を返す
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'日付変換エラー: {date_str} - {str(e)}'))
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 代表者の生年月日をフォーマット変換
|
||||||
|
formatted_leader_birth_date = format_date(leader_birth_date)
|
||||||
|
|
||||||
|
# 参加時間を全角に変換
|
||||||
|
fullwidth_participation_time = to_fullwidth(participation_time)
|
||||||
|
|
||||||
|
# 代表者の性別を設定(Femaleならtrue、それ以外ならfalse)
|
||||||
|
is_female = leader_gender.strip().lower() == "female"
|
||||||
|
|
||||||
|
# 1. CustomUserを検索または作成
|
||||||
|
new_user = False
|
||||||
|
password_to_save = ""
|
||||||
|
try:
|
||||||
|
leader = CustomUser.objects.get(email=leader_email)
|
||||||
|
# 既存ユーザー
|
||||||
|
password_to_save = "(既存)"
|
||||||
|
# 既存ユーザーの性別情報を更新
|
||||||
|
if leader.female != is_female:
|
||||||
|
leader.female = is_female
|
||||||
|
leader.save()
|
||||||
|
self.stdout.write(f'既存ユーザーを代表者として使用します: {leader_email} (性別: {leader_gender})')
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
# 新規ユーザーの場合
|
||||||
|
# leader_nameを空白で分離
|
||||||
|
|
||||||
|
leader = CustomUser.objects.create_user(
|
||||||
|
email=leader_email,
|
||||||
|
password=password,
|
||||||
|
firstname=firstname,
|
||||||
|
lastname=lastname,
|
||||||
|
date_of_birth=formatted_leader_birth_date,
|
||||||
|
group=event.event_name,
|
||||||
|
female=is_female, # 性別を設定
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
password_to_save = password # 新規ユーザーの場合は実際のパスワード
|
||||||
|
self.stdout.write(f'代表者を新規作成しました: {leader_email} (パスワード: {password}, 性別: {leader_gender})')
|
||||||
|
|
||||||
|
# 処理した代表者情報をリストに追加
|
||||||
|
self.processed_entries.append({
|
||||||
|
'leader_name': leader_name,
|
||||||
|
'team_name': team_name,
|
||||||
|
'email': leader_email,
|
||||||
|
'password': password_to_save
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# CSVの参加部門から対応するカテゴリーを検索
|
||||||
|
# division + "-" + participation_time + "時間" の形式で検索
|
||||||
|
category_name_with_time = f"{division}-{fullwidth_participation_time}時間"
|
||||||
|
try:
|
||||||
|
category = NewCategory.objects.get(category_name=category_name_with_time)
|
||||||
|
except NewCategory.DoesNotExist:
|
||||||
|
# カテゴリーが見つからない場合のエラーハンドリング
|
||||||
|
self.stdout.write(self.style.ERROR(f'カテゴリーが見つかりません: {category_name_with_time}'))
|
||||||
|
raise CommandError(f'カテゴリー "{category_name_with_time}" が存在しません。先にカテゴリーを作成してください。')
|
||||||
|
|
||||||
|
# 2. チームの作成とメンバーの登録
|
||||||
|
team = Team.objects.create(
|
||||||
|
team_name=team_name,
|
||||||
|
owner=leader,
|
||||||
|
category=category # eventではなくcategoryを使用
|
||||||
|
)
|
||||||
|
|
||||||
|
# メンバーの登録(代表者を含む)
|
||||||
|
Member.objects.create(
|
||||||
|
team=team,
|
||||||
|
user=leader,
|
||||||
|
firstname=firstname,
|
||||||
|
lastname=lastname,
|
||||||
|
date_of_birth=formatted_leader_birth_date,
|
||||||
|
female=is_female, # 性別を設定
|
||||||
|
is_temporary=False # 代表者は一時的なメンバーではない
|
||||||
|
)
|
||||||
|
|
||||||
|
# 追加メンバーの登録(CSVの14列目以降に存在する場合)
|
||||||
|
for i in range(1, min(member_count, 5) + 1): # 最大5人まで
|
||||||
|
# 各メンバーは3つのフィールド(名前、生年月日、性別)を持つ
|
||||||
|
member_name_idx = 14 + (i-1) * 3
|
||||||
|
member_birth_idx = member_name_idx + 1
|
||||||
|
member_gender_idx = member_name_idx + 2 # 性別のインデックス
|
||||||
|
|
||||||
|
if len(row) > member_name_idx and row[member_name_idx]:
|
||||||
|
member_name = row[member_name_idx]
|
||||||
|
member_birth = row[member_birth_idx] if len(row) > member_birth_idx else None
|
||||||
|
# メンバーの生年月日もフォーマット変換
|
||||||
|
formatted_member_birth = format_date(member_birth) if member_birth else None
|
||||||
|
member_gender = row[member_gender_idx] if len(row) > member_gender_idx else "Male"
|
||||||
|
member_is_female = member_gender.strip().lower() == "female"
|
||||||
|
|
||||||
|
# 名前を分割
|
||||||
|
name_parts = member_name.split(' ', 1)
|
||||||
|
lastname = name_parts[0]
|
||||||
|
firstname = name_parts[1] if len(name_parts) > 1 else ""
|
||||||
|
|
||||||
|
# メンバー用のユーザー作成(メールアドレスは一時的なもの)
|
||||||
|
temp_email = f"{team_name.replace(' ', '_')}_{i}@example.com"
|
||||||
|
|
||||||
|
# 既存のメンバーチェック
|
||||||
|
try:
|
||||||
|
member_user = CustomUser.objects.filter(email=temp_email).first()
|
||||||
|
if not member_user:
|
||||||
|
raise CustomUser.DoesNotExist()
|
||||||
|
|
||||||
|
# 既存ユーザーの性別情報を更新
|
||||||
|
if member_user.female != member_is_female:
|
||||||
|
member_user.female = member_is_female
|
||||||
|
member_user.save()
|
||||||
|
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
import secrets
|
||||||
|
member_user = CustomUser.objects.create_user(
|
||||||
|
email=temp_email,
|
||||||
|
password=secrets.token_urlsafe(12), # メンバーにはランダムパスワード
|
||||||
|
firstname=firstname,
|
||||||
|
lastname=lastname,
|
||||||
|
date_of_birth=formatted_member_birth,
|
||||||
|
female=member_is_female, # 性別を設定
|
||||||
|
is_active=False # メンバーは直接ログインしないのでFalse
|
||||||
|
)
|
||||||
|
|
||||||
|
Member.objects.create(
|
||||||
|
team=team,
|
||||||
|
user=member_user,
|
||||||
|
firstname=firstname,
|
||||||
|
lastname=lastname,
|
||||||
|
date_of_birth=formatted_member_birth,
|
||||||
|
female=member_is_female, # 性別を設定
|
||||||
|
is_temporary=True # 追加メンバーは一時的なメンバーとして設定
|
||||||
|
)
|
||||||
|
|
||||||
|
self.stdout.write(f' => メンバーを追加しました: {member_name} (性別: {member_gender})')
|
||||||
|
|
||||||
|
# 3. エントリーの作成
|
||||||
|
# イベントの実施日をエントリーに割り当て
|
||||||
|
# イベントの開始日時から日付部分のみを取得
|
||||||
|
entry_date = event.start_datetime.date() if event.start_datetime else None # イベントの実施日
|
||||||
|
entry = Entry.objects.create(
|
||||||
|
team=team,
|
||||||
|
event=event,
|
||||||
|
date=entry_date, # イベントの実施日をエントリーに割り当て
|
||||||
|
zekken_number=zekken_number,
|
||||||
|
zekken_label=zekken_label,
|
||||||
|
category=category,
|
||||||
|
owner=leader,
|
||||||
|
is_trial=is_trial # お試しフラグを設定
|
||||||
|
)
|
||||||
|
|
||||||
|
# エントリー登録完了のログ出力
|
||||||
|
self.stdout.write(f'チーム "{team_name}" をイベント "{event.event_name}" に登録しました (ゼッケン: {zekken_label}, お試し: {is_trial})')
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'エラーが発生しました: {str(e)}'))
|
||||||
|
# エラーが発生してもスキップして続行するため、例外を再スローしない
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'エラーが発生しました: {str(e)}'))
|
||||||
|
raise
|
||||||
|
|
||||||
|
def export_processed_entries(self, event_code):
|
||||||
|
"""処理した代表者情報をCSVファイルに出力"""
|
||||||
|
if not self.processed_entries:
|
||||||
|
self.stdout.write('処理したエントリーがありません')
|
||||||
|
return
|
||||||
|
|
||||||
|
output_file = f"{event_code}_leaders_{timezone.now().strftime('%Y%m%d_%H%M%S')}.csv"
|
||||||
|
|
||||||
|
with open(output_file, 'w', encoding='utf-8', newline='') as csvfile:
|
||||||
|
fieldnames = ['代表者名', 'チーム名', 'メールアドレス', 'パスワード']
|
||||||
|
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
||||||
|
|
||||||
|
writer.writeheader()
|
||||||
|
for entry in self.processed_entries:
|
||||||
|
writer.writerow({
|
||||||
|
'代表者名': entry['leader_name'],
|
||||||
|
'チーム名': entry['team_name'],
|
||||||
|
'メールアドレス': entry['email'],
|
||||||
|
'パスワード': entry['password']
|
||||||
|
})
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'代表者情報をCSVファイルに出力しました: {output_file}'))
|
||||||
|
|
||||||
|
def process_entry_old(self, row, event):
|
||||||
|
"""CSVの1行からエントリー情報を処理"""
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'イベント "{event.event_name}", row="{row}"'))
|
||||||
|
try:
|
||||||
|
# 新しいCSVフォーマットに対応したインデックス
|
||||||
|
participation_time = row[0]
|
||||||
|
division = row[1]
|
||||||
|
is_trial = row[2].strip() == 'お試し' # 「お試し」フラグ
|
||||||
|
division_number = row[3]
|
||||||
|
team_name = row[4]
|
||||||
|
leader_name = row[5]
|
||||||
|
leader_kana = row[6]
|
||||||
|
password = row[7] # 新しいフィールド:パスワード
|
||||||
|
member_count = int(row[8])
|
||||||
|
zekken_label = row[9] # ゼッケンラベル
|
||||||
|
zekken_number = row[10] # ナンバー
|
||||||
|
leader_email = row[11]
|
||||||
|
leader_birth_date = row[12]
|
||||||
|
|
||||||
|
# 1. CustomUserを検索または作成
|
||||||
|
try:
|
||||||
|
leader = CustomUser.objects.get(email=leader_email)
|
||||||
|
self.stdout.write(f'既存ユーザーを代表者として使用します: {leader_email}')
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
# 新規ユーザーの場合はランダムパスワードを生成
|
||||||
|
import secrets
|
||||||
|
# leader_nameを空白で分離
|
||||||
|
name_parts = leader_name.split(' ', 1)
|
||||||
|
lastname = name_parts[0]
|
||||||
|
firstname = name_parts[1] if len(name_parts) > 1 else ""
|
||||||
|
|
||||||
|
leader = CustomUser.objects.create_user(
|
||||||
|
email=leader_email,
|
||||||
|
password=password,
|
||||||
|
firstname=firstname, # 名前の後半部分
|
||||||
|
lastname=lastname, # 名前の前半部分
|
||||||
|
birth_date=leader_birth_date,
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
self.stdout.write(f'代表者を新規作成しました: {leader_email} (パスワード: {password})')
|
||||||
|
|
||||||
|
# CSVの参加部門から対応するカテゴリーを検索
|
||||||
|
try:
|
||||||
|
category = NewCategory.objects.get(category_name=division)
|
||||||
|
self.stdout.write(f'カテゴリーを見つけました: {category.category_name}')
|
||||||
|
except NewCategory.DoesNotExist:
|
||||||
|
# カテゴリーが見つからない場合のエラーハンドリング
|
||||||
|
self.stdout.write(self.style.ERROR(f'カテゴリーが見つかりません: {division}'))
|
||||||
|
raise CommandError(f'カテゴリー "{division}" が存在しません。先にカテゴリーを作成してください。')
|
||||||
|
|
||||||
|
|
||||||
|
# 2. チームの作成とメンバーの登録
|
||||||
|
team = Team.objects.create(
|
||||||
|
team_name=team_name,
|
||||||
|
owner=leader,
|
||||||
|
category=category
|
||||||
|
)
|
||||||
|
|
||||||
|
Member.objects.create(
|
||||||
|
team=team,
|
||||||
|
user=leader,
|
||||||
|
is_leader=True,
|
||||||
|
firstname=leader.firstname,
|
||||||
|
lastname=leader.lastname,
|
||||||
|
date_of_birth=leader.date_of_birth,
|
||||||
|
is_temporary=False # 代表者は一時的なメンバーではない
|
||||||
|
)
|
||||||
|
|
||||||
|
# 追加メンバーの登録(CSVの13列目以降に存在する場合)
|
||||||
|
for i in range(1, min(member_count, 5) + 1): # 最大5人まで
|
||||||
|
member_name_idx = 13 + (i-1) * 2
|
||||||
|
member_birth_idx = member_name_idx + 1
|
||||||
|
|
||||||
|
if len(row) > member_name_idx and row[member_name_idx]:
|
||||||
|
member_name = row[member_name_idx]
|
||||||
|
member_birth = row[member_birth_idx] if len(row) > member_birth_idx else None
|
||||||
|
|
||||||
|
# メンバー用のユーザー作成(メールアドレスは一時的なもの)
|
||||||
|
temp_email = f"{team_name.replace(' ', '_')}_{i}@example.com"
|
||||||
|
|
||||||
|
# 既存のメンバーチェック
|
||||||
|
try:
|
||||||
|
member_user = CustomUser.objects.filter(name=member_name).first()
|
||||||
|
if not member_user:
|
||||||
|
raise CustomUser.DoesNotExist()
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
import secrets
|
||||||
|
member_user = CustomUser.objects.create_user(
|
||||||
|
email=temp_email,
|
||||||
|
password=secrets.token_urlsafe(12), # メンバーにはランダムパスワード
|
||||||
|
name=member_name,
|
||||||
|
birth_date=member_birth,
|
||||||
|
is_active=False # メンバーは直接ログインしないのでFalse
|
||||||
|
)
|
||||||
|
|
||||||
|
# 名前を分割(姓と名の分離)
|
||||||
|
name_parts = member_name.split(' ', 1)
|
||||||
|
firstname = name_parts[1] if len(name_parts) > 1 else ""
|
||||||
|
lastname = name_parts[0]
|
||||||
|
|
||||||
|
Member.objects.create(
|
||||||
|
team=team,
|
||||||
|
user=member_user,
|
||||||
|
is_leader=False,
|
||||||
|
firstname=firstname,
|
||||||
|
lastname=lastname,
|
||||||
|
date_of_birth=member_birth,
|
||||||
|
is_temporary=True # 追加メンバーは一時的なメンバーとして設定
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# 3. エントリーの作成
|
||||||
|
entry = Entry.objects.create(
|
||||||
|
team=team,
|
||||||
|
event=event,
|
||||||
|
zekken_number=zekken_number,
|
||||||
|
zekken_label=zekken_label, # 新しいフィールドに設定
|
||||||
|
class_type=division,
|
||||||
|
leader=leader,
|
||||||
|
)
|
||||||
|
|
||||||
|
# スタート記録の追加
|
||||||
|
#GpsLog.record_start(entry)
|
||||||
|
|
||||||
|
self.stdout.write(f'チーム "{team_name}" を登録しました (お試し: {is_trial})')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'エラーが発生しました: {str(e)}'))
|
||||||
|
raise
|
||||||
|
|
||||||
|
def transfer_to_gifuroge(self, event):
|
||||||
|
"""rogdbからgifurogeデータベースへデータを転送"""
|
||||||
|
self.stdout.write('gifurogeデータベースへのデータ転送を開始します')
|
||||||
|
|
||||||
|
with connections['gifuroge'].cursor() as cursor:
|
||||||
|
try:
|
||||||
|
# 1. Event data transfer from NewEvent2 to event_table
|
||||||
|
self.stdout.write('イベントデータを転送中...')
|
||||||
|
|
||||||
|
# Extract fields from the event object
|
||||||
|
event_code = event.event_name
|
||||||
|
event_name = event.event_description or event.event_name
|
||||||
|
start_datetime = event.start_datetime
|
||||||
|
|
||||||
|
# Format start_datetime to get only the date part
|
||||||
|
event_date = start_datetime.date() if start_datetime else None
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO event_table
|
||||||
|
(event_code, event_name, start_time, event_day)
|
||||||
|
VALUES (%s, %s, %s, %s)
|
||||||
|
ON CONFLICT (event_code) DO UPDATE
|
||||||
|
SET event_name = %s, start_time = %s, event_day = %s
|
||||||
|
""", [
|
||||||
|
event_code, event_name, start_datetime, event_date,
|
||||||
|
event_name, start_datetime, event_date
|
||||||
|
])
|
||||||
|
|
||||||
|
self.stdout.write(f'イベント "{event_code}" を転送しました')
|
||||||
|
|
||||||
|
# 4. Locationテーブルからcheckpoint_tableへの転送
|
||||||
|
self.stdout.write('checkpointデータを転送中...')
|
||||||
|
locations = Location.objects.filter(group=event.event_name)
|
||||||
|
# Print the number of location records
|
||||||
|
location_count = locations.count()
|
||||||
|
self.stdout.write(f'checkpointデータ: {location_count}件を転送中...')
|
||||||
|
|
||||||
|
for location in locations:
|
||||||
|
# Display the cp_number, event_code, and colabo_company_memo
|
||||||
|
# self.stdout.write(f' CP: {location.cp}, Event: {event.event_name}, Memo: {"" or "(empty)"}')
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO checkpoint_table
|
||||||
|
(cp_number, event_code, cp_name, latitude, longitude, photo_point, buy_point, sample_photo, colabo_company_memo)
|
||||||
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||||
|
ON CONFLICT (cp_number, event_code,colabo_company_memo) DO UPDATE
|
||||||
|
SET cp_name = %s, latitude = %s, longitude = %s, photo_point = %s, buy_point = %s, sample_photo = %s, colabo_company_memo = %s
|
||||||
|
""", [
|
||||||
|
location.cp, event.event_name, location.location_name,
|
||||||
|
location.latitude, location.longitude, location.checkin_point,
|
||||||
|
location.buy_point, location.photos, '',
|
||||||
|
location.location_name, location.latitude, location.longitude,
|
||||||
|
location.checkin_point, location.buy_point, location.photos, ''
|
||||||
|
])
|
||||||
|
|
||||||
|
# If cp=-1, insert another record with cp=-2
|
||||||
|
if location.cp == -1:
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO checkpoint_table
|
||||||
|
(cp_number, event_code, cp_name, latitude, longitude, photo_point, buy_point, sample_photo, colabo_company_memo)
|
||||||
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||||
|
ON CONFLICT (cp_number, event_code,colabo_company_memo) DO UPDATE
|
||||||
|
SET cp_name = %s, latitude = %s, longitude = %s, photo_point = %s, buy_point = %s, sample_photo = %s, colabo_company_memo = %s
|
||||||
|
""", [
|
||||||
|
-2, event.event_name, location.location_name,
|
||||||
|
location.latitude, location.longitude, location.checkin_point,
|
||||||
|
location.buy_point, location.photos, '',
|
||||||
|
location.location_name, location.latitude, location.longitude,
|
||||||
|
location.checkin_point, location.buy_point, location.photos, ''
|
||||||
|
])
|
||||||
|
|
||||||
|
# 5. user_tableへの転送をスキップ
|
||||||
|
self.stdout.write('ユーザーデータの転送をスキップします')
|
||||||
|
|
||||||
|
# 6. Teamテーブルからteam_tableへの転送(修正版)
|
||||||
|
entries = Entry.objects.filter(event__event_name=event.event_name)
|
||||||
|
# Print the number of team entries
|
||||||
|
entry_count = entries.count()
|
||||||
|
self.stdout.write(f'チームデータ: {entry_count}件を転送中...')
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
team = entry.team
|
||||||
|
|
||||||
|
# 「お試し」かどうかを判定
|
||||||
|
is_trial = False
|
||||||
|
if hasattr(entry, 'zekken_label') and entry.zekken_label and 'お試し' in entry.zekken_label:
|
||||||
|
is_trial = True
|
||||||
|
|
||||||
|
# パスワード処理
|
||||||
|
leader = team.owner
|
||||||
|
user_password = ''
|
||||||
|
|
||||||
|
# リーダーが新規登録のユーザーかどうかを確認
|
||||||
|
if hasattr(leader, '_password') and leader._password:
|
||||||
|
user_password = leader._password
|
||||||
|
else:
|
||||||
|
# 既存のユーザーの場合はパスワードを空にする
|
||||||
|
user_password = '(existing)'
|
||||||
|
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO team_table
|
||||||
|
(zekken_number, event_code, team_name, class_name, password, trial)
|
||||||
|
VALUES (%s, %s, %s, %s, %s, %s)
|
||||||
|
ON CONFLICT (zekken_number, event_code) DO UPDATE
|
||||||
|
SET team_name = %s, class_name = %s, password = %s, trial = %s
|
||||||
|
""", [
|
||||||
|
entry.zekken_label, event.event_name, team.team_name,
|
||||||
|
team.category.category_name, user_password, is_trial,
|
||||||
|
team.team_name, team.category.category_name,
|
||||||
|
user_password, is_trial
|
||||||
|
])
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS('gifurogeデータベースへの転送が完了しました'))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'転送中にエラーが発生しました: {str(e)}'))
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_to_gifuroge_old(self, event):
|
||||||
|
"""rogdbからgifurogeデータベースへデータを転送"""
|
||||||
|
self.stdout.write('gifurogeデータベースへのデータ転送を開始します')
|
||||||
|
|
||||||
|
with connections['gifuroge'].cursor() as cursor:
|
||||||
|
try:
|
||||||
|
# 4. Locationテーブルからcheckpoint_tableへの転送
|
||||||
|
self.stdout.write('checkpointデータを転送中...')
|
||||||
|
locations = Location.objects.filter(event=event)
|
||||||
|
|
||||||
|
for location in locations:
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO checkpoint_table
|
||||||
|
(checkpoint_id, checkpoint_name, point_value, latitude, longitude, event_code)
|
||||||
|
VALUES (%s, %s, %s, %s, %s, %s)
|
||||||
|
ON CONFLICT (checkpoint_id, event_code) DO UPDATE
|
||||||
|
SET checkpoint_name = %s, point_value = %s, latitude = %s, longitude = %s
|
||||||
|
""", [
|
||||||
|
location.id, location.name, location.point_value,
|
||||||
|
location.latitude, location.longitude, event.event_name,
|
||||||
|
location.name, location.point_value,
|
||||||
|
location.latitude, location.longitude
|
||||||
|
])
|
||||||
|
|
||||||
|
# 5. CustomUserテーブルからuser_tableへの転送
|
||||||
|
self.stdout.write('ユーザーデータを転送中...')
|
||||||
|
entries = Entry.objects.filter(event=event)
|
||||||
|
users = CustomUser.objects.filter(entry__event=event).distinct()
|
||||||
|
|
||||||
|
for user in users:
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO user_table
|
||||||
|
(user_id, name, email, event_code)
|
||||||
|
VALUES (%s, %s, %s, %s)
|
||||||
|
ON CONFLICT (user_id, event_code) DO UPDATE
|
||||||
|
SET name = %s, email = %s
|
||||||
|
""", [
|
||||||
|
user.id, user.name, user.email, event.event_name,
|
||||||
|
user.name, user.email
|
||||||
|
])
|
||||||
|
|
||||||
|
# 6. Teamテーブルからteam_tableへの転送(trialフィールドを追加)
|
||||||
|
self.stdout.write('チームデータを転送中...')
|
||||||
|
teams = Team.objects.filter(entry__event=event).distinct()
|
||||||
|
|
||||||
|
for team in teams:
|
||||||
|
entry = Entry.objects.get(team=team, event=event)
|
||||||
|
|
||||||
|
# CSVで「お試し」フラグがあったかどうかを確認
|
||||||
|
# ここでは仮にTeamモデルから判断できないので別途Entry.zekken_labelとの比較などで判断
|
||||||
|
is_trial = False
|
||||||
|
try:
|
||||||
|
# お試しフラグの判定ロジックを実装
|
||||||
|
# 実際のデータ構造に基づいて修正が必要
|
||||||
|
entries_with_trial = Entry.objects.filter(
|
||||||
|
team=team, event=event
|
||||||
|
).first()
|
||||||
|
if entries_with_trial:
|
||||||
|
# ここでお試しフラグを設定する実装が必要
|
||||||
|
# 例えば特定のゼッケンラベルパターンでお試し判定など
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
is_trial = False
|
||||||
|
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT INTO team_table
|
||||||
|
(team_id, team_name, class_type, zekken_number, leader_id, event_code, trial)
|
||||||
|
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
||||||
|
ON CONFLICT (team_id, event_code) DO UPDATE
|
||||||
|
SET team_name = %s, class_type = %s, zekken_number = %s, leader_id = %s, trial = %s
|
||||||
|
""", [
|
||||||
|
team.id, team.team_name, entry.class_type, entry.zekken_number,
|
||||||
|
team.leader.id, event.event_name, is_trial,
|
||||||
|
team.team_name, entry.class_type, entry.zekken_number, team.leader.id, is_trial
|
||||||
|
])
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS('gifurogeデータベースへの転送が完了しました'))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.stdout.write(self.style.ERROR(f'転送中にエラーが発生しました: {str(e)}'))
|
||||||
|
raise
|
||||||
|
|
||||||
0
rog/mapping.py
Normal file → Executable file
0
rog/mapping.py
Normal file → Executable file
0
rog/middleware/__init__.py
Normal file → Executable file
0
rog/middleware/__init__.py
Normal file → Executable file
0
rog/middleware/ip_blocking.py
Normal file → Executable file
0
rog/middleware/ip_blocking.py
Normal file → Executable file
0
rog/migration_scripts.py
Normal file → Executable file
0
rog/migration_scripts.py
Normal file → Executable file
@ -1,351 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-05-04 15:05
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
import django.contrib.gis.db.models.fields
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
import rog.models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
initial = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('auth', '0012_alter_user_first_name_max_length'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='JpnAdminMainPerf',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
|
||||||
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'db_table': 'jpn_admin_main_perf',
|
|
||||||
'managed': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='JpnAdminPerf',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiLineStringField(blank=True, null=True, srid=4326)),
|
|
||||||
('et_id', models.IntegerField(blank=True, null=True)),
|
|
||||||
('et_right', models.CharField(blank=True, max_length=80, null=True)),
|
|
||||||
('et_left', models.CharField(blank=True, max_length=80, null=True)),
|
|
||||||
('adm2_l', models.CharField(blank=True, max_length=50, null=True)),
|
|
||||||
('adm1_l', models.CharField(blank=True, max_length=50, null=True)),
|
|
||||||
('adm0_l', models.CharField(blank=True, max_length=50, null=True)),
|
|
||||||
('adm0_r', models.CharField(blank=True, max_length=50, null=True)),
|
|
||||||
('adm1_r', models.CharField(blank=True, max_length=50, null=True)),
|
|
||||||
('adm2_r', models.CharField(blank=True, max_length=50, null=True)),
|
|
||||||
('admlevel', models.IntegerField(blank=True, null=True)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'db_table': 'jpn_admin_perf',
|
|
||||||
'managed': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='JpnSubPerf',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
|
||||||
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'db_table': 'jpn_sub_perf',
|
|
||||||
'managed': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='CustomUser',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('password', models.CharField(max_length=128, verbose_name='password')),
|
|
||||||
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
|
|
||||||
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
|
|
||||||
('email', models.EmailField(max_length=254, unique=True, verbose_name='email address')),
|
|
||||||
('is_staff', models.BooleanField(default=False)),
|
|
||||||
('is_active', models.BooleanField(default=False)),
|
|
||||||
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
|
|
||||||
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'abstract': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Location',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
|
||||||
('location_name', models.CharField(default='--- 場所をお願いします --', max_length=255, verbose_name='Location Name')),
|
|
||||||
('category', models.CharField(blank=True, max_length=255, null=True, verbose_name='Category')),
|
|
||||||
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
|
||||||
('address', models.CharField(blank=True, max_length=512, null=True, verbose_name='Address')),
|
|
||||||
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
|
||||||
('area', models.CharField(blank=True, max_length=255, null=True, verbose_name='Area')),
|
|
||||||
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City')),
|
|
||||||
('latitude', models.FloatField(blank=True, null=True, verbose_name='Latitude')),
|
|
||||||
('longitude', models.FloatField(blank=True, null=True, verbose_name='Latitude')),
|
|
||||||
('photos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phptos')),
|
|
||||||
('videos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Videos')),
|
|
||||||
('webcontents', models.CharField(blank=True, max_length=255, null=True, verbose_name='Web Content')),
|
|
||||||
('status', models.CharField(blank=True, max_length=255, null=True, verbose_name='Status')),
|
|
||||||
('portal', models.CharField(blank=True, max_length=255, null=True, verbose_name='Portal')),
|
|
||||||
('group', models.CharField(blank=True, max_length=255, null=True, verbose_name='Group')),
|
|
||||||
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
|
|
||||||
('fax', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fax')),
|
|
||||||
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
|
|
||||||
('facility', models.CharField(blank=True, max_length=255, null=True, verbose_name='Facility')),
|
|
||||||
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remarks')),
|
|
||||||
('tags', models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags')),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPointField(srid=4326)),
|
|
||||||
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='location_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='ShapeLayers',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('name', models.CharField(max_length=255, verbose_name='Shape Layer')),
|
|
||||||
('file', models.FileField(blank=True, upload_to=rog.models.get_file_path)),
|
|
||||||
('uploaded_date', models.DateField(auto_now_add=True)),
|
|
||||||
('layerof', models.IntegerField(choices=[(1, 'location'), (2, 'Location_line'), (3, 'Location_polygon')], default=1)),
|
|
||||||
('table_name', models.CharField(blank=True, max_length=255, verbose_name='Table name')),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='TestModel',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('testbane', models.CharField(max_length=355, verbose_name='test field')),
|
|
||||||
('wanttogo', models.BooleanField(default=False)),
|
|
||||||
('like', models.BooleanField(default=False)),
|
|
||||||
('checkin', models.BooleanField(default=False)),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='TravelList',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('travel_id', models.IntegerField(verbose_name='Travel Id')),
|
|
||||||
('start_date', models.DateTimeField(blank=True, null=True, verbose_name='Start date')),
|
|
||||||
('finish_date', models.DateTimeField(blank=True, null=True, verbose_name='End date')),
|
|
||||||
('category', models.CharField(choices=[('PRIVATE', 'Private'), ('GROUP', 'Group'), ('AGENT', 'Agent'), ('ROGAINING', 'Rogaining')], max_length=256)),
|
|
||||||
('title', models.CharField(max_length=255, verbose_name='Title')),
|
|
||||||
('transportation', models.CharField(blank=True, max_length=255, null=True, verbose_name='Transpotation')),
|
|
||||||
('moving_distance', models.IntegerField(blank=True, null=True)),
|
|
||||||
('duration', models.DurationField(blank=True, null=True, verbose_name='Duration')),
|
|
||||||
('eta', models.DateTimeField(blank=True, null=True)),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='travel_list_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Useractions',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('wanttogo', models.BooleanField(default=False)),
|
|
||||||
('like', models.BooleanField(default=False)),
|
|
||||||
('checkin', models.BooleanField(default=False)),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='action_location', to='rog.location')),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='action_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='TravelPoint',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('distance', models.FloatField(blank=True, null=True)),
|
|
||||||
('transportation', models.CharField(blank=True, max_length=255, null=True, verbose_name='Transpotation')),
|
|
||||||
('eta', models.DateTimeField(blank=True, null=True)),
|
|
||||||
('order_number', models.IntegerField(blank=True, null=True)),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='travelpoint_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.location')),
|
|
||||||
('travel_list', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='rog.travellist')),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='SystemSettings',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('setting_name', models.CharField(max_length=255, verbose_name='Settings Name')),
|
|
||||||
('version', models.CharField(blank=True, max_length=10, null=True, verbose_name='Version')),
|
|
||||||
('effective_date', models.DateTimeField()),
|
|
||||||
('end_date', models.DateTimeField()),
|
|
||||||
('parammeters', models.CharField(max_length=512, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='system_setting_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='RogUser',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('email', models.EmailField(max_length=254, verbose_name='Email')),
|
|
||||||
('phone', models.CharField(max_length=55, verbose_name='Phone Number')),
|
|
||||||
('first_name', models.CharField(max_length=255, verbose_name='First Name')),
|
|
||||||
('middle_name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Middle Name')),
|
|
||||||
('last_name', models.CharField(max_length=255, verbose_name='last_name')),
|
|
||||||
('nickname', models.CharField(blank=True, max_length=255, null=True, verbose_name='Nickname')),
|
|
||||||
('country', models.CharField(default='Japan', max_length=255, verbose_name='Country')),
|
|
||||||
('language', models.CharField(default='Japanese', max_length=255, verbose_name='Language')),
|
|
||||||
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
|
||||||
('sex', models.CharField(blank=True, default='unknown', max_length=255, null=True, verbose_name='Sex')),
|
|
||||||
('birthyear', models.IntegerField(blank=True, null=True, verbose_name='Birth year')),
|
|
||||||
('family_structure', models.IntegerField(blank=True, null=True, verbose_name='Family Structure')),
|
|
||||||
('level', models.IntegerField(blank=True, default=0, null=True, verbose_name='Level')),
|
|
||||||
('parammeters', models.CharField(max_length=512, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('introducer', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='introduced_uesr', to=settings.AUTH_USER_MODEL)),
|
|
||||||
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='roguser_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Location_polygon',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
|
||||||
('location_name', models.CharField(max_length=255, verbose_name='Location Name')),
|
|
||||||
('category', models.CharField(blank=True, max_length=255, null=True, verbose_name='Category')),
|
|
||||||
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
|
||||||
('address', models.CharField(blank=True, max_length=512, null=True, verbose_name='Address')),
|
|
||||||
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
|
||||||
('area', models.CharField(blank=True, max_length=255, null=True, verbose_name='Area')),
|
|
||||||
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City')),
|
|
||||||
('photos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phptos')),
|
|
||||||
('videos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Videos')),
|
|
||||||
('webcontents', models.CharField(blank=True, max_length=255, null=True, verbose_name='Web Content')),
|
|
||||||
('status', models.CharField(blank=True, max_length=255, null=True, verbose_name='Status')),
|
|
||||||
('portal', models.CharField(blank=True, max_length=255, null=True, verbose_name='Portal')),
|
|
||||||
('group', models.CharField(blank=True, max_length=255, null=True, verbose_name='Group')),
|
|
||||||
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
|
|
||||||
('fax', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fax')),
|
|
||||||
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
|
|
||||||
('facility', models.CharField(blank=True, max_length=255, null=True, verbose_name='Facility')),
|
|
||||||
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remarks')),
|
|
||||||
('tags', models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags')),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
|
||||||
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='location_polygon_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Location_line',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
|
||||||
('location_name', models.CharField(max_length=255, verbose_name='Location Name')),
|
|
||||||
('category', models.CharField(blank=True, max_length=255, null=True, verbose_name='Category')),
|
|
||||||
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
|
||||||
('address', models.CharField(blank=True, max_length=512, null=True, verbose_name='Address')),
|
|
||||||
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
|
||||||
('area', models.CharField(blank=True, max_length=255, null=True, verbose_name='Area')),
|
|
||||||
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City')),
|
|
||||||
('photos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phptos')),
|
|
||||||
('videos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Videos')),
|
|
||||||
('webcontents', models.CharField(blank=True, max_length=255, null=True, verbose_name='Web Content')),
|
|
||||||
('status', models.CharField(blank=True, max_length=255, null=True, verbose_name='Status')),
|
|
||||||
('portal', models.CharField(blank=True, max_length=255, null=True, verbose_name='Portal')),
|
|
||||||
('group', models.CharField(blank=True, max_length=255, null=True, verbose_name='Group')),
|
|
||||||
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
|
|
||||||
('fax', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fax')),
|
|
||||||
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
|
|
||||||
('facility', models.CharField(blank=True, max_length=255, null=True, verbose_name='Facility')),
|
|
||||||
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remarks')),
|
|
||||||
('tags', models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags')),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiLineStringField(blank=True, null=True, srid=4326)),
|
|
||||||
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='location_line_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='JoinedEvent',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('tagname', models.CharField(blank=True, max_length=255, null=True, verbose_name='Tag Name')),
|
|
||||||
('status', models.CharField(choices=[('REGISTERED', 'Registered'), ('ACCEPTED', 'accepted'), ('PAID', 'paid'), ('JOINED', 'joined'), ('CANCELED', 'Canceled')], max_length=256)),
|
|
||||||
('registrationid', models.CharField(max_length=56, verbose_name='Registration Id')),
|
|
||||||
('payment_code', models.CharField(max_length=255, verbose_name='Payment Code')),
|
|
||||||
('paid', models.IntegerField(default=0, verbose_name='Paid Amount')),
|
|
||||||
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remark')),
|
|
||||||
('parammeters', models.CharField(max_length=512, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='joined_event_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Favorite',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('good', models.IntegerField(default=0, verbose_name='Good')),
|
|
||||||
('favorite', models.IntegerField(default=0, verbose_name='Favorite')),
|
|
||||||
('evaluation', models.IntegerField(default=0, verbose_name='Evaluation')),
|
|
||||||
('number_visit', models.IntegerField(default=0, verbose_name='Good')),
|
|
||||||
('last_visited', models.DateTimeField(blank=True, null=True, verbose_name='Last Visited')),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='favorite_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.location')),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Event',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('tagname', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('status', models.CharField(choices=[('PREPARING', 'Preparing'), ('PROMOTION', 'Promotion'), ('EVENT', 'Event'), ('END', 'End')], max_length=256)),
|
|
||||||
('price', models.IntegerField(default=0, verbose_name='Paid Amount')),
|
|
||||||
('promotion_date', models.DateTimeField(blank=True, null=True, verbose_name='Promotion date')),
|
|
||||||
('event_start', models.DateTimeField(blank=True, null=True, verbose_name='Promotion date')),
|
|
||||||
('event_end', models.DateTimeField(blank=True, null=True, verbose_name='Promotion date')),
|
|
||||||
('remark', models.CharField(blank=True, max_length=256, null=True)),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='event_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-05-11 11:17
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0001_initial'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='useractions',
|
|
||||||
name='order',
|
|
||||||
field=models.IntegerField(default=-1),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='is_active',
|
|
||||||
field=models.BooleanField(default=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-05-11 17:52
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0002_auto_20220511_2017'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='useractions',
|
|
||||||
name='order',
|
|
||||||
field=models.IntegerField(default=0),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-05 15:23
|
|
||||||
|
|
||||||
import django.contrib.gis.db.models.fields
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0003_alter_useractions_order'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='GifuAreas',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
|
||||||
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('area_nm', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'db_table': 'gifu_areas',
|
|
||||||
'managed': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='auto_checkin',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Is Autologin'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='checkin_radious',
|
|
||||||
field=models.IntegerField(blank=True, null=True, verbose_name='Checkin Radious'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_active',
|
|
||||||
field=models.BooleanField(default=True, verbose_name='Is Autologin'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_name',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-06 06:23
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0004_auto_20220606_0023'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_active',
|
|
||||||
field=models.BooleanField(default=True, verbose_name='Is Event active'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_name',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Event name'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-07 13:00
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0005_auto_20220606_1523'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='paid',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Is Paid'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-07 13:07
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0006_location_paid'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='location',
|
|
||||||
name='paid',
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='roguser',
|
|
||||||
name='paid',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Is Paid'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-07 13:09
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0007_auto_20220607_2207'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='roguser',
|
|
||||||
name='parammeters',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-07 14:24
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0008_alter_roguser_parammeters'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='roguser',
|
|
||||||
name='email',
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-10 06:25
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0009_remove_roguser_email'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='useractions',
|
|
||||||
name='checkinimage',
|
|
||||||
field=models.FileField(blank=True, null=True, upload_to='%y%m%d'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,25 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-12 18:11
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
import django.contrib.gis.db.models.fields
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0010_useractions_checkinimage'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='UserTracks',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPointField(srid=4326)),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,113 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-13 08:58
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0011_usertracks'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='address',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Address'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='area',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Area'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='category',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Category'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='city',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='City'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='email',
|
|
||||||
field=models.EmailField(blank=True, max_length=2048, null=True, verbose_name='Email'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_name',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Event name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='facility',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Facility'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='fax',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Fax'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Group'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='location_name',
|
|
||||||
field=models.CharField(default='--- 場所をお願いします --', max_length=2048, verbose_name='Location Name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='parammeters',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Parameters'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='phone',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Phone'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='photos',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Phptos'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='portal',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Portal'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='prefecture',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Prefecture'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='remark',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Remarks'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='status',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Status'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='tags',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Tags'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='videos',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Videos'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='webcontents',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Web Content'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,88 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-18 09:47
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0012_auto_20220613_1758'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='buy_point',
|
|
||||||
field=models.IntegerField(blank=True, default=0, null=True, verbose_name='buy Point'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='checkin_point',
|
|
||||||
field=models.IntegerField(blank=True, default=10, null=True, verbose_name='Checkin Point'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='checkin_radius',
|
|
||||||
field=models.IntegerField(blank=True, default=15, null=True, verbose_name='Checkin radious'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='evaluation_value',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Evaluation value (評価)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='hidden_location',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Is Hidden Location'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_fri',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours frinday (金曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_mon',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours monday (月曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_sat',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours saturday (土曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_sun',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours sunday (日曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_thu',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours thursday (木曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_tue',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours tuesday (火曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_wed',
|
|
||||||
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours wednesday (水曜)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='shop_closed',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Shop Closed (休業)'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='shop_shutdown',
|
|
||||||
field=models.BooleanField(default=False, null=True, verbose_name='Shop Shutdown (閉業)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='auto_checkin',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Is AutoCheckin'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-18 09:52
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0013_auto_20220618_1847'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='shop_shutdown',
|
|
||||||
field=models.BooleanField(default=False, verbose_name='Shop Shutdown (閉業)'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,57 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-19 07:11
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0014_alter_location_shop_shutdown'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='location',
|
|
||||||
name='checkin_radious',
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_fri',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours frinday (金曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_mon',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours monday (月曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_sat',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours saturday (土曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_sun',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours sunday (日曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_thu',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours thursday (木曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_tue',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours tuesday (火曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='opening_hours_wed',
|
|
||||||
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours wednesday (水曜)'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='photos',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Photos'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-06-21 09:46
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0015_auto_20220619_1611'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='ShapeFileLocations',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('shapefile', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Shapelayer')),
|
|
||||||
('locid', models.IntegerField(blank=True, null=True)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-07-25 07:05
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0016_shapefilelocations'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='cp',
|
|
||||||
field=models.IntegerField(blank=True, null=True, verbose_name='Check Point'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='subcategory',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub Category'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-16 07:16
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0017_auto_20220725_1605'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='location',
|
|
||||||
name='sub_loc_id',
|
|
||||||
field=models.IntegerField(blank=True, null=True, verbose_name='Sub location id'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='cp',
|
|
||||||
field=models.FloatField(blank=True, null=True, verbose_name='Check Point'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-16 07:25
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0018_auto_20220816_1616'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='checkin_radius',
|
|
||||||
field=models.FloatField(blank=True, default=15.0, null=True, verbose_name='Checkin radious'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-16 07:27
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0019_alter_location_checkin_radius'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='buy_point',
|
|
||||||
field=models.FloatField(blank=True, default=0, null=True, verbose_name='buy Point'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='checkin_point',
|
|
||||||
field=models.FloatField(blank=True, default=10, null=True, verbose_name='Checkin Point'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,69 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-17 05:48
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
import django.contrib.gis.db.models.fields
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0020_auto_20220816_1627'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='templocation',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
|
||||||
('sub_loc_id', models.IntegerField(blank=True, null=True, verbose_name='Sub location id')),
|
|
||||||
('cp', models.FloatField(blank=True, null=True, verbose_name='Check Point')),
|
|
||||||
('location_name', models.CharField(default='--- 場所をお願いします --', max_length=2048, verbose_name='Location Name')),
|
|
||||||
('category', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Category')),
|
|
||||||
('subcategory', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub Category')),
|
|
||||||
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
|
||||||
('address', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Address')),
|
|
||||||
('prefecture', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Prefecture')),
|
|
||||||
('area', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Area')),
|
|
||||||
('city', models.CharField(blank=True, max_length=2048, null=True, verbose_name='City')),
|
|
||||||
('latitude', models.FloatField(blank=True, null=True, verbose_name='Latitude')),
|
|
||||||
('longitude', models.FloatField(blank=True, null=True, verbose_name='Latitude')),
|
|
||||||
('photos', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Photos')),
|
|
||||||
('videos', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Videos')),
|
|
||||||
('webcontents', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Web Content')),
|
|
||||||
('status', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Status')),
|
|
||||||
('portal', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Portal')),
|
|
||||||
('group', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Group')),
|
|
||||||
('phone', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Phone')),
|
|
||||||
('fax', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Fax')),
|
|
||||||
('email', models.EmailField(blank=True, max_length=2048, null=True, verbose_name='Email')),
|
|
||||||
('facility', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Facility')),
|
|
||||||
('remark', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Remarks')),
|
|
||||||
('tags', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Tags')),
|
|
||||||
('event_name', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Event name')),
|
|
||||||
('event_active', models.BooleanField(default=True, verbose_name='Is Event active')),
|
|
||||||
('hidden_location', models.BooleanField(default=False, verbose_name='Is Hidden Location')),
|
|
||||||
('auto_checkin', models.BooleanField(default=False, verbose_name='Is AutoCheckin')),
|
|
||||||
('checkin_radius', models.FloatField(blank=True, default=15.0, null=True, verbose_name='Checkin radious')),
|
|
||||||
('checkin_point', models.FloatField(blank=True, default=10, null=True, verbose_name='Checkin Point')),
|
|
||||||
('buy_point', models.FloatField(blank=True, default=0, null=True, verbose_name='buy Point')),
|
|
||||||
('evaluation_value', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Evaluation value (評価)')),
|
|
||||||
('shop_closed', models.BooleanField(default=False, verbose_name='Shop Closed (休業)')),
|
|
||||||
('shop_shutdown', models.BooleanField(default=False, verbose_name='Shop Shutdown (閉業)')),
|
|
||||||
('opening_hours_mon', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours monday (月曜)')),
|
|
||||||
('opening_hours_tue', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours tuesday (火曜)')),
|
|
||||||
('opening_hours_wed', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours wednesday (水曜)')),
|
|
||||||
('opening_hours_thu', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours thursday (木曜)')),
|
|
||||||
('opening_hours_fri', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours frinday (金曜)')),
|
|
||||||
('opening_hours_sat', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours saturday (土曜)')),
|
|
||||||
('opening_hours_sun', models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours sunday (日曜)')),
|
|
||||||
('parammeters', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Parameters')),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('last_updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPointField(srid=4326)),
|
|
||||||
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='temp_location_updated_user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-22 14:19
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0021_templocation'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='shapelayers',
|
|
||||||
name='layerof',
|
|
||||||
field=models.IntegerField(choices=[(1, 'templocation'), (2, 'Location_line'), (3, 'Location_polygon')], default=1),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='templocation',
|
|
||||||
name='cp',
|
|
||||||
field=models.FloatField(default=0, null=True, verbose_name='Check Point'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-22 14:19
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0022_auto_20220822_2319'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='cp',
|
|
||||||
field=models.FloatField(default=0, null=True, verbose_name='Check Point'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-29 14:11
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0023_alter_location_cp'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(choices=[('G1', '大垣-初心者'), ('G2', '大垣-3時間'), ('G3', '大垣-5時間')], default='G1', max_length=2),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-30 05:26
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import rog.models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0024_customuser_group'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='UserUpload',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('name', models.CharField(max_length=255, verbose_name='User uploads')),
|
|
||||||
('file', models.FileField(blank=True, upload_to=rog.models.get_file_path)),
|
|
||||||
('uploaded_date', models.DateField(auto_now_add=True)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-30 10:05
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0025_userupload'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='UserUploadUser',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('userfile', models.CharField(blank=True, max_length=2048, null=True, verbose_name='User file')),
|
|
||||||
('email', models.CharField(max_length=255, verbose_name='User Email')),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-30 10:22
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0026_useruploaduser'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(choices=[('G1', '大垣-初心者'), ('G2', '大垣-3時間'), ('G3', '大垣-5時間')], default='G1', max_length=255),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-30 10:58
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0027_alter_customuser_group'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(choices=[('大垣-初心者', '大垣-初心者'), ('大垣-3時間', '大垣-3時間'), ('大垣-5時間', '大垣-5時間')], default='大垣-初心者', max_length=255),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-31 04:17
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0028_alter_customuser_group'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='area',
|
|
||||||
field=models.CharField(default='city', max_length=255),
|
|
||||||
preserve_default=False,
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='cities',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,21 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-31 05:40
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0029_auto_20220831_1317'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='area',
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='cities',
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-08-31 13:44
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0030_auto_20220831_1440'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='EventUser',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='event', to='rog.event')),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='event',
|
|
||||||
name='user',
|
|
||||||
field=models.ManyToManyField(related_name='even', through='rog.EventUser', to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-09-07 09:22
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0031_auto_20220831_2244'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='sub_loc_id',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub location id'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,274 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-24 05:31
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
from django.conf import settings
|
|
||||||
import django.contrib.postgres.indexes
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
import django.utils.timezone
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0032_alter_location_sub_loc_id'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Category',
|
|
||||||
fields=[
|
|
||||||
('category_name', models.CharField(max_length=255, primary_key=True, serialize=False)),
|
|
||||||
('category_number', models.IntegerField(unique=True)),
|
|
||||||
('duration', models.DurationField(default=datetime.timedelta(seconds=18000))),
|
|
||||||
('num_of_member', models.IntegerField(default=1)),
|
|
||||||
('family', models.BooleanField(default=False)),
|
|
||||||
('female', models.BooleanField(default=False)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='CheckinImages',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('checkinimage', models.FileField(blank=True, null=True, upload_to='checkin/%y%m%d')),
|
|
||||||
('checkintime', models.DateTimeField(verbose_name='Goal time')),
|
|
||||||
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
|
|
||||||
('event_code', models.CharField(max_length=255, verbose_name='event code')),
|
|
||||||
('cp_number', models.IntegerField(verbose_name='CP numner')),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Entry',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('date', models.DateTimeField()),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='EntryMember',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('is_temporary', models.BooleanField(default=False)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='GoalImages',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('goalimage', models.FileField(blank=True, null=True, upload_to='goals/%y%m%d')),
|
|
||||||
('goaltime', models.DateTimeField(verbose_name='Goal time')),
|
|
||||||
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
|
|
||||||
('event_code', models.CharField(max_length=255, verbose_name='event code')),
|
|
||||||
('cp_number', models.IntegerField(verbose_name='CP numner')),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Member',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('is_temporary', models.BooleanField(default=False)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='NewEvent',
|
|
||||||
fields=[
|
|
||||||
('event_name', models.CharField(max_length=255, primary_key=True, serialize=False)),
|
|
||||||
('start_datetime', models.DateTimeField(default=django.utils.timezone.now)),
|
|
||||||
('end_datetime', models.DateTimeField()),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Team',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('zekken_number', models.CharField(max_length=255, unique=True)),
|
|
||||||
('team_name', models.CharField(max_length=255)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='TempUser',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('email', models.EmailField(max_length=254, unique=True)),
|
|
||||||
('password', models.CharField(max_length=128)),
|
|
||||||
('is_rogaining', models.BooleanField(default=False)),
|
|
||||||
('zekken_number', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('event_code', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('team_name', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('group', models.CharField(max_length=255)),
|
|
||||||
('firstname', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('lastname', models.CharField(blank=True, max_length=255, null=True)),
|
|
||||||
('date_of_birth', models.DateField(blank=True, null=True)),
|
|
||||||
('female', models.BooleanField(default=False)),
|
|
||||||
('verification_code', models.UUIDField(default=uuid.uuid4, editable=False)),
|
|
||||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
|
||||||
('expires_at', models.DateTimeField()),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.DeleteModel(
|
|
||||||
name='JpnAdminPerf',
|
|
||||||
),
|
|
||||||
migrations.DeleteModel(
|
|
||||||
name='JpnSubPerf',
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='date_of_birth',
|
|
||||||
field=models.DateField(blank=True, null=True),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='event_code',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Event Code'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='female',
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='firstname',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='is_rogaining',
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='lastname',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='team_name',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Team Name'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='zekken_number',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Zekken Number'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='email',
|
|
||||||
field=models.CharField(max_length=255, unique=True, verbose_name='Email'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='category',
|
|
||||||
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Category'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_active',
|
|
||||||
field=models.BooleanField(db_index=True, default=True, verbose_name='Is Event active'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_name',
|
|
||||||
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Event name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Group'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='location_id',
|
|
||||||
field=models.IntegerField(blank=True, db_index=True, null=True, verbose_name='Location id'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='templocation',
|
|
||||||
name='sub_loc_id',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub location id'),
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name='location',
|
|
||||||
index=django.contrib.postgres.indexes.GistIndex(fields=['geom'], name='rog_locatio_geom_4793cc_gist'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='team',
|
|
||||||
name='category',
|
|
||||||
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='rog.category'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='team',
|
|
||||||
name='owner',
|
|
||||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_teams', to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='member',
|
|
||||||
name='team',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.team'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='member',
|
|
||||||
name='user',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='goalimages',
|
|
||||||
name='user',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='entrymember',
|
|
||||||
name='entry',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.entry'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='entrymember',
|
|
||||||
name='member',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.member'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='entry',
|
|
||||||
name='category',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.category'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='entry',
|
|
||||||
name='event',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newevent'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='entry',
|
|
||||||
name='owner',
|
|
||||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='entry',
|
|
||||||
name='team',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.team'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='checkinimages',
|
|
||||||
name='user',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name='category',
|
|
||||||
unique_together={('category_name', 'category_number')},
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name='team',
|
|
||||||
unique_together={('zekken_number', 'category')},
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name='member',
|
|
||||||
unique_together={('team', 'user')},
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name='entrymember',
|
|
||||||
unique_together={('entry', 'member')},
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name='entry',
|
|
||||||
unique_together={('team', 'event', 'date', 'owner')},
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-24 06:22
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0033_auto_20240724_1431'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='category',
|
|
||||||
name='category_number',
|
|
||||||
field=models.IntegerField(default=0),
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='NewCategory',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('category_name', models.CharField(max_length=255, unique=True)),
|
|
||||||
('category_number', models.IntegerField(default=0)),
|
|
||||||
('duration', models.DurationField(default=datetime.timedelta(seconds=18000))),
|
|
||||||
('num_of_member', models.IntegerField(default=1)),
|
|
||||||
('family', models.BooleanField(default=False)),
|
|
||||||
('female', models.BooleanField(default=False)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'unique_together': {('category_name', 'category_number')},
|
|
||||||
},
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-24 06:29
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
import django.utils.timezone
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0034_auto_20240724_1522'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='NewEvent2',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('event_name', models.CharField(max_length=255, unique=True)),
|
|
||||||
('start_datetime', models.DateTimeField(default=django.utils.timezone.now)),
|
|
||||||
('end_datetime', models.DateTimeField()),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='entry',
|
|
||||||
name='event',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newevent2'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-24 06:58
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
import rog.models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0035_auto_20240724_1529'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='team',
|
|
||||||
name='category',
|
|
||||||
field=models.ForeignKey(default=rog.models.get_default_category, on_delete=django.db.models.deletion.SET_DEFAULT, to='rog.newcategory'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-24 15:20
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0036_alter_team_category'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='member',
|
|
||||||
name='team',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='rog.team'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,19 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-25 01:21
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0037_alter_member_team'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='entry',
|
|
||||||
name='category',
|
|
||||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newcategory'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-07-26 06:08
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.utils.timezone
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0038_alter_entry_category'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='date_joined',
|
|
||||||
field=models.DateTimeField(default=django.utils.timezone.now),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='email',
|
|
||||||
field=models.EmailField(max_length=254, unique=True),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(blank=True, max_length=255),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,33 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-08-01 08:29
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0039_auto_20240726_1508'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='member',
|
|
||||||
name='date_of_birth',
|
|
||||||
field=models.DateField(blank=True, null=True),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='member',
|
|
||||||
name='female',
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='member',
|
|
||||||
name='firstname',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='member',
|
|
||||||
name='lastname',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,37 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2024-08-02 15:11
|
|
||||||
|
|
||||||
import django.contrib.gis.db.models.fields
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0040_auto_20240801_1729'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='JpnSubPerf',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
|
||||||
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('name_modified', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('area_name', models.CharField(blank=True, max_length=254, null=True)),
|
|
||||||
('list_order', models.IntegerField(default=0)),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
'db_table': 'jpn_sub_perf',
|
|
||||||
'managed': False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
]
|
|
||||||
0
rog/migrations_backup/__init__.py
Normal file → Executable file
0
rog/migrations_backup/__init__.py
Normal file → Executable file
143
rog/models.py
Normal file → Executable file
143
rog/models.py
Normal file → Executable file
@ -422,6 +422,7 @@ class Entry(models.Model):
|
|||||||
date = models.DateTimeField()
|
date = models.DateTimeField()
|
||||||
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE,blank=True, null=True) # Akira 2024-7-24
|
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE,blank=True, null=True) # Akira 2024-7-24
|
||||||
zekken_number = models.IntegerField(default=0)
|
zekken_number = models.IntegerField(default=0)
|
||||||
|
zekken_label = models.CharField(max_length=255, blank=True, null=True)
|
||||||
is_active = models.BooleanField(default=True) # 新しく追加
|
is_active = models.BooleanField(default=True) # 新しく追加
|
||||||
hasParticipated = models.BooleanField(default=False) # 新しく追加
|
hasParticipated = models.BooleanField(default=False) # 新しく追加
|
||||||
hasGoaled = models.BooleanField(default=False) # 新しく追加
|
hasGoaled = models.BooleanField(default=False) # 新しく追加
|
||||||
@ -1402,3 +1403,145 @@ def publish_data(sender, instance, created, **kwargs):
|
|||||||
insertUserUploadUser(instance.name, fields)
|
insertUserUploadUser(instance.name, fields)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print('######## user csv file ##########',e)
|
print('######## user csv file ##########',e)
|
||||||
|
|
||||||
|
|
||||||
|
# for upper compatible
|
||||||
|
|
||||||
|
# 既存のモデルに追加=> 通過記録に相応しい名称に変更すべき
|
||||||
|
|
||||||
|
|
||||||
|
class GpsLog(models.Model):
|
||||||
|
"""
|
||||||
|
GPSチェックイン情報を管理するモデル
|
||||||
|
gps_informationテーブルに対応
|
||||||
|
"""
|
||||||
|
serial_number = models.IntegerField(null=False)
|
||||||
|
|
||||||
|
# 新規追加
|
||||||
|
entry = models.ForeignKey(Entry, on_delete=models.CASCADE, related_name='checkpoints')
|
||||||
|
|
||||||
|
# Entry へ移行
|
||||||
|
zekken_number = models.TextField(null=False)
|
||||||
|
event_code = models.TextField(null=False)
|
||||||
|
|
||||||
|
cp_number = models.TextField(null=True, blank=True)
|
||||||
|
image_address = models.TextField(null=True, blank=True)
|
||||||
|
|
||||||
|
# 新規追加
|
||||||
|
checkin_time = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
goal_time = models.TextField(null=True, blank=True)
|
||||||
|
late_point = models.IntegerField(null=True, blank=True)
|
||||||
|
create_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
create_user = models.TextField(null=True, blank=True)
|
||||||
|
update_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
update_user = models.TextField(null=True, blank=True)
|
||||||
|
buy_flag = models.BooleanField(null=True, blank=True)
|
||||||
|
minus_photo_flag = models.BooleanField(null=True, blank=True)
|
||||||
|
colabo_company_memo = models.TextField(null=False, default='')
|
||||||
|
|
||||||
|
# 新規追加
|
||||||
|
is_service_checked = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
# ゴール記録用に追加
|
||||||
|
score = models.IntegerField(default=0, null=True, blank=True)
|
||||||
|
scoreboard_url = models.URLField(blank=True, null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = 'gps_information'
|
||||||
|
# 複合主キーの設定
|
||||||
|
unique_together = [['serial_number', 'zekken_number', 'event_code', 'colabo_company_memo']]
|
||||||
|
# インデックスの設定(必要に応じて)
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=['zekken_number', 'event_code'], name='gpslog_zekken_event_idx'),
|
||||||
|
models.Index(fields=['create_at'], name='gpslog_create_at_idx'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.event_code}-{self.zekken_number}-{self.serial_number}"
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_start(cls, entry):
|
||||||
|
"""
|
||||||
|
チームのスタート情報を記録する
|
||||||
|
以前はTeamStartモデルが担当していた機能
|
||||||
|
"""
|
||||||
|
return cls.objects.create(
|
||||||
|
serial_number=0, # スタートログを表す特別な値
|
||||||
|
entry=entry,
|
||||||
|
zekken_number=entry.zekken_number,
|
||||||
|
event_code=entry.event.event_name,
|
||||||
|
cp_number="START",
|
||||||
|
create_at=timezone.now(),
|
||||||
|
update_at=timezone.now(),
|
||||||
|
buy_flag=False,
|
||||||
|
colabo_company_memo=""
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def record_goal(cls, entry, goal_time=None, image_url=None, score=0, scoreboard_url=None):
|
||||||
|
"""
|
||||||
|
チームのゴール情報を記録する
|
||||||
|
以前はTeamGoalモデルが担当していた機能
|
||||||
|
"""
|
||||||
|
if goal_time is None:
|
||||||
|
goal_time = timezone.now()
|
||||||
|
|
||||||
|
return cls.objects.create(
|
||||||
|
serial_number=9999, # ゴールログを表す特別な値
|
||||||
|
entry=entry,
|
||||||
|
zekken_number=entry.zekken_number,
|
||||||
|
event_code=entry.event.event_name,
|
||||||
|
cp_number="GOAL",
|
||||||
|
image_address=image_url,
|
||||||
|
create_at=goal_time,
|
||||||
|
update_at=timezone.now(),
|
||||||
|
goal_time=goal_time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||||
|
buy_flag=False,
|
||||||
|
score=score,
|
||||||
|
scoreboard_url=scoreboard_url,
|
||||||
|
colabo_company_memo=""
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_start_record(self):
|
||||||
|
"""このレコードがスタート記録かどうかを判定"""
|
||||||
|
return self.cp_number == "START" and self.serial_number == 0
|
||||||
|
|
||||||
|
def is_goal_record(self):
|
||||||
|
"""このレコードがゴール記録かどうかを判定"""
|
||||||
|
return self.cp_number == "GOAL" and self.serial_number == 9999
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def start_time(self):
|
||||||
|
"""スタート時刻を返す(TeamStartとの互換性のため)"""
|
||||||
|
return self.create_at or self.checkin_time
|
||||||
|
|
||||||
|
@property
|
||||||
|
def goal_datetime(self):
|
||||||
|
"""ゴール時刻をDateTimeとして返す(TeamGoalとの互換性のため)"""
|
||||||
|
if self.is_goal_record() and self.create_at:
|
||||||
|
return self.create_at
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Waypoint(models.Model):
|
||||||
|
entry = models.ForeignKey('Entry', on_delete=models.CASCADE, related_name='waypoints')
|
||||||
|
latitude = models.FloatField()
|
||||||
|
longitude = models.FloatField()
|
||||||
|
altitude = models.FloatField(null=True, blank=True)
|
||||||
|
accuracy = models.FloatField(null=True, blank=True)
|
||||||
|
speed = models.FloatField(null=True, blank=True)
|
||||||
|
recorded_at = models.DateTimeField()
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ['recorded_at']
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=['entry', 'recorded_at']),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.entry.team_name} - {self.recorded_at.strftime('%Y-%m-%d %H:%M:%S')}"
|
||||||
0
rog/permissions.py
Normal file → Executable file
0
rog/permissions.py
Normal file → Executable file
0
rog/postgres_views.sql
Normal file → Executable file
0
rog/postgres_views.sql
Normal file → Executable file
3
rog/serializers.py
Normal file → Executable file
3
rog/serializers.py
Normal file → Executable file
@ -441,7 +441,8 @@ class EntrySerializer(serializers.ModelSerializer):
|
|||||||
event = serializers.PrimaryKeyRelatedField(queryset=NewEvent2.objects.all())
|
event = serializers.PrimaryKeyRelatedField(queryset=NewEvent2.objects.all())
|
||||||
category = serializers.PrimaryKeyRelatedField(queryset=NewCategory.objects.all())
|
category = serializers.PrimaryKeyRelatedField(queryset=NewCategory.objects.all())
|
||||||
owner = serializers.PrimaryKeyRelatedField(read_only=True)
|
owner = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||||
date = serializers.DateTimeField(input_formats=['%Y-%m-%d'])
|
#date = serializers.DateTimeField(input_formats=['%Y-%m-%d'])
|
||||||
|
date = serializers.DateField(required=False, allow_null=True) # DateTimeFieldではなくDateFieldを使用
|
||||||
zekken_number = serializers.IntegerField()
|
zekken_number = serializers.IntegerField()
|
||||||
|
|
||||||
#date = serializers.DateTimeField(default_timezone=timezone.get_current_timezone())
|
#date = serializers.DateTimeField(default_timezone=timezone.get_current_timezone())
|
||||||
|
|||||||
0
rog/services/__init__.py
Normal file → Executable file
0
rog/services/__init__.py
Normal file → Executable file
0
rog/services/csv_processor.py
Normal file → Executable file
0
rog/services/csv_processor.py
Normal file → Executable file
0
rog/templates/.DS_Store
vendored
Normal file → Executable file
0
rog/templates/.DS_Store
vendored
Normal file → Executable file
0
rog/templates/activation-template.html
Normal file → Executable file
0
rog/templates/activation-template.html
Normal file → Executable file
0
rog/templates/activation_success.html
Normal file → Executable file
0
rog/templates/activation_success.html
Normal file → Executable file
0
rog/templates/admin/.DS_Store
vendored
Normal file → Executable file
0
rog/templates/admin/.DS_Store
vendored
Normal file → Executable file
0
rog/templates/admin/csv_upload.html
Normal file → Executable file
0
rog/templates/admin/csv_upload.html
Normal file → Executable file
0
rog/templates/admin/entry/change_list.html
Normal file → Executable file
0
rog/templates/admin/entry/change_list.html
Normal file → Executable file
0
rog/templates/admin/entry/upload_csv.html
Normal file → Executable file
0
rog/templates/admin/entry/upload_csv.html
Normal file → Executable file
0
rog/templates/admin/gifuroge_register_changelist.html
Normal file → Executable file
0
rog/templates/admin/gifuroge_register_changelist.html
Normal file → Executable file
0
rog/templates/admin/load_users.html
Normal file → Executable file
0
rog/templates/admin/load_users.html
Normal file → Executable file
0
rog/templates/admin/rog/.DS_Store
vendored
Normal file → Executable file
0
rog/templates/admin/rog/.DS_Store
vendored
Normal file → Executable file
0
rog/templates/admin/rog/customuser/change_list.html
Normal file → Executable file
0
rog/templates/admin/rog/customuser/change_list.html
Normal file → Executable file
0
rog/templates/admin/rog/gifurogeregister/change_list.html
Normal file → Executable file
0
rog/templates/admin/rog/gifurogeregister/change_list.html
Normal file → Executable file
0
rog/templates/admin/rog/gifurogeregister/upload-csv.html
Normal file → Executable file
0
rog/templates/admin/rog/gifurogeregister/upload-csv.html
Normal file → Executable file
0
rog/templates/email/invitation_and_verification_email.txt
Normal file → Executable file
0
rog/templates/email/invitation_and_verification_email.txt
Normal file → Executable file
0
rog/templates/email/invitation_existing_email.txt
Normal file → Executable file
0
rog/templates/email/invitation_existing_email.txt
Normal file → Executable file
0
rog/templates/email/invitation_new_email.txt
Normal file → Executable file
0
rog/templates/email/invitation_new_email.txt
Normal file → Executable file
0
rog/templates/email/reset_password_email.txt
Normal file → Executable file
0
rog/templates/email/reset_password_email.txt
Normal file → Executable file
0
rog/templates/email/verification_email.txt
Normal file → Executable file
0
rog/templates/email/verification_email.txt
Normal file → Executable file
0
rog/templates/password-reset-component.tsx
Normal file → Executable file
0
rog/templates/password-reset-component.tsx
Normal file → Executable file
0
rog/templates/password_reset.html
Normal file → Executable file
0
rog/templates/password_reset.html
Normal file → Executable file
0
rog/templates/password_reset_invalid.html
Normal file → Executable file
0
rog/templates/password_reset_invalid.html
Normal file → Executable file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user