Fix migration issues

This commit is contained in:
2025-08-25 14:28:30 +09:00
parent 961c577ec8
commit 6886ba15c8
7 changed files with 403 additions and 28 deletions

View File

@ -24,7 +24,7 @@ ENV CPLUS_INCLUDE_PATH=/usr/include/gdal
ENV C_INCLUDE_PATH=/usr/include/gdal ENV C_INCLUDE_PATH=/usr/include/gdal
RUN apt-get update \ RUN apt-get update \
&& apt-get -y install netcat gcc postgresql \ && apt-get -y install netcat gcc postgresql curl \
&& apt-get clean && apt-get clean
RUN apt-get update \ RUN apt-get update \

View File

@ -0,0 +1,174 @@
-- rog_location2025テーブル手動作成SQL (デプロイ先用)
-- 実行前に必要な拡張機能が有効になっていることを確認してください
-- CREATE EXTENSION IF NOT EXISTS postgis;
-- 既存テーブルが存在する場合は削除 (必要に応じてコメントアウト)
-- DROP TABLE IF EXISTS rog_location2025;
-- rog_location2025テーブル作成
CREATE TABLE IF NOT EXISTS rog_location2025 (
id BIGSERIAL PRIMARY KEY,
cp_number INTEGER NOT NULL,
event_id INTEGER NOT NULL,
cp_name VARCHAR(255) NOT NULL,
latitude DOUBLE PRECISION,
longitude DOUBLE PRECISION,
location GEOMETRY(POINT, 4326),
cp_point INTEGER NOT NULL DEFAULT 10,
photo_point INTEGER NOT NULL DEFAULT 0,
buy_point INTEGER NOT NULL DEFAULT 0,
checkin_radius DOUBLE PRECISION NOT NULL DEFAULT 15.0,
auto_checkin BOOLEAN NOT NULL DEFAULT false,
shop_closed BOOLEAN NOT NULL DEFAULT false,
shop_shutdown BOOLEAN NOT NULL DEFAULT false,
opening_hours TEXT,
address VARCHAR(512),
phone VARCHAR(32),
website VARCHAR(200),
description TEXT,
is_active BOOLEAN NOT NULL DEFAULT true,
sort_order INTEGER NOT NULL DEFAULT 0,
csv_source_file VARCHAR(255),
csv_upload_date TIMESTAMP WITH TIME ZONE,
csv_upload_user_id BIGINT,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_by_id BIGINT,
updated_by_id BIGINT
);
-- インデックス作成
CREATE INDEX IF NOT EXISTS rog_location2025_cp_number_idx ON rog_location2025 (cp_number);
CREATE INDEX IF NOT EXISTS rog_location2025_event_id_idx ON rog_location2025 (event_id);
CREATE INDEX IF NOT EXISTS rog_location2025_is_active_idx ON rog_location2025 (is_active);
CREATE INDEX IF NOT EXISTS location2025_event_cp_idx ON rog_location2025 (event_id, cp_number);
CREATE INDEX IF NOT EXISTS location2025_event_active_idx ON rog_location2025 (event_id, is_active);
CREATE INDEX IF NOT EXISTS location2025_csv_date_idx ON rog_location2025 (csv_upload_date);
-- 空間インデックス (PostGIS必須)
CREATE INDEX IF NOT EXISTS location2025_location_gist_idx ON rog_location2025 USING GIST (location);
-- 外部キー制約追加 (テーブルが存在する場合)
-- rog_newevent2テーブルが存在することを前提
DO $$
BEGIN
-- event_idの外部キー制約
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'rog_newevent2') THEN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'rog_location2025_event_id_fkey'
) THEN
ALTER TABLE rog_location2025
ADD CONSTRAINT rog_location2025_event_id_fkey
FOREIGN KEY (event_id) REFERENCES rog_newevent2(id) DEFERRABLE INITIALLY DEFERRED;
END IF;
END IF;
-- csv_upload_user_idの外部キー制約
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'rog_customuser') THEN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'rog_location2025_csv_upload_user_id_fkey'
) THEN
ALTER TABLE rog_location2025
ADD CONSTRAINT rog_location2025_csv_upload_user_id_fkey
FOREIGN KEY (csv_upload_user_id) REFERENCES rog_customuser(id) DEFERRABLE INITIALLY DEFERRED;
END IF;
END IF;
-- created_by_idの外部キー制約
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'rog_customuser') THEN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'rog_location2025_created_by_id_fkey'
) THEN
ALTER TABLE rog_location2025
ADD CONSTRAINT rog_location2025_created_by_id_fkey
FOREIGN KEY (created_by_id) REFERENCES rog_customuser(id) DEFERRABLE INITIALLY DEFERRED;
END IF;
END IF;
-- updated_by_idの外部キー制約
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'rog_customuser') THEN
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'rog_location2025_updated_by_id_fkey'
) THEN
ALTER TABLE rog_location2025
ADD CONSTRAINT rog_location2025_updated_by_id_fkey
FOREIGN KEY (updated_by_id) REFERENCES rog_customuser(id) DEFERRABLE INITIALLY DEFERRED;
END IF;
END IF;
-- ユニーク制約
IF NOT EXISTS (
SELECT 1 FROM information_schema.table_constraints
WHERE constraint_name = 'rog_location2025_cp_number_event_id_unique'
) THEN
ALTER TABLE rog_location2025
ADD CONSTRAINT rog_location2025_cp_number_event_id_unique
UNIQUE (cp_number, event_id);
END IF;
END $$;
-- updated_atの自動更新トリガー作成
CREATE OR REPLACE FUNCTION update_rog_location2025_updated_at()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = CURRENT_TIMESTAMP;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DROP TRIGGER IF EXISTS rog_location2025_updated_at_trigger ON rog_location2025;
CREATE TRIGGER rog_location2025_updated_at_trigger
BEFORE UPDATE ON rog_location2025
FOR EACH ROW
EXECUTE FUNCTION update_rog_location2025_updated_at();
-- 作成確認
SELECT
schemaname,
tablename,
tableowner
FROM pg_tables
WHERE tablename = 'rog_location2025';
-- カラム確認
SELECT
column_name,
data_type,
is_nullable,
column_default
FROM information_schema.columns
WHERE table_name = 'rog_location2025'
ORDER BY ordinal_position;
COMMENT ON TABLE rog_location2025 IS '2025年版チェックポイント管理テーブル';
COMMENT ON COLUMN rog_location2025.cp_number IS 'CP番号';
COMMENT ON COLUMN rog_location2025.event_id IS 'イベントID';
COMMENT ON COLUMN rog_location2025.cp_name IS 'CP名';
COMMENT ON COLUMN rog_location2025.latitude IS '緯度';
COMMENT ON COLUMN rog_location2025.longitude IS '経度';
COMMENT ON COLUMN rog_location2025.location IS '位置(PostGIS Point)';
COMMENT ON COLUMN rog_location2025.cp_point IS 'チェックポイント得点';
COMMENT ON COLUMN rog_location2025.photo_point IS '写真ポイント';
COMMENT ON COLUMN rog_location2025.buy_point IS '買い物ポイント';
COMMENT ON COLUMN rog_location2025.checkin_radius IS 'チェックイン範囲(m)';
COMMENT ON COLUMN rog_location2025.auto_checkin IS '自動チェックイン';
COMMENT ON COLUMN rog_location2025.shop_closed IS '休業中';
COMMENT ON COLUMN rog_location2025.shop_shutdown IS '閉業';
COMMENT ON COLUMN rog_location2025.opening_hours IS '営業時間';
COMMENT ON COLUMN rog_location2025.address IS '住所';
COMMENT ON COLUMN rog_location2025.phone IS '電話番号';
COMMENT ON COLUMN rog_location2025.website IS 'ウェブサイト';
COMMENT ON COLUMN rog_location2025.description IS '説明';
COMMENT ON COLUMN rog_location2025.is_active IS '有効';
COMMENT ON COLUMN rog_location2025.sort_order IS '表示順';
COMMENT ON COLUMN rog_location2025.csv_source_file IS 'CSVファイル名';
COMMENT ON COLUMN rog_location2025.csv_upload_date IS 'CSVアップロード日時';
COMMENT ON COLUMN rog_location2025.csv_upload_user_id IS 'CSVアップロードユーザーID';
COMMENT ON COLUMN rog_location2025.created_at IS '作成日時';
COMMENT ON COLUMN rog_location2025.updated_at IS '更新日時';
COMMENT ON COLUMN rog_location2025.created_by_id IS '作成者ID';
COMMENT ON COLUMN rog_location2025.updated_by_id IS '更新者ID';

View File

@ -1,5 +1,3 @@
version: "3.9"
services: services:
postgres-db: postgres-db:
image: kartoza/postgis:12.0 image: kartoza/postgis:12.0
@ -9,11 +7,25 @@ services:
- postgres_data:/var/lib/postgresql - postgres_data:/var/lib/postgresql
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf - ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
- ./rogaining.sql:/sql/rogaining.sql - ./rogaining.sql:/sql/rogaining.sql
- ./sqls:/sqls
- ./create_location2025_table.sql:/sql/create_location2025_table.sql
environment: environment:
- POSTGRES_USER=${POSTGRES_USER} - POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASS=${POSTGRES_PASS} - POSTGRES_PASS=${POSTGRES_PASS}
- POSTGRES_DBNAME=${POSTGRES_DBNAME} - POSTGRES_DBNAME=${POSTGRES_DBNAME}
- POSTGRES_MAX_CONNECTIONS=600 - POSTGRES_MAX_CONNECTIONS=600
deploy:
resources:
limits:
memory: 2G
reservations:
memory: 1G
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DBNAME}"]
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
restart: "on-failure" restart: "on-failure"
networks: networks:
- rog-api - rog-api
@ -22,16 +34,23 @@ services:
build: build:
context: . context: .
dockerfile: Dockerfile.gdal dockerfile: Dockerfile.gdal
command: bash -c "./wait-for-postgres.sh postgres-db && python manage.py migrate && gunicorn config.wsgi:application --bind 0.0.0.0:8000" command: bash -c "./wait-for-postgres.sh postgres-db && gunicorn config.wsgi:application --bind 0.0.0.0:8000"
volumes: volumes:
- .:/app - .:/app
- static_volume:/app/static - static_volume:/app/static
- media_volume:/app/media - media_volume:/app/media
env_file: env_file:
- .env - .env
healthcheck:
test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000')\" || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
restart: "on-failure" restart: "on-failure"
depends_on: depends_on:
- postgres-db postgres-db:
condition: service_healthy
networks: networks:
- rog-api - rog-api

60
docker-compose-simple.yml Normal file
View File

@ -0,0 +1,60 @@
services:
postgres-db:
image: kartoza/postgis:12.0
ports:
- 5432:5432
volumes:
- postgres_data:/var/lib/postgresql
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
- ./rogaining.sql:/sql/rogaining.sql
- ./sqls:/sqls
- ./create_location2025_table.sql:/sql/create_location2025_table.sql
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASS=${POSTGRES_PASS}
- POSTGRES_DBNAME=${POSTGRES_DBNAME}
- POSTGRES_MAX_CONNECTIONS=600
restart: "no"
networks:
- rog-api
app:
build:
context: .
dockerfile: Dockerfile.gdal
command: bash -c "./wait-for-postgres.sh postgres-db && python manage.py migrate && gunicorn config.wsgi:application --bind 0.0.0.0:8000"
volumes:
- .:/app
- static_volume:/app/static
- media_volume:/app/media
env_file:
- .env
restart: "no"
depends_on:
- postgres-db
networks:
- rog-api
nginx:
image: nginx:1.19
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
- static_volume:/app/static
- media_volume:/app/media
- ./supervisor/html:/usr/share/nginx/html
ports:
- 8100:80
restart: "no"
depends_on:
- app
networks:
- rog-api
networks:
rog-api:
driver: bridge
volumes:
postgres_data:
static_volume:
media_volume:

View File

@ -89,6 +89,19 @@ def check_database_connectivity():
source_cursor.execute("SELECT COUNT(*) FROM gps_information") source_cursor.execute("SELECT COUNT(*) FROM gps_information")
source_count = source_cursor.fetchone()[0] source_count = source_cursor.fetchone()[0]
print(f"✅ gifuroge DB接続成功: gps_information {source_count}") print(f"✅ gifuroge DB接続成功: gps_information {source_count}")
# テーブル構造確認
source_cursor.execute("""
SELECT column_name, data_type
FROM information_schema.columns
WHERE table_name = 'gps_information'
ORDER BY ordinal_position
""")
columns = source_cursor.fetchall()
print("📋 gps_informationテーブル構造:")
for col_name, col_type in columns:
print(f" {col_name}: {col_type}")
source_conn.close() source_conn.close()
# rogdb DB接続確認 # rogdb DB接続確認
@ -97,6 +110,19 @@ def check_database_connectivity():
target_cursor.execute("SELECT COUNT(*) FROM rog_gpscheckin") target_cursor.execute("SELECT COUNT(*) FROM rog_gpscheckin")
target_count = target_cursor.fetchone()[0] target_count = target_cursor.fetchone()[0]
print(f"✅ rogdb DB接続成功: rog_gpscheckin {target_count}") print(f"✅ rogdb DB接続成功: rog_gpscheckin {target_count}")
# 移行先テーブル構造確認
target_cursor.execute("""
SELECT column_name, data_type
FROM information_schema.columns
WHERE table_name = 'rog_gpscheckin'
ORDER BY ordinal_position
""")
target_columns = target_cursor.fetchall()
print("📋 rog_gpscheckinテーブル構造:")
for col_name, col_type in target_columns:
print(f" {col_name}: {col_type}")
target_conn.close() target_conn.close()
return True return True
@ -225,17 +251,61 @@ def migrate_gps_data(source_cursor, target_cursor):
print("\n=== GPS記録データの移行 ===") print("\n=== GPS記録データの移行 ===")
try: try:
# GPS記録のみを取得(不正な写真記録データを除外) # 移行元テーブルの構造を確認
source_cursor.execute(""" source_cursor.execute("""
SELECT SELECT column_name
serial_number, team_name, cp_number, record_time, FROM information_schema.columns
goal_time, late_point, buy_flag, image_address, WHERE table_name = 'gps_information'
minus_photo_flag, create_user, update_user, ORDER BY ordinal_position
colabo_company_memo """)
source_columns = [row[0] for row in source_cursor.fetchall()]
print(f"📋 移行元カラム: {source_columns}")
# 移行先テーブルの構造を確認
target_cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'rog_gpscheckin'
ORDER BY ordinal_position
""")
target_columns = [row[0] for row in target_cursor.fetchall()]
print(f"📋 移行先カラム: {target_columns}")
# 必要なカラムのマッピングを確認
column_mapping = {
'serial_number': 'serial_number',
'team_name': 'team_name' if 'team_name' in source_columns else None,
'zekken_number': 'zekken_number' if 'zekken_number' in source_columns else None,
'event_code': 'event_code' if 'event_code' in source_columns else None,
'cp_number': 'cp_number',
'record_time': 'create_at' if 'create_at' in source_columns else 'record_time',
'goal_time': 'goal_time',
'late_point': 'late_point',
'buy_flag': 'buy_flag',
'image_address': 'image_address',
'minus_photo_flag': 'minus_photo_flag',
'create_user': 'create_user',
'update_user': 'update_user',
'colabo_company_memo': 'colabo_company_memo'
}
# 実際に存在するカラムでクエリを構築
select_columns = []
for key, column in column_mapping.items():
if column and column in source_columns:
select_columns.append(f"{column} as {key}")
else:
select_columns.append(f"NULL as {key}")
query = f"""
SELECT {', '.join(select_columns)}
FROM gps_information FROM gps_information
WHERE serial_number < 20000 -- GPS専用データのみ WHERE serial_number < 20000 -- GPS専用データのみ
ORDER BY serial_number ORDER BY serial_number
""") """
print(f"📋 実行クエリ: {query}")
source_cursor.execute(query)
gps_records = source_cursor.fetchall() gps_records = source_cursor.fetchall()
print(f"移行対象GPS記録数: {len(gps_records)}") print(f"移行対象GPS記録数: {len(gps_records)}")
@ -245,10 +315,25 @@ def migrate_gps_data(source_cursor, target_cursor):
for record in gps_records: for record in gps_records:
try: try:
(serial_number, team_name, cp_number, record_time, # レコードを解析NULLの場合はデフォルト値を設定
goal_time, late_point, buy_flag, image_address, record_data = {}
minus_photo_flag, create_user, update_user, for i, key in enumerate(column_mapping.keys()):
colabo_company_memo) = record record_data[key] = record[i] if i < len(record) else None
serial_number = record_data['serial_number']
team_name = record_data['team_name'] or f"Team_{record_data['zekken_number'] or serial_number}"
zekken_number = record_data['zekken_number'] or serial_number
event_code = record_data['event_code'] or 'unknown'
cp_number = record_data['cp_number']
record_time = record_data['record_time']
goal_time = record_data['goal_time']
late_point = record_data['late_point']
buy_flag = record_data['buy_flag']
image_address = record_data['image_address']
minus_photo_flag = record_data['minus_photo_flag']
create_user = record_data['create_user']
update_user = record_data['update_user']
colabo_company_memo = record_data['colabo_company_memo']
# UTC時刻をJST時刻に変換 # UTC時刻をJST時刻に変換
record_time_jst = convert_utc_to_jst(record_time) record_time_jst = convert_utc_to_jst(record_time)
@ -265,18 +350,29 @@ def migrate_gps_data(source_cursor, target_cursor):
elif isinstance(goal_time, datetime): elif isinstance(goal_time, datetime):
goal_time_utc = convert_utc_to_jst(goal_time) goal_time_utc = convert_utc_to_jst(goal_time)
# rog_gpscheckinに挿入(マイグレーション用マーカー付き) # 移行先テーブルに合わせてINSERT文を動的構築
target_cursor.execute(""" insert_columns = ['serial_number', 'cp_number', 'record_time', 'goal_time',
INSERT INTO rog_gpscheckin 'late_point', 'buy_flag', 'image_address', 'minus_photo_flag',
(serial_number, team_name, cp_number, record_time, goal_time, 'create_user', 'update_user', 'comment']
late_point, buy_flag, image_address, minus_photo_flag, insert_values = [serial_number, cp_number, record_time_jst, goal_time_utc,
create_user, update_user, comment) late_point, buy_flag, image_address, minus_photo_flag,
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) create_user, update_user, f'migrated_from_gifuroge_team_{team_name}_zekken_{zekken_number}_event_{event_code}']
""", (
serial_number, team_name, cp_number, record_time_jst, goal_time_utc, # 移行先テーブルに存在するカラムのみを使用
late_point, buy_flag, image_address, minus_photo_flag, final_columns = []
create_user, update_user, 'migrated_from_gifuroge' final_values = []
)) for i, col in enumerate(insert_columns):
if col in target_columns:
final_columns.append(col)
final_values.append(insert_values[i])
placeholders = ', '.join(['%s'] * len(final_columns))
columns_str = ', '.join(final_columns)
target_cursor.execute(f"""
INSERT INTO rog_gpscheckin ({columns_str})
VALUES ({placeholders})
""", final_values)
migrated_count += 1 migrated_count += 1
@ -287,6 +383,11 @@ def migrate_gps_data(source_cursor, target_cursor):
except Exception as e: except Exception as e:
error_count += 1 error_count += 1
print(f" レコード移行エラー(serial_number={serial_number}): {e}") print(f" レコード移行エラー(serial_number={serial_number}): {e}")
# トランザクションエラーの場合はロールバックして続行
try:
target_cursor.connection.rollback()
except:
pass
if error_count > 100: # エラー上限 if error_count > 100: # エラー上限
print("❌ エラー数が上限を超えました。移行を中止します。") print("❌ エラー数が上限を超えました。移行を中止します。")
raise raise

View File

21
start-app.sh Normal file
View File

@ -0,0 +1,21 @@
#!/bin/bash
set -e
echo "Waiting for PostgreSQL..."
./wait-for-postgres.sh postgres-db
echo "Checking migration status..."
python manage.py showmigrations
echo "Attempting migration with error handling..."
if python manage.py migrate --check; then
echo "No migrations needed"
elif python manage.py migrate --fake-initial; then
echo "Migrations applied successfully"
else
echo "Migration failed, trying fake-initial..."
python manage.py migrate --fake-initial || echo "Fake-initial also failed, proceeding anyway..."
fi
echo "Starting Gunicorn..."
exec gunicorn config.wsgi:application --bind 0.0.0.0:8000