Compare commits
39 Commits
exdb
...
9eb45d7e97
| Author | SHA1 | Date | |
|---|---|---|---|
| 9eb45d7e97 | |||
| 2aaecb6b22 | |||
| 6e472cf634 | |||
| 106ab0e94e | |||
| 7f4d37d40c | |||
| 4a2a5de476 | |||
| 15815d5f06 | |||
| 768dd6e261 | |||
| 139c0987bc | |||
| ceb783d6bd | |||
| a714557eef | |||
| 586f341897 | |||
| 0c2dfec7dd | |||
| d6464c1369 | |||
| 338643b0d7 | |||
| e992e834da | |||
| c6969d7afa | |||
| 82d0e55945 | |||
| b872f377b2 | |||
| a6b816c9f2 | |||
| 2913a435c1 | |||
| 051916f9f6 | |||
| b8d7029965 | |||
| 6f0d8d15fd | |||
| 80ccaace3d | |||
| 95b787c819 | |||
| 3d195973fc | |||
| d851e7e4ad | |||
| 9d0d3ea102 | |||
| 37a253e63a | |||
| bc74b14cbc | |||
| 49b3ee7342 | |||
| 26e8e68dbd | |||
| 44ad30093c | |||
| bcfcceb068 | |||
| 9215ba8f9f | |||
| c0fb177d02 | |||
| 09e39987e2 | |||
| 6f79d9a4be |
4
.gitignore
vendored
4
.gitignore
vendored
@ -157,6 +157,10 @@ dmypy.json
|
|||||||
# Cython debug symbols
|
# Cython debug symbols
|
||||||
cython_debug/
|
cython_debug/
|
||||||
|
|
||||||
|
# migration files
|
||||||
|
rog/migrations/
|
||||||
|
|
||||||
|
|
||||||
# PyCharm
|
# PyCharm
|
||||||
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
|||||||
@ -3,6 +3,7 @@ FROM osgeo/gdal:ubuntu-small-3.4.0
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|
||||||
LABEL maintainer="nouffer@gmail.com"
|
LABEL maintainer="nouffer@gmail.com"
|
||||||
LABEL description="Development image for the Rogaining JP"
|
LABEL description="Development image for the Rogaining JP"
|
||||||
|
|
||||||
@ -38,8 +39,29 @@ RUN apt-get install -y python3
|
|||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
python3-pip
|
python3-pip
|
||||||
|
|
||||||
|
# ベースイメージの更新とパッケージのインストール
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y \
|
||||||
|
libreoffice \
|
||||||
|
libreoffice-calc \
|
||||||
|
libreoffice-writer \
|
||||||
|
python3-uno # LibreOffice Python バインディング
|
||||||
|
|
||||||
|
# 作業ディレクトリとパーミッションの設定
|
||||||
|
RUN mkdir -p /app/docbase /tmp/libreoffice && \
|
||||||
|
chmod -R 777 /app/docbase /tmp/libreoffice
|
||||||
|
|
||||||
|
|
||||||
RUN pip install --upgrade pip
|
RUN pip install --upgrade pip
|
||||||
|
|
||||||
|
# Copy the package directory first
|
||||||
|
COPY SumasenLibs/excel_lib /app/SumasenLibs/excel_lib
|
||||||
|
COPY ./docbase /app/docbase
|
||||||
|
|
||||||
|
# Install the package in editable mode
|
||||||
|
RUN pip install -e /app/SumasenLibs/excel_lib
|
||||||
|
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
|
|
||||||
COPY ./requirements.txt /app/requirements.txt
|
COPY ./requirements.txt /app/requirements.txt
|
||||||
@ -51,7 +73,10 @@ RUN pip install gunicorn
|
|||||||
|
|
||||||
#RUN ["chmod", "+x", "wait-for.sh"]
|
#RUN ["chmod", "+x", "wait-for.sh"]
|
||||||
|
|
||||||
RUN pip install -r requirements.txt
|
# xlsxwriterを追加
|
||||||
|
RUN pip install -r requirements.txt \
|
||||||
|
&& pip install django-cors-headers \
|
||||||
|
&& pip install xlsxwriter gunicorn
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
|
|
||||||
|
|||||||
35
Dockerfile.supervisor
Normal file
35
Dockerfile.supervisor
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
FROM nginx:alpine
|
||||||
|
|
||||||
|
# Create necessary directories and set permissions
|
||||||
|
RUN mkdir -p /usr/share/nginx/html \
|
||||||
|
&& mkdir -p /var/log/nginx \
|
||||||
|
&& mkdir -p /var/cache/nginx \
|
||||||
|
&& chown -R nginx:nginx /usr/share/nginx/html \
|
||||||
|
&& chown -R nginx:nginx /var/log/nginx \
|
||||||
|
&& chown -R nginx:nginx /var/cache/nginx \
|
||||||
|
&& chmod -R 755 /usr/share/nginx/html
|
||||||
|
|
||||||
|
# Copy files - notice the change in the source path
|
||||||
|
COPY supervisor/html/* /usr/share/nginx/html/
|
||||||
|
COPY supervisor/nginx/default.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
# メディアディレクトリを作成
|
||||||
|
RUN mkdir -p /app/media && chmod 755 /app/media
|
||||||
|
|
||||||
|
# 静的ファイルをコピー
|
||||||
|
#COPY ./static /usr/share/nginx/html/static
|
||||||
|
|
||||||
|
# 権限の設定
|
||||||
|
RUN chown -R nginx:nginx /app/media
|
||||||
|
|
||||||
|
# Set final permissions
|
||||||
|
RUN chown -R nginx:nginx /usr/share/nginx/html \
|
||||||
|
&& chmod -R 755 /usr/share/nginx/html \
|
||||||
|
&& touch /var/log/nginx/access.log \
|
||||||
|
&& touch /var/log/nginx/error.log \
|
||||||
|
&& chown -R nginx:nginx /var/log/nginx \
|
||||||
|
&& chown -R nginx:nginx /etc/nginx/conf.d
|
||||||
|
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
8066
LineBot/MobServer_gifuroge.rb
Normal file
8066
LineBot/MobServer_gifuroge.rb
Normal file
File diff suppressed because it is too large
Load Diff
1087
LineBot/userpostgres.rb
Normal file
1087
LineBot/userpostgres.rb
Normal file
File diff suppressed because it is too large
Load Diff
BIN
SumasenLibs/certificate_template.xlsx
Normal file
BIN
SumasenLibs/certificate_template.xlsx
Normal file
Binary file not shown.
19
SumasenLibs/excel_lib/README.md
Normal file
19
SumasenLibs/excel_lib/README.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# SumasenExcel Library
|
||||||
|
|
||||||
|
Excel操作のためのシンプルなPythonライブラリです。
|
||||||
|
|
||||||
|
## インストール方法
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
## 使用方法
|
||||||
|
from sumaexcel import SumasenExcel
|
||||||
|
|
||||||
|
excel = SumasenExcel("path/to/file.xlsx")
|
||||||
|
data = excel.read_excel()
|
||||||
|
|
||||||
|
## ライセンス
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
20
SumasenLibs/excel_lib/docker/docker-compose.yml
Normal file
20
SumasenLibs/excel_lib/docker/docker-compose.yml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
python:
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: docker/python/Dockerfile
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
|
environment:
|
||||||
|
- PYTHONPATH=/app
|
||||||
|
- POSTGRES_DB=rogdb
|
||||||
|
- POSTGRES_USER=admin
|
||||||
|
- POSTGRES_PASSWORD=admin123456
|
||||||
|
- POSTGRES_HOST=localhost
|
||||||
|
- POSTGRES_PORT=5432
|
||||||
|
network_mode: "host"
|
||||||
|
tty: true
|
||||||
|
container_name: python_container # コンテナ名を明示的に指定
|
||||||
|
|
||||||
26
SumasenLibs/excel_lib/docker/python/Dockerfile
Normal file
26
SumasenLibs/excel_lib/docker/python/Dockerfile
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
FROM python:3.9-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# GPGキーの更新とパッケージのインストール
|
||||||
|
RUN apt-get update --allow-insecure-repositories && \
|
||||||
|
apt-get install -y --allow-unauthenticated python3-dev libpq-dev postgresql-client && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Pythonパッケージのインストール
|
||||||
|
COPY requirements.txt .
|
||||||
|
COPY setup.py .
|
||||||
|
COPY README.md .
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# 開発用パッケージのインストール
|
||||||
|
RUN pip install --no-cache-dir --upgrade pip \
|
||||||
|
pytest \
|
||||||
|
pytest-cov \
|
||||||
|
flake8
|
||||||
|
|
||||||
|
# パッケージのインストール
|
||||||
|
RUN pip install -e .
|
||||||
|
|
||||||
6
SumasenLibs/excel_lib/requirements.txt
Normal file
6
SumasenLibs/excel_lib/requirements.txt
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
openpyxl>=3.0.0
|
||||||
|
pandas>=1.0.0
|
||||||
|
pillow>=8.0.0
|
||||||
|
configparser>=5.0.0
|
||||||
|
psycopg2-binary==2.9.9
|
||||||
|
requests
|
||||||
25
SumasenLibs/excel_lib/setup.py
Normal file
25
SumasenLibs/excel_lib/setup.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# setup.py
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="sumaexcel",
|
||||||
|
version="0.1.0",
|
||||||
|
packages=find_packages(),
|
||||||
|
install_requires=[
|
||||||
|
"openpyxl>=3.0.0",
|
||||||
|
"pandas>=1.0.0"
|
||||||
|
],
|
||||||
|
author="Akira Miyata",
|
||||||
|
author_email="akira.miyata@sumasen.net",
|
||||||
|
description="Excel handling library",
|
||||||
|
long_description=open("README.md").read(),
|
||||||
|
long_description_content_type="text/markdown",
|
||||||
|
url="https://github.com/akiramiyata/sumaexcel",
|
||||||
|
classifiers=[
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
],
|
||||||
|
python_requires=">=3.6",
|
||||||
|
)
|
||||||
|
|
||||||
4
SumasenLibs/excel_lib/sumaexcel/__init__.py
Normal file
4
SumasenLibs/excel_lib/sumaexcel/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from .sumaexcel import SumasenExcel
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
|
__all__ = ["SumasenExcel"]
|
||||||
102
SumasenLibs/excel_lib/sumaexcel/conditional.py
Normal file
102
SumasenLibs/excel_lib/sumaexcel/conditional.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
# sumaexcel/conditional.py
|
||||||
|
from typing import Dict, Any, List, Union
|
||||||
|
from openpyxl.formatting.rule import Rule, ColorScaleRule, DataBarRule, IconSetRule
|
||||||
|
from openpyxl.styles import PatternFill, Font, Border, Side
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
|
class ConditionalFormatManager:
|
||||||
|
"""Handle conditional formatting in Excel"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
|
||||||
|
def add_color_scale(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
min_color: str = "00FF0000", # Red
|
||||||
|
mid_color: str = "00FFFF00", # Yellow
|
||||||
|
max_color: str = "0000FF00" # Green
|
||||||
|
) -> None:
|
||||||
|
"""Add color scale conditional formatting"""
|
||||||
|
rule = ColorScaleRule(
|
||||||
|
start_type='min',
|
||||||
|
start_color=min_color,
|
||||||
|
mid_type='percentile',
|
||||||
|
mid_value=50,
|
||||||
|
mid_color=mid_color,
|
||||||
|
end_type='max',
|
||||||
|
end_color=max_color
|
||||||
|
)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def add_data_bar(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
color: str = "000000FF", # Blue
|
||||||
|
show_value: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""Add data bar conditional formatting"""
|
||||||
|
rule = DataBarRule(
|
||||||
|
start_type='min',
|
||||||
|
end_type='max',
|
||||||
|
color=color,
|
||||||
|
showValue=show_value
|
||||||
|
)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def add_icon_set(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
icon_style: str = '3Arrows', # '3Arrows', '3TrafficLights', '3Signs'
|
||||||
|
reverse_icons: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Add icon set conditional formatting"""
|
||||||
|
rule = IconSetRule(
|
||||||
|
icon_style=icon_style,
|
||||||
|
type='percent',
|
||||||
|
values=[0, 33, 67],
|
||||||
|
reverse_icons=reverse_icons
|
||||||
|
)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def add_custom_rule(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
rule_type: str,
|
||||||
|
formula: str,
|
||||||
|
fill_color: str = None,
|
||||||
|
font_color: str = None,
|
||||||
|
bold: bool = None,
|
||||||
|
border_style: str = None,
|
||||||
|
border_color: str = None
|
||||||
|
) -> None:
|
||||||
|
"""Add custom conditional formatting rule"""
|
||||||
|
dxf = {}
|
||||||
|
if fill_color:
|
||||||
|
dxf['fill'] = PatternFill(start_color=fill_color, end_color=fill_color)
|
||||||
|
if font_color or bold is not None:
|
||||||
|
dxf['font'] = Font(color=font_color, bold=bold)
|
||||||
|
if border_style and border_color:
|
||||||
|
side = Side(style=border_style, color=border_color)
|
||||||
|
dxf['border'] = Border(left=side, right=side, top=side, bottom=side)
|
||||||
|
|
||||||
|
rule = Rule(type=rule_type, formula=[formula], dxf=dxf)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def copy_conditional_format(
|
||||||
|
self,
|
||||||
|
source_range: str,
|
||||||
|
target_range: str
|
||||||
|
) -> None:
|
||||||
|
"""Copy conditional formatting from one range to another"""
|
||||||
|
source_rules = self.worksheet.conditional_formatting.get(source_range)
|
||||||
|
if source_rules:
|
||||||
|
for rule in source_rules:
|
||||||
|
self.worksheet.conditional_formatting.add(target_range, rule)
|
||||||
|
|
||||||
|
def clear_conditional_format(
|
||||||
|
self,
|
||||||
|
cell_range: str
|
||||||
|
) -> None:
|
||||||
|
"""Clear conditional formatting from specified range"""
|
||||||
|
self.worksheet.conditional_formatting.delete(cell_range)
|
||||||
166
SumasenLibs/excel_lib/sumaexcel/config_handler.py
Normal file
166
SumasenLibs/excel_lib/sumaexcel/config_handler.py
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
# config_handler.py
|
||||||
|
#
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
class ConfigHandler:
|
||||||
|
"""変数置換機能付きの設定ファイル管理クラス"""
|
||||||
|
|
||||||
|
def __init__(self, ini_file_path: str, variables: Dict[str, str] = None):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
ini_file_path (str): INIファイルのパス
|
||||||
|
variables (Dict[str, str], optional): 置換用の変数辞書
|
||||||
|
"""
|
||||||
|
self.ini_file_path = ini_file_path
|
||||||
|
self.variables = variables or {}
|
||||||
|
self.config = configparser.ConfigParser()
|
||||||
|
self.load_config()
|
||||||
|
|
||||||
|
def _substitute_variables(self, text: str) -> str:
|
||||||
|
"""
|
||||||
|
テキスト内の変数を置換する
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): 置換対象のテキスト
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: 置換後のテキスト
|
||||||
|
"""
|
||||||
|
# ${var}形式の変数を置換
|
||||||
|
pattern1 = r'\${([^}]+)}'
|
||||||
|
# [var]形式の変数を置換
|
||||||
|
pattern2 = r'\[([^\]]+)\]'
|
||||||
|
|
||||||
|
def replace_var(match):
|
||||||
|
var_name = match.group(1)
|
||||||
|
return self.variables.get(var_name, match.group(0))
|
||||||
|
|
||||||
|
# 両方のパターンで置換を実行
|
||||||
|
text = re.sub(pattern1, replace_var, text)
|
||||||
|
text = re.sub(pattern2, replace_var, text)
|
||||||
|
|
||||||
|
return text
|
||||||
|
|
||||||
|
def load_config(self) -> None:
|
||||||
|
"""設定ファイルを読み込み、変数を置換する"""
|
||||||
|
if not os.path.exists(self.ini_file_path):
|
||||||
|
raise FileNotFoundError(f"設定ファイルが見つかりません: {self.ini_file_path}")
|
||||||
|
|
||||||
|
# まず生のテキストとして読み込む
|
||||||
|
with open(self.ini_file_path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# 変数を置換
|
||||||
|
substituted_content = self._substitute_variables(content)
|
||||||
|
|
||||||
|
# 置換済みの内容を StringIO 経由で configparser に読み込ませる
|
||||||
|
from io import StringIO
|
||||||
|
self.config.read_file(StringIO(substituted_content))
|
||||||
|
|
||||||
|
def get_value(self, section: str, key: str, default: Any = None) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
指定されたセクションのキーの値を取得する
|
||||||
|
|
||||||
|
Args:
|
||||||
|
section (str): セクション名
|
||||||
|
key (str): キー名
|
||||||
|
default (Any): デフォルト値(オプション)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[str]: 設定値。存在しない場合はデフォルト値
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.config[section][key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def get_section(self, section: str) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
指定されたセクションの全ての設定を取得する
|
||||||
|
|
||||||
|
Args:
|
||||||
|
section (str): セクション名
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, str]: セクションの設定をディクショナリで返す
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return dict(self.config[section])
|
||||||
|
except KeyError:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def get_all_sections(self) -> Dict[str, Dict[str, str]]:
|
||||||
|
"""
|
||||||
|
全てのセクションの設定を取得する
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Dict[str, str]]: 全セクションの設定をネストされたディクショナリで返す
|
||||||
|
"""
|
||||||
|
return {section: dict(self.config[section]) for section in self.config.sections()}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# 使用例
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# サンプルのINIファイル作成
|
||||||
|
sample_ini = """
|
||||||
|
[Database]
|
||||||
|
host = localhost
|
||||||
|
port = 5432
|
||||||
|
database = mydb
|
||||||
|
user = admin
|
||||||
|
password = secret
|
||||||
|
|
||||||
|
[Application]
|
||||||
|
debug = true
|
||||||
|
log_level = INFO
|
||||||
|
max_connections = 100
|
||||||
|
|
||||||
|
[Paths]
|
||||||
|
data_dir = /var/data
|
||||||
|
log_file = /var/log/app.log
|
||||||
|
"""
|
||||||
|
|
||||||
|
# サンプルINIファイルを作成
|
||||||
|
with open('config.ini', 'w', encoding='utf-8') as f:
|
||||||
|
f.write(sample_ini)
|
||||||
|
|
||||||
|
# 設定を読み込んで使用
|
||||||
|
config = ConfigHandler('config.ini')
|
||||||
|
|
||||||
|
# 特定の値を取得
|
||||||
|
db_host = config.get_value('Database', 'host')
|
||||||
|
db_port = config.get_value('Database', 'port')
|
||||||
|
print(f"Database connection: {db_host}:{db_port}")
|
||||||
|
|
||||||
|
# セクション全体を取得
|
||||||
|
db_config = config.get_section('Database')
|
||||||
|
print("Database configuration:", db_config)
|
||||||
|
|
||||||
|
# 全ての設定を取得
|
||||||
|
all_config = config.get_all_sections()
|
||||||
|
print("All configurations:", all_config)
|
||||||
|
|
||||||
|
|
||||||
|
# サンプル:
|
||||||
|
# # 設定ファイルから値を取得
|
||||||
|
# config = ConfigHandler('config.ini')
|
||||||
|
#
|
||||||
|
# # データベース設定を取得
|
||||||
|
# db_host = config.get_value('Database', 'host')
|
||||||
|
# db_port = config.get_value('Database', 'port')
|
||||||
|
# db_name = config.get_value('Database', 'database')
|
||||||
|
#
|
||||||
|
# # アプリケーション設定を取得
|
||||||
|
# debug_mode = config.get_value('Application', 'debug')
|
||||||
|
# log_level = config.get_value('Application', 'log_level')
|
||||||
|
#
|
||||||
77
SumasenLibs/excel_lib/sumaexcel/image.py
Normal file
77
SumasenLibs/excel_lib/sumaexcel/image.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
# sumaexcel/image.py
|
||||||
|
from typing import Optional, Tuple, Union
|
||||||
|
from pathlib import Path
|
||||||
|
import os
|
||||||
|
from PIL import Image
|
||||||
|
from openpyxl.drawing.image import Image as XLImage
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
|
class ImageManager:
|
||||||
|
"""Handle image operations in Excel"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
self.temp_dir = Path("/tmp/sumaexcel_images")
|
||||||
|
self.temp_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def add_image(
|
||||||
|
self,
|
||||||
|
image_path: Union[str, Path],
|
||||||
|
cell_coordinates: Tuple[int, int],
|
||||||
|
size: Optional[Tuple[int, int]] = None,
|
||||||
|
keep_aspect_ratio: bool = True,
|
||||||
|
anchor_type: str = 'absolute'
|
||||||
|
) -> None:
|
||||||
|
"""Add image to worksheet at specified position"""
|
||||||
|
# Convert path to Path object
|
||||||
|
image_path = Path(image_path)
|
||||||
|
|
||||||
|
# Open and process image
|
||||||
|
with Image.open(image_path) as img:
|
||||||
|
# Get original size
|
||||||
|
orig_width, orig_height = img.size
|
||||||
|
|
||||||
|
# Calculate new size if specified
|
||||||
|
if size:
|
||||||
|
target_width, target_height = size
|
||||||
|
if keep_aspect_ratio:
|
||||||
|
ratio = min(target_width/orig_width, target_height/orig_height)
|
||||||
|
target_width = int(orig_width * ratio)
|
||||||
|
target_height = int(orig_height * ratio)
|
||||||
|
|
||||||
|
# Resize image
|
||||||
|
img = img.resize((target_width, target_height), Image.LANCZOS)
|
||||||
|
|
||||||
|
# Save temporary resized image
|
||||||
|
temp_path = self.temp_dir / f"temp_{image_path.name}"
|
||||||
|
img.save(temp_path)
|
||||||
|
image_path = temp_path
|
||||||
|
|
||||||
|
# Create Excel image object
|
||||||
|
excel_image = XLImage(str(image_path))
|
||||||
|
|
||||||
|
# Add to worksheet
|
||||||
|
self.worksheet.add_image(excel_image, anchor=f'{cell_coordinates[0]}{cell_coordinates[1]}')
|
||||||
|
|
||||||
|
def add_image_absolute(
|
||||||
|
self,
|
||||||
|
image_path: Union[str, Path],
|
||||||
|
position: Tuple[int, int],
|
||||||
|
size: Optional[Tuple[int, int]] = None
|
||||||
|
) -> None:
|
||||||
|
"""Add image with absolute positioning"""
|
||||||
|
excel_image = XLImage(str(image_path))
|
||||||
|
if size:
|
||||||
|
excel_image.width, excel_image.height = size
|
||||||
|
excel_image.anchor = 'absolute'
|
||||||
|
excel_image.top, excel_image.left = position
|
||||||
|
self.worksheet.add_image(excel_image)
|
||||||
|
|
||||||
|
def cleanup(self) -> None:
|
||||||
|
"""Clean up temporary files"""
|
||||||
|
for file in self.temp_dir.glob("temp_*"):
|
||||||
|
file.unlink()
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
"""Cleanup on object destruction"""
|
||||||
|
self.cleanup()
|
||||||
96
SumasenLibs/excel_lib/sumaexcel/merge.py
Normal file
96
SumasenLibs/excel_lib/sumaexcel/merge.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
# sumaexcel/merge.py
|
||||||
|
from typing import List, Tuple, Dict
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
from openpyxl.worksheet.merge import MergedCellRange
|
||||||
|
|
||||||
|
class MergeManager:
|
||||||
|
"""Handle merge cell operations"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
self._merged_ranges: List[MergedCellRange] = []
|
||||||
|
self._load_merged_ranges()
|
||||||
|
|
||||||
|
def _load_merged_ranges(self) -> None:
|
||||||
|
"""Load existing merged ranges from worksheet"""
|
||||||
|
self._merged_ranges = list(self.worksheet.merged_cells.ranges)
|
||||||
|
|
||||||
|
def merge_cells(
|
||||||
|
self,
|
||||||
|
start_row: int,
|
||||||
|
start_col: int,
|
||||||
|
end_row: int,
|
||||||
|
end_col: int
|
||||||
|
) -> None:
|
||||||
|
"""Merge cells in specified range"""
|
||||||
|
self.worksheet.merge_cells(
|
||||||
|
start_row=start_row,
|
||||||
|
start_column=start_col,
|
||||||
|
end_row=end_row,
|
||||||
|
end_column=end_col
|
||||||
|
)
|
||||||
|
self._load_merged_ranges()
|
||||||
|
|
||||||
|
def unmerge_cells(
|
||||||
|
self,
|
||||||
|
start_row: int,
|
||||||
|
start_col: int,
|
||||||
|
end_row: int,
|
||||||
|
end_col: int
|
||||||
|
) -> None:
|
||||||
|
"""Unmerge cells in specified range"""
|
||||||
|
self.worksheet.unmerge_cells(
|
||||||
|
start_row=start_row,
|
||||||
|
start_column=start_col,
|
||||||
|
end_row=end_row,
|
||||||
|
end_column=end_col
|
||||||
|
)
|
||||||
|
self._load_merged_ranges()
|
||||||
|
|
||||||
|
def copy_merged_cells(
|
||||||
|
self,
|
||||||
|
source_range: Tuple[int, int, int, int],
|
||||||
|
target_start_row: int,
|
||||||
|
target_start_col: int
|
||||||
|
) -> None:
|
||||||
|
"""Copy merged cells from source range to target position"""
|
||||||
|
src_row1, src_col1, src_row2, src_col2 = source_range
|
||||||
|
row_offset = target_start_row - src_row1
|
||||||
|
col_offset = target_start_col - src_col1
|
||||||
|
|
||||||
|
for merged_range in self._merged_ranges:
|
||||||
|
if (src_row1 <= merged_range.min_row <= src_row2 and
|
||||||
|
src_col1 <= merged_range.min_col <= src_col2):
|
||||||
|
new_row1 = merged_range.min_row + row_offset
|
||||||
|
new_col1 = merged_range.min_col + col_offset
|
||||||
|
new_row2 = merged_range.max_row + row_offset
|
||||||
|
new_col2 = merged_range.max_col + col_offset
|
||||||
|
|
||||||
|
self.merge_cells(new_row1, new_col1, new_row2, new_col2)
|
||||||
|
|
||||||
|
def shift_merged_cells(
|
||||||
|
self,
|
||||||
|
start_row: int,
|
||||||
|
rows: int = 0,
|
||||||
|
cols: int = 0
|
||||||
|
) -> None:
|
||||||
|
"""Shift merged cells by specified number of rows and columns"""
|
||||||
|
new_ranges = []
|
||||||
|
for merged_range in self._merged_ranges:
|
||||||
|
if merged_range.min_row >= start_row:
|
||||||
|
new_row1 = merged_range.min_row + rows
|
||||||
|
new_col1 = merged_range.min_col + cols
|
||||||
|
new_row2 = merged_range.max_row + rows
|
||||||
|
new_col2 = merged_range.max_col + cols
|
||||||
|
|
||||||
|
self.worksheet.unmerge_cells(
|
||||||
|
start_row=merged_range.min_row,
|
||||||
|
start_column=merged_range.min_col,
|
||||||
|
end_row=merged_range.max_row,
|
||||||
|
end_column=merged_range.max_col
|
||||||
|
)
|
||||||
|
|
||||||
|
new_ranges.append((new_row1, new_col1, new_row2, new_col2))
|
||||||
|
|
||||||
|
for new_range in new_ranges:
|
||||||
|
self.merge_cells(*new_range)
|
||||||
148
SumasenLibs/excel_lib/sumaexcel/page.py
Normal file
148
SumasenLibs/excel_lib/sumaexcel/page.py
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
# sumaexcel/page.py
|
||||||
|
from typing import Optional, Dict, Any, Union
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
from openpyxl.worksheet.page import PageMargins, PrintPageSetup
|
||||||
|
|
||||||
|
# sumaexcel/page.py (continued)
|
||||||
|
|
||||||
|
class PageManager:
|
||||||
|
"""Handle page setup and header/footer settings"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
|
||||||
|
def set_page_setup(
|
||||||
|
self,
|
||||||
|
orientation: str = 'portrait',
|
||||||
|
paper_size: int = 9, # A4
|
||||||
|
fit_to_height: Optional[int] = None,
|
||||||
|
fit_to_width: Optional[int] = None,
|
||||||
|
scale: Optional[int] = None
|
||||||
|
) -> None:
|
||||||
|
"""Configure page setup
|
||||||
|
|
||||||
|
Args:
|
||||||
|
orientation: 'portrait' or 'landscape'
|
||||||
|
paper_size: paper size (e.g., 9 for A4)
|
||||||
|
fit_to_height: number of pages tall
|
||||||
|
fit_to_width: number of pages wide
|
||||||
|
scale: zoom scale (1-400)
|
||||||
|
"""
|
||||||
|
setup = PrintPageSetup(
|
||||||
|
orientation=orientation,
|
||||||
|
paperSize=paper_size,
|
||||||
|
scale=scale,
|
||||||
|
fitToHeight=fit_to_height,
|
||||||
|
fitToWidth=fit_to_width
|
||||||
|
)
|
||||||
|
self.worksheet.page_setup = setup
|
||||||
|
|
||||||
|
def set_margins(
|
||||||
|
self,
|
||||||
|
left: float = 0.7,
|
||||||
|
right: float = 0.7,
|
||||||
|
top: float = 0.75,
|
||||||
|
bottom: float = 0.75,
|
||||||
|
header: float = 0.3,
|
||||||
|
footer: float = 0.3
|
||||||
|
) -> None:
|
||||||
|
"""Set page margins in inches"""
|
||||||
|
margins = PageMargins(
|
||||||
|
left=left,
|
||||||
|
right=right,
|
||||||
|
top=top,
|
||||||
|
bottom=bottom,
|
||||||
|
header=header,
|
||||||
|
footer=footer
|
||||||
|
)
|
||||||
|
self.worksheet.page_margins = margins
|
||||||
|
|
||||||
|
def set_header_footer(
|
||||||
|
self,
|
||||||
|
odd_header: Optional[str] = None,
|
||||||
|
odd_footer: Optional[str] = None,
|
||||||
|
even_header: Optional[str] = None,
|
||||||
|
even_footer: Optional[str] = None,
|
||||||
|
first_header: Optional[str] = None,
|
||||||
|
first_footer: Optional[str] = None,
|
||||||
|
different_first: bool = False,
|
||||||
|
different_odd_even: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Set headers and footers
|
||||||
|
|
||||||
|
Format codes:
|
||||||
|
- &P: Page number
|
||||||
|
- &N: Total pages
|
||||||
|
- &D: Date
|
||||||
|
- &T: Time
|
||||||
|
- &[Tab]: Sheet name
|
||||||
|
- &[Path]: File path
|
||||||
|
- &[File]: File name
|
||||||
|
- &[Tab]: Worksheet name
|
||||||
|
"""
|
||||||
|
self.worksheet.oddHeader.left = odd_header or ""
|
||||||
|
self.worksheet.oddFooter.left = odd_footer or ""
|
||||||
|
|
||||||
|
if different_odd_even:
|
||||||
|
self.worksheet.evenHeader.left = even_header or ""
|
||||||
|
self.worksheet.evenFooter.left = even_footer or ""
|
||||||
|
|
||||||
|
if different_first:
|
||||||
|
self.worksheet.firstHeader.left = first_header or ""
|
||||||
|
self.worksheet.firstFooter.left = first_footer or ""
|
||||||
|
|
||||||
|
self.worksheet.differentFirst = different_first
|
||||||
|
self.worksheet.differentOddEven = different_odd_even
|
||||||
|
|
||||||
|
def set_print_area(self, range_string: str) -> None:
|
||||||
|
"""Set print area
|
||||||
|
|
||||||
|
Args:
|
||||||
|
range_string: Cell range in A1 notation (e.g., 'A1:H42')
|
||||||
|
"""
|
||||||
|
self.worksheet.print_area = range_string
|
||||||
|
|
||||||
|
def set_print_title_rows(self, rows: str) -> None:
|
||||||
|
"""Set rows to repeat at top of each page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rows: Row range (e.g., '1:3')
|
||||||
|
"""
|
||||||
|
self.worksheet.print_title_rows = rows
|
||||||
|
|
||||||
|
def set_print_title_columns(self, cols: str) -> None:
|
||||||
|
"""Set columns to repeat at left of each page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cols: Column range (e.g., 'A:B')
|
||||||
|
"""
|
||||||
|
self.worksheet.print_title_cols = cols
|
||||||
|
|
||||||
|
def set_print_options(
|
||||||
|
self,
|
||||||
|
grid_lines: bool = False,
|
||||||
|
horizontal_centered: bool = False,
|
||||||
|
vertical_centered: bool = False,
|
||||||
|
headers: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Set print options"""
|
||||||
|
self.worksheet.print_gridlines = grid_lines
|
||||||
|
self.worksheet.print_options.horizontalCentered = horizontal_centered
|
||||||
|
self.worksheet.print_options.verticalCentered = vertical_centered
|
||||||
|
self.worksheet.print_options.headers = headers
|
||||||
|
|
||||||
|
class PaperSizes:
|
||||||
|
"""Standard paper size constants"""
|
||||||
|
LETTER = 1
|
||||||
|
LETTER_SMALL = 2
|
||||||
|
TABLOID = 3
|
||||||
|
LEDGER = 4
|
||||||
|
LEGAL = 5
|
||||||
|
STATEMENT = 6
|
||||||
|
EXECUTIVE = 7
|
||||||
|
A3 = 8
|
||||||
|
A4 = 9
|
||||||
|
A4_SMALL = 10
|
||||||
|
A5 = 11
|
||||||
|
B4 = 12
|
||||||
|
B5 = 13
|
||||||
115
SumasenLibs/excel_lib/sumaexcel/styles.py
Normal file
115
SumasenLibs/excel_lib/sumaexcel/styles.py
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
# sumaexcel/styles.py
|
||||||
|
from typing import Dict, Any, Optional, Union
|
||||||
|
from openpyxl.styles import (
|
||||||
|
Font, PatternFill, Alignment, Border, Side,
|
||||||
|
NamedStyle, Protection, Color
|
||||||
|
)
|
||||||
|
from openpyxl.styles.differential import DifferentialStyle
|
||||||
|
from openpyxl.formatting.rule import Rule
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
|
class StyleManager:
|
||||||
|
"""Excel style management class"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_font(
|
||||||
|
name: str = "Arial",
|
||||||
|
size: int = 11,
|
||||||
|
bold: bool = False,
|
||||||
|
italic: bool = False,
|
||||||
|
color: str = "000000",
|
||||||
|
underline: str = None,
|
||||||
|
strike: bool = False
|
||||||
|
) -> Font:
|
||||||
|
"""Create a Font object with specified parameters"""
|
||||||
|
return Font(
|
||||||
|
name=name,
|
||||||
|
size=size,
|
||||||
|
bold=bold,
|
||||||
|
italic=italic,
|
||||||
|
color=color,
|
||||||
|
underline=underline,
|
||||||
|
strike=strike
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_fill(
|
||||||
|
fill_type: str = "solid",
|
||||||
|
start_color: str = "FFFFFF",
|
||||||
|
end_color: str = None
|
||||||
|
) -> PatternFill:
|
||||||
|
"""Create a PatternFill object"""
|
||||||
|
return PatternFill(
|
||||||
|
fill_type=fill_type,
|
||||||
|
start_color=start_color,
|
||||||
|
end_color=end_color or start_color
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_border(
|
||||||
|
style: str = "thin",
|
||||||
|
color: str = "000000"
|
||||||
|
) -> Border:
|
||||||
|
"""Create a Border object"""
|
||||||
|
side = Side(style=style, color=color)
|
||||||
|
return Border(
|
||||||
|
left=side,
|
||||||
|
right=side,
|
||||||
|
top=side,
|
||||||
|
bottom=side
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_alignment(
|
||||||
|
horizontal: str = "general",
|
||||||
|
vertical: str = "bottom",
|
||||||
|
wrap_text: bool = False,
|
||||||
|
shrink_to_fit: bool = False,
|
||||||
|
indent: int = 0
|
||||||
|
) -> Alignment:
|
||||||
|
"""Create an Alignment object"""
|
||||||
|
return Alignment(
|
||||||
|
horizontal=horizontal,
|
||||||
|
vertical=vertical,
|
||||||
|
wrap_text=wrap_text,
|
||||||
|
shrink_to_fit=shrink_to_fit,
|
||||||
|
indent=indent
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def copy_style(source_cell: Any, target_cell: Any) -> None:
|
||||||
|
"""Copy all style properties from source cell to target cell"""
|
||||||
|
target_cell.font = Font(
|
||||||
|
name=source_cell.font.name,
|
||||||
|
size=source_cell.font.size,
|
||||||
|
bold=source_cell.font.bold,
|
||||||
|
italic=source_cell.font.italic,
|
||||||
|
color=source_cell.font.color,
|
||||||
|
underline=source_cell.font.underline,
|
||||||
|
strike=source_cell.font.strike
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_cell.fill.patternType != None:
|
||||||
|
target_cell.fill = PatternFill(
|
||||||
|
fill_type=source_cell.fill.patternType,
|
||||||
|
start_color=source_cell.fill.start_color.rgb,
|
||||||
|
end_color=source_cell.fill.end_color.rgb
|
||||||
|
)
|
||||||
|
|
||||||
|
target_cell.border = Border(
|
||||||
|
left=source_cell.border.left,
|
||||||
|
right=source_cell.border.right,
|
||||||
|
top=source_cell.border.top,
|
||||||
|
bottom=source_cell.border.bottom
|
||||||
|
)
|
||||||
|
|
||||||
|
target_cell.alignment = Alignment(
|
||||||
|
horizontal=source_cell.alignment.horizontal,
|
||||||
|
vertical=source_cell.alignment.vertical,
|
||||||
|
wrap_text=source_cell.alignment.wrap_text,
|
||||||
|
shrink_to_fit=source_cell.alignment.shrink_to_fit,
|
||||||
|
indent=source_cell.alignment.indent
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_cell.number_format:
|
||||||
|
target_cell.number_format = source_cell.number_format
|
||||||
1015
SumasenLibs/excel_lib/sumaexcel/sumaexcel.py
Normal file
1015
SumasenLibs/excel_lib/sumaexcel/sumaexcel.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
SumasenLibs/excel_lib/testdata/certificate_5033.xlsx
vendored
Normal file
BIN
SumasenLibs/excel_lib/testdata/certificate_5033.xlsx
vendored
Normal file
Binary file not shown.
BIN
SumasenLibs/excel_lib/testdata/certificate_template.xlsx
vendored
Normal file
BIN
SumasenLibs/excel_lib/testdata/certificate_template.xlsx
vendored
Normal file
Binary file not shown.
28
SumasenLibs/excel_lib/testdata/sample.py
vendored
Normal file
28
SumasenLibs/excel_lib/testdata/sample.py
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from sumaexcel import SumasenExcel
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# 初期化
|
||||||
|
# 初期化
|
||||||
|
variables = {
|
||||||
|
"zekken_number":"5033",
|
||||||
|
"event_code":"FC岐阜",
|
||||||
|
"db":"rogdb",
|
||||||
|
"username":"admin",
|
||||||
|
"password":"admin123456",
|
||||||
|
"host":"localhost",
|
||||||
|
"port":"5432"
|
||||||
|
}
|
||||||
|
excel = SumasenExcel(document="test", variables=variables, docbase="./testdata")
|
||||||
|
|
||||||
|
logging.info("Excelファイル作成 step-1")
|
||||||
|
|
||||||
|
# シート初期化
|
||||||
|
ret = excel.make_report(variables=variables)
|
||||||
|
logging.info(f"Excelファイル作成 step-2 : ret={ret}")
|
||||||
|
if ret["status"]==True:
|
||||||
|
filepath=ret["filepath"]
|
||||||
|
logging.info(f"Excelファイル作成 : ret.filepath={filepath}")
|
||||||
|
else:
|
||||||
|
message = ret.get("message", "No message provided")
|
||||||
|
logging.error(f"Excelファイル作成失敗 : ret.message={message}")
|
||||||
|
|
||||||
26
SumasenLibs/excel_lib/testdata/test.ini
vendored
Normal file
26
SumasenLibs/excel_lib/testdata/test.ini
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
[basic]
|
||||||
|
template_file=certificate_template.xlsx
|
||||||
|
doc_file=certificate_[zekken_number].xlsx
|
||||||
|
sections=section1
|
||||||
|
maxcol=10
|
||||||
|
column_width=3,5,16,16,16,16,16,8,8,12,3
|
||||||
|
|
||||||
|
[section1]
|
||||||
|
template_sheet=certificate
|
||||||
|
sheet_name=certificate
|
||||||
|
groups=group1,group2
|
||||||
|
fit_to_width=1
|
||||||
|
orientation=portrait
|
||||||
|
|
||||||
|
[section1.group1]
|
||||||
|
table_name=mv_entry_details
|
||||||
|
where=zekken_number='[zekken_number]' and event_name='[event_code]'
|
||||||
|
group_range=A1:J12
|
||||||
|
|
||||||
|
|
||||||
|
[section1.group2]
|
||||||
|
table_name=v_checkins_locations
|
||||||
|
where=zekken_number='[zekken_number]' and event_code='[event_code]'
|
||||||
|
sort=path_order
|
||||||
|
group_range=A13:J13
|
||||||
|
|
||||||
@ -53,10 +53,14 @@ INSTALLED_APPS = [
|
|||||||
'leaflet',
|
'leaflet',
|
||||||
'leaflet_admin_list',
|
'leaflet_admin_list',
|
||||||
'rog.apps.RogConfig',
|
'rog.apps.RogConfig',
|
||||||
|
'corsheaders', # added
|
||||||
'django_filters'
|
'django_filters'
|
||||||
]
|
]
|
||||||
|
|
||||||
MIDDLEWARE = [
|
MIDDLEWARE = [
|
||||||
|
'corsheaders.middleware.CorsMiddleware', # できるだけ上部に
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
|
||||||
'django.middleware.security.SecurityMiddleware',
|
'django.middleware.security.SecurityMiddleware',
|
||||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||||
'django.middleware.common.CommonMiddleware',
|
'django.middleware.common.CommonMiddleware',
|
||||||
@ -68,10 +72,47 @@ MIDDLEWARE = [
|
|||||||
|
|
||||||
ROOT_URLCONF = 'config.urls'
|
ROOT_URLCONF = 'config.urls'
|
||||||
|
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True # 開発環境のみ
|
||||||
|
CORS_ALLOW_CREDENTIALS = True
|
||||||
|
|
||||||
|
CORS_ALLOWED_METHODS = [
|
||||||
|
'GET',
|
||||||
|
'POST',
|
||||||
|
'PUT',
|
||||||
|
'PATCH',
|
||||||
|
'DELETE',
|
||||||
|
'OPTIONS'
|
||||||
|
]
|
||||||
|
CORS_ALLOWED_HEADERS = [
|
||||||
|
'accept',
|
||||||
|
'accept-encoding',
|
||||||
|
'authorization',
|
||||||
|
'content-type',
|
||||||
|
'dnt',
|
||||||
|
'origin',
|
||||||
|
'user-agent',
|
||||||
|
'x-csrftoken',
|
||||||
|
'x-requested-with',
|
||||||
|
]
|
||||||
|
|
||||||
|
# 本番環境では以下のように制限する
|
||||||
|
CORS_ALLOWED_ORIGINS = [
|
||||||
|
"https://rogaining.sumasen.net",
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
]
|
||||||
|
|
||||||
|
# CSRFの設定
|
||||||
|
CSRF_TRUSTED_ORIGINS = [
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
"https://rogaining.sumasen.net",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
TEMPLATES = [
|
TEMPLATES = [
|
||||||
{
|
{
|
||||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||||
'DIRS': [BASE_DIR / 'templates'],
|
'DIRS': [os.path.join(BASE_DIR, 'templates')],
|
||||||
'APP_DIRS': True,
|
'APP_DIRS': True,
|
||||||
'OPTIONS': {
|
'OPTIONS': {
|
||||||
'context_processors': [
|
'context_processors': [
|
||||||
@ -138,10 +179,12 @@ USE_TZ = True
|
|||||||
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
|
||||||
|
|
||||||
#STATIC_URL = '/static2/'
|
#STATIC_URL = '/static2/'
|
||||||
STATIC_ROOT = BASE_DIR / "static"
|
#STATIC_ROOT = BASE_DIR / "static"
|
||||||
|
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
||||||
|
|
||||||
MEDIA_URL = '/media/'
|
MEDIA_URL = '/media/'
|
||||||
MEDIA_ROOT = BASE_DIR / "media/"
|
#MEDIA_ROOT = BASE_DIR / "media/"
|
||||||
|
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
|
||||||
|
|
||||||
#STATICFILES_DIRS = (os.path.join(BASE_DIR, "static2"),os.path.join(BASE_DIR, "media"))
|
#STATICFILES_DIRS = (os.path.join(BASE_DIR, "static2"),os.path.join(BASE_DIR, "media"))
|
||||||
|
|
||||||
@ -174,3 +217,87 @@ REST_FRAMEWORK = {
|
|||||||
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
|
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
|
||||||
'DEFAULT_AUTHENTICATION_CLASSES': ('knox.auth.TokenAuthentication', ),
|
'DEFAULT_AUTHENTICATION_CLASSES': ('knox.auth.TokenAuthentication', ),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#FRONTEND_URL = 'https://rogaining.intranet.sumasen.net' # フロントエンドのURLに適宜変更してください
|
||||||
|
FRONTEND_URL = 'https://rogaining.sumasen.net' # フロントエンドのURLに適宜変更してください
|
||||||
|
|
||||||
|
# この設定により、メールは実際には送信されず、代わりにコンソールに出力されます。
|
||||||
|
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||||
|
|
||||||
|
EMAIL_HOST = 'smtp.outlook.com'
|
||||||
|
EMAIL_PORT = 587
|
||||||
|
EMAIL_USE_TLS = True
|
||||||
|
EMAIL_HOST_USER = 'rogaining@gifuai.net'
|
||||||
|
EMAIL_HOST_PASSWORD = 'ctcpy9823"x~'
|
||||||
|
DEFAULT_FROM_EMAIL = 'rogaining@gifuai.net'
|
||||||
|
|
||||||
|
APP_DOWNLOAD_LINK = 'https://apps.apple.com/jp/app/%E5%B2%90%E9%98%9C%E3%83%8A%E3%83%93/id6444221792'
|
||||||
|
ANDROID_DOWNLOAD_LINK = 'https://play.google.com/store/apps/details?id=com.dvox.gifunavi&hl=ja'
|
||||||
|
|
||||||
|
SERVICE_NAME = '岐阜ナビ(岐阜ロゲのアプリ)'
|
||||||
|
|
||||||
|
# settings.py
|
||||||
|
DEFAULT_CHARSET = 'utf-8'
|
||||||
|
|
||||||
|
#REST_FRAMEWORK = {
|
||||||
|
# 'DEFAULT_RENDERER_CLASSES': [
|
||||||
|
# 'rest_framework.renderers.JSONRenderer',
|
||||||
|
# ],
|
||||||
|
# 'JSON_UNICODE_ESCAPE': False,
|
||||||
|
#}
|
||||||
|
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'formatters': {
|
||||||
|
'verbose': {
|
||||||
|
'format': '{levelname} {asctime} {module} {message}',
|
||||||
|
'style': '{',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'handlers': {
|
||||||
|
#'file': {
|
||||||
|
# 'level': 'DEBUG',
|
||||||
|
# 'class': 'logging.FileHandler',
|
||||||
|
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
|
||||||
|
# 'formatter': 'verbose',
|
||||||
|
#},
|
||||||
|
'console': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.StreamHandler',
|
||||||
|
'formatter': 'verbose',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'root': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
},
|
||||||
|
'loggers': {
|
||||||
|
'django': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'INFO',
|
||||||
|
'propagate': False,
|
||||||
|
},
|
||||||
|
'django.request': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
},
|
||||||
|
'rog': {
|
||||||
|
#'handlers': ['file','console'],
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': True,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
PASSWORD_HASHERS = [
|
||||||
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.Argon2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
|
||||||
|
]
|
||||||
|
|
||||||
|
BLACKLISTED_IPS = ['44.230.58.114'] # ブロックしたい IP アドレスをここにリストとして追加
|
||||||
|
|
||||||
|
|||||||
@ -18,6 +18,21 @@ from django.urls import path, include
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.conf.urls.static import static
|
from django.conf.urls.static import static
|
||||||
|
|
||||||
|
|
||||||
|
# debug_urlsビューをrogアプリケーションのviewsからインポート
|
||||||
|
from rog import views as rog_views
|
||||||
|
|
||||||
|
DEBUG = True
|
||||||
|
ALLOWED_HOSTS = ['rogaining.sumasen.net', 'localhost', '127.0.0.1']
|
||||||
|
|
||||||
|
# CORSの設定
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True
|
||||||
|
CORS_ALLOWED_ORIGINS = [
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
"http://localhost",
|
||||||
|
"http://127.0.0.1",
|
||||||
|
]
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path('admin/', admin.site.urls),
|
path('admin/', admin.site.urls),
|
||||||
path('auth/', include('knox.urls')),
|
path('auth/', include('knox.urls')),
|
||||||
@ -27,3 +42,8 @@ urlpatterns = [
|
|||||||
admin.site.site_header = "ROGANING"
|
admin.site.site_header = "ROGANING"
|
||||||
admin.site.site_title = "Roganing Admin Portal"
|
admin.site.site_title = "Roganing Admin Portal"
|
||||||
admin.site.index_title = "Welcome to Roganing Portal"
|
admin.site.index_title = "Welcome to Roganing Portal"
|
||||||
|
|
||||||
|
# 開発環境での静的ファイル配信
|
||||||
|
if settings.DEBUG:
|
||||||
|
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||||
|
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||||
|
|||||||
26
docbase/certificate.ini
Normal file
26
docbase/certificate.ini
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
[basic]
|
||||||
|
template_file=certificate_template.xlsx
|
||||||
|
doc_file=certificate_[zekken_number].xlsx
|
||||||
|
sections=section1
|
||||||
|
maxcol=10
|
||||||
|
column_width=3,5,16,16,16,16,16,8,8,12,3
|
||||||
|
|
||||||
|
[section1]
|
||||||
|
template_sheet=certificate
|
||||||
|
sheet_name=certificate
|
||||||
|
groups=group1,group2
|
||||||
|
fit_to_width=1
|
||||||
|
orientation=portrait
|
||||||
|
|
||||||
|
[section1.group1]
|
||||||
|
table_name=mv_entry_details
|
||||||
|
where=zekken_number='[zekken_number]' and event_name='[event_code]'
|
||||||
|
group_range=A1:J12
|
||||||
|
|
||||||
|
|
||||||
|
[section1.group2]
|
||||||
|
table_name=v_checkins_locations
|
||||||
|
where=zekken_number='[zekken_number]' and event_code='[event_code]'
|
||||||
|
sort=path_order
|
||||||
|
group_range=A13:J13
|
||||||
|
|
||||||
BIN
docbase/certificate_template.xlsx
Normal file
BIN
docbase/certificate_template.xlsx
Normal file
Binary file not shown.
@ -30,12 +30,41 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
restart: "on-failure"
|
restart: "on-failure"
|
||||||
# depends_on:
|
|
||||||
# - postgres-db
|
|
||||||
networks:
|
networks:
|
||||||
- rog-api
|
- rog-api
|
||||||
#entrypoint: ["/app/wait-for.sh", "postgres-db:5432", "--", ""]
|
|
||||||
#command: python3 manage.py runserver 0.0.0.0:8100
|
supervisor-web:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.supervisor
|
||||||
|
volumes:
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/html
|
||||||
|
target: /usr/share/nginx/html/supervisor
|
||||||
|
read_only: true
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/nginx/default.conf
|
||||||
|
target: /etc/nginx/conf.d/default.conf
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: static_volume
|
||||||
|
target: /app/static
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: nginx_logs
|
||||||
|
target: /var/log/nginx
|
||||||
|
- type: bind
|
||||||
|
source: ./media
|
||||||
|
target: /usr/share/nginx/html/media
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
depends_on:
|
||||||
|
- api
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
rog-api:
|
rog-api:
|
||||||
@ -44,3 +73,5 @@ networks:
|
|||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
geoserver-data:
|
geoserver-data:
|
||||||
|
static_volume:
|
||||||
|
nginx_logs:
|
||||||
|
|||||||
81
docker-compose.yaml.ssl
Normal file
81
docker-compose.yaml.ssl
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
version: "3.9"
|
||||||
|
|
||||||
|
services:
|
||||||
|
# postgres-db:
|
||||||
|
# image: kartoza/postgis:12.0
|
||||||
|
# ports:
|
||||||
|
# - 5432:5432
|
||||||
|
# volumes:
|
||||||
|
# - postgres_data:/var/lib/postgresql
|
||||||
|
# - ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
||||||
|
# environment:
|
||||||
|
# - POSTGRES_USER=${POSTGRES_USER}
|
||||||
|
# - POSTGRES_PASS=${POSTGRES_PASS}
|
||||||
|
# - POSTGRES_DBNAME=${POSTGRES_DBNAME}
|
||||||
|
# - POSTGRES_MAX_CONNECTIONS=600
|
||||||
|
|
||||||
|
# restart: "on-failure"
|
||||||
|
# networks:
|
||||||
|
# - rog-api
|
||||||
|
|
||||||
|
api:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.gdal
|
||||||
|
command: python3 manage.py runserver 0.0.0.0:8100
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
ports:
|
||||||
|
- 8100:8100
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
restart: "on-failure"
|
||||||
|
# depends_on:
|
||||||
|
# - postgres-db
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
#entrypoint: ["/app/wait-for.sh", "postgres-db:5432", "--", ""]
|
||||||
|
#command: python3 manage.py runserver 0.0.0.0:8100
|
||||||
|
|
||||||
|
supervisor-web:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.supervisor
|
||||||
|
volumes:
|
||||||
|
- type: bind
|
||||||
|
source: /etc/letsencrypt
|
||||||
|
target: /etc/nginx/ssl
|
||||||
|
read_only: true
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/html
|
||||||
|
target: /usr/share/nginx/html
|
||||||
|
read_only: true
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/nginx/default.conf
|
||||||
|
target: /etc/nginx/conf.d/default.conf
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: static_volume
|
||||||
|
target: /app/static
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: nginx_logs
|
||||||
|
target: /var/log/nginx
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
depends_on:
|
||||||
|
- api
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
|
||||||
|
networks:
|
||||||
|
rog-api:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
geoserver-data:
|
||||||
|
static_volume:
|
||||||
|
nginx_logs:
|
||||||
1
dump_rog_data.sql
Normal file
1
dump_rog_data.sql
Normal file
@ -0,0 +1 @@
|
|||||||
|
pg_dump: error: connection to database "rogdb" failed: FATAL: Peer authentication failed for user "postgres"
|
||||||
10
entrypoint.sh
Normal file
10
entrypoint.sh
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Collect static files
|
||||||
|
python manage.py collectstatic --noinput
|
||||||
|
|
||||||
|
# Apply database migrations
|
||||||
|
python manage.py migrate
|
||||||
|
|
||||||
|
# Start Gunicorn
|
||||||
|
exec "$@"
|
||||||
14
nginx.conf
14
nginx.conf
@ -29,17 +29,21 @@ http {
|
|||||||
listen 80;
|
listen 80;
|
||||||
server_name localhost;
|
server_name localhost;
|
||||||
|
|
||||||
|
# 静的ファイルの提供
|
||||||
location /static/ {
|
location /static/ {
|
||||||
alias /app/static/;
|
alias /app/static/;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /media/ {
|
# スーパーバイザー Web アプリケーション
|
||||||
alias /app/media/;
|
location / {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Django API プロキシ
|
||||||
location / {
|
location /api/ {
|
||||||
proxy_pass http://app:8000;
|
proxy_pass http://api:8000;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
|||||||
@ -65,3 +65,4 @@ django-extra-fields==3.0.2
|
|||||||
django-phonenumber-field==6.1.0
|
django-phonenumber-field==6.1.0
|
||||||
django-rest-knox==4.2.0
|
django-rest-knox==4.2.0
|
||||||
dj-database-url==2.0.0
|
dj-database-url==2.0.0
|
||||||
|
django-cors-headers==4.3.0
|
||||||
|
|||||||
777
rog/admin.py
777
rog/admin.py
@ -1,16 +1,618 @@
|
|||||||
import email
|
import email
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render,redirect
|
||||||
from leaflet.admin import LeafletGeoAdmin
|
from leaflet.admin import LeafletGeoAdmin
|
||||||
from leaflet.admin import LeafletGeoAdminMixin
|
from leaflet.admin import LeafletGeoAdminMixin
|
||||||
from leaflet_admin_list.admin import LeafletAdminListMixin
|
from leaflet_admin_list.admin import LeafletAdminListMixin
|
||||||
from .models import RogUser, Location, SystemSettings, JoinedEvent, Favorite, TravelList, TravelPoint, ShapeLayers, Event, Location_line, Location_polygon, JpnAdminMainPerf, Useractions, CustomUser, GifuAreas, UserTracks, templocation, UserUpload, EventUser, GoalImages, CheckinImages
|
from .models import RogUser, Location, SystemSettings, JoinedEvent, Favorite, TravelList, TravelPoint, ShapeLayers, Event, Location_line, Location_polygon, JpnAdminMainPerf, Useractions, CustomUser, GifuAreas, UserTracks, templocation, UserUpload, EventUser, GoalImages, CheckinImages, NewEvent2, Team, NewCategory, Entry, Member, TempUser,GifurogeRegister
|
||||||
from django.contrib.auth.admin import UserAdmin
|
from django.contrib.auth.admin import UserAdmin
|
||||||
from django.urls import path
|
from django.urls import path,reverse
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render
|
||||||
from django import forms;
|
from django import forms;
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from django.http import HttpResponseRedirect
|
||||||
|
from django.utils.html import format_html
|
||||||
|
from .forms import CSVUploadForm
|
||||||
|
from .views import process_csv_upload
|
||||||
|
|
||||||
|
from django.db.models import F # F式をインポート
|
||||||
|
from django.db import transaction
|
||||||
|
from django.contrib import messages
|
||||||
|
import csv
|
||||||
|
from io import StringIO,TextIOWrapper
|
||||||
|
from datetime import timedelta
|
||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
|
from datetime import datetime, date
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
@admin.register(GifurogeRegister)
|
||||||
|
class GifurogeRegisterAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ('event_code', 'time', 'owner_name', 'email', 'team_name', 'department')
|
||||||
|
change_list_template = 'admin/rog/gifurogeregister/change_list.html' # この行を追加
|
||||||
|
|
||||||
|
def find_matching_category(self, time, department):
|
||||||
|
"""
|
||||||
|
時間とdepartmentに基づいて適切なカテゴリを見つける
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
duration = timedelta(hours=time)
|
||||||
|
|
||||||
|
# 検索前の情報出力
|
||||||
|
print(f" Searching for category with parameters:")
|
||||||
|
print(f" - Duration: {duration}")
|
||||||
|
print(f" - Department: {department}")
|
||||||
|
|
||||||
|
# 利用可能なカテゴリの一覧を出力
|
||||||
|
all_categories = NewCategory.objects.all()
|
||||||
|
print(" Available categories:")
|
||||||
|
for cat in all_categories:
|
||||||
|
#print(f" - ID: {cat.id}")
|
||||||
|
print(f" - Name: {cat.category_name}")
|
||||||
|
print(f" - Duration: {cat.duration}")
|
||||||
|
print(f" - Number: {cat.category_number}")
|
||||||
|
|
||||||
|
# カテゴリ検索のクエリをログ出力
|
||||||
|
query = NewCategory.objects.filter(
|
||||||
|
duration=duration,
|
||||||
|
category_name__startswith=department
|
||||||
|
)
|
||||||
|
print(f" Query SQL: {query.query}")
|
||||||
|
|
||||||
|
# 検索結果の取得
|
||||||
|
category = query.first()
|
||||||
|
|
||||||
|
if category:
|
||||||
|
print(f" Found matching category:")
|
||||||
|
print(f" - Name: {category.category_name}")
|
||||||
|
print(f" - Duration: {category.duration}")
|
||||||
|
print(f" - Category Number: {getattr(category, 'category_number', 'N/A')}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(" No matching category found with the following filters:")
|
||||||
|
print(f" - Duration equals: {duration}")
|
||||||
|
print(f" - Category name starts with: {department}")
|
||||||
|
|
||||||
|
return category
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error finding category: {e}")
|
||||||
|
print(f"Exception type: {type(e)}")
|
||||||
|
import traceback
|
||||||
|
print(f"Traceback: {traceback.format_exc()}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def create_entry_with_number(self, team, category, owner, event):
|
||||||
|
"""
|
||||||
|
カテゴリ番号をインクリメントしてエントリーを作成
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
# 事前バリデーション
|
||||||
|
try:
|
||||||
|
# チームメンバーの性別をチェック
|
||||||
|
if category.female:
|
||||||
|
for member in team.members.all():
|
||||||
|
|
||||||
|
print(f" Check existing member {member.user.lastname} {member.user.firstname} female:{member.user.female}")
|
||||||
|
if not member.user.female:
|
||||||
|
raise ValidationError(f"チーム '{team.team_name}' に男性メンバーが含まれているため、"
|
||||||
|
f"カテゴリー '{category.category_name}' には参加できません。")
|
||||||
|
except ValidationError as e:
|
||||||
|
print(f"Pre-validation error: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# カテゴリを再度ロックして取得
|
||||||
|
category_for_update = NewCategory.objects.select_for_update().get(
|
||||||
|
category_name=category.category_name
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f" Creating entry with following details:")
|
||||||
|
print(f" - Category: {category_for_update.category_name}")
|
||||||
|
print(f" - Current category number: {category_for_update.category_number}")
|
||||||
|
|
||||||
|
# イベントの日付を取得
|
||||||
|
entry_date = event.start_datetime.date()
|
||||||
|
|
||||||
|
# 既存のエントリーをチェック
|
||||||
|
existing_entry = Entry.objects.filter(
|
||||||
|
team=team,
|
||||||
|
event=event,
|
||||||
|
date=entry_date,
|
||||||
|
is_active=True # アクティブなエントリーのみをチェック
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_entry:
|
||||||
|
print(f" Found existing entry for team {team.team_name} on {entry_date}")
|
||||||
|
raise ValidationError(
|
||||||
|
f"Team {team.team_name} already has an entry for event {event.event_name} on {entry_date}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 現在の番号を取得してインクリメント
|
||||||
|
current_number = category_for_update.category_number
|
||||||
|
zekken_number = current_number
|
||||||
|
|
||||||
|
# カテゴリ番号をインクリメント
|
||||||
|
category_for_update.category_number = F('category_number') + 1
|
||||||
|
category_for_update.save()
|
||||||
|
|
||||||
|
# 変更後の値を取得して表示
|
||||||
|
category_for_update.refresh_from_db()
|
||||||
|
print(f" Updated category number: {category_for_update.category_number}")
|
||||||
|
|
||||||
|
# エントリーの作成
|
||||||
|
try:
|
||||||
|
entry = Entry.objects.create(
|
||||||
|
date=event.start_datetime,
|
||||||
|
team=team,
|
||||||
|
category=category,
|
||||||
|
owner=owner,
|
||||||
|
event=event,
|
||||||
|
zekken_number=zekken_number,
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
# バリデーションを実行
|
||||||
|
entry.full_clean()
|
||||||
|
# 問題なければ保存
|
||||||
|
entry.save()
|
||||||
|
|
||||||
|
print(f" Created entry:")
|
||||||
|
print(f" - Team: {team.team_name}")
|
||||||
|
print(f" - Event: {event.event_name}")
|
||||||
|
print(f" - Category: {category.category_name}")
|
||||||
|
print(f" - Zekken Number: {zekken_number}")
|
||||||
|
|
||||||
|
return entry
|
||||||
|
|
||||||
|
except ValidationError as e:
|
||||||
|
print(f"Entry validation error: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating entry: {e}")
|
||||||
|
print(f"Exception type: {type(e)}")
|
||||||
|
import traceback
|
||||||
|
print(f"Traceback: {traceback.format_exc()}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def split_full_name(self, full_name):
|
||||||
|
"""
|
||||||
|
フルネームを姓と名に分割
|
||||||
|
半角または全角スペースに対応
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# 空白文字で分割(半角スペース、全角スペース、タブなど)
|
||||||
|
parts = full_name.replace(' ', ' ').split()
|
||||||
|
if len(parts) >= 2:
|
||||||
|
last_name = parts[0]
|
||||||
|
first_name = ' '.join(parts[1:]) # 名が複数単語の場合に対応
|
||||||
|
return last_name, first_name
|
||||||
|
else:
|
||||||
|
# 分割できない場合は全体を姓とする
|
||||||
|
return full_name, ''
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error splitting name '{full_name}': {e}")
|
||||||
|
return full_name, ''
|
||||||
|
|
||||||
|
def convert_japanese_date(self, date_text):
|
||||||
|
"""
|
||||||
|
日本式の日付テキストをDateField形式に変換
|
||||||
|
例: '1990年1月1日' -> datetime.date(1990, 1, 1)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not date_text or date_text.strip() == '':
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 全角数字を半角数字に変換
|
||||||
|
date_text = date_text.translate(str.maketrans('0123456789', '0123456789'))
|
||||||
|
date_text = date_text.strip()
|
||||||
|
|
||||||
|
# 区切り文字の判定と分割
|
||||||
|
if '年' in date_text:
|
||||||
|
# 年月日形式の場合
|
||||||
|
date_parts = date_text.replace('年', '-').replace('月', '-').replace('日', '').split('-')
|
||||||
|
elif '/' in date_text:
|
||||||
|
# スラッシュ区切りの場合
|
||||||
|
date_parts = date_text.split('/')
|
||||||
|
elif '-' in date_text:
|
||||||
|
date_parts = date_text.split('-')
|
||||||
|
else:
|
||||||
|
print(f"Unsupported date format: {date_text}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 部分の数を確認
|
||||||
|
if len(date_parts) != 3:
|
||||||
|
print(f"Invalid date parts count: {len(date_parts)} in '{date_text}'")
|
||||||
|
return None
|
||||||
|
|
||||||
|
year = int(date_parts[0])
|
||||||
|
month = int(date_parts[1])
|
||||||
|
day = int(date_parts[2])
|
||||||
|
|
||||||
|
# 簡単な妥当性チェック
|
||||||
|
if not (1900 <= year <= 2100):
|
||||||
|
print(f"Invalid year: {year}")
|
||||||
|
return None
|
||||||
|
if not (1 <= month <= 12):
|
||||||
|
print(f"Invalid month: {month}")
|
||||||
|
return None
|
||||||
|
if not (1 <= day <= 31): # 月ごとの日数チェックは省略
|
||||||
|
print(f"Invalid day: {day}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
print(f"Converted from {date_text} to year-{year} / month-{month} / day-{day}")
|
||||||
|
|
||||||
|
return date(year, month, day)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error converting date '{date_text}': {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def create_owner_member( self,team,row ):
|
||||||
|
"""
|
||||||
|
オーナーをチームメンバー1として作成
|
||||||
|
既存のメンバーは更新
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
owner_name = row.get('owner_name').strip()
|
||||||
|
# 姓名を分割
|
||||||
|
last_name, first_name = self.split_full_name(owner_name)
|
||||||
|
print(f" Split name - Last: {last_name}, First: {first_name}")
|
||||||
|
# 誕生日の処理
|
||||||
|
birthday = row.get(f'owner_birthday', '').strip()
|
||||||
|
birth_date = self.convert_japanese_date(birthday)
|
||||||
|
print(f" Converted birthday: {birth_date}")
|
||||||
|
|
||||||
|
# 性別の処理
|
||||||
|
sex = row.get(f'owner_sex', '').strip()
|
||||||
|
is_female = sex in ['女性','女','女子','female','girl','lady']
|
||||||
|
print(f" Sex: {sex}, is_female: {is_female}")
|
||||||
|
|
||||||
|
# メンバーを作成
|
||||||
|
member,created = Member.objects.get_or_create(
|
||||||
|
team=team,
|
||||||
|
user=team.owner,
|
||||||
|
defaults={
|
||||||
|
'is_temporary': True # 仮登録
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# 既存メンバーの場合は情報を更新
|
||||||
|
if not created:
|
||||||
|
member.lastname = last_name
|
||||||
|
member.firstname = first_name
|
||||||
|
member.date_of_birth = birth_date
|
||||||
|
member.female = is_female
|
||||||
|
member.is_temporary = True
|
||||||
|
member.save()
|
||||||
|
print(f" Updated existing member {last_name} {first_name}")
|
||||||
|
else:
|
||||||
|
print(f" Created new member {last_name} {first_name}")
|
||||||
|
|
||||||
|
return member
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating/updating member: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def create_members(self, team, row):
|
||||||
|
"""
|
||||||
|
チームのメンバーを作成
|
||||||
|
既存のメンバーは更新
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
created_members = []
|
||||||
|
|
||||||
|
# オーナーをメンバーに登録
|
||||||
|
member = self.create_owner_member(team,row)
|
||||||
|
created_members.append(member)
|
||||||
|
|
||||||
|
# メンバー2から5までを処理
|
||||||
|
for i in range(2, 6):
|
||||||
|
member_name = row.get(f'member{i}', '').strip()
|
||||||
|
if member_name:
|
||||||
|
print(f"===== Processing member: {member_name} =====")
|
||||||
|
|
||||||
|
# 姓名を分割
|
||||||
|
last_name, first_name = self.split_full_name(member_name)
|
||||||
|
print(f" Split name - Last: {last_name}, First: {first_name}")
|
||||||
|
|
||||||
|
# 誕生日の処理
|
||||||
|
birthday = row.get(f'birthday{i}', '').strip()
|
||||||
|
birth_date = self.convert_japanese_date(birthday)
|
||||||
|
print(f" Converted birthday: {birth_date}")
|
||||||
|
|
||||||
|
# 性別の処理
|
||||||
|
sex = row.get(f'sex{i}', '').strip()
|
||||||
|
is_female = sex in ['女性','女','女子','female','girl','lady']
|
||||||
|
print(f" Sex: {sex}, is_female: {is_female}")
|
||||||
|
|
||||||
|
# メンバー用のユーザーを作成
|
||||||
|
email = f"dummy_{team.id}_{i}@gifuai.net".lower()
|
||||||
|
member_user, created = CustomUser.objects.get_or_create(
|
||||||
|
email=email,
|
||||||
|
defaults={
|
||||||
|
'password': make_password('temporary_password'),
|
||||||
|
'lastname': last_name,
|
||||||
|
'firstname': first_name,
|
||||||
|
'date_of_birth': birth_date,
|
||||||
|
'female':is_female
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# 既存ユーザーの場合も姓名を更新
|
||||||
|
if not created:
|
||||||
|
member_user.lastname = last_name
|
||||||
|
member_user.firstname = first_name
|
||||||
|
member_user.date_of_birth = birth_date
|
||||||
|
member_user.female = is_female
|
||||||
|
member_user.save()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# メンバーを作成
|
||||||
|
member,created = Member.objects.get_or_create(
|
||||||
|
team=team,
|
||||||
|
user=member_user,
|
||||||
|
defaults={
|
||||||
|
'is_temporary': True # 仮登録
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# 既存メンバーの場合は情報を更新
|
||||||
|
if not created:
|
||||||
|
member.is_temporary = True
|
||||||
|
member.save()
|
||||||
|
print(f" Updated existing member {member_user.lastname} {member_user.firstname}")
|
||||||
|
else:
|
||||||
|
print(f" Created new member {member_user.lastname} {member_user.firstname}")
|
||||||
|
|
||||||
|
created_members.append(member)
|
||||||
|
print(f" - Birthday: {member_user.date_of_birth}")
|
||||||
|
print(f" - Sex: {'Female' if member_user.female else 'Male'}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating/updating member: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
return created_members
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating members: {e}")
|
||||||
|
print(f"Exception type: {type(e)}")
|
||||||
|
import traceback
|
||||||
|
print(f"Traceback: {traceback.format_exc()}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_urls(self):
|
||||||
|
urls = super().get_urls()
|
||||||
|
custom_urls = [
|
||||||
|
path('upload-csv/', self.upload_csv, name='gifuroge_register_upload_csv'),
|
||||||
|
]
|
||||||
|
return custom_urls + urls
|
||||||
|
|
||||||
|
def upload_csv(self, request):
|
||||||
|
print("upload_csv")
|
||||||
|
if request.method == 'POST':
|
||||||
|
print("POST")
|
||||||
|
if 'csv_file' not in request.FILES:
|
||||||
|
messages.error(request, 'No file was uploaded.')
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
print(f"csv_file(1) = {csv_file}")
|
||||||
|
if not csv_file.name.endswith('.csv'):
|
||||||
|
messages.error(request, 'File is not CSV type')
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
try:
|
||||||
|
# BOMを考慮してファイルを読み込む
|
||||||
|
file_content = csv_file.read()
|
||||||
|
# BOMがある場合は除去
|
||||||
|
if file_content.startswith(b'\xef\xbb\xbf'):
|
||||||
|
file_content = file_content[3:]
|
||||||
|
|
||||||
|
# デコード
|
||||||
|
file_content = file_content.decode('utf-8')
|
||||||
|
csv_file = StringIO(file_content)
|
||||||
|
reader = csv.DictReader(csv_file)
|
||||||
|
|
||||||
|
print(f"csv_file(2) = {csv_file}")
|
||||||
|
print(f"reader = {reader}")
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
for row in reader:
|
||||||
|
print(f" row={row}")
|
||||||
|
|
||||||
|
# オーナーの姓名を分割
|
||||||
|
owner_lastname, owner_firstname = self.split_full_name(row['owner_name'])
|
||||||
|
|
||||||
|
# パスワードをハッシュ化
|
||||||
|
hashed_password = make_password(row['password'])
|
||||||
|
|
||||||
|
# オーナーの誕生日の処理
|
||||||
|
owner_birthday = row.get('owner_birthday', '').strip()
|
||||||
|
owner_birth_date = self.convert_japanese_date(owner_birthday)
|
||||||
|
print(f" Owner birthday: {owner_birth_date}")
|
||||||
|
|
||||||
|
# オーナーの性別の処理
|
||||||
|
owner_sex = row.get('owner_sex', '').strip()
|
||||||
|
owner_is_female = owner_sex in ['女性','女','女子','female','girl','lady']
|
||||||
|
print(f" Owner sex: {owner_sex}, is_female: {owner_is_female}")
|
||||||
|
|
||||||
|
# ユーザーの取得または作成
|
||||||
|
user, created = CustomUser.objects.get_or_create(
|
||||||
|
email=row['email'],
|
||||||
|
defaults={
|
||||||
|
'password': hashed_password, # make_password(row['password'])
|
||||||
|
'lastname': owner_lastname,
|
||||||
|
'firstname': owner_firstname,
|
||||||
|
'date_of_birth': owner_birth_date,
|
||||||
|
'female': owner_is_female
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not created:
|
||||||
|
# 既存ユーザーの場合、空のフィールドがあれば更新
|
||||||
|
should_update = False
|
||||||
|
update_fields = []
|
||||||
|
|
||||||
|
print(f" Checking existing user data for {user.email}:")
|
||||||
|
print(f" - Current lastname: '{user.lastname}'")
|
||||||
|
print(f" - Current firstname: '{user.firstname}'")
|
||||||
|
print(f" - Current birth date: {user.date_of_birth}")
|
||||||
|
print(f" - Current female: {user.female}")
|
||||||
|
|
||||||
|
# 姓が空またはNoneの場合
|
||||||
|
if not user.lastname or user.lastname.strip() == '':
|
||||||
|
user.lastname = owner_lastname
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('lastname')
|
||||||
|
print(f" - Updating lastname to: {owner_lastname}")
|
||||||
|
|
||||||
|
# 名が空またはNoneの場合
|
||||||
|
if not user.firstname or user.firstname.strip() == '':
|
||||||
|
user.firstname = owner_firstname
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('firstname')
|
||||||
|
print(f" - Updating firstname to: {owner_firstname}")
|
||||||
|
|
||||||
|
# 生年月日が空またはNoneの場合
|
||||||
|
if not user.date_of_birth and owner_birth_date:
|
||||||
|
user.date_of_birth = owner_birth_date
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('date_of_birth')
|
||||||
|
print(f" - Updating birth date to: {owner_birth_date}")
|
||||||
|
|
||||||
|
# 性別が空またはNoneの場合
|
||||||
|
# Booleanフィールドなのでis None で判定
|
||||||
|
if user.female is None:
|
||||||
|
user.female = owner_is_female
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('female')
|
||||||
|
print(f" - Updating female to: {owner_is_female}")
|
||||||
|
|
||||||
|
# パスワードが'登録済み'でない場合のみ更新
|
||||||
|
if row['password'] != '登録済み':
|
||||||
|
user.password = hashed_password
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('password')
|
||||||
|
print(f" - Updating password")
|
||||||
|
|
||||||
|
# 変更があった場合のみ保存
|
||||||
|
if should_update:
|
||||||
|
try:
|
||||||
|
# 特定のフィールドのみを更新
|
||||||
|
user.save(update_fields=update_fields)
|
||||||
|
print(f" Updated user {user.email} fields: {', '.join(update_fields)}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" Error updating user {user.email}: {str(e)}")
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
print(f" No updates needed for user {user.email}")
|
||||||
|
|
||||||
|
|
||||||
|
print(f" user created...")
|
||||||
|
print(f" Owner member created: {user.lastname} {user.firstname}")
|
||||||
|
print(f" - Birthday: {user.date_of_birth}")
|
||||||
|
print(f" - Sex: {'Female' if user.female else 'Male'}")
|
||||||
|
|
||||||
|
# 適切なカテゴリを見つける
|
||||||
|
category = self.find_matching_category(
|
||||||
|
time=int(row['time']),
|
||||||
|
department=row['department']
|
||||||
|
)
|
||||||
|
|
||||||
|
if not category:
|
||||||
|
raise ValueError(
|
||||||
|
f"No matching category found for time={row['time']} minutes "
|
||||||
|
f"and department={row['department']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f" Using category: {category.category_name}")
|
||||||
|
|
||||||
|
# Teamの作成(既存のチームがある場合は取得)
|
||||||
|
team, created = Team.objects.get_or_create(
|
||||||
|
team_name=row['team_name'],
|
||||||
|
defaults={
|
||||||
|
'owner': user,
|
||||||
|
'category': category
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# 既存のチームの場合でもカテゴリを更新
|
||||||
|
if not created:
|
||||||
|
team.category = category
|
||||||
|
team.save()
|
||||||
|
|
||||||
|
print(" team created/updated...")
|
||||||
|
|
||||||
|
self.create_members(team, row)
|
||||||
|
|
||||||
|
# イベントの検索
|
||||||
|
try:
|
||||||
|
event_code = row['event_code']
|
||||||
|
event = NewEvent2.objects.get(event_name=event_code)
|
||||||
|
print(f" Found event: {event.event_name}")
|
||||||
|
except NewEvent2.DoesNotExist:
|
||||||
|
raise ValueError(f"Event with code {event_code} does not exist")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# エントリーの作成
|
||||||
|
entry = self.create_entry_with_number(
|
||||||
|
team=team,
|
||||||
|
category=category,
|
||||||
|
owner=user,
|
||||||
|
event=event,
|
||||||
|
)
|
||||||
|
|
||||||
|
print(" entry created...")
|
||||||
|
except ValidationError as e:
|
||||||
|
messages.error(request, str(e))
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
gifuroge_register = GifurogeRegister.objects.create(
|
||||||
|
event_code=row['event_code'],
|
||||||
|
time=int(row['time']),
|
||||||
|
owner_name_kana=row['owner_name_kana'],
|
||||||
|
owner_name=row['owner_name'],
|
||||||
|
owner_birthday=self.convert_japanese_date(row['owner_birthday']),
|
||||||
|
owner_sex=row['owner_sex'],
|
||||||
|
email=row['email'],
|
||||||
|
password=row['password'],
|
||||||
|
team_name=row['team_name'],
|
||||||
|
department=row['department'],
|
||||||
|
members_count=int(row['members_count']),
|
||||||
|
member2=row.get('member2', '') or None,
|
||||||
|
birthday2=self.convert_japanese_date(row.get('birthday2', '') ),
|
||||||
|
sex2=row.get('sex2', '') or None,
|
||||||
|
member3=row.get('member3', '') or None,
|
||||||
|
birthday3=self.convert_japanese_date(row.get('birthday3', '') ),
|
||||||
|
sex3=row.get('sex3', '') or None,
|
||||||
|
member4=row.get('member4', '') or None,
|
||||||
|
birthday4=self.convert_japanese_date(row.get('birthday4', '') ),
|
||||||
|
sex4=row.get('sex4', '') or None,
|
||||||
|
member5=row.get('member5', '') or None,
|
||||||
|
birthday5=self.convert_japanese_date(row.get('birthday5', '') ),
|
||||||
|
sex5=row.get('sex5', '') or None
|
||||||
|
)
|
||||||
|
print(f" saved gifuroge_register...")
|
||||||
|
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
messages.error(request, 'File encoding error. Please ensure the file is UTF-8 encoded.')
|
||||||
|
return redirect('..')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error processing row: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
messages.success(request, 'CSV file uploaded successfully')
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
return render(request, 'admin/rog/gifurogeregister/upload-csv.html')
|
||||||
|
|
||||||
class RogAdmin(LeafletAdminListMixin, LeafletGeoAdminMixin, admin.ModelAdmin):
|
class RogAdmin(LeafletAdminListMixin, LeafletGeoAdminMixin, admin.ModelAdmin):
|
||||||
list_display=['title', 'venue', 'at_date',]
|
list_display=['title', 'venue', 'at_date',]
|
||||||
|
|
||||||
@ -53,21 +655,25 @@ class UserAdminConfig(UserAdmin):
|
|||||||
print("-------Event code--------")
|
print("-------Event code--------")
|
||||||
for i in data:
|
for i in data:
|
||||||
_exist = CustomUser.objects.filter(email=i["zekken_number"]).delete()
|
_exist = CustomUser.objects.filter(email=i["zekken_number"]).delete()
|
||||||
|
other_fields.setDefaut('zekken_number',i['zekken_number'])
|
||||||
|
other_fields.setdefault('is_staff', True)
|
||||||
|
other_fields.setdefault('is_superuser', False)
|
||||||
|
other_fields.setdefault('is_active', True)
|
||||||
|
other_fields.setdefault('event_code', i['event_code'])
|
||||||
|
other_fields.setdefault('team_name', i['team_name'])
|
||||||
|
other_fields.setdefault('group', '大垣-初心者')
|
||||||
|
|
||||||
usr = CustomUser.objects.create_user(
|
usr = CustomUser.objects.create_user(
|
||||||
email=i["zekken_number"],
|
email=i["zekken_number"],
|
||||||
zekken_number=i['zekken_number'],
|
|
||||||
is_rogaining=True,
|
|
||||||
event_code=i['event_code'],
|
|
||||||
team_name=i['team_name'],
|
|
||||||
group='大垣-初心者',
|
|
||||||
password=i['password'],
|
password=i['password'],
|
||||||
is_staff= True
|
**other_fields
|
||||||
)
|
)
|
||||||
|
|
||||||
form = loadUserForm()
|
form = loadUserForm()
|
||||||
data = {'form': form}
|
data = {'form': form}
|
||||||
return render(request, 'admin/load_users.html', data)
|
return render(request, 'admin/load_users.html', data)
|
||||||
|
|
||||||
|
"""
|
||||||
fieldsets = (
|
fieldsets = (
|
||||||
(None, {'fields':('email', 'group', 'zekken_number', 'event_code', 'team_name',)}),
|
(None, {'fields':('email', 'group', 'zekken_number', 'event_code', 'team_name',)}),
|
||||||
('Permissions', {'fields':('is_staff', 'is_active', 'is_rogaining')}),
|
('Permissions', {'fields':('is_staff', 'is_active', 'is_rogaining')}),
|
||||||
@ -76,6 +682,35 @@ class UserAdminConfig(UserAdmin):
|
|||||||
add_fieldsets = (
|
add_fieldsets = (
|
||||||
(None, {'classes':('wide',), 'fields':('email', 'group','zekken_number', 'event_code', 'team_name', 'password1', 'password2')}),
|
(None, {'classes':('wide',), 'fields':('email', 'group','zekken_number', 'event_code', 'team_name', 'password1', 'password2')}),
|
||||||
)
|
)
|
||||||
|
"""
|
||||||
|
# readonly_fieldsを明示的に設定
|
||||||
|
readonly_fields = ('date_joined',) # 変更不可のフィールドのみを指定=>Personal Infoも編集可能にする。
|
||||||
|
|
||||||
|
fieldsets = (
|
||||||
|
(None, {'fields': ('email', 'password')}),
|
||||||
|
(_('Personal info'), {
|
||||||
|
'fields': ('firstname', 'lastname', 'date_of_birth', 'female'),
|
||||||
|
'classes': ('wide',) # フィールドの表示を広げる
|
||||||
|
}),
|
||||||
|
(_('Permissions'), {'fields': ('is_staff', 'is_active', 'is_rogaining','user_permissions')}),
|
||||||
|
(_('Rogaining info'), {
|
||||||
|
'fields': ('zekken_number', 'event_code', 'team_name', 'group'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}),
|
||||||
|
(_('Important dates'), {
|
||||||
|
'fields': ('date_joined','last_login'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}), # 読み取り専用
|
||||||
|
)
|
||||||
|
add_fieldsets = (
|
||||||
|
(None, {
|
||||||
|
'classes': ('wide',),
|
||||||
|
#'fields': ('email', 'password1', 'password2', 'is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
'fields': ('email', 'password1', 'password2', 'lastname','firstname', 'date_of_birth', 'female','is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
search_fields = ('email', 'firstname', 'lastname', 'zekken_number', 'team_name')
|
||||||
|
ordering = ('email',)
|
||||||
|
|
||||||
class JpnSubPerfAdmin(LeafletGeoAdmin):
|
class JpnSubPerfAdmin(LeafletGeoAdmin):
|
||||||
search_fields = ('adm0_ja', 'adm1_ja', 'adm2_ja', 'name_modified', 'area_name',)
|
search_fields = ('adm0_ja', 'adm1_ja', 'adm2_ja', 'name_modified', 'area_name',)
|
||||||
@ -194,6 +829,126 @@ class TempLocationAdmin(LeafletGeoAdmin):
|
|||||||
actions = [tranfer_to_location,]
|
actions = [tranfer_to_location,]
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(NewEvent2)
|
||||||
|
class NewEvent2Admin(admin.ModelAdmin):
|
||||||
|
list_display = ['event_name', 'start_datetime', 'end_datetime', 'csv_upload_button']
|
||||||
|
|
||||||
|
def get_urls(self):
|
||||||
|
urls = super().get_urls()
|
||||||
|
my_urls = [
|
||||||
|
path('csv-upload/', self.admin_site.admin_view(self.csv_upload_view), name='newevent2_csv_upload'),
|
||||||
|
]
|
||||||
|
return my_urls + urls
|
||||||
|
|
||||||
|
def csv_upload_view(self, request):
|
||||||
|
if request.method == 'POST':
|
||||||
|
form = CSVUploadForm(request.POST, request.FILES)
|
||||||
|
if form.is_valid():
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
event = form.cleaned_data['event']
|
||||||
|
process_csv_upload(csv_file, event)
|
||||||
|
self.message_user(request, "CSV file has been processed successfully.")
|
||||||
|
return HttpResponseRedirect("../")
|
||||||
|
else:
|
||||||
|
form = CSVUploadForm()
|
||||||
|
|
||||||
|
return render(request, 'admin/csv_upload.html', {'form': form})
|
||||||
|
|
||||||
|
def csv_upload_button(self, obj):
|
||||||
|
url = reverse('admin:newevent2_csv_upload')
|
||||||
|
return format_html('<a class="button" href="{}">CSVアップロード</a>', url)
|
||||||
|
csv_upload_button.short_description = 'CSV Upload'
|
||||||
|
|
||||||
|
def changelist_view(self, request, extra_context=None):
|
||||||
|
extra_context = extra_context or {}
|
||||||
|
extra_context['csv_upload_url'] = reverse('admin:newevent2_csv_upload')
|
||||||
|
return super().changelist_view(request, extra_context=extra_context)
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(Team)
|
||||||
|
class TeamAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team_name', 'owner']
|
||||||
|
search_fields = ['team_name', 'owner__email']
|
||||||
|
|
||||||
|
@admin.register(NewCategory)
|
||||||
|
class NewCategoryAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['category_name', 'category_number', 'duration', 'num_of_member', 'family', 'female']
|
||||||
|
list_filter = ['family', 'female']
|
||||||
|
search_fields = ['category_name']
|
||||||
|
|
||||||
|
@admin.register(Entry)
|
||||||
|
class EntryAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team', 'event', 'category', 'date']
|
||||||
|
list_filter = ['event', 'category']
|
||||||
|
search_fields = ['team__team_name', 'event__event_name']
|
||||||
|
|
||||||
|
@admin.register(Member)
|
||||||
|
class MemberAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team', 'user']
|
||||||
|
search_fields = ['team__team_name', 'user__email']
|
||||||
|
|
||||||
|
@admin.register(TempUser)
|
||||||
|
class TempUserAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['email', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group', 'created_at', 'expires_at']
|
||||||
|
list_filter = ['is_rogaining', 'group']
|
||||||
|
search_fields = ['email', 'zekken_number', 'team_name']
|
||||||
|
|
||||||
|
|
||||||
|
# CustomUserAdmin の修正(既存のものを更新)
|
||||||
|
class CustomUserChangeForm(UserChangeForm):
|
||||||
|
class Meta(UserChangeForm.Meta):
|
||||||
|
model = CustomUser
|
||||||
|
fields = '__all__'
|
||||||
|
|
||||||
|
class CustomUserCreationForm(UserCreationForm):
|
||||||
|
class Meta(UserCreationForm.Meta):
|
||||||
|
model = CustomUser
|
||||||
|
fields = ('email', 'lastname', 'firstname', 'date_of_birth', 'female')
|
||||||
|
|
||||||
|
class CustomUserAdmin(UserAdmin):
|
||||||
|
form = CustomUserChangeForm
|
||||||
|
add_form = CustomUserCreationForm
|
||||||
|
model = CustomUser
|
||||||
|
|
||||||
|
list_display = ('email', 'is_staff', 'is_active', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group', 'firstname', 'lastname')
|
||||||
|
list_filter = ('is_staff', 'is_active', 'is_rogaining', 'group')
|
||||||
|
|
||||||
|
# readonly_fieldsを明示的に設定
|
||||||
|
readonly_fields = ('date_joined',) # 変更不可のフィールドのみを指定=>Personal Infoも編集可能にする。
|
||||||
|
|
||||||
|
fieldsets = (
|
||||||
|
(None, {'fields': ('email', 'password')}),
|
||||||
|
(_('Personal info'), {
|
||||||
|
'fields': ('firstname', 'lastname', 'date_of_birth', 'female'),
|
||||||
|
'classes': ('wide',) # フィールドの表示を広げる
|
||||||
|
}),
|
||||||
|
(_('Permissions'), {'fields': ('is_staff', 'is_active', 'is_rogaining','user_permissions')}),
|
||||||
|
(_('Rogaining info'), {
|
||||||
|
'fields': ('zekken_number', 'event_code', 'team_name', 'group'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}),
|
||||||
|
(_('Important dates'), {
|
||||||
|
'fields': ('date_joined','last_login'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}), # 読み取り専用
|
||||||
|
)
|
||||||
|
add_fieldsets = (
|
||||||
|
(None, {
|
||||||
|
'classes': ('wide',),
|
||||||
|
#'fields': ('email', 'password1', 'password2', 'is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
'fields': ('email', 'password1', 'password2', 'lastname','firstname', 'date_of_birth', 'female','is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
search_fields = ('email', 'firstname', 'lastname', 'zekken_number', 'team_name')
|
||||||
|
ordering = ('email',)
|
||||||
|
|
||||||
|
def get_readonly_fields(self, request, obj=None):
|
||||||
|
# スーパーユーザーの場合は読み取り専用フィールドを最小限に
|
||||||
|
if request.user.is_superuser:
|
||||||
|
return self.readonly_fields
|
||||||
|
# 通常のスタッフユーザーの場合は追加の制限を設定可能
|
||||||
|
return self.readonly_fields + ('is_staff', 'is_superuser')
|
||||||
|
|
||||||
|
|
||||||
admin.site.register(Useractions)
|
admin.site.register(Useractions)
|
||||||
admin.site.register(RogUser, admin.ModelAdmin)
|
admin.site.register(RogUser, admin.ModelAdmin)
|
||||||
@ -220,3 +975,7 @@ admin.site.register(CustomUser, UserAdminConfig)
|
|||||||
admin.site.register(templocation, TempLocationAdmin)
|
admin.site.register(templocation, TempLocationAdmin)
|
||||||
admin.site.register(GoalImages, admin.ModelAdmin)
|
admin.site.register(GoalImages, admin.ModelAdmin)
|
||||||
admin.site.register(CheckinImages, admin.ModelAdmin)
|
admin.site.register(CheckinImages, admin.ModelAdmin)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -3,12 +3,18 @@ from django.conf import settings
|
|||||||
from .models import CustomUser
|
from .models import CustomUser
|
||||||
from django.contrib.auth.backends import ModelBackend
|
from django.contrib.auth.backends import ModelBackend
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.contrib.auth.hashers import check_password
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EmailOrUsernameModelBackend(ModelBackend):
|
class EmailOrUsernameModelBackend(ModelBackend):
|
||||||
"""
|
"""
|
||||||
This is a ModelBacked that allows authentication
|
This is a ModelBacked that allows authentication
|
||||||
with either a username or an email address.
|
with either a username or an email address.
|
||||||
|
|
||||||
|
"""
|
||||||
"""
|
"""
|
||||||
def authenticate(self, username=None, password=None):
|
def authenticate(self, username=None, password=None):
|
||||||
if '@' in username:
|
if '@' in username:
|
||||||
@ -27,3 +33,34 @@ class EmailOrUsernameModelBackend(ModelBackend):
|
|||||||
return CustomUser.objects.get(pk=username)
|
return CustomUser.objects.get(pk=username)
|
||||||
except get_user_model().DoesNotExist:
|
except get_user_model().DoesNotExist:
|
||||||
return None
|
return None
|
||||||
|
"""
|
||||||
|
|
||||||
|
def authenticate(self, request, username=None, password=None, **kwargs):
|
||||||
|
if '@' in username:
|
||||||
|
kwargs = {'email': username}
|
||||||
|
else:
|
||||||
|
kwargs = {'username': username}
|
||||||
|
try:
|
||||||
|
user = CustomUser.objects.get(**kwargs)
|
||||||
|
if check_password(password, user.password):
|
||||||
|
logger.info(f"User authenticated successfully: {username}")
|
||||||
|
return user
|
||||||
|
else:
|
||||||
|
logger.warning(f"Password mismatch for user: {username}")
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
logger.warning(f"User does not exist: {username}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Authentication error for {username}: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
try:
|
||||||
|
user = CustomUser.objects.get(pk=user_id)
|
||||||
|
logger.info(f"User retrieved: {user.username or user.email}")
|
||||||
|
return user
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
logger.warning(f"User with id {user_id} does not exist")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error retrieving user with id {user_id}: {str(e)}")
|
||||||
|
return None
|
||||||
|
|||||||
7
rog/forms.py
Normal file
7
rog/forms.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from django import forms
|
||||||
|
from .models import NewEvent2
|
||||||
|
|
||||||
|
class CSVUploadForm(forms.Form):
|
||||||
|
event = forms.ModelChoiceField(queryset=NewEvent2.objects.all(), label="イベント選択")
|
||||||
|
csv_file = forms.FileField(label="CSVファイル")
|
||||||
|
|
||||||
40
rog/gifuroge_team.csv
Normal file
40
rog/gifuroge_team.csv
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
4019,関ケ原2,Best Wishes,ソロ女子-5時間,pbkdf2_sha256$260000$RPvncicp11ENXxwpcpMXi1$9e/fKcfwaX3sJ91q9S70KWQcrNlraliguiHjF/UCW/I=
|
||||||
|
4010,関ケ原2,まつげん,ソロ女子-5時間,pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
|
||||||
|
4021,大垣3,まつげん,ソロ女子-5時間,pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
|
||||||
|
5,関ケ原2,てすとあきら1,ソロ男子-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
3003,関ケ原2,てすとあきら1,ソロ男子-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
3115,関ケ原2,Best Wishes,ソロ男子-5時間,pbkdf2_sha256$260000$tlNrgHyqDtfbM9f3GLv5G1$jRcR/ieTB174TZ9jW7obCBUMpyz86aywqDKw3VmhVQQ=
|
||||||
|
1010,大垣3,ハヤノテスト,一般-5時間,pbkdf2_sha256$260000$IeGmRkkUkwXXc1zO9oxvCe$ijnJTH7xhwidit+uCggSgjj/7g/vMK539IpOMA5GlnM=
|
||||||
|
1012,大垣3,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1014,各務原2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1018,下呂2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1024,関ケ原2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1026,美濃加茂2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1028,多治見2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
3006,関ケ原2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3009,養老2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3011,郡上2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3013,大垣3,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3015,各務原2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3017,多治見2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3019,下呂2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3021,高山2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3023,美濃加茂2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
4008,下呂2,GO!GO!YOKO,ソロ女子-5時間,pbkdf2_sha256$260000$tuv8ajw2VSmCooIxNHJhdD$m7q0fqPIsAs7L9uubt+PUVsmexwpJPXPCgVs9GjY12c=
|
||||||
|
3121,関ケ原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3126,大垣3,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
|
||||||
|
3128,多治見2,MASA,ソロ男子-5時間,pbkdf2_sha256$260000$qpaSbqryD4f5bZaY893Ug4$Gk8XuqsJbSkX9Hxrl/xg9LtjM8JQkpgNkpbbNzTmhzY=
|
||||||
|
3124,関ケ原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
|
||||||
|
3132,各務原2,岐阜市イイとこあるある探検隊,ソロ男子-5時間,pbkdf2_sha256$260000$QWc5BpSBUbkUwP9UlIzyE5$do+VKkH8mNibg6PJDsm6AJ/VMFh3NWdzwZ9IQW/26xA=
|
||||||
|
3135,大垣3,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
|
||||||
|
3137,関ケ原2,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
|
||||||
|
3139,養老2,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
|
||||||
|
3073,養老2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3075,高山2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3077,郡上2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3081,美濃加茂2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3083,多治見2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3085,各務原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3079,下呂2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3093,関ケ原2,岐阜愛,ソロ男子-5時間,pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
|
||||||
|
3099,高山2,岐阜愛,ソロ男子-5時間,pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
|
||||||
|
3
rog/middleware/__init__.py
Normal file
3
rog/middleware/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .ip_blocking import IPBlockingMiddleware
|
||||||
|
|
||||||
|
__all__ = ['IPBlockingMiddleware']
|
||||||
42
rog/middleware/ip_blocking.py
Normal file
42
rog/middleware/ip_blocking.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from django.core.exceptions import PermissionDenied
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
class IPBlockingMiddleware:
|
||||||
|
def __init__(self, get_response):
|
||||||
|
self.get_response = get_response
|
||||||
|
# 事前にブロックする IP アドレスのリスト
|
||||||
|
self.blacklisted_ips = getattr(settings, 'BLACKLISTED_IPS', [])
|
||||||
|
|
||||||
|
def __call__(self, request):
|
||||||
|
ip = self.get_client_ip(request)
|
||||||
|
|
||||||
|
# キャッシュからブロックリストを取得
|
||||||
|
blocked_ips = cache.get('blocked_ips', set())
|
||||||
|
|
||||||
|
# 事前にブロックされた IP またはキャッシュ内のブロックされた IP をチェック
|
||||||
|
if ip in self.blacklisted_ips or ip in blocked_ips:
|
||||||
|
raise PermissionDenied
|
||||||
|
|
||||||
|
# 不正アクセスの検出ロジックをここに実装
|
||||||
|
if self.is_suspicious(ip):
|
||||||
|
blocked_ips.add(ip)
|
||||||
|
cache.set('blocked_ips', blocked_ips, timeout=3600) # 1時間ブロック
|
||||||
|
raise PermissionDenied
|
||||||
|
|
||||||
|
response = self.get_response(request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def is_suspicious(self, ip):
|
||||||
|
request_count = cache.get(f'request_count_{ip}', 0)
|
||||||
|
cache.set(f'request_count_{ip}', request_count + 1, timeout=60)
|
||||||
|
return request_count > 100 # 1分間に100回以上のリクエストがあれば不審と判断
|
||||||
|
|
||||||
|
def get_client_ip(self, request):
|
||||||
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||||
|
if x_forwarded_for:
|
||||||
|
ip = x_forwarded_for.split(',')[0]
|
||||||
|
else:
|
||||||
|
ip = request.META.get('REMOTE_ADDR')
|
||||||
|
return ip
|
||||||
|
|
||||||
148
rog/migration_scripts.py
Normal file
148
rog/migration_scripts.py
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
"""
|
||||||
|
このコードは永栄コードをNoufferコードに統合するための一時変換コードです。
|
||||||
|
一旦、完全にマイグレーションでき、ランキングや走行履歴が完成したら、不要になります。
|
||||||
|
"""
|
||||||
|
import psycopg2
|
||||||
|
from PIL import Image
|
||||||
|
import PIL.ExifTags
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
def get_gps_from_image(image_path):
|
||||||
|
"""
|
||||||
|
画像ファイルからGPS情報を抽出する
|
||||||
|
Returns: (latitude, longitude) または取得できない場合は (None, None)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with Image.open(image_path) as img:
|
||||||
|
exif = {
|
||||||
|
PIL.ExifTags.TAGS[k]: v
|
||||||
|
for k, v in img._getexif().items()
|
||||||
|
if k in PIL.ExifTags.TAGS
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'GPSInfo' in exif:
|
||||||
|
gps_info = exif['GPSInfo']
|
||||||
|
|
||||||
|
# 緯度の計算
|
||||||
|
lat = gps_info[2]
|
||||||
|
lat = lat[0] + lat[1]/60 + lat[2]/3600
|
||||||
|
if gps_info[1] == 'S':
|
||||||
|
lat = -lat
|
||||||
|
|
||||||
|
# 経度の計算
|
||||||
|
lon = gps_info[4]
|
||||||
|
lon = lon[0] + lon[1]/60 + lon[2]/3600
|
||||||
|
if gps_info[3] == 'W':
|
||||||
|
lon = -lon
|
||||||
|
|
||||||
|
return lat, lon
|
||||||
|
except Exception as e:
|
||||||
|
print(f"GPS情報の抽出に失敗: {e}")
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def migrate_data():
|
||||||
|
# コンテナ環境用の接続情報
|
||||||
|
source_db = {
|
||||||
|
'dbname': 'gifuroge',
|
||||||
|
'user': 'admin', # 環境に合わせて変更
|
||||||
|
'password': 'admin123456', # 環境に合わせて変更
|
||||||
|
'host': 'localhost', # Dockerのサービス名
|
||||||
|
'port': '5432'
|
||||||
|
}
|
||||||
|
|
||||||
|
target_db = {
|
||||||
|
'dbname': 'rogdb',
|
||||||
|
'user': 'admin', # 環境に合わせて変更
|
||||||
|
'password': 'admin123456', # 環境に合わせて変更
|
||||||
|
'host': 'localhost', # Dockerのサービス名
|
||||||
|
'port': '5432'
|
||||||
|
}
|
||||||
|
|
||||||
|
source_conn = None
|
||||||
|
target_conn = None
|
||||||
|
source_cur = None
|
||||||
|
target_cur = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
print("ソースDBへの接続を試みています...")
|
||||||
|
source_conn = psycopg2.connect(**source_db)
|
||||||
|
source_cur = source_conn.cursor()
|
||||||
|
print("ソースDBへの接続が成功しました")
|
||||||
|
|
||||||
|
print("ターゲットDBへの接続を試みています...")
|
||||||
|
target_conn = psycopg2.connect(**target_db)
|
||||||
|
target_cur = target_conn.cursor()
|
||||||
|
print("ターゲットDBへの接続が成功しました")
|
||||||
|
|
||||||
|
print("データの取得を開始します...")
|
||||||
|
source_cur.execute("""
|
||||||
|
SELECT serial_number, zekken_number, event_code, cp_number, image_address,
|
||||||
|
goal_time, late_point, create_at, create_user,
|
||||||
|
update_at, update_user, buy_flag, colabo_company_memo
|
||||||
|
FROM gps_information
|
||||||
|
""")
|
||||||
|
|
||||||
|
rows = source_cur.fetchall()
|
||||||
|
print(f"取得したレコード数: {len(rows)}")
|
||||||
|
|
||||||
|
processed_count = 0
|
||||||
|
for row in rows:
|
||||||
|
(serial_number, zekken_number, event_code, cp_number, image_address,
|
||||||
|
goal_time, late_point, create_at, create_user,
|
||||||
|
update_at, update_user, buy_flag, colabo_company_memo) = row
|
||||||
|
|
||||||
|
latitude, longitude = None, None
|
||||||
|
if image_address and os.path.exists(image_address):
|
||||||
|
latitude, longitude = get_gps_from_image(image_address)
|
||||||
|
|
||||||
|
target_cur.execute("""
|
||||||
|
INSERT INTO gps_checkins (
|
||||||
|
path_order, zekken_number, event_code, cp_number,
|
||||||
|
lattitude, longitude, image_address,
|
||||||
|
image_receipt, image_QR, validate_location,
|
||||||
|
goal_time, late_point, create_at,
|
||||||
|
create_user, update_at, update_user,
|
||||||
|
buy_flag, colabo_company_memo, points
|
||||||
|
) VALUES (
|
||||||
|
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
|
||||||
|
%s, %s, %s, %s, %s, %s, %s, %s, %s
|
||||||
|
)
|
||||||
|
""", (
|
||||||
|
serial_number,
|
||||||
|
zekken_number, event_code, cp_number,
|
||||||
|
latitude, longitude, image_address,
|
||||||
|
True, True, True,
|
||||||
|
goal_time, late_point, create_at,
|
||||||
|
create_user, update_at, update_user,
|
||||||
|
buy_flag if buy_flag is not None else False,
|
||||||
|
colabo_company_memo if colabo_company_memo else '',
|
||||||
|
0
|
||||||
|
))
|
||||||
|
|
||||||
|
processed_count += 1
|
||||||
|
if processed_count % 100 == 0:
|
||||||
|
print(f"処理済みレコード数: {processed_count}")
|
||||||
|
|
||||||
|
target_conn.commit()
|
||||||
|
print(f"移行完了: {processed_count}件のレコードを処理しました")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"エラーが発生しました: {e}")
|
||||||
|
if target_conn:
|
||||||
|
target_conn.rollback()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if source_cur:
|
||||||
|
source_cur.close()
|
||||||
|
if target_cur:
|
||||||
|
target_cur.close()
|
||||||
|
if source_conn:
|
||||||
|
source_conn.close()
|
||||||
|
if target_conn:
|
||||||
|
target_conn.close()
|
||||||
|
print("すべての接続をクローズしました")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
migrate_data()
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
351
rog/migrations_backup/0001_initial.py
Normal file
351
rog/migrations_backup/0001_initial.py
Normal file
@ -0,0 +1,351 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-05-04 15:05
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.contrib.gis.db.models.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import rog.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('auth', '0012_alter_user_first_name_max_length'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='JpnAdminMainPerf',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
||||||
|
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'jpn_admin_main_perf',
|
||||||
|
'managed': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='JpnAdminPerf',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiLineStringField(blank=True, null=True, srid=4326)),
|
||||||
|
('et_id', models.IntegerField(blank=True, null=True)),
|
||||||
|
('et_right', models.CharField(blank=True, max_length=80, null=True)),
|
||||||
|
('et_left', models.CharField(blank=True, max_length=80, null=True)),
|
||||||
|
('adm2_l', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('adm1_l', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('adm0_l', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('adm0_r', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('adm1_r', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('adm2_r', models.CharField(blank=True, max_length=50, null=True)),
|
||||||
|
('admlevel', models.IntegerField(blank=True, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'jpn_admin_perf',
|
||||||
|
'managed': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='JpnSubPerf',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
||||||
|
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'jpn_sub_perf',
|
||||||
|
'managed': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='CustomUser',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('password', models.CharField(max_length=128, verbose_name='password')),
|
||||||
|
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
|
||||||
|
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
|
||||||
|
('email', models.EmailField(max_length=254, unique=True, verbose_name='email address')),
|
||||||
|
('is_staff', models.BooleanField(default=False)),
|
||||||
|
('is_active', models.BooleanField(default=False)),
|
||||||
|
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
|
||||||
|
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Location',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
||||||
|
('location_name', models.CharField(default='--- 場所をお願いします --', max_length=255, verbose_name='Location Name')),
|
||||||
|
('category', models.CharField(blank=True, max_length=255, null=True, verbose_name='Category')),
|
||||||
|
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
||||||
|
('address', models.CharField(blank=True, max_length=512, null=True, verbose_name='Address')),
|
||||||
|
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
||||||
|
('area', models.CharField(blank=True, max_length=255, null=True, verbose_name='Area')),
|
||||||
|
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City')),
|
||||||
|
('latitude', models.FloatField(blank=True, null=True, verbose_name='Latitude')),
|
||||||
|
('longitude', models.FloatField(blank=True, null=True, verbose_name='Latitude')),
|
||||||
|
('photos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phptos')),
|
||||||
|
('videos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Videos')),
|
||||||
|
('webcontents', models.CharField(blank=True, max_length=255, null=True, verbose_name='Web Content')),
|
||||||
|
('status', models.CharField(blank=True, max_length=255, null=True, verbose_name='Status')),
|
||||||
|
('portal', models.CharField(blank=True, max_length=255, null=True, verbose_name='Portal')),
|
||||||
|
('group', models.CharField(blank=True, max_length=255, null=True, verbose_name='Group')),
|
||||||
|
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
|
||||||
|
('fax', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fax')),
|
||||||
|
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
|
||||||
|
('facility', models.CharField(blank=True, max_length=255, null=True, verbose_name='Facility')),
|
||||||
|
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remarks')),
|
||||||
|
('tags', models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags')),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPointField(srid=4326)),
|
||||||
|
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='location_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ShapeLayers',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('name', models.CharField(max_length=255, verbose_name='Shape Layer')),
|
||||||
|
('file', models.FileField(blank=True, upload_to=rog.models.get_file_path)),
|
||||||
|
('uploaded_date', models.DateField(auto_now_add=True)),
|
||||||
|
('layerof', models.IntegerField(choices=[(1, 'location'), (2, 'Location_line'), (3, 'Location_polygon')], default=1)),
|
||||||
|
('table_name', models.CharField(blank=True, max_length=255, verbose_name='Table name')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='TestModel',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('testbane', models.CharField(max_length=355, verbose_name='test field')),
|
||||||
|
('wanttogo', models.BooleanField(default=False)),
|
||||||
|
('like', models.BooleanField(default=False)),
|
||||||
|
('checkin', models.BooleanField(default=False)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='TravelList',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('travel_id', models.IntegerField(verbose_name='Travel Id')),
|
||||||
|
('start_date', models.DateTimeField(blank=True, null=True, verbose_name='Start date')),
|
||||||
|
('finish_date', models.DateTimeField(blank=True, null=True, verbose_name='End date')),
|
||||||
|
('category', models.CharField(choices=[('PRIVATE', 'Private'), ('GROUP', 'Group'), ('AGENT', 'Agent'), ('ROGAINING', 'Rogaining')], max_length=256)),
|
||||||
|
('title', models.CharField(max_length=255, verbose_name='Title')),
|
||||||
|
('transportation', models.CharField(blank=True, max_length=255, null=True, verbose_name='Transpotation')),
|
||||||
|
('moving_distance', models.IntegerField(blank=True, null=True)),
|
||||||
|
('duration', models.DurationField(blank=True, null=True, verbose_name='Duration')),
|
||||||
|
('eta', models.DateTimeField(blank=True, null=True)),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='travel_list_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Useractions',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('wanttogo', models.BooleanField(default=False)),
|
||||||
|
('like', models.BooleanField(default=False)),
|
||||||
|
('checkin', models.BooleanField(default=False)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='action_location', to='rog.location')),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='action_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='TravelPoint',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('distance', models.FloatField(blank=True, null=True)),
|
||||||
|
('transportation', models.CharField(blank=True, max_length=255, null=True, verbose_name='Transpotation')),
|
||||||
|
('eta', models.DateTimeField(blank=True, null=True)),
|
||||||
|
('order_number', models.IntegerField(blank=True, null=True)),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='travelpoint_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.location')),
|
||||||
|
('travel_list', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='rog.travellist')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SystemSettings',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('setting_name', models.CharField(max_length=255, verbose_name='Settings Name')),
|
||||||
|
('version', models.CharField(blank=True, max_length=10, null=True, verbose_name='Version')),
|
||||||
|
('effective_date', models.DateTimeField()),
|
||||||
|
('end_date', models.DateTimeField()),
|
||||||
|
('parammeters', models.CharField(max_length=512, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='system_setting_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='RogUser',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('email', models.EmailField(max_length=254, verbose_name='Email')),
|
||||||
|
('phone', models.CharField(max_length=55, verbose_name='Phone Number')),
|
||||||
|
('first_name', models.CharField(max_length=255, verbose_name='First Name')),
|
||||||
|
('middle_name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Middle Name')),
|
||||||
|
('last_name', models.CharField(max_length=255, verbose_name='last_name')),
|
||||||
|
('nickname', models.CharField(blank=True, max_length=255, null=True, verbose_name='Nickname')),
|
||||||
|
('country', models.CharField(default='Japan', max_length=255, verbose_name='Country')),
|
||||||
|
('language', models.CharField(default='Japanese', max_length=255, verbose_name='Language')),
|
||||||
|
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
||||||
|
('sex', models.CharField(blank=True, default='unknown', max_length=255, null=True, verbose_name='Sex')),
|
||||||
|
('birthyear', models.IntegerField(blank=True, null=True, verbose_name='Birth year')),
|
||||||
|
('family_structure', models.IntegerField(blank=True, null=True, verbose_name='Family Structure')),
|
||||||
|
('level', models.IntegerField(blank=True, default=0, null=True, verbose_name='Level')),
|
||||||
|
('parammeters', models.CharField(max_length=512, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('introducer', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='introduced_uesr', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='roguser_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Location_polygon',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
||||||
|
('location_name', models.CharField(max_length=255, verbose_name='Location Name')),
|
||||||
|
('category', models.CharField(blank=True, max_length=255, null=True, verbose_name='Category')),
|
||||||
|
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
||||||
|
('address', models.CharField(blank=True, max_length=512, null=True, verbose_name='Address')),
|
||||||
|
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
||||||
|
('area', models.CharField(blank=True, max_length=255, null=True, verbose_name='Area')),
|
||||||
|
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City')),
|
||||||
|
('photos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phptos')),
|
||||||
|
('videos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Videos')),
|
||||||
|
('webcontents', models.CharField(blank=True, max_length=255, null=True, verbose_name='Web Content')),
|
||||||
|
('status', models.CharField(blank=True, max_length=255, null=True, verbose_name='Status')),
|
||||||
|
('portal', models.CharField(blank=True, max_length=255, null=True, verbose_name='Portal')),
|
||||||
|
('group', models.CharField(blank=True, max_length=255, null=True, verbose_name='Group')),
|
||||||
|
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
|
||||||
|
('fax', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fax')),
|
||||||
|
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
|
||||||
|
('facility', models.CharField(blank=True, max_length=255, null=True, verbose_name='Facility')),
|
||||||
|
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remarks')),
|
||||||
|
('tags', models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags')),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
||||||
|
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='location_polygon_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Location_line',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('location_id', models.IntegerField(blank=True, null=True, verbose_name='Location id')),
|
||||||
|
('location_name', models.CharField(max_length=255, verbose_name='Location Name')),
|
||||||
|
('category', models.CharField(blank=True, max_length=255, null=True, verbose_name='Category')),
|
||||||
|
('zip', models.CharField(blank=True, max_length=12, null=True, verbose_name='Zip code')),
|
||||||
|
('address', models.CharField(blank=True, max_length=512, null=True, verbose_name='Address')),
|
||||||
|
('prefecture', models.CharField(blank=True, max_length=255, null=True, verbose_name='Prefecture')),
|
||||||
|
('area', models.CharField(blank=True, max_length=255, null=True, verbose_name='Area')),
|
||||||
|
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City')),
|
||||||
|
('photos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phptos')),
|
||||||
|
('videos', models.CharField(blank=True, max_length=255, null=True, verbose_name='Videos')),
|
||||||
|
('webcontents', models.CharField(blank=True, max_length=255, null=True, verbose_name='Web Content')),
|
||||||
|
('status', models.CharField(blank=True, max_length=255, null=True, verbose_name='Status')),
|
||||||
|
('portal', models.CharField(blank=True, max_length=255, null=True, verbose_name='Portal')),
|
||||||
|
('group', models.CharField(blank=True, max_length=255, null=True, verbose_name='Group')),
|
||||||
|
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
|
||||||
|
('fax', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fax')),
|
||||||
|
('email', models.EmailField(blank=True, max_length=255, null=True, verbose_name='Email')),
|
||||||
|
('facility', models.CharField(blank=True, max_length=255, null=True, verbose_name='Facility')),
|
||||||
|
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remarks')),
|
||||||
|
('tags', models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags')),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiLineStringField(blank=True, null=True, srid=4326)),
|
||||||
|
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='location_line_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='JoinedEvent',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('tagname', models.CharField(blank=True, max_length=255, null=True, verbose_name='Tag Name')),
|
||||||
|
('status', models.CharField(choices=[('REGISTERED', 'Registered'), ('ACCEPTED', 'accepted'), ('PAID', 'paid'), ('JOINED', 'joined'), ('CANCELED', 'Canceled')], max_length=256)),
|
||||||
|
('registrationid', models.CharField(max_length=56, verbose_name='Registration Id')),
|
||||||
|
('payment_code', models.CharField(max_length=255, verbose_name='Payment Code')),
|
||||||
|
('paid', models.IntegerField(default=0, verbose_name='Paid Amount')),
|
||||||
|
('remark', models.CharField(blank=True, max_length=255, null=True, verbose_name='Remark')),
|
||||||
|
('parammeters', models.CharField(max_length=512, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='joined_event_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Favorite',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('good', models.IntegerField(default=0, verbose_name='Good')),
|
||||||
|
('favorite', models.IntegerField(default=0, verbose_name='Favorite')),
|
||||||
|
('evaluation', models.IntegerField(default=0, verbose_name='Evaluation')),
|
||||||
|
('number_visit', models.IntegerField(default=0, verbose_name='Good')),
|
||||||
|
('last_visited', models.DateTimeField(blank=True, null=True, verbose_name='Last Visited')),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('last_updated_user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='favorite_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.location')),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Event',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('tagname', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('status', models.CharField(choices=[('PREPARING', 'Preparing'), ('PROMOTION', 'Promotion'), ('EVENT', 'Event'), ('END', 'End')], max_length=256)),
|
||||||
|
('price', models.IntegerField(default=0, verbose_name='Paid Amount')),
|
||||||
|
('promotion_date', models.DateTimeField(blank=True, null=True, verbose_name='Promotion date')),
|
||||||
|
('event_start', models.DateTimeField(blank=True, null=True, verbose_name='Promotion date')),
|
||||||
|
('event_end', models.DateTimeField(blank=True, null=True, verbose_name='Promotion date')),
|
||||||
|
('remark', models.CharField(blank=True, max_length=256, null=True)),
|
||||||
|
('parammeters', models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('last_updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('last_updated_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='event_updated_user', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
23
rog/migrations_backup/0002_auto_20220511_2017.py
Normal file
23
rog/migrations_backup/0002_auto_20220511_2017.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-05-11 11:17
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0001_initial'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='useractions',
|
||||||
|
name='order',
|
||||||
|
field=models.IntegerField(default=-1),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='is_active',
|
||||||
|
field=models.BooleanField(default=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
rog/migrations_backup/0003_alter_useractions_order.py
Normal file
18
rog/migrations_backup/0003_alter_useractions_order.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-05-11 17:52
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0002_auto_20220511_2017'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='useractions',
|
||||||
|
name='order',
|
||||||
|
field=models.IntegerField(default=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
55
rog/migrations_backup/0004_auto_20220606_0023.py
Normal file
55
rog/migrations_backup/0004_auto_20220606_0023.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-05 15:23
|
||||||
|
|
||||||
|
import django.contrib.gis.db.models.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0003_alter_useractions_order'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='GifuAreas',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
||||||
|
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('area_nm', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'gifu_areas',
|
||||||
|
'managed': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='auto_checkin',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Is Autologin'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='checkin_radious',
|
||||||
|
field=models.IntegerField(blank=True, null=True, verbose_name='Checkin Radious'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_active',
|
||||||
|
field=models.BooleanField(default=True, verbose_name='Is Autologin'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_name',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Tags'),
|
||||||
|
),
|
||||||
|
]
|
||||||
23
rog/migrations_backup/0005_auto_20220606_1523.py
Normal file
23
rog/migrations_backup/0005_auto_20220606_1523.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-06 06:23
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0004_auto_20220606_0023'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_active',
|
||||||
|
field=models.BooleanField(default=True, verbose_name='Is Event active'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_name',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Event name'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
rog/migrations_backup/0006_location_paid.py
Normal file
18
rog/migrations_backup/0006_location_paid.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-07 13:00
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0005_auto_20220606_1523'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='paid',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Is Paid'),
|
||||||
|
),
|
||||||
|
]
|
||||||
22
rog/migrations_backup/0007_auto_20220607_2207.py
Normal file
22
rog/migrations_backup/0007_auto_20220607_2207.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-07 13:07
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0006_location_paid'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='location',
|
||||||
|
name='paid',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='roguser',
|
||||||
|
name='paid',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Is Paid'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
rog/migrations_backup/0008_alter_roguser_parammeters.py
Normal file
18
rog/migrations_backup/0008_alter_roguser_parammeters.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-07 13:09
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0007_auto_20220607_2207'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='roguser',
|
||||||
|
name='parammeters',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Parameters'),
|
||||||
|
),
|
||||||
|
]
|
||||||
17
rog/migrations_backup/0009_remove_roguser_email.py
Normal file
17
rog/migrations_backup/0009_remove_roguser_email.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-07 14:24
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0008_alter_roguser_parammeters'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='roguser',
|
||||||
|
name='email',
|
||||||
|
),
|
||||||
|
]
|
||||||
18
rog/migrations_backup/0010_useractions_checkinimage.py
Normal file
18
rog/migrations_backup/0010_useractions_checkinimage.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-10 06:25
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0009_remove_roguser_email'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='useractions',
|
||||||
|
name='checkinimage',
|
||||||
|
field=models.FileField(blank=True, null=True, upload_to='%y%m%d'),
|
||||||
|
),
|
||||||
|
]
|
||||||
25
rog/migrations_backup/0011_usertracks.py
Normal file
25
rog/migrations_backup/0011_usertracks.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-12 18:11
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.contrib.gis.db.models.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0010_useractions_checkinimage'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='UserTracks',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPointField(srid=4326)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
113
rog/migrations_backup/0012_auto_20220613_1758.py
Normal file
113
rog/migrations_backup/0012_auto_20220613_1758.py
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-13 08:58
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0011_usertracks'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='address',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Address'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='area',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Area'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='category',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Category'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='city',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='City'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='email',
|
||||||
|
field=models.EmailField(blank=True, max_length=2048, null=True, verbose_name='Email'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_name',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Event name'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='facility',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Facility'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='fax',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Fax'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='group',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Group'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='location_name',
|
||||||
|
field=models.CharField(default='--- 場所をお願いします --', max_length=2048, verbose_name='Location Name'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='parammeters',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Parameters'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='phone',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Phone'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='photos',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Phptos'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='portal',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Portal'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='prefecture',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Prefecture'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='remark',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Remarks'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='status',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Status'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='tags',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Tags'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='videos',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Videos'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='webcontents',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Web Content'),
|
||||||
|
),
|
||||||
|
]
|
||||||
88
rog/migrations_backup/0013_auto_20220618_1847.py
Normal file
88
rog/migrations_backup/0013_auto_20220618_1847.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-18 09:47
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0012_auto_20220613_1758'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='buy_point',
|
||||||
|
field=models.IntegerField(blank=True, default=0, null=True, verbose_name='buy Point'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='checkin_point',
|
||||||
|
field=models.IntegerField(blank=True, default=10, null=True, verbose_name='Checkin Point'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='checkin_radius',
|
||||||
|
field=models.IntegerField(blank=True, default=15, null=True, verbose_name='Checkin radious'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='evaluation_value',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Evaluation value (評価)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='hidden_location',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Is Hidden Location'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_fri',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours frinday (金曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_mon',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours monday (月曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_sat',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours saturday (土曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_sun',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours sunday (日曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_thu',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours thursday (木曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_tue',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours tuesday (火曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_wed',
|
||||||
|
field=models.TimeField(blank=True, null=True, verbose_name='Opening hours wednesday (水曜)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='shop_closed',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Shop Closed (休業)'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='shop_shutdown',
|
||||||
|
field=models.BooleanField(default=False, null=True, verbose_name='Shop Shutdown (閉業)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='auto_checkin',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Is AutoCheckin'),
|
||||||
|
),
|
||||||
|
]
|
||||||
18
rog/migrations_backup/0014_alter_location_shop_shutdown.py
Normal file
18
rog/migrations_backup/0014_alter_location_shop_shutdown.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-18 09:52
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0013_auto_20220618_1847'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='shop_shutdown',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Shop Shutdown (閉業)'),
|
||||||
|
),
|
||||||
|
]
|
||||||
57
rog/migrations_backup/0015_auto_20220619_1611.py
Normal file
57
rog/migrations_backup/0015_auto_20220619_1611.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-19 07:11
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0014_alter_location_shop_shutdown'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='location',
|
||||||
|
name='checkin_radious',
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_fri',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours frinday (金曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_mon',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours monday (月曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_sat',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours saturday (土曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_sun',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours sunday (日曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_thu',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours thursday (木曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_tue',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours tuesday (火曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='opening_hours_wed',
|
||||||
|
field=models.CharField(blank=True, max_length=512, null=True, verbose_name='Opening hours wednesday (水曜)'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='photos',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Photos'),
|
||||||
|
),
|
||||||
|
]
|
||||||
21
rog/migrations_backup/0016_shapefilelocations.py
Normal file
21
rog/migrations_backup/0016_shapefilelocations.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-06-21 09:46
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0015_auto_20220619_1611'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ShapeFileLocations',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('shapefile', models.CharField(blank=True, max_length=2048, null=True, verbose_name='Shapelayer')),
|
||||||
|
('locid', models.IntegerField(blank=True, null=True)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
||||||
23
rog/migrations_backup/0017_auto_20220725_1605.py
Normal file
23
rog/migrations_backup/0017_auto_20220725_1605.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2022-07-25 07:05
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0016_shapefilelocations'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='cp',
|
||||||
|
field=models.IntegerField(blank=True, null=True, verbose_name='Check Point'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='location',
|
||||||
|
name='subcategory',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub Category'),
|
||||||
|
),
|
||||||
|
]
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user