Compare commits
80 Commits
exdb
...
ae87890eec
| Author | SHA1 | Date | |
|---|---|---|---|
| ae87890eec | |||
| 43c89dec9a | |||
| 005de98ecc | |||
| 82fa3c2249 | |||
| acf6e36e71 | |||
| a0f2b01f29 | |||
| a3c90902ec | |||
| fccc55cf18 | |||
| 19f12652b9 | |||
| cdae8dc7ec | |||
| c4e25de121 | |||
| 09810a2a9a | |||
| a9b959a807 | |||
| de3e87b963 | |||
| 0453494cca | |||
| 60337c6863 | |||
| a3f602b360 | |||
| fd973575be | |||
| 872f252923 | |||
| 5e2b5add5c | |||
| 9e3a940ec2 | |||
| 158dbeee40 | |||
| 10bf6e8fa1 | |||
| 18f3370f29 | |||
| 0abfd6cdb6 | |||
| 2f8b86b683 | |||
| b85b04412a | |||
| efbce943b6 | |||
| 02f483aa68 | |||
| 7c659a0865 | |||
| 3f91e2080a | |||
| 56e13457ab | |||
| 7d6635ef01 | |||
| 2ca77b604b | |||
| 27aed10a4a | |||
| e6e6d059ac | |||
| e1928564fa | |||
| a0c3a82720 | |||
| 4e4bd7ac5d | |||
| 2bf7d44cd3 | |||
| d22e8b5a23 | |||
| 9eb45d7e97 | |||
| 2aaecb6b22 | |||
| 6e472cf634 | |||
| 106ab0e94e | |||
| 7f4d37d40c | |||
| 4a2a5de476 | |||
| 15815d5f06 | |||
| 768dd6e261 | |||
| 139c0987bc | |||
| ceb783d6bd | |||
| a714557eef | |||
| 586f341897 | |||
| 0c2dfec7dd | |||
| d6464c1369 | |||
| 338643b0d7 | |||
| e992e834da | |||
| c6969d7afa | |||
| 82d0e55945 | |||
| b872f377b2 | |||
| a6b816c9f2 | |||
| 2913a435c1 | |||
| 051916f9f6 | |||
| b8d7029965 | |||
| 6f0d8d15fd | |||
| 80ccaace3d | |||
| 95b787c819 | |||
| 3d195973fc | |||
| d851e7e4ad | |||
| 9d0d3ea102 | |||
| 37a253e63a | |||
| bc74b14cbc | |||
| 49b3ee7342 | |||
| 26e8e68dbd | |||
| 44ad30093c | |||
| bcfcceb068 | |||
| 9215ba8f9f | |||
| c0fb177d02 | |||
| 09e39987e2 | |||
| 6f79d9a4be |
4
.gitignore
vendored
4
.gitignore
vendored
@ -157,6 +157,10 @@ dmypy.json
|
|||||||
# Cython debug symbols
|
# Cython debug symbols
|
||||||
cython_debug/
|
cython_debug/
|
||||||
|
|
||||||
|
# migration files
|
||||||
|
rog/migrations/
|
||||||
|
|
||||||
|
|
||||||
# PyCharm
|
# PyCharm
|
||||||
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
|||||||
@ -3,6 +3,7 @@ FROM osgeo/gdal:ubuntu-small-3.4.0
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|
||||||
LABEL maintainer="nouffer@gmail.com"
|
LABEL maintainer="nouffer@gmail.com"
|
||||||
LABEL description="Development image for the Rogaining JP"
|
LABEL description="Development image for the Rogaining JP"
|
||||||
|
|
||||||
@ -38,12 +39,63 @@ RUN apt-get install -y python3
|
|||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
python3-pip
|
python3-pip
|
||||||
|
|
||||||
|
# ベースイメージの更新とパッケージのインストール
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y \
|
||||||
|
libreoffice \
|
||||||
|
libreoffice-calc \
|
||||||
|
libreoffice-writer \
|
||||||
|
libreoffice-java-common \
|
||||||
|
fonts-ipafont \
|
||||||
|
fonts-ipafont-gothic \
|
||||||
|
fonts-ipafont-mincho \
|
||||||
|
language-pack-ja \
|
||||||
|
fontconfig \
|
||||||
|
locales \
|
||||||
|
python3-uno # LibreOffice Python バインディング
|
||||||
|
|
||||||
|
|
||||||
|
# 日本語ロケールの設定
|
||||||
|
RUN locale-gen ja_JP.UTF-8
|
||||||
|
ENV LANG=ja_JP.UTF-8
|
||||||
|
ENV LC_ALL=ja_JP.UTF-8
|
||||||
|
ENV LANGUAGE=ja_JP:ja
|
||||||
|
|
||||||
|
# フォント設定ファイルをコピー
|
||||||
|
COPY config/fonts.conf /etc/fonts/local.conf
|
||||||
|
|
||||||
|
# フォントキャッシュの更新
|
||||||
|
RUN fc-cache -f -v
|
||||||
|
|
||||||
|
# LibreOfficeの作業ディレクトリを作成
|
||||||
|
RUN mkdir -p /var/cache/libreoffice && \
|
||||||
|
chmod 777 /var/cache/libreoffice
|
||||||
|
|
||||||
|
# フォント設定の権限を設定
|
||||||
|
RUN chmod 644 /etc/fonts/local.conf
|
||||||
|
|
||||||
|
|
||||||
|
# 作業ディレクトリとパーミッションの設定
|
||||||
|
RUN mkdir -p /app/docbase /tmp/libreoffice && \
|
||||||
|
chmod -R 777 /app/docbase /tmp/libreoffice
|
||||||
|
|
||||||
|
|
||||||
RUN pip install --upgrade pip
|
RUN pip install --upgrade pip
|
||||||
|
|
||||||
|
# Copy the package directory first
|
||||||
|
COPY SumasenLibs/excel_lib /app/SumasenLibs/excel_lib
|
||||||
|
COPY ./docbase /app/docbase
|
||||||
|
|
||||||
|
# Install the package in editable mode
|
||||||
|
RUN pip install -e /app/SumasenLibs/excel_lib
|
||||||
|
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
|
|
||||||
COPY ./requirements.txt /app/requirements.txt
|
COPY ./requirements.txt /app/requirements.txt
|
||||||
|
|
||||||
|
RUN pip install boto3==1.26.137
|
||||||
|
|
||||||
# Install Gunicorn
|
# Install Gunicorn
|
||||||
RUN pip install gunicorn
|
RUN pip install gunicorn
|
||||||
|
|
||||||
@ -51,7 +103,10 @@ RUN pip install gunicorn
|
|||||||
|
|
||||||
#RUN ["chmod", "+x", "wait-for.sh"]
|
#RUN ["chmod", "+x", "wait-for.sh"]
|
||||||
|
|
||||||
RUN pip install -r requirements.txt
|
# xlsxwriterを追加
|
||||||
|
RUN pip install -r requirements.txt \
|
||||||
|
&& pip install django-cors-headers \
|
||||||
|
&& pip install xlsxwriter gunicorn
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
|
|
||||||
|
|||||||
35
Dockerfile.supervisor
Normal file
35
Dockerfile.supervisor
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
FROM nginx:alpine
|
||||||
|
|
||||||
|
# Create necessary directories and set permissions
|
||||||
|
RUN mkdir -p /usr/share/nginx/html \
|
||||||
|
&& mkdir -p /var/log/nginx \
|
||||||
|
&& mkdir -p /var/cache/nginx \
|
||||||
|
&& chown -R nginx:nginx /usr/share/nginx/html \
|
||||||
|
&& chown -R nginx:nginx /var/log/nginx \
|
||||||
|
&& chown -R nginx:nginx /var/cache/nginx \
|
||||||
|
&& chmod -R 755 /usr/share/nginx/html
|
||||||
|
|
||||||
|
# Copy files - notice the change in the source path
|
||||||
|
COPY supervisor/html/* /usr/share/nginx/html/
|
||||||
|
COPY supervisor/nginx/default.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
# メディアディレクトリを作成
|
||||||
|
RUN mkdir -p /app/media && chmod 755 /app/media
|
||||||
|
|
||||||
|
# 静的ファイルをコピー
|
||||||
|
#COPY ./static /usr/share/nginx/html/static
|
||||||
|
|
||||||
|
# 権限の設定
|
||||||
|
RUN chown -R nginx:nginx /app/media
|
||||||
|
|
||||||
|
# Set final permissions
|
||||||
|
RUN chown -R nginx:nginx /usr/share/nginx/html \
|
||||||
|
&& chmod -R 755 /usr/share/nginx/html \
|
||||||
|
&& touch /var/log/nginx/access.log \
|
||||||
|
&& touch /var/log/nginx/error.log \
|
||||||
|
&& chown -R nginx:nginx /var/log/nginx \
|
||||||
|
&& chown -R nginx:nginx /etc/nginx/conf.d
|
||||||
|
|
||||||
|
#EXPOSE 8100
|
||||||
|
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
8066
LineBot/MobServer_gifuroge.rb
Normal file
8066
LineBot/MobServer_gifuroge.rb
Normal file
File diff suppressed because it is too large
Load Diff
1087
LineBot/userpostgres.rb
Normal file
1087
LineBot/userpostgres.rb
Normal file
File diff suppressed because it is too large
Load Diff
BIN
SumasenLibs/certificate_template.xlsx
Normal file
BIN
SumasenLibs/certificate_template.xlsx
Normal file
Binary file not shown.
19
SumasenLibs/excel_lib/README.md
Normal file
19
SumasenLibs/excel_lib/README.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# SumasenExcel Library
|
||||||
|
|
||||||
|
Excel操作のためのシンプルなPythonライブラリです。
|
||||||
|
|
||||||
|
## インストール方法
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
## 使用方法
|
||||||
|
from sumaexcel import SumasenExcel
|
||||||
|
|
||||||
|
excel = SumasenExcel("path/to/file.xlsx")
|
||||||
|
data = excel.read_excel()
|
||||||
|
|
||||||
|
## ライセンス
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
20
SumasenLibs/excel_lib/docker/docker-compose.yml
Normal file
20
SumasenLibs/excel_lib/docker/docker-compose.yml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
python:
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: docker/python/Dockerfile
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
|
environment:
|
||||||
|
- PYTHONPATH=/app
|
||||||
|
- POSTGRES_DB=rogdb
|
||||||
|
- POSTGRES_USER=admin
|
||||||
|
- POSTGRES_PASSWORD=admin123456
|
||||||
|
- POSTGRES_HOST=localhost
|
||||||
|
- POSTGRES_PORT=5432
|
||||||
|
network_mode: "host"
|
||||||
|
tty: true
|
||||||
|
container_name: python_container # コンテナ名を明示的に指定
|
||||||
|
|
||||||
26
SumasenLibs/excel_lib/docker/python/Dockerfile
Normal file
26
SumasenLibs/excel_lib/docker/python/Dockerfile
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
FROM python:3.9-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# GPGキーの更新とパッケージのインストール
|
||||||
|
RUN apt-get update --allow-insecure-repositories && \
|
||||||
|
apt-get install -y --allow-unauthenticated python3-dev libpq-dev postgresql-client && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Pythonパッケージのインストール
|
||||||
|
COPY requirements.txt .
|
||||||
|
COPY setup.py .
|
||||||
|
COPY README.md .
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# 開発用パッケージのインストール
|
||||||
|
RUN pip install --no-cache-dir --upgrade pip \
|
||||||
|
pytest \
|
||||||
|
pytest-cov \
|
||||||
|
flake8
|
||||||
|
|
||||||
|
# パッケージのインストール
|
||||||
|
RUN pip install -e .
|
||||||
|
|
||||||
6
SumasenLibs/excel_lib/requirements.txt
Normal file
6
SumasenLibs/excel_lib/requirements.txt
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
openpyxl>=3.0.0
|
||||||
|
pandas>=1.0.0
|
||||||
|
pillow>=8.0.0
|
||||||
|
configparser>=5.0.0
|
||||||
|
psycopg2-binary==2.9.9
|
||||||
|
requests
|
||||||
25
SumasenLibs/excel_lib/setup.py
Normal file
25
SumasenLibs/excel_lib/setup.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# setup.py
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="sumaexcel",
|
||||||
|
version="0.1.0",
|
||||||
|
packages=find_packages(),
|
||||||
|
install_requires=[
|
||||||
|
"openpyxl>=3.0.0",
|
||||||
|
"pandas>=1.0.0"
|
||||||
|
],
|
||||||
|
author="Akira Miyata",
|
||||||
|
author_email="akira.miyata@sumasen.net",
|
||||||
|
description="Excel handling library",
|
||||||
|
long_description=open("README.md").read(),
|
||||||
|
long_description_content_type="text/markdown",
|
||||||
|
url="https://github.com/akiramiyata/sumaexcel",
|
||||||
|
classifiers=[
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
],
|
||||||
|
python_requires=">=3.6",
|
||||||
|
)
|
||||||
|
|
||||||
4
SumasenLibs/excel_lib/sumaexcel/__init__.py
Normal file
4
SumasenLibs/excel_lib/sumaexcel/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
from .sumaexcel import SumasenExcel
|
||||||
|
|
||||||
|
__version__ = "0.1.0"
|
||||||
|
__all__ = ["SumasenExcel"]
|
||||||
102
SumasenLibs/excel_lib/sumaexcel/conditional.py
Normal file
102
SumasenLibs/excel_lib/sumaexcel/conditional.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
# sumaexcel/conditional.py
|
||||||
|
from typing import Dict, Any, List, Union
|
||||||
|
from openpyxl.formatting.rule import Rule, ColorScaleRule, DataBarRule, IconSetRule
|
||||||
|
from openpyxl.styles import PatternFill, Font, Border, Side
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
|
class ConditionalFormatManager:
|
||||||
|
"""Handle conditional formatting in Excel"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
|
||||||
|
def add_color_scale(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
min_color: str = "00FF0000", # Red
|
||||||
|
mid_color: str = "00FFFF00", # Yellow
|
||||||
|
max_color: str = "0000FF00" # Green
|
||||||
|
) -> None:
|
||||||
|
"""Add color scale conditional formatting"""
|
||||||
|
rule = ColorScaleRule(
|
||||||
|
start_type='min',
|
||||||
|
start_color=min_color,
|
||||||
|
mid_type='percentile',
|
||||||
|
mid_value=50,
|
||||||
|
mid_color=mid_color,
|
||||||
|
end_type='max',
|
||||||
|
end_color=max_color
|
||||||
|
)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def add_data_bar(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
color: str = "000000FF", # Blue
|
||||||
|
show_value: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""Add data bar conditional formatting"""
|
||||||
|
rule = DataBarRule(
|
||||||
|
start_type='min',
|
||||||
|
end_type='max',
|
||||||
|
color=color,
|
||||||
|
showValue=show_value
|
||||||
|
)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def add_icon_set(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
icon_style: str = '3Arrows', # '3Arrows', '3TrafficLights', '3Signs'
|
||||||
|
reverse_icons: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Add icon set conditional formatting"""
|
||||||
|
rule = IconSetRule(
|
||||||
|
icon_style=icon_style,
|
||||||
|
type='percent',
|
||||||
|
values=[0, 33, 67],
|
||||||
|
reverse_icons=reverse_icons
|
||||||
|
)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def add_custom_rule(
|
||||||
|
self,
|
||||||
|
cell_range: str,
|
||||||
|
rule_type: str,
|
||||||
|
formula: str,
|
||||||
|
fill_color: str = None,
|
||||||
|
font_color: str = None,
|
||||||
|
bold: bool = None,
|
||||||
|
border_style: str = None,
|
||||||
|
border_color: str = None
|
||||||
|
) -> None:
|
||||||
|
"""Add custom conditional formatting rule"""
|
||||||
|
dxf = {}
|
||||||
|
if fill_color:
|
||||||
|
dxf['fill'] = PatternFill(start_color=fill_color, end_color=fill_color)
|
||||||
|
if font_color or bold is not None:
|
||||||
|
dxf['font'] = Font(color=font_color, bold=bold)
|
||||||
|
if border_style and border_color:
|
||||||
|
side = Side(style=border_style, color=border_color)
|
||||||
|
dxf['border'] = Border(left=side, right=side, top=side, bottom=side)
|
||||||
|
|
||||||
|
rule = Rule(type=rule_type, formula=[formula], dxf=dxf)
|
||||||
|
self.worksheet.conditional_formatting.add(cell_range, rule)
|
||||||
|
|
||||||
|
def copy_conditional_format(
|
||||||
|
self,
|
||||||
|
source_range: str,
|
||||||
|
target_range: str
|
||||||
|
) -> None:
|
||||||
|
"""Copy conditional formatting from one range to another"""
|
||||||
|
source_rules = self.worksheet.conditional_formatting.get(source_range)
|
||||||
|
if source_rules:
|
||||||
|
for rule in source_rules:
|
||||||
|
self.worksheet.conditional_formatting.add(target_range, rule)
|
||||||
|
|
||||||
|
def clear_conditional_format(
|
||||||
|
self,
|
||||||
|
cell_range: str
|
||||||
|
) -> None:
|
||||||
|
"""Clear conditional formatting from specified range"""
|
||||||
|
self.worksheet.conditional_formatting.delete(cell_range)
|
||||||
166
SumasenLibs/excel_lib/sumaexcel/config_handler.py
Normal file
166
SumasenLibs/excel_lib/sumaexcel/config_handler.py
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
# config_handler.py
|
||||||
|
#
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
class ConfigHandler:
|
||||||
|
"""変数置換機能付きの設定ファイル管理クラス"""
|
||||||
|
|
||||||
|
def __init__(self, ini_file_path: str, variables: Dict[str, str] = None):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
ini_file_path (str): INIファイルのパス
|
||||||
|
variables (Dict[str, str], optional): 置換用の変数辞書
|
||||||
|
"""
|
||||||
|
self.ini_file_path = ini_file_path
|
||||||
|
self.variables = variables or {}
|
||||||
|
self.config = configparser.ConfigParser()
|
||||||
|
self.load_config()
|
||||||
|
|
||||||
|
def _substitute_variables(self, text: str) -> str:
|
||||||
|
"""
|
||||||
|
テキスト内の変数を置換する
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): 置換対象のテキスト
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: 置換後のテキスト
|
||||||
|
"""
|
||||||
|
# ${var}形式の変数を置換
|
||||||
|
pattern1 = r'\${([^}]+)}'
|
||||||
|
# [var]形式の変数を置換
|
||||||
|
pattern2 = r'\[([^\]]+)\]'
|
||||||
|
|
||||||
|
def replace_var(match):
|
||||||
|
var_name = match.group(1)
|
||||||
|
return self.variables.get(var_name, match.group(0))
|
||||||
|
|
||||||
|
# 両方のパターンで置換を実行
|
||||||
|
text = re.sub(pattern1, replace_var, text)
|
||||||
|
text = re.sub(pattern2, replace_var, text)
|
||||||
|
|
||||||
|
return text
|
||||||
|
|
||||||
|
def load_config(self) -> None:
|
||||||
|
"""設定ファイルを読み込み、変数を置換する"""
|
||||||
|
if not os.path.exists(self.ini_file_path):
|
||||||
|
raise FileNotFoundError(f"設定ファイルが見つかりません: {self.ini_file_path}")
|
||||||
|
|
||||||
|
# まず生のテキストとして読み込む
|
||||||
|
with open(self.ini_file_path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# 変数を置換
|
||||||
|
substituted_content = self._substitute_variables(content)
|
||||||
|
|
||||||
|
# 置換済みの内容を StringIO 経由で configparser に読み込ませる
|
||||||
|
from io import StringIO
|
||||||
|
self.config.read_file(StringIO(substituted_content))
|
||||||
|
|
||||||
|
def get_value(self, section: str, key: str, default: Any = None) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
指定されたセクションのキーの値を取得する
|
||||||
|
|
||||||
|
Args:
|
||||||
|
section (str): セクション名
|
||||||
|
key (str): キー名
|
||||||
|
default (Any): デフォルト値(オプション)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[str]: 設定値。存在しない場合はデフォルト値
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.config[section][key]
|
||||||
|
except KeyError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
def get_section(self, section: str) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
指定されたセクションの全ての設定を取得する
|
||||||
|
|
||||||
|
Args:
|
||||||
|
section (str): セクション名
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, str]: セクションの設定をディクショナリで返す
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return dict(self.config[section])
|
||||||
|
except KeyError:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def get_all_sections(self) -> Dict[str, Dict[str, str]]:
|
||||||
|
"""
|
||||||
|
全てのセクションの設定を取得する
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Dict[str, str]]: 全セクションの設定をネストされたディクショナリで返す
|
||||||
|
"""
|
||||||
|
return {section: dict(self.config[section]) for section in self.config.sections()}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# 使用例
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# サンプルのINIファイル作成
|
||||||
|
sample_ini = """
|
||||||
|
[Database]
|
||||||
|
host = localhost
|
||||||
|
port = 5432
|
||||||
|
database = mydb
|
||||||
|
user = admin
|
||||||
|
password = secret
|
||||||
|
|
||||||
|
[Application]
|
||||||
|
debug = true
|
||||||
|
log_level = INFO
|
||||||
|
max_connections = 100
|
||||||
|
|
||||||
|
[Paths]
|
||||||
|
data_dir = /var/data
|
||||||
|
log_file = /var/log/app.log
|
||||||
|
"""
|
||||||
|
|
||||||
|
# サンプルINIファイルを作成
|
||||||
|
with open('config.ini', 'w', encoding='utf-8') as f:
|
||||||
|
f.write(sample_ini)
|
||||||
|
|
||||||
|
# 設定を読み込んで使用
|
||||||
|
config = ConfigHandler('config.ini')
|
||||||
|
|
||||||
|
# 特定の値を取得
|
||||||
|
db_host = config.get_value('Database', 'host')
|
||||||
|
db_port = config.get_value('Database', 'port')
|
||||||
|
print(f"Database connection: {db_host}:{db_port}")
|
||||||
|
|
||||||
|
# セクション全体を取得
|
||||||
|
db_config = config.get_section('Database')
|
||||||
|
print("Database configuration:", db_config)
|
||||||
|
|
||||||
|
# 全ての設定を取得
|
||||||
|
all_config = config.get_all_sections()
|
||||||
|
print("All configurations:", all_config)
|
||||||
|
|
||||||
|
|
||||||
|
# サンプル:
|
||||||
|
# # 設定ファイルから値を取得
|
||||||
|
# config = ConfigHandler('config.ini')
|
||||||
|
#
|
||||||
|
# # データベース設定を取得
|
||||||
|
# db_host = config.get_value('Database', 'host')
|
||||||
|
# db_port = config.get_value('Database', 'port')
|
||||||
|
# db_name = config.get_value('Database', 'database')
|
||||||
|
#
|
||||||
|
# # アプリケーション設定を取得
|
||||||
|
# debug_mode = config.get_value('Application', 'debug')
|
||||||
|
# log_level = config.get_value('Application', 'log_level')
|
||||||
|
#
|
||||||
77
SumasenLibs/excel_lib/sumaexcel/image.py
Normal file
77
SumasenLibs/excel_lib/sumaexcel/image.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
# sumaexcel/image.py
|
||||||
|
from typing import Optional, Tuple, Union
|
||||||
|
from pathlib import Path
|
||||||
|
import os
|
||||||
|
from PIL import Image
|
||||||
|
from openpyxl.drawing.image import Image as XLImage
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
|
class ImageManager:
|
||||||
|
"""Handle image operations in Excel"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
self.temp_dir = Path("/tmp/sumaexcel_images")
|
||||||
|
self.temp_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
def add_image(
|
||||||
|
self,
|
||||||
|
image_path: Union[str, Path],
|
||||||
|
cell_coordinates: Tuple[int, int],
|
||||||
|
size: Optional[Tuple[int, int]] = None,
|
||||||
|
keep_aspect_ratio: bool = True,
|
||||||
|
anchor_type: str = 'absolute'
|
||||||
|
) -> None:
|
||||||
|
"""Add image to worksheet at specified position"""
|
||||||
|
# Convert path to Path object
|
||||||
|
image_path = Path(image_path)
|
||||||
|
|
||||||
|
# Open and process image
|
||||||
|
with Image.open(image_path) as img:
|
||||||
|
# Get original size
|
||||||
|
orig_width, orig_height = img.size
|
||||||
|
|
||||||
|
# Calculate new size if specified
|
||||||
|
if size:
|
||||||
|
target_width, target_height = size
|
||||||
|
if keep_aspect_ratio:
|
||||||
|
ratio = min(target_width/orig_width, target_height/orig_height)
|
||||||
|
target_width = int(orig_width * ratio)
|
||||||
|
target_height = int(orig_height * ratio)
|
||||||
|
|
||||||
|
# Resize image
|
||||||
|
img = img.resize((target_width, target_height), Image.LANCZOS)
|
||||||
|
|
||||||
|
# Save temporary resized image
|
||||||
|
temp_path = self.temp_dir / f"temp_{image_path.name}"
|
||||||
|
img.save(temp_path)
|
||||||
|
image_path = temp_path
|
||||||
|
|
||||||
|
# Create Excel image object
|
||||||
|
excel_image = XLImage(str(image_path))
|
||||||
|
|
||||||
|
# Add to worksheet
|
||||||
|
self.worksheet.add_image(excel_image, anchor=f'{cell_coordinates[0]}{cell_coordinates[1]}')
|
||||||
|
|
||||||
|
def add_image_absolute(
|
||||||
|
self,
|
||||||
|
image_path: Union[str, Path],
|
||||||
|
position: Tuple[int, int],
|
||||||
|
size: Optional[Tuple[int, int]] = None
|
||||||
|
) -> None:
|
||||||
|
"""Add image with absolute positioning"""
|
||||||
|
excel_image = XLImage(str(image_path))
|
||||||
|
if size:
|
||||||
|
excel_image.width, excel_image.height = size
|
||||||
|
excel_image.anchor = 'absolute'
|
||||||
|
excel_image.top, excel_image.left = position
|
||||||
|
self.worksheet.add_image(excel_image)
|
||||||
|
|
||||||
|
def cleanup(self) -> None:
|
||||||
|
"""Clean up temporary files"""
|
||||||
|
for file in self.temp_dir.glob("temp_*"):
|
||||||
|
file.unlink()
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
"""Cleanup on object destruction"""
|
||||||
|
self.cleanup()
|
||||||
96
SumasenLibs/excel_lib/sumaexcel/merge.py
Normal file
96
SumasenLibs/excel_lib/sumaexcel/merge.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
# sumaexcel/merge.py
|
||||||
|
from typing import List, Tuple, Dict
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
from openpyxl.worksheet.merge import MergedCellRange
|
||||||
|
|
||||||
|
class MergeManager:
|
||||||
|
"""Handle merge cell operations"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
self._merged_ranges: List[MergedCellRange] = []
|
||||||
|
self._load_merged_ranges()
|
||||||
|
|
||||||
|
def _load_merged_ranges(self) -> None:
|
||||||
|
"""Load existing merged ranges from worksheet"""
|
||||||
|
self._merged_ranges = list(self.worksheet.merged_cells.ranges)
|
||||||
|
|
||||||
|
def merge_cells(
|
||||||
|
self,
|
||||||
|
start_row: int,
|
||||||
|
start_col: int,
|
||||||
|
end_row: int,
|
||||||
|
end_col: int
|
||||||
|
) -> None:
|
||||||
|
"""Merge cells in specified range"""
|
||||||
|
self.worksheet.merge_cells(
|
||||||
|
start_row=start_row,
|
||||||
|
start_column=start_col,
|
||||||
|
end_row=end_row,
|
||||||
|
end_column=end_col
|
||||||
|
)
|
||||||
|
self._load_merged_ranges()
|
||||||
|
|
||||||
|
def unmerge_cells(
|
||||||
|
self,
|
||||||
|
start_row: int,
|
||||||
|
start_col: int,
|
||||||
|
end_row: int,
|
||||||
|
end_col: int
|
||||||
|
) -> None:
|
||||||
|
"""Unmerge cells in specified range"""
|
||||||
|
self.worksheet.unmerge_cells(
|
||||||
|
start_row=start_row,
|
||||||
|
start_column=start_col,
|
||||||
|
end_row=end_row,
|
||||||
|
end_column=end_col
|
||||||
|
)
|
||||||
|
self._load_merged_ranges()
|
||||||
|
|
||||||
|
def copy_merged_cells(
|
||||||
|
self,
|
||||||
|
source_range: Tuple[int, int, int, int],
|
||||||
|
target_start_row: int,
|
||||||
|
target_start_col: int
|
||||||
|
) -> None:
|
||||||
|
"""Copy merged cells from source range to target position"""
|
||||||
|
src_row1, src_col1, src_row2, src_col2 = source_range
|
||||||
|
row_offset = target_start_row - src_row1
|
||||||
|
col_offset = target_start_col - src_col1
|
||||||
|
|
||||||
|
for merged_range in self._merged_ranges:
|
||||||
|
if (src_row1 <= merged_range.min_row <= src_row2 and
|
||||||
|
src_col1 <= merged_range.min_col <= src_col2):
|
||||||
|
new_row1 = merged_range.min_row + row_offset
|
||||||
|
new_col1 = merged_range.min_col + col_offset
|
||||||
|
new_row2 = merged_range.max_row + row_offset
|
||||||
|
new_col2 = merged_range.max_col + col_offset
|
||||||
|
|
||||||
|
self.merge_cells(new_row1, new_col1, new_row2, new_col2)
|
||||||
|
|
||||||
|
def shift_merged_cells(
|
||||||
|
self,
|
||||||
|
start_row: int,
|
||||||
|
rows: int = 0,
|
||||||
|
cols: int = 0
|
||||||
|
) -> None:
|
||||||
|
"""Shift merged cells by specified number of rows and columns"""
|
||||||
|
new_ranges = []
|
||||||
|
for merged_range in self._merged_ranges:
|
||||||
|
if merged_range.min_row >= start_row:
|
||||||
|
new_row1 = merged_range.min_row + rows
|
||||||
|
new_col1 = merged_range.min_col + cols
|
||||||
|
new_row2 = merged_range.max_row + rows
|
||||||
|
new_col2 = merged_range.max_col + cols
|
||||||
|
|
||||||
|
self.worksheet.unmerge_cells(
|
||||||
|
start_row=merged_range.min_row,
|
||||||
|
start_column=merged_range.min_col,
|
||||||
|
end_row=merged_range.max_row,
|
||||||
|
end_column=merged_range.max_col
|
||||||
|
)
|
||||||
|
|
||||||
|
new_ranges.append((new_row1, new_col1, new_row2, new_col2))
|
||||||
|
|
||||||
|
for new_range in new_ranges:
|
||||||
|
self.merge_cells(*new_range)
|
||||||
148
SumasenLibs/excel_lib/sumaexcel/page.py
Normal file
148
SumasenLibs/excel_lib/sumaexcel/page.py
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
# sumaexcel/page.py
|
||||||
|
from typing import Optional, Dict, Any, Union
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
from openpyxl.worksheet.page import PageMargins, PrintPageSetup
|
||||||
|
|
||||||
|
# sumaexcel/page.py (continued)
|
||||||
|
|
||||||
|
class PageManager:
|
||||||
|
"""Handle page setup and header/footer settings"""
|
||||||
|
|
||||||
|
def __init__(self, worksheet: Worksheet):
|
||||||
|
self.worksheet = worksheet
|
||||||
|
|
||||||
|
def set_page_setup(
|
||||||
|
self,
|
||||||
|
orientation: str = 'portrait',
|
||||||
|
paper_size: int = 9, # A4
|
||||||
|
fit_to_height: Optional[int] = None,
|
||||||
|
fit_to_width: Optional[int] = None,
|
||||||
|
scale: Optional[int] = None
|
||||||
|
) -> None:
|
||||||
|
"""Configure page setup
|
||||||
|
|
||||||
|
Args:
|
||||||
|
orientation: 'portrait' or 'landscape'
|
||||||
|
paper_size: paper size (e.g., 9 for A4)
|
||||||
|
fit_to_height: number of pages tall
|
||||||
|
fit_to_width: number of pages wide
|
||||||
|
scale: zoom scale (1-400)
|
||||||
|
"""
|
||||||
|
setup = PrintPageSetup(
|
||||||
|
orientation=orientation,
|
||||||
|
paperSize=paper_size,
|
||||||
|
scale=scale,
|
||||||
|
fitToHeight=fit_to_height,
|
||||||
|
fitToWidth=fit_to_width
|
||||||
|
)
|
||||||
|
self.worksheet.page_setup = setup
|
||||||
|
|
||||||
|
def set_margins(
|
||||||
|
self,
|
||||||
|
left: float = 0.7,
|
||||||
|
right: float = 0.7,
|
||||||
|
top: float = 0.75,
|
||||||
|
bottom: float = 0.75,
|
||||||
|
header: float = 0.3,
|
||||||
|
footer: float = 0.3
|
||||||
|
) -> None:
|
||||||
|
"""Set page margins in inches"""
|
||||||
|
margins = PageMargins(
|
||||||
|
left=left,
|
||||||
|
right=right,
|
||||||
|
top=top,
|
||||||
|
bottom=bottom,
|
||||||
|
header=header,
|
||||||
|
footer=footer
|
||||||
|
)
|
||||||
|
self.worksheet.page_margins = margins
|
||||||
|
|
||||||
|
def set_header_footer(
|
||||||
|
self,
|
||||||
|
odd_header: Optional[str] = None,
|
||||||
|
odd_footer: Optional[str] = None,
|
||||||
|
even_header: Optional[str] = None,
|
||||||
|
even_footer: Optional[str] = None,
|
||||||
|
first_header: Optional[str] = None,
|
||||||
|
first_footer: Optional[str] = None,
|
||||||
|
different_first: bool = False,
|
||||||
|
different_odd_even: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Set headers and footers
|
||||||
|
|
||||||
|
Format codes:
|
||||||
|
- &P: Page number
|
||||||
|
- &N: Total pages
|
||||||
|
- &D: Date
|
||||||
|
- &T: Time
|
||||||
|
- &[Tab]: Sheet name
|
||||||
|
- &[Path]: File path
|
||||||
|
- &[File]: File name
|
||||||
|
- &[Tab]: Worksheet name
|
||||||
|
"""
|
||||||
|
self.worksheet.oddHeader.left = odd_header or ""
|
||||||
|
self.worksheet.oddFooter.left = odd_footer or ""
|
||||||
|
|
||||||
|
if different_odd_even:
|
||||||
|
self.worksheet.evenHeader.left = even_header or ""
|
||||||
|
self.worksheet.evenFooter.left = even_footer or ""
|
||||||
|
|
||||||
|
if different_first:
|
||||||
|
self.worksheet.firstHeader.left = first_header or ""
|
||||||
|
self.worksheet.firstFooter.left = first_footer or ""
|
||||||
|
|
||||||
|
self.worksheet.differentFirst = different_first
|
||||||
|
self.worksheet.differentOddEven = different_odd_even
|
||||||
|
|
||||||
|
def set_print_area(self, range_string: str) -> None:
|
||||||
|
"""Set print area
|
||||||
|
|
||||||
|
Args:
|
||||||
|
range_string: Cell range in A1 notation (e.g., 'A1:H42')
|
||||||
|
"""
|
||||||
|
self.worksheet.print_area = range_string
|
||||||
|
|
||||||
|
def set_print_title_rows(self, rows: str) -> None:
|
||||||
|
"""Set rows to repeat at top of each page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
rows: Row range (e.g., '1:3')
|
||||||
|
"""
|
||||||
|
self.worksheet.print_title_rows = rows
|
||||||
|
|
||||||
|
def set_print_title_columns(self, cols: str) -> None:
|
||||||
|
"""Set columns to repeat at left of each page
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cols: Column range (e.g., 'A:B')
|
||||||
|
"""
|
||||||
|
self.worksheet.print_title_cols = cols
|
||||||
|
|
||||||
|
def set_print_options(
|
||||||
|
self,
|
||||||
|
grid_lines: bool = False,
|
||||||
|
horizontal_centered: bool = False,
|
||||||
|
vertical_centered: bool = False,
|
||||||
|
headers: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Set print options"""
|
||||||
|
self.worksheet.print_gridlines = grid_lines
|
||||||
|
self.worksheet.print_options.horizontalCentered = horizontal_centered
|
||||||
|
self.worksheet.print_options.verticalCentered = vertical_centered
|
||||||
|
self.worksheet.print_options.headers = headers
|
||||||
|
|
||||||
|
class PaperSizes:
|
||||||
|
"""Standard paper size constants"""
|
||||||
|
LETTER = 1
|
||||||
|
LETTER_SMALL = 2
|
||||||
|
TABLOID = 3
|
||||||
|
LEDGER = 4
|
||||||
|
LEGAL = 5
|
||||||
|
STATEMENT = 6
|
||||||
|
EXECUTIVE = 7
|
||||||
|
A3 = 8
|
||||||
|
A4 = 9
|
||||||
|
A4_SMALL = 10
|
||||||
|
A5 = 11
|
||||||
|
B4 = 12
|
||||||
|
B5 = 13
|
||||||
115
SumasenLibs/excel_lib/sumaexcel/styles.py
Normal file
115
SumasenLibs/excel_lib/sumaexcel/styles.py
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
# sumaexcel/styles.py
|
||||||
|
from typing import Dict, Any, Optional, Union
|
||||||
|
from openpyxl.styles import (
|
||||||
|
Font, PatternFill, Alignment, Border, Side,
|
||||||
|
NamedStyle, Protection, Color
|
||||||
|
)
|
||||||
|
from openpyxl.styles.differential import DifferentialStyle
|
||||||
|
from openpyxl.formatting.rule import Rule
|
||||||
|
from openpyxl.worksheet.worksheet import Worksheet
|
||||||
|
|
||||||
|
class StyleManager:
|
||||||
|
"""Excel style management class"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_font(
|
||||||
|
name: str = "Arial",
|
||||||
|
size: int = 11,
|
||||||
|
bold: bool = False,
|
||||||
|
italic: bool = False,
|
||||||
|
color: str = "000000",
|
||||||
|
underline: str = None,
|
||||||
|
strike: bool = False
|
||||||
|
) -> Font:
|
||||||
|
"""Create a Font object with specified parameters"""
|
||||||
|
return Font(
|
||||||
|
name=name,
|
||||||
|
size=size,
|
||||||
|
bold=bold,
|
||||||
|
italic=italic,
|
||||||
|
color=color,
|
||||||
|
underline=underline,
|
||||||
|
strike=strike
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_fill(
|
||||||
|
fill_type: str = "solid",
|
||||||
|
start_color: str = "FFFFFF",
|
||||||
|
end_color: str = None
|
||||||
|
) -> PatternFill:
|
||||||
|
"""Create a PatternFill object"""
|
||||||
|
return PatternFill(
|
||||||
|
fill_type=fill_type,
|
||||||
|
start_color=start_color,
|
||||||
|
end_color=end_color or start_color
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_border(
|
||||||
|
style: str = "thin",
|
||||||
|
color: str = "000000"
|
||||||
|
) -> Border:
|
||||||
|
"""Create a Border object"""
|
||||||
|
side = Side(style=style, color=color)
|
||||||
|
return Border(
|
||||||
|
left=side,
|
||||||
|
right=side,
|
||||||
|
top=side,
|
||||||
|
bottom=side
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_alignment(
|
||||||
|
horizontal: str = "general",
|
||||||
|
vertical: str = "bottom",
|
||||||
|
wrap_text: bool = False,
|
||||||
|
shrink_to_fit: bool = False,
|
||||||
|
indent: int = 0
|
||||||
|
) -> Alignment:
|
||||||
|
"""Create an Alignment object"""
|
||||||
|
return Alignment(
|
||||||
|
horizontal=horizontal,
|
||||||
|
vertical=vertical,
|
||||||
|
wrap_text=wrap_text,
|
||||||
|
shrink_to_fit=shrink_to_fit,
|
||||||
|
indent=indent
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def copy_style(source_cell: Any, target_cell: Any) -> None:
|
||||||
|
"""Copy all style properties from source cell to target cell"""
|
||||||
|
target_cell.font = Font(
|
||||||
|
name=source_cell.font.name,
|
||||||
|
size=source_cell.font.size,
|
||||||
|
bold=source_cell.font.bold,
|
||||||
|
italic=source_cell.font.italic,
|
||||||
|
color=source_cell.font.color,
|
||||||
|
underline=source_cell.font.underline,
|
||||||
|
strike=source_cell.font.strike
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_cell.fill.patternType != None:
|
||||||
|
target_cell.fill = PatternFill(
|
||||||
|
fill_type=source_cell.fill.patternType,
|
||||||
|
start_color=source_cell.fill.start_color.rgb,
|
||||||
|
end_color=source_cell.fill.end_color.rgb
|
||||||
|
)
|
||||||
|
|
||||||
|
target_cell.border = Border(
|
||||||
|
left=source_cell.border.left,
|
||||||
|
right=source_cell.border.right,
|
||||||
|
top=source_cell.border.top,
|
||||||
|
bottom=source_cell.border.bottom
|
||||||
|
)
|
||||||
|
|
||||||
|
target_cell.alignment = Alignment(
|
||||||
|
horizontal=source_cell.alignment.horizontal,
|
||||||
|
vertical=source_cell.alignment.vertical,
|
||||||
|
wrap_text=source_cell.alignment.wrap_text,
|
||||||
|
shrink_to_fit=source_cell.alignment.shrink_to_fit,
|
||||||
|
indent=source_cell.alignment.indent
|
||||||
|
)
|
||||||
|
|
||||||
|
if source_cell.number_format:
|
||||||
|
target_cell.number_format = source_cell.number_format
|
||||||
1444
SumasenLibs/excel_lib/sumaexcel/sumaexcel.py
Normal file
1444
SumasenLibs/excel_lib/sumaexcel/sumaexcel.py
Normal file
File diff suppressed because it is too large
Load Diff
BIN
SumasenLibs/excel_lib/testdata/certificate_5033.xlsx
vendored
Normal file
BIN
SumasenLibs/excel_lib/testdata/certificate_5033.xlsx
vendored
Normal file
Binary file not shown.
BIN
SumasenLibs/excel_lib/testdata/certificate_template.xlsx
vendored
Normal file
BIN
SumasenLibs/excel_lib/testdata/certificate_template.xlsx
vendored
Normal file
Binary file not shown.
28
SumasenLibs/excel_lib/testdata/sample.py
vendored
Normal file
28
SumasenLibs/excel_lib/testdata/sample.py
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from sumaexcel import SumasenExcel
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# 初期化
|
||||||
|
# 初期化
|
||||||
|
variables = {
|
||||||
|
"zekken_number":"5033",
|
||||||
|
"event_code":"FC岐阜",
|
||||||
|
"db":"rogdb",
|
||||||
|
"username":"admin",
|
||||||
|
"password":"admin123456",
|
||||||
|
"host":"localhost",
|
||||||
|
"port":"5432"
|
||||||
|
}
|
||||||
|
excel = SumasenExcel(document="test", variables=variables, docbase="./testdata")
|
||||||
|
|
||||||
|
logging.info("Excelファイル作成 step-1")
|
||||||
|
|
||||||
|
# シート初期化
|
||||||
|
ret = excel.make_report(variables=variables)
|
||||||
|
logging.info(f"Excelファイル作成 step-2 : ret={ret}")
|
||||||
|
if ret["status"]==True:
|
||||||
|
filepath=ret["filepath"]
|
||||||
|
logging.info(f"Excelファイル作成 : ret.filepath={filepath}")
|
||||||
|
else:
|
||||||
|
message = ret.get("message", "No message provided")
|
||||||
|
logging.error(f"Excelファイル作成失敗 : ret.message={message}")
|
||||||
|
|
||||||
26
SumasenLibs/excel_lib/testdata/test.ini
vendored
Normal file
26
SumasenLibs/excel_lib/testdata/test.ini
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
[basic]
|
||||||
|
template_file=certificate_template.xlsx
|
||||||
|
doc_file=certificate_[zekken_number].xlsx
|
||||||
|
sections=section1
|
||||||
|
maxcol=10
|
||||||
|
column_width=3,5,16,16,16,16,16,8,8,12,3
|
||||||
|
|
||||||
|
[section1]
|
||||||
|
template_sheet=certificate
|
||||||
|
sheet_name=certificate
|
||||||
|
groups=group1,group2
|
||||||
|
fit_to_width=1
|
||||||
|
orientation=portrait
|
||||||
|
|
||||||
|
[section1.group1]
|
||||||
|
table_name=mv_entry_details
|
||||||
|
where=zekken_number='[zekken_number]' and event_name='[event_code]'
|
||||||
|
group_range=A1:J12
|
||||||
|
|
||||||
|
|
||||||
|
[section1.group2]
|
||||||
|
table_name=v_checkins_locations
|
||||||
|
where=zekken_number='[zekken_number]' and event_code='[event_code]'
|
||||||
|
sort=path_order
|
||||||
|
group_range=A13:J13
|
||||||
|
|
||||||
164
checkpoint_summary.csv
Normal file
164
checkpoint_summary.csv
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
event_id,event_name,cp_number,sub_loc_id,location_name,category_id,category_name,normal_checkins,purchase_checkins
|
||||||
|
10,FC岐阜,-1,#-1(0),スタート(長良川競技場芝生広場),5,ソロ男子-3時間,7,0
|
||||||
|
10,FC岐阜,-1,#-1(0),スタート(長良川競技場芝生広場),6,ソロ女子-3時間,2,0
|
||||||
|
10,FC岐阜,-1,#-1(0),スタート(長良川競技場芝生広場),7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,-1,#-1(0),スタート(長良川競技場芝生広場),8,一般-3時間,8,0
|
||||||
|
10,FC岐阜,1,#1(35),長良公園(枝広館跡),8,一般-3時間,2,0
|
||||||
|
10,FC岐阜,3,#3(28),長良川うかいミュージアム(岐阜市長良川鵜飼伝承館),7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,3,#3(28),長良川うかいミュージアム(岐阜市長良川鵜飼伝承館),8,一般-3時間,4,0
|
||||||
|
10,FC岐阜,4,#4(15),高橋尚子ゴールドメダル記念碑(足形),5,ソロ男子-3時間,7,0
|
||||||
|
10,FC岐阜,4,#4(15),高橋尚子ゴールドメダル記念碑(足形),6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,4,#4(15),高橋尚子ゴールドメダル記念碑(足形),7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,4,#4(15),高橋尚子ゴールドメダル記念碑(足形),8,一般-3時間,7,0
|
||||||
|
10,FC岐阜,4,#4(15),高橋尚子ゴールドメダル記念碑(足形),9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,5,#5(10),崇福寺・稲葉一鉄寄贈の鐘楼,5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,5,#5(10),崇福寺・稲葉一鉄寄贈の鐘楼,6,ソロ女子-3時間,2,0
|
||||||
|
10,FC岐阜,5,#5(10),崇福寺・稲葉一鉄寄贈の鐘楼,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,5,#5(10),崇福寺・稲葉一鉄寄贈の鐘楼,8,一般-3時間,6,0
|
||||||
|
10,FC岐阜,6,#6(40),鷺山城跡,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,6,#6(40),鷺山城跡,8,一般-3時間,2,0
|
||||||
|
10,FC岐阜,7,#7(30),岐阜県立岐阜商業高等学校,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,7,#7(30),岐阜県立岐阜商業高等学校,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,7,#7(30),岐阜県立岐阜商業高等学校,8,一般-3時間,4,0
|
||||||
|
10,FC岐阜,8,#8(45+80),パティスリー kura,5,ソロ男子-3時間,2,1
|
||||||
|
10,FC岐阜,8,#8(45+80),パティスリー kura,8,一般-3時間,4,4
|
||||||
|
10,FC岐阜,9,#9(55),大垣共立銀行 則武支店,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,9,#9(55),大垣共立銀行 則武支店,8,一般-3時間,4,0
|
||||||
|
10,FC岐阜,10,#10(48+30),ポッカサッポロ自販機-BOOKOFF則武店,6,ソロ女子-3時間,1,1
|
||||||
|
10,FC岐阜,10,#10(48+30),ポッカサッポロ自販機-BOOKOFF則武店,8,一般-3時間,2,2
|
||||||
|
10,FC岐阜,11,#11(72),御嶽神社茅萱宮,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,11,#11(72),御嶽神社茅萱宮,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,12,#12(55),眞中(みなか)神社,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,13,#13(60),江口の鵜飼発祥の地/史跡 江口のわたし,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,13,#13(60),江口の鵜飼発祥の地/史跡 江口のわたし,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,14,#14(85),鏡島湊跡(かがみしまみなと),5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,14,#14(85),鏡島湊跡(かがみしまみなと),6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,15,#15(45),鏡島弘法(乙津寺),5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,15,#15(45),鏡島弘法(乙津寺),6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,16,#16(65),岐阜市立岐阜商業高等学校,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,17,#17(43),立政寺,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,18,#18(35),本莊神社,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,19,#19(40),岐阜県美術館,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,20,#20(55+30),ポッカサッポロ自販機-大垣共立銀行エブリデープラザ,5,ソロ男子-3時間,2,2
|
||||||
|
10,FC岐阜,21,#21(62),武藤嘉門爺像,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,23,#23(95),岐阜県立岐阜総合学園高等学校,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,25,#25(76),鶉田神社,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,26,#26(74),茜部神社,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,33,#33(60),馬頭観世音菩薩,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,33,#33(60),馬頭観世音菩薩,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,34,#34(70),陸上自衛隊 日野基本射撃場,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,37,#37(45+30),ポッカサッポロ自販機-セリア茜部店,5,ソロ男子-3時間,1,1
|
||||||
|
10,FC岐阜,38,#38(40),比奈守神社,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,39,#39(35),岐阜県立加納高等学校前バス停,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,41,#41(32),中山道往来の松,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,42,#42(30),問屋町ウォールアート,5,ソロ男子-3時間,4,0
|
||||||
|
10,FC岐阜,43,#43(22),黄金の信長像,5,ソロ男子-3時間,4,0
|
||||||
|
10,FC岐阜,44,#44(25+80),名鉄協商パーキング 岐阜第2,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,45,#45(30),本荘公園,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,45,#45(30),本荘公園,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,46,#46(30),大縄場大橋公園,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,46,#46(30),大縄場大橋公園,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,46,#46(30),大縄場大橋公園,8,一般-3時間,1,0
|
||||||
|
10,FC岐阜,47,#47(25),金神社/おもかる石,5,ソロ男子-3時間,4,0
|
||||||
|
10,FC岐阜,48,#48(46),OKB岐阜中央プラザ わくわくベースG,5,ソロ男子-3時間,8,0
|
||||||
|
10,FC岐阜,48,#48(46),OKB岐阜中央プラザ わくわくベースG,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,48,#48(46),OKB岐阜中央プラザ わくわくベースG,8,一般-3時間,1,0
|
||||||
|
10,FC岐阜,51,#51(20),梅林公園,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,51,#51(20),梅林公園,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,52,#52(60),柳ヶ瀬FC岐阜勝ち神社,5,ソロ男子-3時間,7,0
|
||||||
|
10,FC岐阜,52,#52(60),柳ヶ瀬FC岐阜勝ち神社,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,52,#52(60),柳ヶ瀬FC岐阜勝ち神社,7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,52,#52(60),柳ヶ瀬FC岐阜勝ち神社,8,一般-3時間,1,0
|
||||||
|
10,FC岐阜,53,#53(25),美殿町の郵便ポスト,5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,53,#53(25),美殿町の郵便ポスト,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,53,#53(25),美殿町の郵便ポスト,7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,53,#53(25),美殿町の郵便ポスト,8,一般-3時間,1,0
|
||||||
|
10,FC岐阜,54,#54(150),水道山展望台,5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,54,#54(150),水道山展望台,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,54,#54(150),水道山展望台,7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,54,#54(150),水道山展望台,8,一般-3時間,1,0
|
||||||
|
10,FC岐阜,55,#55(30),岐阜新聞社,5,ソロ男子-3時間,4,0
|
||||||
|
10,FC岐阜,55,#55(30),岐阜新聞社,7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,55,#55(30),岐阜新聞社,8,一般-3時間,3,0
|
||||||
|
10,FC岐阜,56,#56(24),弥八地蔵尊堂,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,56,#56(24),弥八地蔵尊堂,7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,56,#56(24),弥八地蔵尊堂,8,一般-3時間,1,0
|
||||||
|
10,FC岐阜,57,#57(25),建勲神社 (岐阜 信長神社),5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,57,#57(25),建勲神社 (岐阜 信長神社),6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,57,#57(25),建勲神社 (岐阜 信長神社),7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,58,#58(65),伊奈波神社・黒龍神社龍頭石,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,58,#58(65),伊奈波神社・黒龍神社龍頭石,8,一般-3時間,2,0
|
||||||
|
10,FC岐阜,59,#59(12),日下部邸跡・岐阜町本陣跡,5,ソロ男子-3時間,2,0
|
||||||
|
10,FC岐阜,59,#59(12),日下部邸跡・岐阜町本陣跡,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,59,#59(12),日下部邸跡・岐阜町本陣跡,8,一般-3時間,3,0
|
||||||
|
10,FC岐阜,60,#60(25),メディアコスモスみんなの森,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,60,#60(25),メディアコスモスみんなの森,7,ファミリー-3時間,1,0
|
||||||
|
10,FC岐阜,60,#60(25),メディアコスモスみんなの森,8,一般-3時間,3,0
|
||||||
|
10,FC岐阜,61,#61(15+80),ナガラガワフレーバー,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,61,#61(15+80),ナガラガワフレーバー,7,ファミリー-3時間,2,2
|
||||||
|
10,FC岐阜,61,#61(15+80),ナガラガワフレーバー,8,一般-3時間,8,8
|
||||||
|
10,FC岐阜,62,#62(15),庚申堂,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,62,#62(15),庚申堂,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,62,#62(15),庚申堂,8,一般-3時間,7,0
|
||||||
|
10,FC岐阜,63,#63(15+80),和菓子処 緑水庵 川原町店,5,ソロ男子-3時間,3,0
|
||||||
|
10,FC岐阜,63,#63(15+80),和菓子処 緑水庵 川原町店,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,63,#63(15+80),和菓子処 緑水庵 川原町店,7,ファミリー-3時間,2,1
|
||||||
|
10,FC岐阜,63,#63(15+80),和菓子処 緑水庵 川原町店,8,一般-3時間,8,8
|
||||||
|
10,FC岐阜,63,#63(15+80),和菓子処 緑水庵 川原町店,9,お試し-3時間,1,1
|
||||||
|
10,FC岐阜,64,#64(16),日中友好庭園,5,ソロ男子-3時間,4,0
|
||||||
|
10,FC岐阜,64,#64(16),日中友好庭園,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,64,#64(16),日中友好庭園,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,64,#64(16),日中友好庭園,8,一般-3時間,8,0
|
||||||
|
10,FC岐阜,64,#64(16),日中友好庭園,9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,65,#65(15),板垣死すとも自由は死なず,5,ソロ男子-3時間,3,0
|
||||||
|
10,FC岐阜,65,#65(15),板垣死すとも自由は死なず,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,65,#65(15),板垣死すとも自由は死なず,8,一般-3時間,6,0
|
||||||
|
10,FC岐阜,65,#65(15),板垣死すとも自由は死なず,9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,66,#66(40),岐阜大仏(正法寺),5,ソロ男子-3時間,3,0
|
||||||
|
10,FC岐阜,66,#66(40),岐阜大仏(正法寺),7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,66,#66(40),岐阜大仏(正法寺),8,一般-3時間,3,0
|
||||||
|
10,FC岐阜,66,#66(40),岐阜大仏(正法寺),9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,67,#67(100),めいそうの小道:中間地点,5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,67,#67(100),めいそうの小道:中間地点,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,67,#67(100),めいそうの小道:中間地点,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,67,#67(100),めいそうの小道:中間地点,8,一般-3時間,3,0
|
||||||
|
10,FC岐阜,68,#68(160),岐阜城,5,ソロ男子-3時間,4,0
|
||||||
|
10,FC岐阜,68,#68(160),岐阜城,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,68,#68(160),岐阜城,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,68,#68(160),岐阜城,8,一般-3時間,6,0
|
||||||
|
10,FC岐阜,68,#68(160),岐阜城,9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,69,#69(150),金華山展望デッキ,5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,69,#69(150),金華山展望デッキ,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,69,#69(150),金華山展望デッキ,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,69,#69(150),金華山展望デッキ,8,一般-3時間,6,0
|
||||||
|
10,FC岐阜,70,#70(180),七曲り登山道:岐阜城まで1000m,5,ソロ男子-3時間,5,0
|
||||||
|
10,FC岐阜,70,#70(180),七曲り登山道:岐阜城まで1000m,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,70,#70(180),七曲り登山道:岐阜城まで1000m,7,ファミリー-3時間,2,0
|
||||||
|
10,FC岐阜,70,#70(180),七曲り登山道:岐阜城まで1000m,8,一般-3時間,5,0
|
||||||
|
10,FC岐阜,70,#70(180),七曲り登山道:岐阜城まで1000m,9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,71,#71(5+5),練習ポイント,5,ソロ男子-3時間,6,5
|
||||||
|
10,FC岐阜,71,#71(5+5),練習ポイント,6,ソロ女子-3時間,2,2
|
||||||
|
10,FC岐阜,71,#71(5+5),練習ポイント,7,ファミリー-3時間,1,1
|
||||||
|
10,FC岐阜,71,#71(5+5),練習ポイント,8,一般-3時間,8,7
|
||||||
|
10,FC岐阜,71,#71(5+5),練習ポイント,9,お試し-3時間,1,1
|
||||||
|
10,FC岐阜,72,#72(5+80),岐阜ロゲコーヒー,5,ソロ男子-3時間,3,1
|
||||||
|
10,FC岐阜,72,#72(5+80),岐阜ロゲコーヒー,6,ソロ女子-3時間,1,0
|
||||||
|
10,FC岐阜,72,#72(5+80),岐阜ロゲコーヒー,7,ファミリー-3時間,1,1
|
||||||
|
10,FC岐阜,72,#72(5+80),岐阜ロゲコーヒー,8,一般-3時間,4,3
|
||||||
|
10,FC岐阜,72,#72(5+80),岐阜ロゲコーヒー,9,お試し-3時間,1,1
|
||||||
|
10,FC岐阜,73,#73(5+80),FC岐阜+岐阜バス,5,ソロ男子-3時間,6,1
|
||||||
|
10,FC岐阜,73,#73(5+80),FC岐阜+岐阜バス,8,一般-3時間,2,0
|
||||||
|
10,FC岐阜,73,#73(5+80),FC岐阜+岐阜バス,9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,74,#74(5+80),MKPポイントカード発行,5,ソロ男子-3時間,2,1
|
||||||
|
10,FC岐阜,74,#74(5+80),MKPポイントカード発行,6,ソロ女子-3時間,1,1
|
||||||
|
10,FC岐阜,74,#74(5+80),MKPポイントカード発行,7,ファミリー-3時間,1,1
|
||||||
|
10,FC岐阜,74,#74(5+80),MKPポイントカード発行,8,一般-3時間,7,3
|
||||||
|
10,FC岐阜,74,#74(5+80),MKPポイントカード発行,9,お試し-3時間,1,1
|
||||||
|
10,FC岐阜,75,#75(5+80),小屋垣内(権太)農園,5,ソロ男子-3時間,1,0
|
||||||
|
10,FC岐阜,75,#75(5+80),小屋垣内(権太)農園,7,ファミリー-3時間,2,2
|
||||||
|
10,FC岐阜,75,#75(5+80),小屋垣内(権太)農園,8,一般-3時間,5,5
|
||||||
|
10,FC岐阜,75,#75(5+80),小屋垣内(権太)農園,9,お試し-3時間,1,0
|
||||||
|
10,FC岐阜,200,#200(15+15),穂積駅,5,ソロ男子-3時間,1,1
|
||||||
|
10,FC岐阜,201,#201(15+15),大垣駅,5,ソロ男子-3時間,1,1
|
||||||
|
10,FC岐阜,202,#202(15+15),関ケ原駅,5,ソロ男子-3時間,1,1
|
||||||
|
10,FC岐阜,204,#204(15+15),名古屋駅,5,ソロ男子-3時間,1,1
|
||||||
|
69
config/fonts.conf
Normal file
69
config/fonts.conf
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
<?xml version="1.0"?>
|
||||||
|
<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
|
||||||
|
<fontconfig>
|
||||||
|
<dir>/usr/share/fonts</dir>
|
||||||
|
|
||||||
|
<!-- デフォルトのサンセリフフォントをIPAexGothicに設定 -->
|
||||||
|
<match target="pattern">
|
||||||
|
<test qual="any" name="family">
|
||||||
|
<string>sans-serif</string>
|
||||||
|
</test>
|
||||||
|
<edit name="family" mode="assign" binding="same">
|
||||||
|
<string>IPAexGothic</string>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
|
||||||
|
<!-- デフォルトのセリフフォントをIPAexMinchoに設定 -->
|
||||||
|
<match target="pattern">
|
||||||
|
<test qual="any" name="family">
|
||||||
|
<string>serif</string>
|
||||||
|
</test>
|
||||||
|
<edit name="family" mode="assign" binding="same">
|
||||||
|
<string>IPAexMincho</string>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
|
||||||
|
<!-- MS Gothic の代替としてIPAexGothicを使用 -->
|
||||||
|
<match target="pattern">
|
||||||
|
<test name="family">
|
||||||
|
<string>MS Gothic</string>
|
||||||
|
</test>
|
||||||
|
<edit name="family" mode="assign" binding="same">
|
||||||
|
<string>IPAexGothic</string>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
|
||||||
|
<!-- MS Mincho の代替としてIPAexMinchoを使用 -->
|
||||||
|
<match target="pattern">
|
||||||
|
<test name="family">
|
||||||
|
<string>MS Mincho</string>
|
||||||
|
</test>
|
||||||
|
<edit name="family" mode="assign" binding="same">
|
||||||
|
<string>IPAexMincho</string>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
|
||||||
|
<!-- ビットマップフォントを無効化 -->
|
||||||
|
<match target="font">
|
||||||
|
<edit name="embeddedbitmap" mode="assign">
|
||||||
|
<bool>false</bool>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
|
||||||
|
<!-- フォントのヒンティング設定 -->
|
||||||
|
<match target="font">
|
||||||
|
<edit name="hintstyle" mode="assign">
|
||||||
|
<const>hintslight</const>
|
||||||
|
</edit>
|
||||||
|
<edit name="rgba" mode="assign">
|
||||||
|
<const>rgb</const>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
|
||||||
|
<!-- アンチエイリアス設定 -->
|
||||||
|
<match target="font">
|
||||||
|
<edit name="antialias" mode="assign">
|
||||||
|
<bool>true</bool>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
</fontconfig>
|
||||||
@ -53,10 +53,14 @@ INSTALLED_APPS = [
|
|||||||
'leaflet',
|
'leaflet',
|
||||||
'leaflet_admin_list',
|
'leaflet_admin_list',
|
||||||
'rog.apps.RogConfig',
|
'rog.apps.RogConfig',
|
||||||
|
'corsheaders', # added
|
||||||
'django_filters'
|
'django_filters'
|
||||||
]
|
]
|
||||||
|
|
||||||
MIDDLEWARE = [
|
MIDDLEWARE = [
|
||||||
|
'corsheaders.middleware.CorsMiddleware', # できるだけ上部に
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
|
||||||
'django.middleware.security.SecurityMiddleware',
|
'django.middleware.security.SecurityMiddleware',
|
||||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||||
'django.middleware.common.CommonMiddleware',
|
'django.middleware.common.CommonMiddleware',
|
||||||
@ -68,10 +72,47 @@ MIDDLEWARE = [
|
|||||||
|
|
||||||
ROOT_URLCONF = 'config.urls'
|
ROOT_URLCONF = 'config.urls'
|
||||||
|
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True # 開発環境のみ
|
||||||
|
CORS_ALLOW_CREDENTIALS = True
|
||||||
|
|
||||||
|
CORS_ALLOWED_METHODS = [
|
||||||
|
'GET',
|
||||||
|
'POST',
|
||||||
|
'PUT',
|
||||||
|
'PATCH',
|
||||||
|
'DELETE',
|
||||||
|
'OPTIONS'
|
||||||
|
]
|
||||||
|
CORS_ALLOWED_HEADERS = [
|
||||||
|
'accept',
|
||||||
|
'accept-encoding',
|
||||||
|
'authorization',
|
||||||
|
'content-type',
|
||||||
|
'dnt',
|
||||||
|
'origin',
|
||||||
|
'user-agent',
|
||||||
|
'x-csrftoken',
|
||||||
|
'x-requested-with',
|
||||||
|
]
|
||||||
|
|
||||||
|
# 本番環境では以下のように制限する
|
||||||
|
CORS_ALLOWED_ORIGINS = [
|
||||||
|
"https://rogaining.sumasen.net",
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
]
|
||||||
|
|
||||||
|
# CSRFの設定
|
||||||
|
CSRF_TRUSTED_ORIGINS = [
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
"https://rogaining.sumasen.net",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
TEMPLATES = [
|
TEMPLATES = [
|
||||||
{
|
{
|
||||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||||
'DIRS': [BASE_DIR / 'templates'],
|
'DIRS': [os.path.join(BASE_DIR, 'templates')],
|
||||||
'APP_DIRS': True,
|
'APP_DIRS': True,
|
||||||
'OPTIONS': {
|
'OPTIONS': {
|
||||||
'context_processors': [
|
'context_processors': [
|
||||||
@ -138,10 +179,12 @@ USE_TZ = True
|
|||||||
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
|
||||||
|
|
||||||
#STATIC_URL = '/static2/'
|
#STATIC_URL = '/static2/'
|
||||||
STATIC_ROOT = BASE_DIR / "static"
|
#STATIC_ROOT = BASE_DIR / "static"
|
||||||
|
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
||||||
|
|
||||||
MEDIA_URL = '/media/'
|
MEDIA_URL = '/media/'
|
||||||
MEDIA_ROOT = BASE_DIR / "media/"
|
#MEDIA_ROOT = BASE_DIR / "media/"
|
||||||
|
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
|
||||||
|
|
||||||
#STATICFILES_DIRS = (os.path.join(BASE_DIR, "static2"),os.path.join(BASE_DIR, "media"))
|
#STATICFILES_DIRS = (os.path.join(BASE_DIR, "static2"),os.path.join(BASE_DIR, "media"))
|
||||||
|
|
||||||
@ -174,3 +217,87 @@ REST_FRAMEWORK = {
|
|||||||
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
|
'DEFAULT_FILTER_BACKENDS': ['django_filters.rest_framework.DjangoFilterBackend'],
|
||||||
'DEFAULT_AUTHENTICATION_CLASSES': ('knox.auth.TokenAuthentication', ),
|
'DEFAULT_AUTHENTICATION_CLASSES': ('knox.auth.TokenAuthentication', ),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#FRONTEND_URL = 'https://rogaining.intranet.sumasen.net' # フロントエンドのURLに適宜変更してください
|
||||||
|
FRONTEND_URL = 'https://rogaining.sumasen.net' # フロントエンドのURLに適宜変更してください
|
||||||
|
|
||||||
|
# この設定により、メールは実際には送信されず、代わりにコンソールに出力されます。
|
||||||
|
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||||
|
|
||||||
|
EMAIL_HOST = 'smtp.outlook.com'
|
||||||
|
EMAIL_PORT = 587
|
||||||
|
EMAIL_USE_TLS = True
|
||||||
|
EMAIL_HOST_USER = 'rogaining@gifuai.net'
|
||||||
|
EMAIL_HOST_PASSWORD = 'ctcpy9823"x~'
|
||||||
|
DEFAULT_FROM_EMAIL = 'rogaining@gifuai.net'
|
||||||
|
|
||||||
|
APP_DOWNLOAD_LINK = 'https://apps.apple.com/jp/app/%E5%B2%90%E9%98%9C%E3%83%8A%E3%83%93/id6444221792'
|
||||||
|
ANDROID_DOWNLOAD_LINK = 'https://play.google.com/store/apps/details?id=com.dvox.gifunavi&hl=ja'
|
||||||
|
|
||||||
|
SERVICE_NAME = '岐阜ナビ(岐阜ロゲのアプリ)'
|
||||||
|
|
||||||
|
# settings.py
|
||||||
|
DEFAULT_CHARSET = 'utf-8'
|
||||||
|
|
||||||
|
#REST_FRAMEWORK = {
|
||||||
|
# 'DEFAULT_RENDERER_CLASSES': [
|
||||||
|
# 'rest_framework.renderers.JSONRenderer',
|
||||||
|
# ],
|
||||||
|
# 'JSON_UNICODE_ESCAPE': False,
|
||||||
|
#}
|
||||||
|
|
||||||
|
LOGGING = {
|
||||||
|
'version': 1,
|
||||||
|
'disable_existing_loggers': False,
|
||||||
|
'formatters': {
|
||||||
|
'verbose': {
|
||||||
|
'format': '{levelname} {asctime} {module} {message}',
|
||||||
|
'style': '{',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'handlers': {
|
||||||
|
#'file': {
|
||||||
|
# 'level': 'DEBUG',
|
||||||
|
# 'class': 'logging.FileHandler',
|
||||||
|
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
|
||||||
|
# 'formatter': 'verbose',
|
||||||
|
#},
|
||||||
|
'console': {
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'class': 'logging.StreamHandler',
|
||||||
|
'formatter': 'verbose',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'root': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
},
|
||||||
|
'loggers': {
|
||||||
|
'django': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'INFO',
|
||||||
|
'propagate': False,
|
||||||
|
},
|
||||||
|
'django.request': {
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
},
|
||||||
|
'rog': {
|
||||||
|
#'handlers': ['file','console'],
|
||||||
|
'handlers': ['console'],
|
||||||
|
'level': 'DEBUG',
|
||||||
|
'propagate': True,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
PASSWORD_HASHERS = [
|
||||||
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.Argon2PasswordHasher',
|
||||||
|
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
|
||||||
|
]
|
||||||
|
|
||||||
|
BLACKLISTED_IPS = ['44.230.58.114'] # ブロックしたい IP アドレスをここにリストとして追加
|
||||||
|
|
||||||
|
|||||||
@ -18,6 +18,21 @@ from django.urls import path, include
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.conf.urls.static import static
|
from django.conf.urls.static import static
|
||||||
|
|
||||||
|
|
||||||
|
# debug_urlsビューをrogアプリケーションのviewsからインポート
|
||||||
|
from rog import views as rog_views
|
||||||
|
|
||||||
|
DEBUG = True
|
||||||
|
ALLOWED_HOSTS = ['rogaining.sumasen.net', 'localhost', '127.0.0.1']
|
||||||
|
|
||||||
|
# CORSの設定
|
||||||
|
CORS_ALLOW_ALL_ORIGINS = True
|
||||||
|
CORS_ALLOWED_ORIGINS = [
|
||||||
|
"http://rogaining.sumasen.net",
|
||||||
|
"http://localhost",
|
||||||
|
"http://127.0.0.1",
|
||||||
|
]
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path('admin/', admin.site.urls),
|
path('admin/', admin.site.urls),
|
||||||
path('auth/', include('knox.urls')),
|
path('auth/', include('knox.urls')),
|
||||||
@ -27,3 +42,8 @@ urlpatterns = [
|
|||||||
admin.site.site_header = "ROGANING"
|
admin.site.site_header = "ROGANING"
|
||||||
admin.site.site_title = "Roganing Admin Portal"
|
admin.site.site_title = "Roganing Admin Portal"
|
||||||
admin.site.index_title = "Welcome to Roganing Portal"
|
admin.site.index_title = "Welcome to Roganing Portal"
|
||||||
|
|
||||||
|
# 開発環境での静的ファイル配信
|
||||||
|
if settings.DEBUG:
|
||||||
|
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
||||||
|
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||||
|
|||||||
27
docbase/certificate.ini
Normal file
27
docbase/certificate.ini
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
[basic]
|
||||||
|
template_file=certificate_template.xlsx
|
||||||
|
doc_file=certificate_[zekken_number].xlsx
|
||||||
|
sections=section1
|
||||||
|
maxcol=10
|
||||||
|
column_width=3,5,16,16,16,20,16,8,8,12,3
|
||||||
|
output_path=media/reports/[event_code]
|
||||||
|
|
||||||
|
[section1]
|
||||||
|
template_sheet=certificate
|
||||||
|
sheet_name=certificate
|
||||||
|
groups=group1,group2
|
||||||
|
fit_to_width=1
|
||||||
|
orientation=portrait
|
||||||
|
|
||||||
|
[section1.group1]
|
||||||
|
table_name=mv_entry_details
|
||||||
|
where=zekken_number='[zekken_number]' and event_name='[event_code]'
|
||||||
|
group_range=A1:K15
|
||||||
|
|
||||||
|
|
||||||
|
[section1.group2]
|
||||||
|
table_name=v_checkins_locations
|
||||||
|
where=zekken_number='[zekken_number]' and event_code='[event_code]'
|
||||||
|
sort=path_order
|
||||||
|
group_range=A16:J16
|
||||||
|
|
||||||
BIN
docbase/certificate_template.xlsx
Normal file
BIN
docbase/certificate_template.xlsx
Normal file
Binary file not shown.
@ -1,5 +1,3 @@
|
|||||||
version: "3.9"
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
# postgres-db:
|
# postgres-db:
|
||||||
# image: kartoza/postgis:12.0
|
# image: kartoza/postgis:12.0
|
||||||
@ -22,20 +20,49 @@ services:
|
|||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile.gdal
|
dockerfile: Dockerfile.gdal
|
||||||
command: python3 manage.py runserver 0.0.0.0:8100
|
# command: python3 manage.py runserver 0.0.0.0:8100
|
||||||
volumes:
|
volumes:
|
||||||
- .:/app
|
- .:/app
|
||||||
ports:
|
ports:
|
||||||
- 8100:8100
|
- 8000:8000
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
restart: "on-failure"
|
restart: "on-failure"
|
||||||
# depends_on:
|
|
||||||
# - postgres-db
|
|
||||||
networks:
|
networks:
|
||||||
- rog-api
|
- rog-api
|
||||||
#entrypoint: ["/app/wait-for.sh", "postgres-db:5432", "--", ""]
|
|
||||||
#command: python3 manage.py runserver 0.0.0.0:8100
|
supervisor-web:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.supervisor
|
||||||
|
volumes:
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/html
|
||||||
|
target: /usr/share/nginx/html/supervisor
|
||||||
|
read_only: true
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/nginx/default.conf
|
||||||
|
target: /etc/nginx/conf.d/default.conf
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: static_volume
|
||||||
|
target: /app/static
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: nginx_logs
|
||||||
|
target: /var/log/nginx
|
||||||
|
- type: bind
|
||||||
|
source: ./media
|
||||||
|
target: /usr/share/nginx/html/media
|
||||||
|
ports:
|
||||||
|
- "8100:8100"
|
||||||
|
depends_on:
|
||||||
|
- api
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
rog-api:
|
rog-api:
|
||||||
@ -44,3 +71,5 @@ networks:
|
|||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
geoserver-data:
|
geoserver-data:
|
||||||
|
static_volume:
|
||||||
|
nginx_logs:
|
||||||
|
|||||||
81
docker-compose.yaml.ssl
Normal file
81
docker-compose.yaml.ssl
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
version: "3.9"
|
||||||
|
|
||||||
|
services:
|
||||||
|
# postgres-db:
|
||||||
|
# image: kartoza/postgis:12.0
|
||||||
|
# ports:
|
||||||
|
# - 5432:5432
|
||||||
|
# volumes:
|
||||||
|
# - postgres_data:/var/lib/postgresql
|
||||||
|
# - ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
|
||||||
|
# environment:
|
||||||
|
# - POSTGRES_USER=${POSTGRES_USER}
|
||||||
|
# - POSTGRES_PASS=${POSTGRES_PASS}
|
||||||
|
# - POSTGRES_DBNAME=${POSTGRES_DBNAME}
|
||||||
|
# - POSTGRES_MAX_CONNECTIONS=600
|
||||||
|
|
||||||
|
# restart: "on-failure"
|
||||||
|
# networks:
|
||||||
|
# - rog-api
|
||||||
|
|
||||||
|
api:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.gdal
|
||||||
|
command: python3 manage.py runserver 0.0.0.0:8100
|
||||||
|
volumes:
|
||||||
|
- .:/app
|
||||||
|
ports:
|
||||||
|
- 8100:8100
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
restart: "on-failure"
|
||||||
|
# depends_on:
|
||||||
|
# - postgres-db
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
#entrypoint: ["/app/wait-for.sh", "postgres-db:5432", "--", ""]
|
||||||
|
#command: python3 manage.py runserver 0.0.0.0:8100
|
||||||
|
|
||||||
|
supervisor-web:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.supervisor
|
||||||
|
volumes:
|
||||||
|
- type: bind
|
||||||
|
source: /etc/letsencrypt
|
||||||
|
target: /etc/nginx/ssl
|
||||||
|
read_only: true
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/html
|
||||||
|
target: /usr/share/nginx/html
|
||||||
|
read_only: true
|
||||||
|
- type: bind
|
||||||
|
source: ./supervisor/nginx/default.conf
|
||||||
|
target: /etc/nginx/conf.d/default.conf
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: static_volume
|
||||||
|
target: /app/static
|
||||||
|
read_only: true
|
||||||
|
- type: volume
|
||||||
|
source: nginx_logs
|
||||||
|
target: /var/log/nginx
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
depends_on:
|
||||||
|
- api
|
||||||
|
networks:
|
||||||
|
- rog-api
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
|
||||||
|
networks:
|
||||||
|
rog-api:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
geoserver-data:
|
||||||
|
static_volume:
|
||||||
|
nginx_logs:
|
||||||
1
dump_rog_data.sql
Normal file
1
dump_rog_data.sql
Normal file
@ -0,0 +1 @@
|
|||||||
|
pg_dump: error: connection to database "rogdb" failed: FATAL: Peer authentication failed for user "postgres"
|
||||||
10
entrypoint.sh
Normal file
10
entrypoint.sh
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Collect static files
|
||||||
|
python manage.py collectstatic --noinput
|
||||||
|
|
||||||
|
# Apply database migrations
|
||||||
|
python manage.py migrate
|
||||||
|
|
||||||
|
# Start Gunicorn
|
||||||
|
exec "$@"
|
||||||
26
nginx.conf
26
nginx.conf
@ -26,29 +26,33 @@ http {
|
|||||||
#gzip on;
|
#gzip on;
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 80;
|
listen 80;
|
||||||
server_name localhost;
|
server_name localhost;
|
||||||
|
|
||||||
|
# 静的ファイルの提供
|
||||||
location /static/ {
|
location /static/ {
|
||||||
alias /app/static/;
|
alias /app/static/;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /media/ {
|
# スーパーバイザー Web アプリケーション
|
||||||
alias /app/media/;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_pass http://app:8000;
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Django API プロキシ
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass http://api:8000;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
}
|
}
|
||||||
|
|
||||||
error_page 500 502 503 504 /50x.html;
|
error_page 500 502 503 504 /50x.html;
|
||||||
location = /50x.html {
|
location = /50x.html {
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
111
q
Normal file
111
q
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
List of relations
|
||||||
|
Schema | Name | Type | Owner
|
||||||
|
----------+----------------------------------------+-------------------+-------
|
||||||
|
public | auth_group | table | admin
|
||||||
|
public | auth_group_id_seq | sequence | admin
|
||||||
|
public | auth_group_permissions | table | admin
|
||||||
|
public | auth_group_permissions_id_seq | sequence | admin
|
||||||
|
public | auth_permission | table | admin
|
||||||
|
public | auth_permission_id_seq | sequence | admin
|
||||||
|
public | authtoken_token | table | admin
|
||||||
|
public | django_admin_log | table | admin
|
||||||
|
public | django_admin_log_id_seq | sequence | admin
|
||||||
|
public | django_content_type | table | admin
|
||||||
|
public | django_content_type_id_seq | sequence | admin
|
||||||
|
public | django_migrations | table | admin
|
||||||
|
public | django_migrations_backup | table | admin
|
||||||
|
public | django_migrations_id_seq | sequence | admin
|
||||||
|
public | django_session | table | admin
|
||||||
|
public | geography_columns | view | admin
|
||||||
|
public | geometry_columns | view | admin
|
||||||
|
public | gifu_areas | table | admin
|
||||||
|
public | gifu_areas_id_seq | sequence | admin
|
||||||
|
public | gps_checkins | table | admin
|
||||||
|
public | gps_checkins_backup | table | admin
|
||||||
|
public | gps_checkins_id_seq | sequence | admin
|
||||||
|
public | jpn_admin_main_perf | table | admin
|
||||||
|
public | jpn_admin_main_perf_id_seq | sequence | admin
|
||||||
|
public | knox_authtoken | table | admin
|
||||||
|
public | mv_entry_details | materialized view | admin
|
||||||
|
public | raster_columns | view | admin
|
||||||
|
public | raster_overviews | view | admin
|
||||||
|
public | rog_category | table | admin
|
||||||
|
public | rog_checkinimages | table | admin
|
||||||
|
public | rog_checkinimages_id_seq | sequence | admin
|
||||||
|
public | rog_customuser | table | admin
|
||||||
|
public | rog_customuser_groups | table | admin
|
||||||
|
public | rog_customuser_groups_id_seq | sequence | admin
|
||||||
|
public | rog_customuser_id_seq | sequence | admin
|
||||||
|
public | rog_customuser_user_permissions | table | admin
|
||||||
|
public | rog_customuser_user_permissions_id_seq | sequence | admin
|
||||||
|
public | rog_entry | table | admin
|
||||||
|
public | rog_entry_id_seq | sequence | admin
|
||||||
|
public | rog_entrymember | table | admin
|
||||||
|
public | rog_entrymember_id_seq | sequence | admin
|
||||||
|
public | rog_event | table | admin
|
||||||
|
public | rog_event_id_seq | sequence | admin
|
||||||
|
public | rog_eventuser | table | admin
|
||||||
|
public | rog_eventuser_id_seq | sequence | admin
|
||||||
|
public | rog_favorite | table | admin
|
||||||
|
public | rog_favorite_id_seq | sequence | admin
|
||||||
|
public | rog_gifurogeregister | table | admin
|
||||||
|
public | rog_gifurogeregister_id_seq | sequence | admin
|
||||||
|
public | rog_goalimages | table | admin
|
||||||
|
public | rog_goalimages_id_seq | sequence | admin
|
||||||
|
public | rog_joinedevent | table | admin
|
||||||
|
public | rog_joinedevent_id_seq | sequence | admin
|
||||||
|
public | rog_location | table | admin
|
||||||
|
public | rog_location_id_seq | sequence | admin
|
||||||
|
public | rog_location_line | table | admin
|
||||||
|
public | rog_location_line_id_seq | sequence | admin
|
||||||
|
public | rog_location_polygon | table | admin
|
||||||
|
public | rog_location_polygon_id_seq | sequence | admin
|
||||||
|
public | rog_member | table | admin
|
||||||
|
public | rog_member_id_seq | sequence | admin
|
||||||
|
public | rog_newcategory | table | admin
|
||||||
|
public | rog_newcategory_id_seq | sequence | admin
|
||||||
|
public | rog_newevent | table | admin
|
||||||
|
public | rog_newevent2 | table | admin
|
||||||
|
public | rog_newevent2_id_seq | sequence | admin
|
||||||
|
public | rog_roguser | table | admin
|
||||||
|
public | rog_roguser_id_seq | sequence | admin
|
||||||
|
public | rog_shapefilelocations | table | admin
|
||||||
|
public | rog_shapefilelocations_id_seq | sequence | admin
|
||||||
|
public | rog_shapelayers | table | admin
|
||||||
|
public | rog_shapelayers_id_seq | sequence | admin
|
||||||
|
public | rog_systemsettings | table | admin
|
||||||
|
public | rog_systemsettings_id_seq | sequence | admin
|
||||||
|
public | rog_team | table | admin
|
||||||
|
public | rog_team_id_seq | sequence | admin
|
||||||
|
public | rog_templocation | table | admin
|
||||||
|
public | rog_templocation_id_seq | sequence | admin
|
||||||
|
public | rog_tempuser | table | admin
|
||||||
|
public | rog_tempuser_id_seq | sequence | admin
|
||||||
|
public | rog_testmodel | table | admin
|
||||||
|
public | rog_testmodel_id_seq | sequence | admin
|
||||||
|
public | rog_travellist | table | admin
|
||||||
|
public | rog_travellist_id_seq | sequence | admin
|
||||||
|
public | rog_travelpoint | table | admin
|
||||||
|
public | rog_travelpoint_id_seq | sequence | admin
|
||||||
|
public | rog_useractions | table | admin
|
||||||
|
public | rog_useractions_id_seq | sequence | admin
|
||||||
|
public | rog_usertracks | table | admin
|
||||||
|
public | rog_usertracks_id_seq | sequence | admin
|
||||||
|
public | rog_userupload | table | admin
|
||||||
|
public | rog_userupload_id_seq | sequence | admin
|
||||||
|
public | rog_useruploaduser | table | admin
|
||||||
|
public | rog_useruploaduser_id_seq | sequence | admin
|
||||||
|
public | spatial_ref_sys | table | admin
|
||||||
|
public | temp_gifuroge_team | table | admin
|
||||||
|
public | tmp_checkin | table | admin
|
||||||
|
public | tmp_checkpoint_table | table | admin
|
||||||
|
public | tmp_goalimage | table | admin
|
||||||
|
public | tmp_point | table | admin
|
||||||
|
public | v_category_rankings | view | admin
|
||||||
|
public | v_checkin_summary | view | admin
|
||||||
|
public | v_checkins_locations | view | admin
|
||||||
|
topology | layer | table | admin
|
||||||
|
topology | topology | table | admin
|
||||||
|
topology | topology_id_seq | sequence | admin
|
||||||
|
(106 rows)
|
||||||
|
|
||||||
@ -65,3 +65,4 @@ django-extra-fields==3.0.2
|
|||||||
django-phonenumber-field==6.1.0
|
django-phonenumber-field==6.1.0
|
||||||
django-rest-knox==4.2.0
|
django-rest-knox==4.2.0
|
||||||
dj-database-url==2.0.0
|
dj-database-url==2.0.0
|
||||||
|
django-cors-headers==4.3.0
|
||||||
|
|||||||
BIN
rog/.DS_Store
vendored
BIN
rog/.DS_Store
vendored
Binary file not shown.
820
rog/admin.py
820
rog/admin.py
@ -1,16 +1,636 @@
|
|||||||
import email
|
import email
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render,redirect
|
||||||
from leaflet.admin import LeafletGeoAdmin
|
from leaflet.admin import LeafletGeoAdmin
|
||||||
from leaflet.admin import LeafletGeoAdminMixin
|
from leaflet.admin import LeafletGeoAdminMixin
|
||||||
from leaflet_admin_list.admin import LeafletAdminListMixin
|
from leaflet_admin_list.admin import LeafletAdminListMixin
|
||||||
from .models import RogUser, Location, SystemSettings, JoinedEvent, Favorite, TravelList, TravelPoint, ShapeLayers, Event, Location_line, Location_polygon, JpnAdminMainPerf, Useractions, CustomUser, GifuAreas, UserTracks, templocation, UserUpload, EventUser, GoalImages, CheckinImages
|
from .models import RogUser, Location, SystemSettings, JoinedEvent, Favorite, TravelList, TravelPoint, ShapeLayers, Event, Location_line, Location_polygon, JpnAdminMainPerf, Useractions, CustomUser, GifuAreas, UserTracks, templocation, UserUpload, EventUser, GoalImages, CheckinImages, NewEvent2, Team, NewCategory, Entry, Member, TempUser,GifurogeRegister
|
||||||
from django.contrib.auth.admin import UserAdmin
|
from django.contrib.auth.admin import UserAdmin
|
||||||
from django.urls import path
|
from django.urls import path,reverse
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render
|
||||||
from django import forms;
|
from django import forms;
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from django.http import HttpResponseRedirect
|
||||||
|
from django.utils.html import format_html
|
||||||
|
from .forms import CSVUploadForm
|
||||||
|
from .views import process_csv_upload
|
||||||
|
|
||||||
|
from django.db.models import F # F式をインポート
|
||||||
|
from django.db import transaction
|
||||||
|
from django.contrib import messages
|
||||||
|
import csv
|
||||||
|
from io import StringIO,TextIOWrapper
|
||||||
|
from datetime import timedelta
|
||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
|
from datetime import datetime, date
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
|
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from .services.csv_processor import EntryCSVProcessor
|
||||||
|
|
||||||
|
@admin.register(Entry)
|
||||||
|
class EntryAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team', 'event', 'category', 'date', 'is_active']
|
||||||
|
|
||||||
|
def get_urls(self):
|
||||||
|
from django.urls import path
|
||||||
|
urls = super().get_urls()
|
||||||
|
custom_urls = [
|
||||||
|
path('upload-csv/', self.upload_csv_view, name='entry_upload_csv'),
|
||||||
|
]
|
||||||
|
return custom_urls + urls
|
||||||
|
|
||||||
|
def upload_csv_view(self, request):
|
||||||
|
processor = EntryCSVProcessor()
|
||||||
|
return processor.process_upload(request)
|
||||||
|
|
||||||
|
@admin.register(GifurogeRegister)
|
||||||
|
class GifurogeRegisterAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ('event_code', 'time', 'owner_name', 'email', 'team_name', 'department')
|
||||||
|
change_list_template = 'admin/rog/gifurogeregister/change_list.html' # この行を追加
|
||||||
|
|
||||||
|
def find_matching_category(self, time, department):
|
||||||
|
"""
|
||||||
|
時間とdepartmentに基づいて適切なカテゴリを見つける
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
duration = timedelta(hours=time)
|
||||||
|
|
||||||
|
# 検索前の情報出力
|
||||||
|
print(f" Searching for category with parameters:")
|
||||||
|
print(f" - Duration: {duration}")
|
||||||
|
print(f" - Department: {department}")
|
||||||
|
|
||||||
|
# 利用可能なカテゴリの一覧を出力
|
||||||
|
all_categories = NewCategory.objects.all()
|
||||||
|
print(" Available categories:")
|
||||||
|
for cat in all_categories:
|
||||||
|
#print(f" - ID: {cat.id}")
|
||||||
|
print(f" - Name: {cat.category_name}")
|
||||||
|
print(f" - Duration: {cat.duration}")
|
||||||
|
print(f" - Number: {cat.category_number}")
|
||||||
|
|
||||||
|
# カテゴリ検索のクエリをログ出力
|
||||||
|
query = NewCategory.objects.filter(
|
||||||
|
duration=duration,
|
||||||
|
category_name__startswith=department
|
||||||
|
)
|
||||||
|
print(f" Query SQL: {query.query}")
|
||||||
|
|
||||||
|
# 検索結果の取得
|
||||||
|
category = query.first()
|
||||||
|
|
||||||
|
if category:
|
||||||
|
print(f" Found matching category:")
|
||||||
|
print(f" - Name: {category.category_name}")
|
||||||
|
print(f" - Duration: {category.duration}")
|
||||||
|
print(f" - Category Number: {getattr(category, 'category_number', 'N/A')}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(" No matching category found with the following filters:")
|
||||||
|
print(f" - Duration equals: {duration}")
|
||||||
|
print(f" - Category name starts with: {department}")
|
||||||
|
|
||||||
|
return category
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error finding category: {e}")
|
||||||
|
print(f"Exception type: {type(e)}")
|
||||||
|
import traceback
|
||||||
|
print(f"Traceback: {traceback.format_exc()}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def create_entry_with_number(self, team, category, owner, event):
|
||||||
|
"""
|
||||||
|
カテゴリ番号をインクリメントしてエントリーを作成
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
# 事前バリデーション
|
||||||
|
try:
|
||||||
|
# チームメンバーの性別をチェック
|
||||||
|
if category.female:
|
||||||
|
for member in team.members.all():
|
||||||
|
|
||||||
|
print(f" Check existing member {member.user.lastname} {member.user.firstname} female:{member.user.female}")
|
||||||
|
if not member.user.female:
|
||||||
|
raise ValidationError(f"チーム '{team.team_name}' に男性メンバーが含まれているため、"
|
||||||
|
f"カテゴリー '{category.category_name}' には参加できません。")
|
||||||
|
except ValidationError as e:
|
||||||
|
print(f"Pre-validation error: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# カテゴリを再度ロックして取得
|
||||||
|
category_for_update = NewCategory.objects.select_for_update().get(
|
||||||
|
category_name=category.category_name
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f" Creating entry with following details:")
|
||||||
|
print(f" - Category: {category_for_update.category_name}")
|
||||||
|
print(f" - Current category number: {category_for_update.category_number}")
|
||||||
|
|
||||||
|
# イベントの日付を取得
|
||||||
|
entry_date = event.start_datetime.date()
|
||||||
|
|
||||||
|
# 既存のエントリーをチェック
|
||||||
|
existing_entry = Entry.objects.filter(
|
||||||
|
team=team,
|
||||||
|
event=event,
|
||||||
|
date=entry_date,
|
||||||
|
is_active=True # アクティブなエントリーのみをチェック
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_entry:
|
||||||
|
print(f" Found existing entry for team {team.team_name} on {entry_date}")
|
||||||
|
raise ValidationError(
|
||||||
|
f"Team {team.team_name} already has an entry for event {event.event_name} on {entry_date}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 現在の番号を取得してインクリメント
|
||||||
|
current_number = category_for_update.category_number
|
||||||
|
zekken_number = current_number
|
||||||
|
|
||||||
|
# カテゴリ番号をインクリメント
|
||||||
|
category_for_update.category_number = F('category_number') + 1
|
||||||
|
category_for_update.save()
|
||||||
|
|
||||||
|
# 変更後の値を取得して表示
|
||||||
|
category_for_update.refresh_from_db()
|
||||||
|
print(f" Updated category number: {category_for_update.category_number}")
|
||||||
|
|
||||||
|
# エントリーの作成
|
||||||
|
try:
|
||||||
|
entry = Entry.objects.create(
|
||||||
|
date=event.start_datetime,
|
||||||
|
team=team,
|
||||||
|
category=category,
|
||||||
|
owner=owner,
|
||||||
|
event=event,
|
||||||
|
zekken_number=zekken_number,
|
||||||
|
is_active=True
|
||||||
|
)
|
||||||
|
# バリデーションを実行
|
||||||
|
entry.full_clean()
|
||||||
|
# 問題なければ保存
|
||||||
|
entry.save()
|
||||||
|
|
||||||
|
print(f" Created entry:")
|
||||||
|
print(f" - Team: {team.team_name}")
|
||||||
|
print(f" - Event: {event.event_name}")
|
||||||
|
print(f" - Category: {category.category_name}")
|
||||||
|
print(f" - Zekken Number: {zekken_number}")
|
||||||
|
|
||||||
|
return entry
|
||||||
|
|
||||||
|
except ValidationError as e:
|
||||||
|
print(f"Entry validation error: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating entry: {e}")
|
||||||
|
print(f"Exception type: {type(e)}")
|
||||||
|
import traceback
|
||||||
|
print(f"Traceback: {traceback.format_exc()}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def split_full_name(self, full_name):
|
||||||
|
"""
|
||||||
|
フルネームを姓と名に分割
|
||||||
|
半角または全角スペースに対応
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# 空白文字で分割(半角スペース、全角スペース、タブなど)
|
||||||
|
parts = full_name.replace(' ', ' ').split()
|
||||||
|
if len(parts) >= 2:
|
||||||
|
last_name = parts[0]
|
||||||
|
first_name = ' '.join(parts[1:]) # 名が複数単語の場合に対応
|
||||||
|
return last_name, first_name
|
||||||
|
else:
|
||||||
|
# 分割できない場合は全体を姓とする
|
||||||
|
return full_name, ''
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error splitting name '{full_name}': {e}")
|
||||||
|
return full_name, ''
|
||||||
|
|
||||||
|
def convert_japanese_date(self, date_text):
|
||||||
|
"""
|
||||||
|
日本式の日付テキストをDateField形式に変換
|
||||||
|
例: '1990年1月1日' -> datetime.date(1990, 1, 1)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not date_text or date_text.strip() == '':
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 全角数字を半角数字に変換
|
||||||
|
date_text = date_text.translate(str.maketrans('0123456789', '0123456789'))
|
||||||
|
date_text = date_text.strip()
|
||||||
|
|
||||||
|
# 区切り文字の判定と分割
|
||||||
|
if '年' in date_text:
|
||||||
|
# 年月日形式の場合
|
||||||
|
date_parts = date_text.replace('年', '-').replace('月', '-').replace('日', '').split('-')
|
||||||
|
elif '/' in date_text:
|
||||||
|
# スラッシュ区切りの場合
|
||||||
|
date_parts = date_text.split('/')
|
||||||
|
elif '-' in date_text:
|
||||||
|
date_parts = date_text.split('-')
|
||||||
|
else:
|
||||||
|
print(f"Unsupported date format: {date_text}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 部分の数を確認
|
||||||
|
if len(date_parts) != 3:
|
||||||
|
print(f"Invalid date parts count: {len(date_parts)} in '{date_text}'")
|
||||||
|
return None
|
||||||
|
|
||||||
|
year = int(date_parts[0])
|
||||||
|
month = int(date_parts[1])
|
||||||
|
day = int(date_parts[2])
|
||||||
|
|
||||||
|
# 簡単な妥当性チェック
|
||||||
|
if not (1900 <= year <= 2100):
|
||||||
|
print(f"Invalid year: {year}")
|
||||||
|
return None
|
||||||
|
if not (1 <= month <= 12):
|
||||||
|
print(f"Invalid month: {month}")
|
||||||
|
return None
|
||||||
|
if not (1 <= day <= 31): # 月ごとの日数チェックは省略
|
||||||
|
print(f"Invalid day: {day}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
print(f"Converted from {date_text} to year-{year} / month-{month} / day-{day}")
|
||||||
|
|
||||||
|
return date(year, month, day)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error converting date '{date_text}': {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def create_owner_member( self,team,row ):
|
||||||
|
"""
|
||||||
|
オーナーをチームメンバー1として作成
|
||||||
|
既存のメンバーは更新
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
owner_name = row.get('owner_name').strip()
|
||||||
|
# 姓名を分割
|
||||||
|
last_name, first_name = self.split_full_name(owner_name)
|
||||||
|
print(f" Split name - Last: {last_name}, First: {first_name}")
|
||||||
|
# 誕生日の処理
|
||||||
|
birthday = row.get(f'owner_birthday', '').strip()
|
||||||
|
birth_date = self.convert_japanese_date(birthday)
|
||||||
|
print(f" Converted birthday: {birth_date}")
|
||||||
|
|
||||||
|
# 性別の処理
|
||||||
|
sex = row.get(f'owner_sex', '').strip()
|
||||||
|
is_female = sex in ['女性','女','女子','female','girl','lady']
|
||||||
|
print(f" Sex: {sex}, is_female: {is_female}")
|
||||||
|
|
||||||
|
# メンバーを作成
|
||||||
|
member,created = Member.objects.get_or_create(
|
||||||
|
team=team,
|
||||||
|
user=team.owner,
|
||||||
|
defaults={
|
||||||
|
'is_temporary': True # 仮登録
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# 既存メンバーの場合は情報を更新
|
||||||
|
if not created:
|
||||||
|
member.lastname = last_name
|
||||||
|
member.firstname = first_name
|
||||||
|
member.date_of_birth = birth_date
|
||||||
|
member.female = is_female
|
||||||
|
member.is_temporary = True
|
||||||
|
member.save()
|
||||||
|
print(f" Updated existing member {last_name} {first_name}")
|
||||||
|
else:
|
||||||
|
print(f" Created new member {last_name} {first_name}")
|
||||||
|
|
||||||
|
return member
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating/updating member: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def create_members(self, team, row):
|
||||||
|
"""
|
||||||
|
チームのメンバーを作成
|
||||||
|
既存のメンバーは更新
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
created_members = []
|
||||||
|
|
||||||
|
# オーナーをメンバーに登録
|
||||||
|
member = self.create_owner_member(team,row)
|
||||||
|
created_members.append(member)
|
||||||
|
|
||||||
|
# メンバー2から5までを処理
|
||||||
|
for i in range(2, 6):
|
||||||
|
member_name = row.get(f'member{i}', '').strip()
|
||||||
|
if member_name:
|
||||||
|
print(f"===== Processing member: {member_name} =====")
|
||||||
|
|
||||||
|
# 姓名を分割
|
||||||
|
last_name, first_name = self.split_full_name(member_name)
|
||||||
|
print(f" Split name - Last: {last_name}, First: {first_name}")
|
||||||
|
|
||||||
|
# 誕生日の処理
|
||||||
|
birthday = row.get(f'birthday{i}', '').strip()
|
||||||
|
birth_date = self.convert_japanese_date(birthday)
|
||||||
|
print(f" Converted birthday: {birth_date}")
|
||||||
|
|
||||||
|
# 性別の処理
|
||||||
|
sex = row.get(f'sex{i}', '').strip()
|
||||||
|
is_female = sex in ['女性','女','女子','female','girl','lady']
|
||||||
|
print(f" Sex: {sex}, is_female: {is_female}")
|
||||||
|
|
||||||
|
# メンバー用のユーザーを作成
|
||||||
|
email = f"dummy_{team.id}_{i}@gifuai.net".lower()
|
||||||
|
member_user, created = CustomUser.objects.get_or_create(
|
||||||
|
email=email,
|
||||||
|
defaults={
|
||||||
|
'password': make_password('temporary_password'),
|
||||||
|
'lastname': last_name,
|
||||||
|
'firstname': first_name,
|
||||||
|
'date_of_birth': birth_date,
|
||||||
|
'female':is_female
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# 既存ユーザーの場合も姓名を更新
|
||||||
|
if not created:
|
||||||
|
member_user.lastname = last_name
|
||||||
|
member_user.firstname = first_name
|
||||||
|
member_user.date_of_birth = birth_date
|
||||||
|
member_user.female = is_female
|
||||||
|
member_user.save()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# メンバーを作成
|
||||||
|
member,created = Member.objects.get_or_create(
|
||||||
|
team=team,
|
||||||
|
user=member_user,
|
||||||
|
defaults={
|
||||||
|
'is_temporary': True # 仮登録
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# 既存メンバーの場合は情報を更新
|
||||||
|
if not created:
|
||||||
|
member.is_temporary = True
|
||||||
|
member.save()
|
||||||
|
print(f" Updated existing member {member_user.lastname} {member_user.firstname}")
|
||||||
|
else:
|
||||||
|
print(f" Created new member {member_user.lastname} {member_user.firstname}")
|
||||||
|
|
||||||
|
created_members.append(member)
|
||||||
|
print(f" - Birthday: {member_user.date_of_birth}")
|
||||||
|
print(f" - Sex: {'Female' if member_user.female else 'Male'}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating/updating member: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
return created_members
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating members: {e}")
|
||||||
|
print(f"Exception type: {type(e)}")
|
||||||
|
import traceback
|
||||||
|
print(f"Traceback: {traceback.format_exc()}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_urls(self):
|
||||||
|
urls = super().get_urls()
|
||||||
|
custom_urls = [
|
||||||
|
path('upload-csv/', self.upload_csv, name='gifuroge_register_upload_csv'),
|
||||||
|
]
|
||||||
|
return custom_urls + urls
|
||||||
|
|
||||||
|
def upload_csv(self, request):
|
||||||
|
print("upload_csv")
|
||||||
|
if request.method == 'POST':
|
||||||
|
print("POST")
|
||||||
|
if 'csv_file' not in request.FILES:
|
||||||
|
messages.error(request, 'No file was uploaded.')
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
print(f"csv_file(1) = {csv_file}")
|
||||||
|
if not csv_file.name.endswith('.csv'):
|
||||||
|
messages.error(request, 'File is not CSV type')
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
try:
|
||||||
|
# BOMを考慮してファイルを読み込む
|
||||||
|
file_content = csv_file.read()
|
||||||
|
# BOMがある場合は除去
|
||||||
|
if file_content.startswith(b'\xef\xbb\xbf'):
|
||||||
|
file_content = file_content[3:]
|
||||||
|
|
||||||
|
# デコード
|
||||||
|
file_content = file_content.decode('utf-8')
|
||||||
|
csv_file = StringIO(file_content)
|
||||||
|
reader = csv.DictReader(csv_file)
|
||||||
|
|
||||||
|
print(f"csv_file(2) = {csv_file}")
|
||||||
|
print(f"reader = {reader}")
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
for row in reader:
|
||||||
|
print(f" row={row}")
|
||||||
|
|
||||||
|
# オーナーの姓名を分割
|
||||||
|
owner_lastname, owner_firstname = self.split_full_name(row['owner_name'])
|
||||||
|
|
||||||
|
# パスワードをハッシュ化
|
||||||
|
hashed_password = make_password(row['password'])
|
||||||
|
|
||||||
|
# オーナーの誕生日の処理
|
||||||
|
owner_birthday = row.get('owner_birthday', '').strip()
|
||||||
|
owner_birth_date = self.convert_japanese_date(owner_birthday)
|
||||||
|
print(f" Owner birthday: {owner_birth_date}")
|
||||||
|
|
||||||
|
# オーナーの性別の処理
|
||||||
|
owner_sex = row.get('owner_sex', '').strip()
|
||||||
|
owner_is_female = owner_sex in ['女性','女','女子','female','girl','lady']
|
||||||
|
print(f" Owner sex: {owner_sex}, is_female: {owner_is_female}")
|
||||||
|
|
||||||
|
# ユーザーの取得または作成
|
||||||
|
user, created = CustomUser.objects.get_or_create(
|
||||||
|
email=row['email'],
|
||||||
|
defaults={
|
||||||
|
'password': hashed_password, # make_password(row['password'])
|
||||||
|
'lastname': owner_lastname,
|
||||||
|
'firstname': owner_firstname,
|
||||||
|
'date_of_birth': owner_birth_date,
|
||||||
|
'female': owner_is_female
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not created:
|
||||||
|
# 既存ユーザーの場合、空のフィールドがあれば更新
|
||||||
|
should_update = False
|
||||||
|
update_fields = []
|
||||||
|
|
||||||
|
print(f" Checking existing user data for {user.email}:")
|
||||||
|
print(f" - Current lastname: '{user.lastname}'")
|
||||||
|
print(f" - Current firstname: '{user.firstname}'")
|
||||||
|
print(f" - Current birth date: {user.date_of_birth}")
|
||||||
|
print(f" - Current female: {user.female}")
|
||||||
|
|
||||||
|
# 姓が空またはNoneの場合
|
||||||
|
if not user.lastname or user.lastname.strip() == '':
|
||||||
|
user.lastname = owner_lastname
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('lastname')
|
||||||
|
print(f" - Updating lastname to: {owner_lastname}")
|
||||||
|
|
||||||
|
# 名が空またはNoneの場合
|
||||||
|
if not user.firstname or user.firstname.strip() == '':
|
||||||
|
user.firstname = owner_firstname
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('firstname')
|
||||||
|
print(f" - Updating firstname to: {owner_firstname}")
|
||||||
|
|
||||||
|
# 生年月日が空またはNoneの場合
|
||||||
|
if not user.date_of_birth and owner_birth_date:
|
||||||
|
user.date_of_birth = owner_birth_date
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('date_of_birth')
|
||||||
|
print(f" - Updating birth date to: {owner_birth_date}")
|
||||||
|
|
||||||
|
# 性別が空またはNoneの場合
|
||||||
|
# Booleanフィールドなのでis None で判定
|
||||||
|
if user.female is None:
|
||||||
|
user.female = owner_is_female
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('female')
|
||||||
|
print(f" - Updating female to: {owner_is_female}")
|
||||||
|
|
||||||
|
# パスワードが'登録済み'でない場合のみ更新
|
||||||
|
if row['password'] != '登録済み':
|
||||||
|
user.password = hashed_password
|
||||||
|
should_update = True
|
||||||
|
update_fields.append('password')
|
||||||
|
print(f" - Updating password")
|
||||||
|
|
||||||
|
# 変更があった場合のみ保存
|
||||||
|
if should_update:
|
||||||
|
try:
|
||||||
|
# 特定のフィールドのみを更新
|
||||||
|
user.save(update_fields=update_fields)
|
||||||
|
print(f" Updated user {user.email} fields: {', '.join(update_fields)}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f" Error updating user {user.email}: {str(e)}")
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
print(f" No updates needed for user {user.email}")
|
||||||
|
|
||||||
|
|
||||||
|
print(f" user created...")
|
||||||
|
print(f" Owner member created: {user.lastname} {user.firstname}")
|
||||||
|
print(f" - Birthday: {user.date_of_birth}")
|
||||||
|
print(f" - Sex: {'Female' if user.female else 'Male'}")
|
||||||
|
|
||||||
|
# 適切なカテゴリを見つける
|
||||||
|
category = self.find_matching_category(
|
||||||
|
time=int(row['time']),
|
||||||
|
department=row['department']
|
||||||
|
)
|
||||||
|
|
||||||
|
if not category:
|
||||||
|
raise ValueError(
|
||||||
|
f"No matching category found for time={row['time']} minutes "
|
||||||
|
f"and department={row['department']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
print(f" Using category: {category.category_name}")
|
||||||
|
|
||||||
|
# Teamの作成(既存のチームがある場合は取得)
|
||||||
|
team, created = Team.objects.get_or_create(
|
||||||
|
team_name=row['team_name'],
|
||||||
|
defaults={
|
||||||
|
'owner': user,
|
||||||
|
'category': category
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# 既存のチームの場合でもカテゴリを更新
|
||||||
|
if not created:
|
||||||
|
team.category = category
|
||||||
|
team.save()
|
||||||
|
|
||||||
|
print(" team created/updated...")
|
||||||
|
|
||||||
|
self.create_members(team, row)
|
||||||
|
|
||||||
|
# イベントの検索
|
||||||
|
try:
|
||||||
|
event_code = row['event_code']
|
||||||
|
event = NewEvent2.objects.get(event_name=event_code)
|
||||||
|
print(f" Found event: {event.event_name}")
|
||||||
|
except NewEvent2.DoesNotExist:
|
||||||
|
raise ValueError(f"Event with code {event_code} does not exist")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# エントリーの作成
|
||||||
|
entry = self.create_entry_with_number(
|
||||||
|
team=team,
|
||||||
|
category=category,
|
||||||
|
owner=user,
|
||||||
|
event=event,
|
||||||
|
)
|
||||||
|
|
||||||
|
print(" entry created...")
|
||||||
|
except ValidationError as e:
|
||||||
|
messages.error(request, str(e))
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
gifuroge_register = GifurogeRegister.objects.create(
|
||||||
|
event_code=row['event_code'],
|
||||||
|
time=int(row['time']),
|
||||||
|
owner_name_kana=row['owner_name_kana'],
|
||||||
|
owner_name=row['owner_name'],
|
||||||
|
owner_birthday=self.convert_japanese_date(row['owner_birthday']),
|
||||||
|
owner_sex=row['owner_sex'],
|
||||||
|
email=row['email'],
|
||||||
|
password=row['password'],
|
||||||
|
team_name=row['team_name'],
|
||||||
|
department=row['department'],
|
||||||
|
members_count=int(row['members_count']),
|
||||||
|
member2=row.get('member2', '') or None,
|
||||||
|
birthday2=self.convert_japanese_date(row.get('birthday2', '') ),
|
||||||
|
sex2=row.get('sex2', '') or None,
|
||||||
|
member3=row.get('member3', '') or None,
|
||||||
|
birthday3=self.convert_japanese_date(row.get('birthday3', '') ),
|
||||||
|
sex3=row.get('sex3', '') or None,
|
||||||
|
member4=row.get('member4', '') or None,
|
||||||
|
birthday4=self.convert_japanese_date(row.get('birthday4', '') ),
|
||||||
|
sex4=row.get('sex4', '') or None,
|
||||||
|
member5=row.get('member5', '') or None,
|
||||||
|
birthday5=self.convert_japanese_date(row.get('birthday5', '') ),
|
||||||
|
sex5=row.get('sex5', '') or None
|
||||||
|
)
|
||||||
|
print(f" saved gifuroge_register...")
|
||||||
|
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
messages.error(request, 'File encoding error. Please ensure the file is UTF-8 encoded.')
|
||||||
|
return redirect('..')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error processing row: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
messages.success(request, 'CSV file uploaded successfully')
|
||||||
|
return redirect('..')
|
||||||
|
|
||||||
|
return render(request, 'admin/rog/gifurogeregister/upload-csv.html')
|
||||||
|
|
||||||
class RogAdmin(LeafletAdminListMixin, LeafletGeoAdminMixin, admin.ModelAdmin):
|
class RogAdmin(LeafletAdminListMixin, LeafletGeoAdminMixin, admin.ModelAdmin):
|
||||||
list_display=['title', 'venue', 'at_date',]
|
list_display=['title', 'venue', 'at_date',]
|
||||||
|
|
||||||
@ -53,21 +673,25 @@ class UserAdminConfig(UserAdmin):
|
|||||||
print("-------Event code--------")
|
print("-------Event code--------")
|
||||||
for i in data:
|
for i in data:
|
||||||
_exist = CustomUser.objects.filter(email=i["zekken_number"]).delete()
|
_exist = CustomUser.objects.filter(email=i["zekken_number"]).delete()
|
||||||
|
other_fields.setDefaut('zekken_number',i['zekken_number'])
|
||||||
|
other_fields.setdefault('is_staff', True)
|
||||||
|
other_fields.setdefault('is_superuser', False)
|
||||||
|
other_fields.setdefault('is_active', True)
|
||||||
|
other_fields.setdefault('event_code', i['event_code'])
|
||||||
|
other_fields.setdefault('team_name', i['team_name'])
|
||||||
|
other_fields.setdefault('group', '大垣-初心者')
|
||||||
|
|
||||||
usr = CustomUser.objects.create_user(
|
usr = CustomUser.objects.create_user(
|
||||||
email=i["zekken_number"],
|
email=i["zekken_number"],
|
||||||
zekken_number=i['zekken_number'],
|
|
||||||
is_rogaining=True,
|
|
||||||
event_code=i['event_code'],
|
|
||||||
team_name=i['team_name'],
|
|
||||||
group='大垣-初心者',
|
|
||||||
password=i['password'],
|
password=i['password'],
|
||||||
is_staff= True
|
**other_fields
|
||||||
)
|
)
|
||||||
|
|
||||||
form = loadUserForm()
|
form = loadUserForm()
|
||||||
data = {'form': form}
|
data = {'form': form}
|
||||||
return render(request, 'admin/load_users.html', data)
|
return render(request, 'admin/load_users.html', data)
|
||||||
|
|
||||||
|
"""
|
||||||
fieldsets = (
|
fieldsets = (
|
||||||
(None, {'fields':('email', 'group', 'zekken_number', 'event_code', 'team_name',)}),
|
(None, {'fields':('email', 'group', 'zekken_number', 'event_code', 'team_name',)}),
|
||||||
('Permissions', {'fields':('is_staff', 'is_active', 'is_rogaining')}),
|
('Permissions', {'fields':('is_staff', 'is_active', 'is_rogaining')}),
|
||||||
@ -76,6 +700,35 @@ class UserAdminConfig(UserAdmin):
|
|||||||
add_fieldsets = (
|
add_fieldsets = (
|
||||||
(None, {'classes':('wide',), 'fields':('email', 'group','zekken_number', 'event_code', 'team_name', 'password1', 'password2')}),
|
(None, {'classes':('wide',), 'fields':('email', 'group','zekken_number', 'event_code', 'team_name', 'password1', 'password2')}),
|
||||||
)
|
)
|
||||||
|
"""
|
||||||
|
# readonly_fieldsを明示的に設定
|
||||||
|
readonly_fields = ('date_joined',) # 変更不可のフィールドのみを指定=>Personal Infoも編集可能にする。
|
||||||
|
|
||||||
|
fieldsets = (
|
||||||
|
(None, {'fields': ('email', 'password')}),
|
||||||
|
(_('Personal info'), {
|
||||||
|
'fields': ('firstname', 'lastname', 'date_of_birth', 'female'),
|
||||||
|
'classes': ('wide',) # フィールドの表示を広げる
|
||||||
|
}),
|
||||||
|
(_('Permissions'), {'fields': ('is_staff', 'is_active', 'is_rogaining','user_permissions')}),
|
||||||
|
(_('Rogaining info'), {
|
||||||
|
'fields': ('zekken_number', 'event_code', 'team_name', 'group'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}),
|
||||||
|
(_('Important dates'), {
|
||||||
|
'fields': ('date_joined','last_login'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}), # 読み取り専用
|
||||||
|
)
|
||||||
|
add_fieldsets = (
|
||||||
|
(None, {
|
||||||
|
'classes': ('wide',),
|
||||||
|
#'fields': ('email', 'password1', 'password2', 'is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
'fields': ('email', 'password1', 'password2', 'lastname','firstname', 'date_of_birth', 'female','is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
search_fields = ('email', 'firstname', 'lastname', 'zekken_number', 'team_name')
|
||||||
|
ordering = ('email',)
|
||||||
|
|
||||||
class JpnSubPerfAdmin(LeafletGeoAdmin):
|
class JpnSubPerfAdmin(LeafletGeoAdmin):
|
||||||
search_fields = ('adm0_ja', 'adm1_ja', 'adm2_ja', 'name_modified', 'area_name',)
|
search_fields = ('adm0_ja', 'adm1_ja', 'adm2_ja', 'name_modified', 'area_name',)
|
||||||
@ -194,29 +847,130 @@ class TempLocationAdmin(LeafletGeoAdmin):
|
|||||||
actions = [tranfer_to_location,]
|
actions = [tranfer_to_location,]
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(NewEvent2)
|
||||||
|
class NewEvent2Admin(admin.ModelAdmin):
|
||||||
|
list_display = ['event_name', 'start_datetime', 'end_datetime', 'csv_upload_button']
|
||||||
|
|
||||||
admin.site.register(Useractions)
|
def get_urls(self):
|
||||||
admin.site.register(RogUser, admin.ModelAdmin)
|
urls = super().get_urls()
|
||||||
admin.site.register(Location, LocationAdmin)
|
my_urls = [
|
||||||
admin.site.register(SystemSettings, admin.ModelAdmin)
|
path('csv-upload/', self.admin_site.admin_view(self.csv_upload_view), name='newevent2_csv_upload'),
|
||||||
admin.site.register(JoinedEvent, admin.ModelAdmin)
|
]
|
||||||
admin.site.register(Favorite, admin.ModelAdmin)
|
return my_urls + urls
|
||||||
admin.site.register(TravelList, admin.ModelAdmin)
|
|
||||||
admin.site.register(TravelPoint, admin.ModelAdmin)
|
|
||||||
admin.site.register(Event, admin.ModelAdmin)
|
|
||||||
admin.site.register(Location_line, LeafletGeoAdmin)
|
|
||||||
admin.site.register(Location_polygon, LeafletGeoAdmin)
|
|
||||||
admin.site.register(JpnAdminMainPerf, LeafletGeoAdmin)
|
|
||||||
admin.site.register(UserTracks, LeafletGeoAdmin);
|
|
||||||
#admin.site.register(JpnAdminPerf, LeafletGeoAdmin)
|
|
||||||
admin.site.register(GifuAreas, LeafletGeoAdmin)
|
|
||||||
admin.site.register(ShapeLayers, admin.ModelAdmin)
|
|
||||||
admin.site.register(UserUpload, admin.ModelAdmin)
|
|
||||||
admin.site.register(EventUser, admin.ModelAdmin)
|
|
||||||
#admin.site.register(UserUploadUser, admin.ModelAdmin)
|
|
||||||
#admin.site.register(ShapeFileLocations, admin.ModelAdmin)
|
|
||||||
|
|
||||||
admin.site.register(CustomUser, UserAdminConfig)
|
def csv_upload_view(self, request):
|
||||||
admin.site.register(templocation, TempLocationAdmin)
|
if request.method == 'POST':
|
||||||
admin.site.register(GoalImages, admin.ModelAdmin)
|
form = CSVUploadForm(request.POST, request.FILES)
|
||||||
admin.site.register(CheckinImages, admin.ModelAdmin)
|
if form.is_valid():
|
||||||
|
csv_file = request.FILES['csv_file']
|
||||||
|
event = form.cleaned_data['event']
|
||||||
|
process_csv_upload(csv_file, event)
|
||||||
|
self.message_user(request, "CSV file has been processed successfully.")
|
||||||
|
return HttpResponseRedirect("../")
|
||||||
|
else:
|
||||||
|
form = CSVUploadForm()
|
||||||
|
|
||||||
|
return render(request, 'admin/csv_upload.html', {'form': form})
|
||||||
|
|
||||||
|
def csv_upload_button(self, obj):
|
||||||
|
url = reverse('admin:newevent2_csv_upload')
|
||||||
|
return format_html('<a class="button" href="{}">CSVアップロード</a>', url)
|
||||||
|
csv_upload_button.short_description = 'CSV Upload'
|
||||||
|
|
||||||
|
def changelist_view(self, request, extra_context=None):
|
||||||
|
extra_context = extra_context or {}
|
||||||
|
extra_context['csv_upload_url'] = reverse('admin:newevent2_csv_upload')
|
||||||
|
return super().changelist_view(request, extra_context=extra_context)
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(Team)
|
||||||
|
class TeamAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team_name', 'owner']
|
||||||
|
search_fields = ['team_name', 'owner__email']
|
||||||
|
|
||||||
|
@admin.register(NewCategory)
|
||||||
|
class NewCategoryAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['category_name', 'category_number', 'duration', 'num_of_member', 'family', 'female']
|
||||||
|
list_filter = ['family', 'female']
|
||||||
|
search_fields = ['category_name']
|
||||||
|
|
||||||
|
@admin.register(Entry)
|
||||||
|
class EntryAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team', 'event', 'category', 'date']
|
||||||
|
list_filter = ['event', 'category']
|
||||||
|
search_fields = ['team__team_name', 'event__event_name']
|
||||||
|
|
||||||
|
@admin.register(Member)
|
||||||
|
class MemberAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['team', 'user']
|
||||||
|
search_fields = ['team__team_name', 'user__email']
|
||||||
|
|
||||||
|
@admin.register(TempUser)
|
||||||
|
class TempUserAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ['email', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group', 'created_at', 'expires_at']
|
||||||
|
list_filter = ['is_rogaining', 'group']
|
||||||
|
search_fields = ['email', 'zekken_number', 'team_name']
|
||||||
|
|
||||||
|
|
||||||
|
# CustomUserAdmin の修正(既存のものを更新)
|
||||||
|
class CustomUserChangeForm(UserChangeForm):
|
||||||
|
class Meta(UserChangeForm.Meta):
|
||||||
|
model = CustomUser
|
||||||
|
fields = '__all__'
|
||||||
|
|
||||||
|
class CustomUserCreationForm(UserCreationForm):
|
||||||
|
class Meta(UserCreationForm.Meta):
|
||||||
|
model = CustomUser
|
||||||
|
fields = ('email', 'lastname', 'firstname', 'date_of_birth', 'female')
|
||||||
|
|
||||||
|
@admin.register(CustomUser)
|
||||||
|
class CustomUserAdmin(UserAdmin):
|
||||||
|
form = CustomUserChangeForm
|
||||||
|
add_form = CustomUserCreationForm
|
||||||
|
#model = CustomUser
|
||||||
|
|
||||||
|
list_display = ('email', 'is_staff', 'is_active', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group', 'firstname', 'lastname')
|
||||||
|
search_fields = ('egit mail', 'firstname', 'lastname', 'zekken_number')
|
||||||
|
list_filter = ('is_staff', 'is_active', 'is_rogaining', 'group')
|
||||||
|
ordering = ('email',)
|
||||||
|
|
||||||
|
# readonly_fieldsを明示的に設定
|
||||||
|
readonly_fields = ('date_joined',) # 変更不可のフィールドのみを指定=>Personal Infoも編集可能にする。
|
||||||
|
|
||||||
|
fieldsets = (
|
||||||
|
(None, {'fields': ('email', 'password')}),
|
||||||
|
(_('Personal info'), {
|
||||||
|
'fields': ('firstname', 'lastname', 'date_of_birth', 'female'),
|
||||||
|
'classes': ('wide',) # フィールドの表示を広げる
|
||||||
|
}),
|
||||||
|
(_('Permissions'), {'fields': ('is_staff', 'is_active', 'is_rogaining','user_permissions')}),
|
||||||
|
(_('Rogaining info'), {
|
||||||
|
'fields': ('zekken_number', 'event_code', 'team_name', 'group'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}),
|
||||||
|
(_('Important dates'), {
|
||||||
|
'fields': ('date_joined','last_login'),
|
||||||
|
'classes': ('wide',)
|
||||||
|
}), # 読み取り専用
|
||||||
|
)
|
||||||
|
add_fieldsets = (
|
||||||
|
(None, {
|
||||||
|
'classes': ('wide',),
|
||||||
|
#'fields': ('email', 'password1', 'password2', 'is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
'fields': ('email', 'password1', 'password2', 'lastname','firstname', 'date_of_birth', 'female','is_staff', 'is_active', 'is_rogaining')}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
search_fields = ('email', 'firstname', 'lastname', 'zekken_number', 'team_name')
|
||||||
|
ordering = ('email',)
|
||||||
|
|
||||||
|
def get_readonly_fields_old(self, request, obj=None):
|
||||||
|
# スーパーユーザーの場合は読み取り専用フィールドを最小限に
|
||||||
|
if request.user.is_superuser:
|
||||||
|
return self.readonly_fields
|
||||||
|
# 通常のスタッフユーザーの場合は追加の制限を設定可能
|
||||||
|
return self.readonly_fields + ('is_staff', 'is_superuser')
|
||||||
|
|
||||||
|
def get_readonly_fields(self, request, obj=None):
|
||||||
|
if request.user.is_superuser:
|
||||||
|
return ('date_joined', 'last_login')
|
||||||
|
return ('date_joined', 'last_login', 'is_staff', 'is_superuser')
|
||||||
|
|||||||
@ -3,12 +3,18 @@ from django.conf import settings
|
|||||||
from .models import CustomUser
|
from .models import CustomUser
|
||||||
from django.contrib.auth.backends import ModelBackend
|
from django.contrib.auth.backends import ModelBackend
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.contrib.auth.hashers import check_password
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EmailOrUsernameModelBackend(ModelBackend):
|
class EmailOrUsernameModelBackend(ModelBackend):
|
||||||
"""
|
"""
|
||||||
This is a ModelBacked that allows authentication
|
This is a ModelBacked that allows authentication
|
||||||
with either a username or an email address.
|
with either a username or an email address.
|
||||||
|
|
||||||
|
"""
|
||||||
"""
|
"""
|
||||||
def authenticate(self, username=None, password=None):
|
def authenticate(self, username=None, password=None):
|
||||||
if '@' in username:
|
if '@' in username:
|
||||||
@ -27,3 +33,34 @@ class EmailOrUsernameModelBackend(ModelBackend):
|
|||||||
return CustomUser.objects.get(pk=username)
|
return CustomUser.objects.get(pk=username)
|
||||||
except get_user_model().DoesNotExist:
|
except get_user_model().DoesNotExist:
|
||||||
return None
|
return None
|
||||||
|
"""
|
||||||
|
|
||||||
|
def authenticate(self, request, username=None, password=None, **kwargs):
|
||||||
|
if '@' in username:
|
||||||
|
kwargs = {'email': username}
|
||||||
|
else:
|
||||||
|
kwargs = {'username': username}
|
||||||
|
try:
|
||||||
|
user = CustomUser.objects.get(**kwargs)
|
||||||
|
if check_password(password, user.password):
|
||||||
|
logger.info(f"User authenticated successfully: {username}")
|
||||||
|
return user
|
||||||
|
else:
|
||||||
|
logger.warning(f"Password mismatch for user: {username}")
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
logger.warning(f"User does not exist: {username}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Authentication error for {username}: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_user(self, user_id):
|
||||||
|
try:
|
||||||
|
user = CustomUser.objects.get(pk=user_id)
|
||||||
|
logger.info(f"User retrieved: {user.username or user.email}")
|
||||||
|
return user
|
||||||
|
except CustomUser.DoesNotExist:
|
||||||
|
logger.warning(f"User with id {user_id} does not exist")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error retrieving user with id {user_id}: {str(e)}")
|
||||||
|
return None
|
||||||
|
|||||||
7
rog/forms.py
Normal file
7
rog/forms.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from django import forms
|
||||||
|
from .models import NewEvent2
|
||||||
|
|
||||||
|
class CSVUploadForm(forms.Form):
|
||||||
|
event = forms.ModelChoiceField(queryset=NewEvent2.objects.all(), label="イベント選択")
|
||||||
|
csv_file = forms.FileField(label="CSVファイル")
|
||||||
|
|
||||||
40
rog/gifuroge_team.csv
Normal file
40
rog/gifuroge_team.csv
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
4019,関ケ原2,Best Wishes,ソロ女子-5時間,pbkdf2_sha256$260000$RPvncicp11ENXxwpcpMXi1$9e/fKcfwaX3sJ91q9S70KWQcrNlraliguiHjF/UCW/I=
|
||||||
|
4010,関ケ原2,まつげん,ソロ女子-5時間,pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
|
||||||
|
4021,大垣3,まつげん,ソロ女子-5時間,pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
|
||||||
|
5,関ケ原2,てすとあきら1,ソロ男子-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
3003,関ケ原2,てすとあきら1,ソロ男子-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
3115,関ケ原2,Best Wishes,ソロ男子-5時間,pbkdf2_sha256$260000$tlNrgHyqDtfbM9f3GLv5G1$jRcR/ieTB174TZ9jW7obCBUMpyz86aywqDKw3VmhVQQ=
|
||||||
|
1010,大垣3,ハヤノテスト,一般-5時間,pbkdf2_sha256$260000$IeGmRkkUkwXXc1zO9oxvCe$ijnJTH7xhwidit+uCggSgjj/7g/vMK539IpOMA5GlnM=
|
||||||
|
1012,大垣3,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1014,各務原2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1018,下呂2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1024,関ケ原2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1026,美濃加茂2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
1028,多治見2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
|
||||||
|
3006,関ケ原2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3009,養老2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3011,郡上2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3013,大垣3,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3015,各務原2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3017,多治見2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3019,下呂2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3021,高山2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
3023,美濃加茂2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
|
||||||
|
4008,下呂2,GO!GO!YOKO,ソロ女子-5時間,pbkdf2_sha256$260000$tuv8ajw2VSmCooIxNHJhdD$m7q0fqPIsAs7L9uubt+PUVsmexwpJPXPCgVs9GjY12c=
|
||||||
|
3121,関ケ原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3126,大垣3,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
|
||||||
|
3128,多治見2,MASA,ソロ男子-5時間,pbkdf2_sha256$260000$qpaSbqryD4f5bZaY893Ug4$Gk8XuqsJbSkX9Hxrl/xg9LtjM8JQkpgNkpbbNzTmhzY=
|
||||||
|
3124,関ケ原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
|
||||||
|
3132,各務原2,岐阜市イイとこあるある探検隊,ソロ男子-5時間,pbkdf2_sha256$260000$QWc5BpSBUbkUwP9UlIzyE5$do+VKkH8mNibg6PJDsm6AJ/VMFh3NWdzwZ9IQW/26xA=
|
||||||
|
3135,大垣3,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
|
||||||
|
3137,関ケ原2,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
|
||||||
|
3139,養老2,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
|
||||||
|
3073,養老2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3075,高山2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3077,郡上2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3081,美濃加茂2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3083,多治見2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3085,各務原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3079,下呂2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
|
||||||
|
3093,関ケ原2,岐阜愛,ソロ男子-5時間,pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
|
||||||
|
3099,高山2,岐阜愛,ソロ男子-5時間,pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
|
||||||
|
16
rog/management/commands/cleanup_temp_users.py
Normal file
16
rog/management/commands/cleanup_temp_users.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone
|
||||||
|
from rog.models import TempUser # アプリ名 'rog' を適切に変更してください
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Deletes expired temporary user records'
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
expired_users = TempUser.objects.filter(expires_at__lt=timezone.now())
|
||||||
|
count = expired_users.count()
|
||||||
|
expired_users.delete()
|
||||||
|
self.stdout.write(self.style.SUCCESS(f'Successfully deleted {count} expired temporary user records'))
|
||||||
|
|
||||||
|
|
||||||
|
# cron job の設定
|
||||||
|
# 0 3 * * * /path/to/your/python /path/to/your/manage.py cleanup_temp_users
|
||||||
3
rog/middleware/__init__.py
Normal file
3
rog/middleware/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .ip_blocking import IPBlockingMiddleware
|
||||||
|
|
||||||
|
__all__ = ['IPBlockingMiddleware']
|
||||||
42
rog/middleware/ip_blocking.py
Normal file
42
rog/middleware/ip_blocking.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from django.core.exceptions import PermissionDenied
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
class IPBlockingMiddleware:
|
||||||
|
def __init__(self, get_response):
|
||||||
|
self.get_response = get_response
|
||||||
|
# 事前にブロックする IP アドレスのリスト
|
||||||
|
self.blacklisted_ips = getattr(settings, 'BLACKLISTED_IPS', [])
|
||||||
|
|
||||||
|
def __call__(self, request):
|
||||||
|
ip = self.get_client_ip(request)
|
||||||
|
|
||||||
|
# キャッシュからブロックリストを取得
|
||||||
|
blocked_ips = cache.get('blocked_ips', set())
|
||||||
|
|
||||||
|
# 事前にブロックされた IP またはキャッシュ内のブロックされた IP をチェック
|
||||||
|
if ip in self.blacklisted_ips or ip in blocked_ips:
|
||||||
|
raise PermissionDenied
|
||||||
|
|
||||||
|
# 不正アクセスの検出ロジックをここに実装
|
||||||
|
if self.is_suspicious(ip):
|
||||||
|
blocked_ips.add(ip)
|
||||||
|
cache.set('blocked_ips', blocked_ips, timeout=3600) # 1時間ブロック
|
||||||
|
raise PermissionDenied
|
||||||
|
|
||||||
|
response = self.get_response(request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def is_suspicious(self, ip):
|
||||||
|
request_count = cache.get(f'request_count_{ip}', 0)
|
||||||
|
cache.set(f'request_count_{ip}', request_count + 1, timeout=60)
|
||||||
|
return request_count > 100 # 1分間に100回以上のリクエストがあれば不審と判断
|
||||||
|
|
||||||
|
def get_client_ip(self, request):
|
||||||
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||||
|
if x_forwarded_for:
|
||||||
|
ip = x_forwarded_for.split(',')[0]
|
||||||
|
else:
|
||||||
|
ip = request.META.get('REMOTE_ADDR')
|
||||||
|
return ip
|
||||||
|
|
||||||
148
rog/migration_scripts.py
Normal file
148
rog/migration_scripts.py
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
"""
|
||||||
|
このコードは永栄コードをNoufferコードに統合するための一時変換コードです。
|
||||||
|
一旦、完全にマイグレーションでき、ランキングや走行履歴が完成したら、不要になります。
|
||||||
|
"""
|
||||||
|
import psycopg2
|
||||||
|
from PIL import Image
|
||||||
|
import PIL.ExifTags
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
def get_gps_from_image(image_path):
|
||||||
|
"""
|
||||||
|
画像ファイルからGPS情報を抽出する
|
||||||
|
Returns: (latitude, longitude) または取得できない場合は (None, None)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with Image.open(image_path) as img:
|
||||||
|
exif = {
|
||||||
|
PIL.ExifTags.TAGS[k]: v
|
||||||
|
for k, v in img._getexif().items()
|
||||||
|
if k in PIL.ExifTags.TAGS
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'GPSInfo' in exif:
|
||||||
|
gps_info = exif['GPSInfo']
|
||||||
|
|
||||||
|
# 緯度の計算
|
||||||
|
lat = gps_info[2]
|
||||||
|
lat = lat[0] + lat[1]/60 + lat[2]/3600
|
||||||
|
if gps_info[1] == 'S':
|
||||||
|
lat = -lat
|
||||||
|
|
||||||
|
# 経度の計算
|
||||||
|
lon = gps_info[4]
|
||||||
|
lon = lon[0] + lon[1]/60 + lon[2]/3600
|
||||||
|
if gps_info[3] == 'W':
|
||||||
|
lon = -lon
|
||||||
|
|
||||||
|
return lat, lon
|
||||||
|
except Exception as e:
|
||||||
|
print(f"GPS情報の抽出に失敗: {e}")
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def migrate_data():
|
||||||
|
# コンテナ環境用の接続情報
|
||||||
|
source_db = {
|
||||||
|
'dbname': 'gifuroge',
|
||||||
|
'user': 'admin', # 環境に合わせて変更
|
||||||
|
'password': 'admin123456', # 環境に合わせて変更
|
||||||
|
'host': 'localhost', # Dockerのサービス名
|
||||||
|
'port': '5432'
|
||||||
|
}
|
||||||
|
|
||||||
|
target_db = {
|
||||||
|
'dbname': 'rogdb',
|
||||||
|
'user': 'admin', # 環境に合わせて変更
|
||||||
|
'password': 'admin123456', # 環境に合わせて変更
|
||||||
|
'host': 'localhost', # Dockerのサービス名
|
||||||
|
'port': '5432'
|
||||||
|
}
|
||||||
|
|
||||||
|
source_conn = None
|
||||||
|
target_conn = None
|
||||||
|
source_cur = None
|
||||||
|
target_cur = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
print("ソースDBへの接続を試みています...")
|
||||||
|
source_conn = psycopg2.connect(**source_db)
|
||||||
|
source_cur = source_conn.cursor()
|
||||||
|
print("ソースDBへの接続が成功しました")
|
||||||
|
|
||||||
|
print("ターゲットDBへの接続を試みています...")
|
||||||
|
target_conn = psycopg2.connect(**target_db)
|
||||||
|
target_cur = target_conn.cursor()
|
||||||
|
print("ターゲットDBへの接続が成功しました")
|
||||||
|
|
||||||
|
print("データの取得を開始します...")
|
||||||
|
source_cur.execute("""
|
||||||
|
SELECT serial_number, zekken_number, event_code, cp_number, image_address,
|
||||||
|
goal_time, late_point, create_at, create_user,
|
||||||
|
update_at, update_user, buy_flag, colabo_company_memo
|
||||||
|
FROM gps_information
|
||||||
|
""")
|
||||||
|
|
||||||
|
rows = source_cur.fetchall()
|
||||||
|
print(f"取得したレコード数: {len(rows)}")
|
||||||
|
|
||||||
|
processed_count = 0
|
||||||
|
for row in rows:
|
||||||
|
(serial_number, zekken_number, event_code, cp_number, image_address,
|
||||||
|
goal_time, late_point, create_at, create_user,
|
||||||
|
update_at, update_user, buy_flag, colabo_company_memo) = row
|
||||||
|
|
||||||
|
latitude, longitude = None, None
|
||||||
|
if image_address and os.path.exists(image_address):
|
||||||
|
latitude, longitude = get_gps_from_image(image_address)
|
||||||
|
|
||||||
|
target_cur.execute("""
|
||||||
|
INSERT INTO gps_checkins (
|
||||||
|
path_order, zekken_number, event_code, cp_number,
|
||||||
|
lattitude, longitude, image_address,
|
||||||
|
image_receipt, image_QR, validate_location,
|
||||||
|
goal_time, late_point, create_at,
|
||||||
|
create_user, update_at, update_user,
|
||||||
|
buy_flag, colabo_company_memo, points
|
||||||
|
) VALUES (
|
||||||
|
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
|
||||||
|
%s, %s, %s, %s, %s, %s, %s, %s, %s
|
||||||
|
)
|
||||||
|
""", (
|
||||||
|
serial_number,
|
||||||
|
zekken_number, event_code, cp_number,
|
||||||
|
latitude, longitude, image_address,
|
||||||
|
True, True, True,
|
||||||
|
goal_time, late_point, create_at,
|
||||||
|
create_user, update_at, update_user,
|
||||||
|
buy_flag if buy_flag is not None else False,
|
||||||
|
colabo_company_memo if colabo_company_memo else '',
|
||||||
|
0
|
||||||
|
))
|
||||||
|
|
||||||
|
processed_count += 1
|
||||||
|
if processed_count % 100 == 0:
|
||||||
|
print(f"処理済みレコード数: {processed_count}")
|
||||||
|
|
||||||
|
target_conn.commit()
|
||||||
|
print(f"移行完了: {processed_count}件のレコードを処理しました")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"エラーが発生しました: {e}")
|
||||||
|
if target_conn:
|
||||||
|
target_conn.rollback()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if source_cur:
|
||||||
|
source_cur.close()
|
||||||
|
if target_cur:
|
||||||
|
target_cur.close()
|
||||||
|
if source_conn:
|
||||||
|
source_conn.close()
|
||||||
|
if target_conn:
|
||||||
|
target_conn.close()
|
||||||
|
print("すべての接続をクローズしました")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
migrate_data()
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-09-09 13:18
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0032_alter_location_sub_loc_id'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='templocation',
|
|
||||||
name='sub_loc_id',
|
|
||||||
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub location id'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-10-06 10:51
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0033_alter_templocation_sub_loc_id'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='email',
|
|
||||||
field=models.CharField(max_length=255, verbose_name='user name'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-10-06 10:52
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0034_alter_customuser_email'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='email',
|
|
||||||
field=models.EmailField(max_length=254, unique=True, verbose_name='user name'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-10-06 11:01
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0035_alter_customuser_email'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='email',
|
|
||||||
field=models.CharField(max_length=255, unique=True, verbose_name='Email'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-10-06 13:42
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0036_alter_customuser_email'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='is_rogaining',
|
|
||||||
field=models.BooleanField(default=False),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-10-16 10:50
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0037_customuser_is_rogaining'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='event_code',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Event Code'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='team_name',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Team Name'),
|
|
||||||
),
|
|
||||||
migrations.AddField(
|
|
||||||
model_name='customuser',
|
|
||||||
name='zekken_number',
|
|
||||||
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Zekken Number'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,27 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-10-17 11:39
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0038_auto_20221016_1950'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='GoalImages',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('goalimage', models.FileField(blank=True, null=True, upload_to='%y%m%d')),
|
|
||||||
('goaltime', models.DateTimeField(verbose_name='Goal time')),
|
|
||||||
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
|
|
||||||
('event_code', models.CharField(max_length=255, verbose_name='event code')),
|
|
||||||
('cp_number', models.IntegerField(verbose_name='CP numner')),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2022-11-05 10:39
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0039_goalimages'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='goalimages',
|
|
||||||
name='goalimage',
|
|
||||||
field=models.FileField(blank=True, null=True, upload_to='goals/%y%m%d'),
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='CheckinImages',
|
|
||||||
fields=[
|
|
||||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
|
||||||
('checkinimage', models.FileField(blank=True, null=True, upload_to='checkin/%y%m%d')),
|
|
||||||
('checkintime', models.DateTimeField(verbose_name='Goal time')),
|
|
||||||
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
|
|
||||||
('event_code', models.CharField(max_length=255, verbose_name='event code')),
|
|
||||||
('cp_number', models.IntegerField(verbose_name='CP numner')),
|
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
# Generated by Django 3.2.9 on 2023-05-26 08:24
|
|
||||||
|
|
||||||
import django.contrib.postgres.indexes
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
('rog', '0040_auto_20221105_1939'),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.DeleteModel(
|
|
||||||
name='JpnAdminPerf',
|
|
||||||
),
|
|
||||||
migrations.DeleteModel(
|
|
||||||
name='JpnSubPerf',
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='category',
|
|
||||||
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Category'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_active',
|
|
||||||
field=models.BooleanField(db_index=True, default=True, verbose_name='Is Event active'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='event_name',
|
|
||||||
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Event name'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='group',
|
|
||||||
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Group'),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name='location',
|
|
||||||
name='location_id',
|
|
||||||
field=models.IntegerField(blank=True, db_index=True, null=True, verbose_name='Location id'),
|
|
||||||
),
|
|
||||||
migrations.AddIndex(
|
|
||||||
model_name='location',
|
|
||||||
index=django.contrib.postgres.indexes.GistIndex(fields=['geom'], name='rog_locatio_geom_4793cc_gist'),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
274
rog/migrations_backup/0033_auto_20240724_1431.py
Normal file
274
rog/migrations_backup/0033_auto_20240724_1431.py
Normal file
@ -0,0 +1,274 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-24 05:31
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from django.conf import settings
|
||||||
|
import django.contrib.postgres.indexes
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.utils.timezone
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0032_alter_location_sub_loc_id'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Category',
|
||||||
|
fields=[
|
||||||
|
('category_name', models.CharField(max_length=255, primary_key=True, serialize=False)),
|
||||||
|
('category_number', models.IntegerField(unique=True)),
|
||||||
|
('duration', models.DurationField(default=datetime.timedelta(seconds=18000))),
|
||||||
|
('num_of_member', models.IntegerField(default=1)),
|
||||||
|
('family', models.BooleanField(default=False)),
|
||||||
|
('female', models.BooleanField(default=False)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='CheckinImages',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('checkinimage', models.FileField(blank=True, null=True, upload_to='checkin/%y%m%d')),
|
||||||
|
('checkintime', models.DateTimeField(verbose_name='Goal time')),
|
||||||
|
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
|
||||||
|
('event_code', models.CharField(max_length=255, verbose_name='event code')),
|
||||||
|
('cp_number', models.IntegerField(verbose_name='CP numner')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Entry',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('date', models.DateTimeField()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='EntryMember',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('is_temporary', models.BooleanField(default=False)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='GoalImages',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('goalimage', models.FileField(blank=True, null=True, upload_to='goals/%y%m%d')),
|
||||||
|
('goaltime', models.DateTimeField(verbose_name='Goal time')),
|
||||||
|
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
|
||||||
|
('event_code', models.CharField(max_length=255, verbose_name='event code')),
|
||||||
|
('cp_number', models.IntegerField(verbose_name='CP numner')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Member',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('is_temporary', models.BooleanField(default=False)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NewEvent',
|
||||||
|
fields=[
|
||||||
|
('event_name', models.CharField(max_length=255, primary_key=True, serialize=False)),
|
||||||
|
('start_datetime', models.DateTimeField(default=django.utils.timezone.now)),
|
||||||
|
('end_datetime', models.DateTimeField()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Team',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('zekken_number', models.CharField(max_length=255, unique=True)),
|
||||||
|
('team_name', models.CharField(max_length=255)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='TempUser',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('email', models.EmailField(max_length=254, unique=True)),
|
||||||
|
('password', models.CharField(max_length=128)),
|
||||||
|
('is_rogaining', models.BooleanField(default=False)),
|
||||||
|
('zekken_number', models.CharField(blank=True, max_length=255, null=True)),
|
||||||
|
('event_code', models.CharField(blank=True, max_length=255, null=True)),
|
||||||
|
('team_name', models.CharField(blank=True, max_length=255, null=True)),
|
||||||
|
('group', models.CharField(max_length=255)),
|
||||||
|
('firstname', models.CharField(blank=True, max_length=255, null=True)),
|
||||||
|
('lastname', models.CharField(blank=True, max_length=255, null=True)),
|
||||||
|
('date_of_birth', models.DateField(blank=True, null=True)),
|
||||||
|
('female', models.BooleanField(default=False)),
|
||||||
|
('verification_code', models.UUIDField(default=uuid.uuid4, editable=False)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('expires_at', models.DateTimeField()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name='JpnAdminPerf',
|
||||||
|
),
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name='JpnSubPerf',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='date_of_birth',
|
||||||
|
field=models.DateField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='event_code',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Event Code'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='female',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='firstname',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='is_rogaining',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='lastname',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='team_name',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Team Name'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='zekken_number',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Zekken Number'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='email',
|
||||||
|
field=models.CharField(max_length=255, unique=True, verbose_name='Email'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='category',
|
||||||
|
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Category'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_active',
|
||||||
|
field=models.BooleanField(db_index=True, default=True, verbose_name='Is Event active'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='event_name',
|
||||||
|
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Event name'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='group',
|
||||||
|
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Group'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='location',
|
||||||
|
name='location_id',
|
||||||
|
field=models.IntegerField(blank=True, db_index=True, null=True, verbose_name='Location id'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='templocation',
|
||||||
|
name='sub_loc_id',
|
||||||
|
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub location id'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='location',
|
||||||
|
index=django.contrib.postgres.indexes.GistIndex(fields=['geom'], name='rog_locatio_geom_4793cc_gist'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='team',
|
||||||
|
name='category',
|
||||||
|
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='rog.category'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='team',
|
||||||
|
name='owner',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_teams', to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='member',
|
||||||
|
name='team',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.team'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='member',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='goalimages',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='entrymember',
|
||||||
|
name='entry',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.entry'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='entrymember',
|
||||||
|
name='member',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.member'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='entry',
|
||||||
|
name='category',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.category'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='entry',
|
||||||
|
name='event',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newevent'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='entry',
|
||||||
|
name='owner',
|
||||||
|
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='entry',
|
||||||
|
name='team',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.team'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='checkinimages',
|
||||||
|
name='user',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='category',
|
||||||
|
unique_together={('category_name', 'category_number')},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='team',
|
||||||
|
unique_together={('zekken_number', 'category')},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='member',
|
||||||
|
unique_together={('team', 'user')},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='entrymember',
|
||||||
|
unique_together={('entry', 'member')},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='entry',
|
||||||
|
unique_together={('team', 'event', 'date', 'owner')},
|
||||||
|
),
|
||||||
|
]
|
||||||
34
rog/migrations_backup/0034_auto_20240724_1522.py
Normal file
34
rog/migrations_backup/0034_auto_20240724_1522.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-24 06:22
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0033_auto_20240724_1431'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='category',
|
||||||
|
name='category_number',
|
||||||
|
field=models.IntegerField(default=0),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NewCategory',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('category_name', models.CharField(max_length=255, unique=True)),
|
||||||
|
('category_number', models.IntegerField(default=0)),
|
||||||
|
('duration', models.DurationField(default=datetime.timedelta(seconds=18000))),
|
||||||
|
('num_of_member', models.IntegerField(default=1)),
|
||||||
|
('family', models.BooleanField(default=False)),
|
||||||
|
('female', models.BooleanField(default=False)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'unique_together': {('category_name', 'category_number')},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
29
rog/migrations_backup/0035_auto_20240724_1529.py
Normal file
29
rog/migrations_backup/0035_auto_20240724_1529.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-24 06:29
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.utils.timezone
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0034_auto_20240724_1522'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='NewEvent2',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('event_name', models.CharField(max_length=255, unique=True)),
|
||||||
|
('start_datetime', models.DateTimeField(default=django.utils.timezone.now)),
|
||||||
|
('end_datetime', models.DateTimeField()),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='entry',
|
||||||
|
name='event',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newevent2'),
|
||||||
|
),
|
||||||
|
]
|
||||||
20
rog/migrations_backup/0036_alter_team_category.py
Normal file
20
rog/migrations_backup/0036_alter_team_category.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-24 06:58
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import rog.models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0035_auto_20240724_1529'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='team',
|
||||||
|
name='category',
|
||||||
|
field=models.ForeignKey(default=rog.models.get_default_category, on_delete=django.db.models.deletion.SET_DEFAULT, to='rog.newcategory'),
|
||||||
|
),
|
||||||
|
]
|
||||||
19
rog/migrations_backup/0037_alter_member_team.py
Normal file
19
rog/migrations_backup/0037_alter_member_team.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-24 15:20
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0036_alter_team_category'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='member',
|
||||||
|
name='team',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='rog.team'),
|
||||||
|
),
|
||||||
|
]
|
||||||
19
rog/migrations_backup/0038_alter_entry_category.py
Normal file
19
rog/migrations_backup/0038_alter_entry_category.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-25 01:21
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0037_alter_member_team'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='entry',
|
||||||
|
name='category',
|
||||||
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newcategory'),
|
||||||
|
),
|
||||||
|
]
|
||||||
29
rog/migrations_backup/0039_auto_20240726_1508.py
Normal file
29
rog/migrations_backup/0039_auto_20240726_1508.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-07-26 06:08
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.utils.timezone
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0038_alter_entry_category'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='date_joined',
|
||||||
|
field=models.DateTimeField(default=django.utils.timezone.now),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='email',
|
||||||
|
field=models.EmailField(max_length=254, unique=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='customuser',
|
||||||
|
name='group',
|
||||||
|
field=models.CharField(blank=True, max_length=255),
|
||||||
|
),
|
||||||
|
]
|
||||||
33
rog/migrations_backup/0040_auto_20240801_1729.py
Normal file
33
rog/migrations_backup/0040_auto_20240801_1729.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-08-01 08:29
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0039_auto_20240726_1508'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='member',
|
||||||
|
name='date_of_birth',
|
||||||
|
field=models.DateField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='member',
|
||||||
|
name='female',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='member',
|
||||||
|
name='firstname',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='member',
|
||||||
|
name='lastname',
|
||||||
|
field=models.CharField(blank=True, max_length=255, null=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
37
rog/migrations_backup/0041_jpnsubperf.py
Normal file
37
rog/migrations_backup/0041_jpnsubperf.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Generated by Django 3.2.9 on 2024-08-02 15:11
|
||||||
|
|
||||||
|
import django.contrib.gis.db.models.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('rog', '0040_auto_20240801_1729'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='JpnSubPerf',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
|
||||||
|
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('name_modified', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('area_name', models.CharField(blank=True, max_length=254, null=True)),
|
||||||
|
('list_order', models.IntegerField(default=0)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'db_table': 'jpn_sub_perf',
|
||||||
|
'managed': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
0
rog/migrations_backup/__init__.py
Normal file
0
rog/migrations_backup/__init__.py
Normal file
692
rog/models.py
692
rog/models.py
@ -1,3 +1,4 @@
|
|||||||
|
from django.contrib.auth.hashers import make_password
|
||||||
from dataclasses import field
|
from dataclasses import field
|
||||||
import email
|
import email
|
||||||
from enum import unique
|
from enum import unique
|
||||||
@ -23,17 +24,27 @@ from django.apps import apps
|
|||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager
|
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager
|
||||||
from django.contrib.postgres.indexes import GistIndex
|
from django.contrib.postgres.indexes import GistIndex
|
||||||
|
from django.utils import timezone
|
||||||
|
from datetime import timedelta,date
|
||||||
|
|
||||||
|
from django.contrib.gis.geos import Point,MultiPoint
|
||||||
|
|
||||||
|
#from django.db import models
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import codecs
|
import codecs
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
env = environ.Env(DEBUG=(bool, False))
|
env = environ.Env(DEBUG=(bool, False))
|
||||||
environ.Env.read_env(env_file=".env")
|
environ.Env.read_env(env_file=".env")
|
||||||
|
|
||||||
db = Db(dbname=env("POSTGRES_DBNAME"), user=env("POSTGRES_USER"), password=env("POSTGRES_PASS"), host="postgres-db", port=env("PG_PORT"))
|
db = Db(dbname=env("POSTGRES_DBNAME"), user=env("POSTGRES_USER"), password=env("POSTGRES_PASS"), host=env("PG_HOST"), port=env("PG_PORT"))
|
||||||
|
|
||||||
|
|
||||||
def get_file_path(instance, filename):
|
def get_file_path(instance, filename):
|
||||||
@ -61,16 +72,41 @@ def remove_bom_inplace(path):
|
|||||||
fp.seek(-bom_length, os.SEEK_CUR)
|
fp.seek(-bom_length, os.SEEK_CUR)
|
||||||
fp.truncate()
|
fp.truncate()
|
||||||
|
|
||||||
|
class GifurogeRegister(models.Model):
|
||||||
|
event_code = models.CharField(max_length=100)
|
||||||
|
time = models.IntegerField(choices=[(3, '3時間'), (5, '5時間')])
|
||||||
|
owner_name_kana = models.CharField(max_length=100)
|
||||||
|
owner_name = models.CharField(max_length=100)
|
||||||
|
email = models.EmailField()
|
||||||
|
password = models.CharField(max_length=100)
|
||||||
|
owner_birthday = models.DateField(blank=True,null=True)
|
||||||
|
owner_sex = models.CharField(max_length=10,blank=True,null=True)
|
||||||
|
team_name = models.CharField(max_length=100)
|
||||||
|
department = models.CharField(max_length=100)
|
||||||
|
members_count = models.IntegerField()
|
||||||
|
member2 = models.CharField(max_length=100, blank=True, null=True)
|
||||||
|
birthday2 = models.DateField(blank=True,null=True)
|
||||||
|
sex2 = models.CharField(max_length=10,blank=True,null=True)
|
||||||
|
member3 = models.CharField(max_length=100, blank=True, null=True)
|
||||||
|
birthday3 = models.DateField(blank=True,null=True)
|
||||||
|
sex3 = models.CharField(max_length=10,blank=True,null=True)
|
||||||
|
member4 = models.CharField(max_length=100, blank=True, null=True)
|
||||||
|
birthday4 = models.DateField(blank=True,null=True)
|
||||||
|
sex4 = models.CharField(max_length=10,blank=True,null=True)
|
||||||
|
member5 = models.CharField(max_length=100, blank=True, null=True)
|
||||||
|
birthday5 = models.DateField(blank=True,null=True)
|
||||||
|
sex5 = models.CharField(max_length=10,blank=True,null=True)
|
||||||
|
|
||||||
|
|
||||||
class CustomUserManager(BaseUserManager):
|
class CustomUserManager(BaseUserManager):
|
||||||
|
|
||||||
def create_user(self, email, password, group, event_code, team_name, **other_fields):
|
def create_user(self, email, password=None, **other_fields):
|
||||||
if not email:
|
if not email:
|
||||||
raise ValueError(_("You must provide an email address"))
|
raise ValueError(_("You must provide an email address"))
|
||||||
|
email = self.normalize_email(email)
|
||||||
user = self.model(email=email, group=group, event_code=event_code, team_name=team_name, zekken_number=email, is_rogaining=True, **other_fields)
|
user = self.model(email=email, **other_fields)
|
||||||
user.set_password(password)
|
user.set_password(password)
|
||||||
user.save()
|
user.save(using=self._db)
|
||||||
|
|
||||||
return user
|
return user
|
||||||
|
|
||||||
@ -90,7 +126,7 @@ class CustomUserManager(BaseUserManager):
|
|||||||
if other_fields.get('is_superuser') is not True:
|
if other_fields.get('is_superuser') is not True:
|
||||||
raise ValueError(_('Superuser must have is_superuser set to True'))
|
raise ValueError(_('Superuser must have is_superuser set to True'))
|
||||||
|
|
||||||
return self.create_user(email, password, group, event_code, team_name, **other_fields)
|
return self.create_user(email, password, **other_fields)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -136,28 +172,28 @@ class JpnAdminMainPerf(models.Model):
|
|||||||
# ###
|
# ###
|
||||||
# ### Cities
|
# ### Cities
|
||||||
# ###
|
# ###
|
||||||
# class JpnSubPerf(models.Model):
|
class JpnSubPerf(models.Model):
|
||||||
# geom = models.MultiPolygonField(blank=True, null=True)
|
geom = models.MultiPolygonField(blank=True, null=True)
|
||||||
# adm0_en = models.CharField(max_length=254, blank=True, null=True)
|
adm0_en = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm0_ja = models.CharField(max_length=254, blank=True, null=True)
|
adm0_ja = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm0_pcode = models.CharField(max_length=254, blank=True, null=True)
|
adm0_pcode = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm1_en = models.CharField(max_length=254, blank=True, null=True)
|
adm1_en = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm1_ja = models.CharField(max_length=254, blank=True, null=True)
|
adm1_ja = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm1_pcode = models.CharField(max_length=254, blank=True, null=True)
|
adm1_pcode = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm2_ja = models.CharField(max_length=254, blank=True, null=True)
|
adm2_ja = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm2_en = models.CharField(max_length=254, blank=True, null=True)
|
adm2_en = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# adm2_pcode = models.CharField(max_length=254, blank=True, null=True)
|
adm2_pcode = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# name_modified = models.CharField(max_length=254, blank=True, null=True)
|
name_modified = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# area_name = models.CharField(max_length=254, blank=True, null=True)
|
area_name = models.CharField(max_length=254, blank=True, null=True)
|
||||||
# list_order =models.IntegerField(default=0)
|
list_order =models.IntegerField(default=0)
|
||||||
|
|
||||||
# class Meta:
|
class Meta:
|
||||||
# managed = False
|
managed = False
|
||||||
# db_table = 'jpn_sub_perf'
|
db_table = 'jpn_sub_perf'
|
||||||
# indexes = [
|
indexes = [
|
||||||
# models.Index(fields=['geom'], name='jpn_sub_perf_geom_idx'),
|
models.Index(fields=['geom'], name='jpn_sub_perf_geom_idx'),
|
||||||
# # Add other fields for indexing as per the requirements
|
# Add other fields for indexing as per the requirements
|
||||||
# ]
|
]
|
||||||
|
|
||||||
###
|
###
|
||||||
### Gifu Areas
|
### Gifu Areas
|
||||||
@ -203,31 +239,295 @@ class CustomUser(AbstractBaseUser, PermissionsMixin):
|
|||||||
GB2 = '大垣-3時間', '大垣-3時間'
|
GB2 = '大垣-3時間', '大垣-3時間'
|
||||||
GB3 = '大垣-5時間', '大垣-5時間'
|
GB3 = '大垣-5時間', '大垣-5時間'
|
||||||
|
|
||||||
email = models.CharField(_("Email"), max_length=255, unique=True)
|
email = models.EmailField(unique=True)
|
||||||
is_staff = models.BooleanField(default=False)
|
firstname = models.CharField(max_length=255,blank=True, null=True)
|
||||||
|
lastname = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
date_of_birth = models.DateField(blank=True, null=True)
|
||||||
|
female = models.BooleanField(default=False)
|
||||||
|
group = models.CharField(max_length=255,blank=True)
|
||||||
is_active = models.BooleanField(default=True)
|
is_active = models.BooleanField(default=True)
|
||||||
|
is_staff = models.BooleanField(default=False)
|
||||||
|
date_joined = models.DateTimeField(default=timezone.now)
|
||||||
|
|
||||||
is_rogaining = models.BooleanField(default=False)
|
is_rogaining = models.BooleanField(default=False)
|
||||||
zekken_number = models.CharField(_("Zekken Number"), max_length=255, blank=True, null=True)
|
zekken_number = models.CharField(_("Zekken Number"), max_length=255, blank=True, null=True)
|
||||||
event_code = models.CharField(_("Event Code"), max_length=255, blank=True, null=True)
|
event_code = models.CharField(_("Event Code"), max_length=255, blank=True, null=True)
|
||||||
team_name = models.CharField(_("Team Name"), max_length=255, blank=True, null=True)
|
team_name = models.CharField(_("Team Name"), max_length=255, blank=True, null=True)
|
||||||
group = models.CharField(max_length=255,
|
|
||||||
choices=Groups.choices,
|
|
||||||
default=Groups.GB1)
|
|
||||||
USERNAME_FIELD = 'email'
|
USERNAME_FIELD = 'email'
|
||||||
REQUIRED_FIELDS = ['group',]
|
REQUIRED_FIELDS = []
|
||||||
|
|
||||||
objects = CustomUserManager()
|
objects = CustomUserManager()
|
||||||
|
|
||||||
|
def set_password(self, raw_password):
|
||||||
|
self.password = make_password(raw_password)
|
||||||
|
self._password = raw_password
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.email
|
return self.email
|
||||||
|
|
||||||
|
class TempUser(models.Model):
|
||||||
|
email = models.EmailField(unique=True)
|
||||||
|
password = models.CharField(max_length=128)
|
||||||
|
is_rogaining = models.BooleanField(default=False)
|
||||||
|
zekken_number = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
event_code = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
team_name = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
group = models.CharField(max_length=255)
|
||||||
|
firstname = models.CharField(max_length=255,blank=True, null=True)
|
||||||
|
lastname = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
date_of_birth = models.DateField(blank=True, null=True)
|
||||||
|
female = models.BooleanField(default=False)
|
||||||
|
verification_code = models.UUIDField(default=uuid.uuid4, editable=False)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
expires_at = models.DateTimeField()
|
||||||
|
|
||||||
|
def set_password(self, raw_password):
|
||||||
|
self.password = make_password(raw_password)
|
||||||
|
|
||||||
|
def check_password(self, raw_password):
|
||||||
|
return check_password(raw_password, self.password)
|
||||||
|
|
||||||
|
# TempUserの作成時にこのメソッドを使用
|
||||||
|
@classmethod
|
||||||
|
def create_temp_user(cls, email, password, **kwargs):
|
||||||
|
temp_user = cls(email=email, **kwargs)
|
||||||
|
temp_user.set_password(password)
|
||||||
|
temp_user.save()
|
||||||
|
return temp_user
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.email
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
if not self.expires_at:
|
||||||
|
self.expires_at = timezone.now() + timedelta(hours=24) # 24時間の有効期限
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
def is_valid(self):
|
||||||
|
return timezone.now() <= self.expires_at
|
||||||
|
|
||||||
|
class NewEvent2(models.Model):
|
||||||
|
event_name = models.CharField(max_length=255, unique=True)
|
||||||
|
event_description=models.TextField(max_length=255,blank=True, null=True)
|
||||||
|
start_datetime = models.DateTimeField(default=timezone.now)
|
||||||
|
end_datetime = models.DateTimeField()
|
||||||
|
deadlineDateTime = models.DateTimeField(null=True, blank=True)
|
||||||
|
|
||||||
|
#// Added @2024-10-21
|
||||||
|
public = models.BooleanField(default=False)
|
||||||
|
hour_3 = models.BooleanField(default=False)
|
||||||
|
hour_5 = models.BooleanField(default=True)
|
||||||
|
class_general = models.BooleanField(default=True)
|
||||||
|
class_family = models.BooleanField(default=True)
|
||||||
|
class_solo_male = models.BooleanField(default=True)
|
||||||
|
class_solo_female = models.BooleanField(default=True)
|
||||||
|
|
||||||
|
self_rogaining = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.event_name} - From:{self.start_datetime} To:{self.end_datetime}"
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
if not self.deadlineDateTime:
|
||||||
|
self.deadlineDateTime = self.end_datetime #- timedelta(days=7)
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
class NewEvent(models.Model):
|
||||||
|
event_name = models.CharField(max_length=255, primary_key=True)
|
||||||
|
start_datetime = models.DateTimeField(default=timezone.now)
|
||||||
|
end_datetime = models.DateTimeField()
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.event_name} - From:{self.start_datetime} To:{self.end_datetime}"
|
||||||
|
|
||||||
|
def get_default_category():
|
||||||
|
return NewCategory.objects.get_or_create(category_name="Default Category", category_number=1)[0].id
|
||||||
|
|
||||||
|
|
||||||
|
class Team(models.Model):
|
||||||
|
# zekken_number = models.CharField(max_length=255, unique=True)
|
||||||
|
team_name = models.CharField(max_length=255)
|
||||||
|
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE, related_name='owned_teams', blank=True, null=True)
|
||||||
|
category = models.ForeignKey('NewCategory', on_delete=models.SET_DEFAULT, default=get_default_category)
|
||||||
|
|
||||||
|
# class Meta:
|
||||||
|
# unique_together = ('zekken_number', 'category')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.team_name}, owner:{self.owner.lastname} {self.owner.firstname}"
|
||||||
|
|
||||||
|
|
||||||
|
class Member(models.Model):
|
||||||
|
team = models.ForeignKey(Team, on_delete=models.CASCADE, related_name='members')
|
||||||
|
user = models.ForeignKey(CustomUser, on_delete=models.CASCADE)
|
||||||
|
firstname = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
lastname = models.CharField(max_length=255, blank=True, null=True)
|
||||||
|
date_of_birth = models.DateField(null=True, blank=True)
|
||||||
|
female = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
is_temporary = models.BooleanField(default=False) # Akira 2024-7-24
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('team', 'user')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.team.team_name} - {self.user.lastname} {self.user.firstname}"
|
||||||
|
|
||||||
|
#
|
||||||
|
class Category(models.Model):
|
||||||
|
category_name = models.CharField(max_length=255, primary_key=True)
|
||||||
|
category_number = models.IntegerField(default=0)
|
||||||
|
duration = models.DurationField(default=timedelta(hours=5))
|
||||||
|
num_of_member = models.IntegerField(default=1)
|
||||||
|
family = models.BooleanField(default=False)
|
||||||
|
female = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('category_name','category_number')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
hours = self.duration.total_seconds() // 3600
|
||||||
|
return f"{self.category_name} - {self.category_number} ({int(hours)}時間)"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hours(self):
|
||||||
|
return self.duration.total_seconds() // 3600
|
||||||
|
|
||||||
|
class NewCategory(models.Model):
|
||||||
|
category_name = models.CharField(max_length=255, unique=True)
|
||||||
|
category_number = models.IntegerField(default=0)
|
||||||
|
duration = models.DurationField(default=timedelta(hours=5))
|
||||||
|
num_of_member = models.IntegerField(default=1)
|
||||||
|
family = models.BooleanField(default=False)
|
||||||
|
female = models.BooleanField(default=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('category_name','category_number')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
hours = self.duration.total_seconds() // 3600
|
||||||
|
return f"{self.category_name} - {self.category_number} ({int(hours)}時間)"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hours(self):
|
||||||
|
return self.duration.total_seconds() // 3600
|
||||||
|
|
||||||
|
class Entry(models.Model):
|
||||||
|
team = models.ForeignKey(Team, on_delete=models.CASCADE)
|
||||||
|
event = models.ForeignKey(NewEvent2, on_delete=models.CASCADE)
|
||||||
|
category = models.ForeignKey(NewCategory, on_delete=models.CASCADE)
|
||||||
|
date = models.DateTimeField()
|
||||||
|
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE,blank=True, null=True) # Akira 2024-7-24
|
||||||
|
zekken_number = models.IntegerField(default=0)
|
||||||
|
is_active = models.BooleanField(default=True) # 新しく追加
|
||||||
|
hasParticipated = models.BooleanField(default=False) # 新しく追加
|
||||||
|
hasGoaled = models.BooleanField(default=False) # 新しく追加
|
||||||
|
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('zekken_number', 'event', 'date')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.zekken_number} - {self.team.team_name} - {self.event.event_name} - {self.date}"
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
super().clean()
|
||||||
|
if self.event and self.category and self.date:
|
||||||
|
start = self.event.start_datetime
|
||||||
|
end = self.event.end_datetime #- self.category.duration
|
||||||
|
if not (start.date() <= self.date.date() <= end.date()):
|
||||||
|
raise ValidationError({
|
||||||
|
'date': f'日時{self.date}は{start.date()}から{end.date()}の間である必要があります。'
|
||||||
|
})
|
||||||
|
|
||||||
|
# メンバーの年齢と性別をチェック
|
||||||
|
if self.team: # and not self.team.members.exists():
|
||||||
|
members = self.team.members.all() # membersを適切に取得
|
||||||
|
if not members.exists():
|
||||||
|
raise ValidationError({'team': 'チームにメンバーが登録されていません。'})
|
||||||
|
|
||||||
|
#members = Member.objects.filter(team=self.team)
|
||||||
|
#if not members.exists():
|
||||||
|
# # ここで、owner をMemberに登録する。 Akira 2024-7-24
|
||||||
|
# raise ValidationError("チームにメンバーが登録されていません。")
|
||||||
|
|
||||||
|
adults = [m for m in members if self.is_adult(m.user.date_of_birth)]
|
||||||
|
children = [m for m in members if self.is_child(m.user.date_of_birth)]
|
||||||
|
teenagers = [m for m in members if self.is_teenager(m.user.date_of_birth)]
|
||||||
|
|
||||||
|
if self.category.family:
|
||||||
|
if not (adults and children):
|
||||||
|
raise ValidationError("ファミリーカテゴリーには、18歳以上のメンバーと小学生以下のメンバーが各1名以上必要です。")
|
||||||
|
else:
|
||||||
|
if not adults:
|
||||||
|
raise ValidationError("18歳以上のメンバーが1名以上必要です。")
|
||||||
|
if children:
|
||||||
|
raise ValidationError("ファミリーカテゴリー以外では、小学生以下のメンバーは参加できません。")
|
||||||
|
|
||||||
|
if self.category.num_of_member == 1:
|
||||||
|
if len(members) != 1:
|
||||||
|
raise ValidationError("このカテゴリーはソロ参加のみ可能です。")
|
||||||
|
if not adults:
|
||||||
|
raise ValidationError("ソロ参加は18歳以上のみ可能です。")
|
||||||
|
if self.category.female and not members[0].user.female:
|
||||||
|
raise ValidationError("このカテゴリーは女性のみ参加可能です。")
|
||||||
|
if not self.category.female and members[0].user.female:
|
||||||
|
raise ValidationError("このカテゴリーは男性のみ参加可能です。")
|
||||||
|
|
||||||
|
|
||||||
|
if len(members) > self.category.num_of_member:
|
||||||
|
raise ValidationError(f"このカテゴリーは{self.category.num_of_member}名までの参加が必要です。")
|
||||||
|
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
self.full_clean()
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_adult(birth_date):
|
||||||
|
today = date.today()
|
||||||
|
age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
|
||||||
|
return age >= 18
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_child(birth_date):
|
||||||
|
today = date.today()
|
||||||
|
age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
|
||||||
|
return age <= 12
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_teenager(birth_date):
|
||||||
|
today = date.today()
|
||||||
|
age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
|
||||||
|
return 13 <= age <= 17
|
||||||
|
|
||||||
|
|
||||||
|
class EntryMember(models.Model):
|
||||||
|
entry = models.ForeignKey(Entry, on_delete=models.CASCADE)
|
||||||
|
member = models.ForeignKey(Member, on_delete=models.CASCADE)
|
||||||
|
is_temporary = models.BooleanField(default=False) # Akira 2024-7-24
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('entry', 'member')
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.entry.team.zekken_number} - {self.member.user.lastname} {self.member.user.firstname}"
|
||||||
|
|
||||||
|
|
||||||
class GoalImages(models.Model):
|
class GoalImages(models.Model):
|
||||||
user=models.ForeignKey(CustomUser, on_delete=models.DO_NOTHING)
|
user=models.ForeignKey(CustomUser, on_delete=models.DO_NOTHING)
|
||||||
goalimage = models.FileField(upload_to='goals/%y%m%d', blank=True, null=True)
|
goalimage = models.FileField(upload_to='goals/%y%m%d', blank=True, null=True)
|
||||||
goaltime = models.DateTimeField(_("Goal time"), auto_now=False, auto_now_add=False)
|
goaltime = models.DateTimeField(_("Goal time"), blank=True, null=True,auto_now=False, auto_now_add=False)
|
||||||
team_name = models.CharField(_("Team name"), max_length=255)
|
team_name = models.CharField(_("Team name"), max_length=255)
|
||||||
event_code = models.CharField(_("event code"), max_length=255)
|
event_code = models.CharField(_("event code"), max_length=255)
|
||||||
cp_number = models.IntegerField(_("CP numner"))
|
cp_number = models.IntegerField(_("CP numner"))
|
||||||
|
zekken_number = models.TextField(
|
||||||
|
null=True, # False にする
|
||||||
|
blank=True, # False にする
|
||||||
|
help_text="ゼッケン番号"
|
||||||
|
)
|
||||||
|
|
||||||
class CheckinImages(models.Model):
|
class CheckinImages(models.Model):
|
||||||
user=models.ForeignKey(CustomUser, on_delete=models.DO_NOTHING)
|
user=models.ForeignKey(CustomUser, on_delete=models.DO_NOTHING)
|
||||||
@ -237,6 +537,122 @@ class CheckinImages(models.Model):
|
|||||||
event_code = models.CharField(_("event code"), max_length=255)
|
event_code = models.CharField(_("event code"), max_length=255)
|
||||||
cp_number = models.IntegerField(_("CP numner"))
|
cp_number = models.IntegerField(_("CP numner"))
|
||||||
|
|
||||||
|
class GpsCheckin(models.Model):
|
||||||
|
id = models.AutoField(primary_key=True) # 明示的にidフィールドを追加
|
||||||
|
path_order = models.IntegerField(
|
||||||
|
null=False,
|
||||||
|
help_text="チェックポイントの順序番号"
|
||||||
|
)
|
||||||
|
zekken_number = models.TextField(
|
||||||
|
null=False,
|
||||||
|
help_text="ゼッケン番号"
|
||||||
|
)
|
||||||
|
event_id = models.IntegerField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="イベントID"
|
||||||
|
)
|
||||||
|
event_code = models.TextField(
|
||||||
|
null=False,
|
||||||
|
help_text="イベントコード"
|
||||||
|
)
|
||||||
|
cp_number = models.IntegerField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="チェックポイント番号"
|
||||||
|
)
|
||||||
|
lattitude = models.FloatField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="緯度:写真から取得"
|
||||||
|
)
|
||||||
|
longitude = models.FloatField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="経度:写真から取得"
|
||||||
|
)
|
||||||
|
image_address = models.TextField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="チェックイン画像のパス"
|
||||||
|
)
|
||||||
|
image_receipt = models.TextField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
default=False,
|
||||||
|
help_text="レシート画像のパス"
|
||||||
|
)
|
||||||
|
image_qr = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="QRコードスキャンフラグ"
|
||||||
|
)
|
||||||
|
validate_location = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="位置情報検証フラグ:画像認識で検証した結果"
|
||||||
|
)
|
||||||
|
goal_time = models.TextField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="ゴール時刻=ゴール時のみ使用される。画像から時刻を読み取り設定する。"
|
||||||
|
)
|
||||||
|
late_point = models.IntegerField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="遅刻ポイント:ゴールの時刻が制限時間を超えた場合、1分につき-50点が加算。"
|
||||||
|
)
|
||||||
|
create_at = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="作成日時:データの作成日時"
|
||||||
|
)
|
||||||
|
create_user = models.TextField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="作成ユーザー"
|
||||||
|
)
|
||||||
|
update_at = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="更新日時"
|
||||||
|
)
|
||||||
|
update_user = models.TextField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="更新ユーザー"
|
||||||
|
)
|
||||||
|
buy_flag = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="購入フラグ:協賛店で購入した場合、無条件でTRUEにする。"
|
||||||
|
)
|
||||||
|
colabo_company_memo = models.TextField(
|
||||||
|
null=False,
|
||||||
|
default='',
|
||||||
|
help_text="グループコード:複数のイベントで合算する場合に使用する"
|
||||||
|
)
|
||||||
|
points = models.IntegerField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="ポイント:このチェックインによる獲得ポイント。通常ポイントと買い物ポイントは分離される。ゴールの場合には減点なども含む。"
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
db_table = 'gps_checkins'
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=['zekken_number', 'event_code', 'path_order'], name='idx_zekken_event'),
|
||||||
|
models.Index(fields=['create_at'], name='idx_create_at'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.event_code}-{self.zekken_number}-{self.path_order}-buy:{self.buy_flag}-valid:{self.validate_location}-point:{self.points}"
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
# 作成時・更新時のタイムスタンプを自動設定
|
||||||
|
from django.utils import timezone
|
||||||
|
if not self.create_at:
|
||||||
|
self.create_at = timezone.now()
|
||||||
|
self.update_at = timezone.now()
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
class RogUser(models.Model):
|
class RogUser(models.Model):
|
||||||
user=models.OneToOneField(CustomUser, on_delete=models.CASCADE)
|
user=models.OneToOneField(CustomUser, on_delete=models.CASCADE)
|
||||||
@ -611,7 +1027,7 @@ class ShapeFileLocations(models.Model):
|
|||||||
|
|
||||||
@receiver(pre_save, sender=Location)
|
@receiver(pre_save, sender=Location)
|
||||||
def location_presave(sender, instance, *args, **kwargs):
|
def location_presave(sender, instance, *args, **kwargs):
|
||||||
#print("------############------------", instance.location_id)
|
print("------############------------", instance.location_id)
|
||||||
templocation.objects.filter(location_id = instance.location_id).delete()
|
templocation.objects.filter(location_id = instance.location_id).delete()
|
||||||
|
|
||||||
|
|
||||||
@ -649,6 +1065,8 @@ def deleteShapelocation(sender,instance,*args,**kwargs):
|
|||||||
|
|
||||||
@receiver(post_save, sender=ShapeLayers)
|
@receiver(post_save, sender=ShapeLayers)
|
||||||
def publish_data(sender, instance, created, **kwargs):
|
def publish_data(sender, instance, created, **kwargs):
|
||||||
|
logger.info(f"Processing ShapeLayer: {instance.name}")
|
||||||
|
|
||||||
file = instance.file.path
|
file = instance.file.path
|
||||||
file_format = os.path.basename(file).split('.')[-1]
|
file_format = os.path.basename(file).split('.')[-1]
|
||||||
file_name = os.path.basename(file).split('.')[0]
|
file_name = os.path.basename(file).split('.')[0]
|
||||||
@ -662,42 +1080,58 @@ def publish_data(sender, instance, created, **kwargs):
|
|||||||
os.remove(file)
|
os.remove(file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
#logger.debug("Attempting to read shape file")
|
||||||
|
|
||||||
# print("before reading the file")
|
# print("before reading the file")
|
||||||
shp = glob.glob(r'{}/**/*.shp'.format(file_path), recursive=True)[0]
|
shp = glob.glob(r'{}/**/*.shp'.format(file_path), recursive=True)[0]
|
||||||
|
|
||||||
|
#logger.info(f"Shape file read: {shp}")
|
||||||
|
|
||||||
# print("this is the read file",shp)
|
# print("this is the read file",shp)
|
||||||
gdf = gpd.read_file(shp)
|
gdf = gpd.read_file(shp)
|
||||||
crs_name = str(gdf.crs.srs)
|
crs_name = str(gdf.crs.srs)
|
||||||
|
#logger.debug(f"CRS name: {crs_name}")
|
||||||
|
|
||||||
# print(crs_name, 'crs - name')
|
# print(crs_name, 'crs - name')
|
||||||
epsg = int(crs_name.replace('epsg:',''))
|
epsg = int(crs_name.replace('epsg:',''))
|
||||||
if epsg is None:
|
if epsg is None:
|
||||||
epsg=4326
|
epsg=4326
|
||||||
|
|
||||||
lm2 = getTempMappingforModel(instance.layerof, shp)
|
lm2 = getTempMappingforModel(instance.layerof, shp)
|
||||||
|
#logger.info("Saving to temporary table")
|
||||||
|
|
||||||
# print("### shape file is ###")
|
# print("### shape file is ###")
|
||||||
lm2.save(strict=True, verbose=True)
|
lm2.save(strict=True, verbose=True)
|
||||||
|
#logger.info("Save to temporary table completed")
|
||||||
|
|
||||||
os.remove(shp)
|
os.remove(shp)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print('######## shape file##########',e)
|
print('######## shape file##########',e)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
logger.debug("Attempting to read CSV file")
|
||||||
|
|
||||||
csv_f = glob.glob(r'{}/**/*.csv'.format(file_path), recursive=True)[0]
|
csv_f = glob.glob(r'{}/**/*.csv'.format(file_path), recursive=True)[0]
|
||||||
|
|
||||||
remove_bom_inplace(csv_f)
|
remove_bom_inplace(csv_f)
|
||||||
|
|
||||||
mdl = apps.get_model(app_label="rog", model_name=LAYER_CHOICES[instance.layerof -1][1])
|
mdl = apps.get_model(app_label="rog", model_name=LAYER_CHOICES[instance.layerof -1][1])
|
||||||
# print(mdl)
|
print(mdl)
|
||||||
# print(f"#### instance.layerof - {instance.layerof}")
|
print(f"#### instance.layerof - {instance.layerof}")
|
||||||
|
#logger.debug(f"Model for layer: {mdl}")
|
||||||
|
|
||||||
with open(csv_f, mode="r", encoding="utf-8") as txt_file:
|
with open(csv_f, mode="r", encoding="utf-8") as txt_file:
|
||||||
#heading = next(txt_file)
|
#heading = next(txt_file)
|
||||||
reader = csv.reader(txt_file, delimiter=",")
|
reader = csv.reader(txt_file, delimiter=",")
|
||||||
for fields in reader:
|
for fields in reader:
|
||||||
|
logger.debug(f"Processing row: {fields[0]}")
|
||||||
print("@@@@@@@@@@@@")
|
print("@@@@@@@@@@@@")
|
||||||
print(fields[0])
|
print(fields[0])
|
||||||
print("@@@@@@@@@@@@")
|
print("@@@@@@@@@@@@")
|
||||||
if instance.layerof == 1:
|
if instance.layerof == 1:
|
||||||
#insertShapeLayerLocation(instance.name, fields)
|
#insertShapeLayerLocation(instance.name, fields)
|
||||||
updateLocation(mdl, fields)
|
#updateLocation(mdl, fields)
|
||||||
|
update_or_create_location(mdl, fields)
|
||||||
if instance.layerof == 2:
|
if instance.layerof == 2:
|
||||||
updateLineTable(mdl, fields)
|
updateLineTable(mdl, fields)
|
||||||
if instance.layerof == 3:
|
if instance.layerof == 3:
|
||||||
@ -706,27 +1140,102 @@ def publish_data(sender, instance, created, **kwargs):
|
|||||||
with open(csv_f, mode="r", encoding="utf-8") as txt_file:
|
with open(csv_f, mode="r", encoding="utf-8") as txt_file:
|
||||||
reader_2 = csv.reader(txt_file, delimiter=",")
|
reader_2 = csv.reader(txt_file, delimiter=",")
|
||||||
for fields in reader_2:
|
for fields in reader_2:
|
||||||
|
logger.debug(f"Inserting ShapeLayerLocation: {fields[0]}")
|
||||||
print("@@@@@@@@@@@@")
|
print("@@@@@@@@@@@@")
|
||||||
print(fields[0])
|
print(fields[0])
|
||||||
print("@@@@@@@@@@@@")
|
print("@@@@@@@@@@@@")
|
||||||
if instance.layerof == 1:
|
if instance.layerof == 1:
|
||||||
insertShapeLayerLocation(instance.name, fields)
|
insertShapeLayerLocation(instance.name, fields)
|
||||||
|
|
||||||
|
logger.info("CSV processing completed")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print('######## csv file ##########',e)
|
print('######## csv file ##########',e)
|
||||||
|
|
||||||
|
|
||||||
def insertShapeLayerLocation(name, fields):
|
def insertShapeLayerLocation(name, fields):
|
||||||
sll = UserUploadUser(userfile=name, email=fields[0])
|
logger.info(f"Attempting to insert ShapeFileLocations for file: {name}, location_id: {fields[0]}")
|
||||||
sll.save();
|
try:
|
||||||
|
sll = UserUploadUser(userfile=name, email=fields[0])
|
||||||
|
sll.save();
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error inserting ShapeFileLocations: {e}", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
def insertUserUploadUser(name, fields):
|
def insertUserUploadUser(name, fields):
|
||||||
with transaction.atomic():
|
try:
|
||||||
sll = UserUploadUser(userfile=name, email=fields[0])
|
with transaction.atomic():
|
||||||
sll.save()
|
sll = UserUploadUser(userfile=name, email=fields[0])
|
||||||
|
sll.save()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating TempLocation: {e}", exc_info=True)
|
||||||
|
|
||||||
|
def update_or_create_location(mdl, fields):
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
latitude = float(fields[11]) if fields[11] and len(fields[11]) > 0 else None
|
||||||
|
longitude = float(fields[12]) if fields[12] and len(fields[12]) > 0 else None
|
||||||
|
|
||||||
|
geom = MultiPoint(Point(longitude, latitude)) if latitude is not None and longitude is not None else None
|
||||||
|
|
||||||
|
|
||||||
|
defaults={
|
||||||
|
'sub_loc_id': fields[1] if len(fields[1]) > 0 else '',
|
||||||
|
'cp': fields[2] if len(fields[2]) > 0 else 0,
|
||||||
|
# その他のフィールド...
|
||||||
|
'location_name': fields[3] if len(fields[3]) > 0 else '',
|
||||||
|
'category': fields[4] if len(fields[4]) > 0 else '',
|
||||||
|
'subcategory': fields[5] if len(fields[5]) > 0 else '',
|
||||||
|
'zip': fields[6] if len(fields[6]) > 0 else '',
|
||||||
|
'address': fields[7] if len(fields[7]) > 0 else '',
|
||||||
|
'prefecture': fields[8] if len(fields[8]) > 0 else '',
|
||||||
|
'area': fields[9] if len(fields[9]) > 0 else '',
|
||||||
|
'city': fields[10] if len(fields[10]) > 0 else '',
|
||||||
|
'latitude': latitude,
|
||||||
|
'longitude': longitude,
|
||||||
|
'photos': fields[13] if len(fields[13]) > 0 else '',
|
||||||
|
'videos': fields[14] if len(fields[14]) > 0 else '',
|
||||||
|
'webcontents': fields[15] if len(fields[15]) > 0 else '',
|
||||||
|
'status': fields[16] if len(fields[16]) > 0 else '',
|
||||||
|
'portal': fields[17] if len(fields[17]) > 0 else '',
|
||||||
|
'group': fields[18] if len(fields[18]) > 0 else '',
|
||||||
|
'phone': fields[19] if len(fields[19]) > 0 else '',
|
||||||
|
'fax': fields[20] if len(fields[20]) > 0 else '',
|
||||||
|
'email': fields[21] if len(fields[21]) > 0 else '',
|
||||||
|
'facility': fields[22] if len(fields[22]) > 0 else '',
|
||||||
|
'remark': fields[23] if len(fields[23]) > 0 else '',
|
||||||
|
'tags': fields[24] if len(fields[24]) > 0 else '',
|
||||||
|
'hidden_location': fields[25] if len(fields[25]) > 0 else False,
|
||||||
|
'auto_checkin': fields[26] if len(fields[26]) > 0 else False,
|
||||||
|
'checkin_radius': fields[27] if len(fields[27]) > 0 else 15,
|
||||||
|
'checkin_point': fields[28] if len(fields[28]) > 0 else 10,
|
||||||
|
'buy_point': fields[29] if len(fields[29]) > 0 else 0,
|
||||||
|
'evaluation_value': fields[30] if len(fields[30]) > 0 else '',
|
||||||
|
'shop_closed': fields[31] if len(fields[31]) > 0 else False,
|
||||||
|
'shop_shutdown': fields[32] if len(fields[32]) > 0 else False,
|
||||||
|
'opening_hours_mon': fields[33] if len(fields[33]) > 0 else '',
|
||||||
|
'opening_hours_tue': fields[34] if len(fields[34]) > 0 else '',
|
||||||
|
'opening_hours_wed': fields[35] if len(fields[35]) > 0 else '',
|
||||||
|
'opening_hours_thu': fields[36] if len(fields[36]) > 0 else '',
|
||||||
|
'opening_hours_fri': fields[37] if len(fields[37]) > 0 else '',
|
||||||
|
'opening_hours_sat': fields[38] if len(fields[38]) > 0 else '',
|
||||||
|
'opening_hours_sun': fields[39] if len(fields[39]) > 0 else ''
|
||||||
|
}
|
||||||
|
if geom:
|
||||||
|
defaults['geom'] = geom
|
||||||
|
|
||||||
|
obj, created = mdl.objects.update_or_create(
|
||||||
|
location_id=int(fields[0]),
|
||||||
|
defaults=defaults
|
||||||
|
)
|
||||||
|
|
||||||
|
if created:
|
||||||
|
logger.info(f"New location created with id: {obj.location_id}")
|
||||||
|
else:
|
||||||
|
logger.info(f"Location updated with id: {obj.location_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating or creating location: {e}", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
def updateLocation(mdl, fields):
|
def updateLocation(mdl, fields):
|
||||||
@ -734,48 +1243,54 @@ def updateLocation(mdl, fields):
|
|||||||
print(mdl.objects.filter(location_id = int(fields[0])))
|
print(mdl.objects.filter(location_id = int(fields[0])))
|
||||||
print(f"--- ${fields} ----")
|
print(f"--- ${fields} ----")
|
||||||
|
|
||||||
with transaction.atomic():
|
try:
|
||||||
mdl.objects.filter(location_id = int(fields[0])).update(
|
with transaction.atomic():
|
||||||
sub_loc_id = fields[1] if len(fields[1]) > 0 else '',
|
updated = mdl.objects.filter(location_id = int(fields[0])).update(
|
||||||
cp = fields[2] if len(fields[2]) > 0 else 0,
|
sub_loc_id = fields[1] if len(fields[1]) > 0 else '',
|
||||||
location_name = fields[3] if len(fields[3]) > 0 else '',
|
cp = fields[2] if len(fields[2]) > 0 else 0,
|
||||||
category = fields[4] if len(fields[4]) > 0 else '',
|
location_name = fields[3] if len(fields[3]) > 0 else '',
|
||||||
subcategory = fields[5] if len(fields[5]) > 0 else '',
|
category = fields[4] if len(fields[4]) > 0 else '',
|
||||||
zip = fields[6] if len(fields[6]) > 0 else '',
|
subcategory = fields[5] if len(fields[5]) > 0 else '',
|
||||||
address = fields[7] if len(fields[7]) > 0 else '',
|
zip = fields[6] if len(fields[6]) > 0 else '',
|
||||||
prefecture = fields[8] if len(fields[8]) > 0 else '',
|
address = fields[7] if len(fields[7]) > 0 else '',
|
||||||
area = fields[9] if len(fields[9]) > 0 else '',
|
prefecture = fields[8] if len(fields[8]) > 0 else '',
|
||||||
city = fields[10] if len(fields[10]) > 0 else '',
|
area = fields[9] if len(fields[9]) > 0 else '',
|
||||||
latitude = fields[11] if len(fields[11]) > 0 else '',
|
city = fields[10] if len(fields[10]) > 0 else '',
|
||||||
longitude = fields[12] if len(fields[12]) > 0 else '',
|
latitude = fields[11] if len(fields[11]) > 0 else '',
|
||||||
photos = fields[13] if len(fields[13]) > 0 else '',
|
longitude = fields[12] if len(fields[12]) > 0 else '',
|
||||||
videos = fields[14] if len(fields[14]) > 0 else '',
|
photos = fields[13] if len(fields[13]) > 0 else '',
|
||||||
webcontents = fields[15] if len(fields[15]) > 0 else '',
|
videos = fields[14] if len(fields[14]) > 0 else '',
|
||||||
status = fields[16] if len(fields[16]) > 0 else '',
|
webcontents = fields[15] if len(fields[15]) > 0 else '',
|
||||||
portal = fields[17] if len(fields[17]) > 0 else '',
|
status = fields[16] if len(fields[16]) > 0 else '',
|
||||||
group = fields[18] if len(fields[18]) > 0 else '',
|
portal = fields[17] if len(fields[17]) > 0 else '',
|
||||||
phone = fields[19] if len(fields[19]) > 0 else '',
|
group = fields[18] if len(fields[18]) > 0 else '',
|
||||||
fax = fields[20] if len(fields[20]) > 0 else '',
|
phone = fields[19] if len(fields[19]) > 0 else '',
|
||||||
email = fields[21] if len(fields[21]) > 0 else '',
|
fax = fields[20] if len(fields[20]) > 0 else '',
|
||||||
facility = fields[22] if len(fields[22]) > 0 else '',
|
email = fields[21] if len(fields[21]) > 0 else '',
|
||||||
remark = fields[23] if len(fields[23]) > 0 else '',
|
facility = fields[22] if len(fields[22]) > 0 else '',
|
||||||
tags = fields[24] if len(fields[24]) > 0 else '',
|
remark = fields[23] if len(fields[23]) > 0 else '',
|
||||||
hidden_location = fields[25] if len(fields[25]) > 0 else False,
|
tags = fields[24] if len(fields[24]) > 0 else '',
|
||||||
auto_checkin = fields[26] if len(fields[26]) > 0 else False,
|
hidden_location = fields[25] if len(fields[25]) > 0 else False,
|
||||||
checkin_radius = fields[27] if len(fields[27]) > 0 else 15,
|
auto_checkin = fields[26] if len(fields[26]) > 0 else False,
|
||||||
checkin_point = fields[28] if len(fields[28]) > 0 else 10,
|
checkin_radius = fields[27] if len(fields[27]) > 0 else 15,
|
||||||
buy_point = fields[29] if len(fields[29]) > 0 else 0,
|
checkin_point = fields[28] if len(fields[28]) > 0 else 10,
|
||||||
evaluation_value = fields[30] if len(fields[30]) > 0 else '',
|
buy_point = fields[29] if len(fields[29]) > 0 else 0,
|
||||||
shop_closed = fields[31] if len(fields[31]) > 0 else False,
|
evaluation_value = fields[30] if len(fields[30]) > 0 else '',
|
||||||
shop_shutdown = fields[32] if len(fields[32]) > 0 else False,
|
shop_closed = fields[31] if len(fields[31]) > 0 else False,
|
||||||
opening_hours_mon = fields[33] if len(fields[33]) > 0 else '',
|
shop_shutdown = fields[32] if len(fields[32]) > 0 else False,
|
||||||
opening_hours_tue = fields[34] if len(fields[34]) > 0 else '',
|
opening_hours_mon = fields[33] if len(fields[33]) > 0 else '',
|
||||||
opening_hours_wed = fields[35] if len(fields[35]) > 0 else '',
|
opening_hours_tue = fields[34] if len(fields[34]) > 0 else '',
|
||||||
opening_hours_thu = fields[36] if len(fields[36]) > 0 else '',
|
opening_hours_wed = fields[35] if len(fields[35]) > 0 else '',
|
||||||
opening_hours_fri = fields[37] if len(fields[37]) > 0 else '',
|
opening_hours_thu = fields[36] if len(fields[36]) > 0 else '',
|
||||||
opening_hours_sat = fields[38] if len(fields[38]) > 0 else '',
|
opening_hours_fri = fields[37] if len(fields[37]) > 0 else '',
|
||||||
opening_hours_sun = fields[39] if len(fields[39]) > 0 else ''
|
opening_hours_sat = fields[38] if len(fields[38]) > 0 else '',
|
||||||
)
|
opening_hours_sun = fields[39] if len(fields[39]) > 0 else ''
|
||||||
|
)
|
||||||
|
logger.info(f"TempLocation updated successfully. Rows affected: {updated}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating TempLocation: {e}", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
def updateLineTable(mdl, fields):
|
def updateLineTable(mdl, fields):
|
||||||
print(f"Updating {fields[0]} - {fields[1]}")
|
print(f"Updating {fields[0]} - {fields[1]}")
|
||||||
@ -835,7 +1350,10 @@ def updatePolygonTable(mdl, fields):
|
|||||||
|
|
||||||
def createUser(fields):
|
def createUser(fields):
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
user = CustomUser.objects.create_user(email=fields[0], event_code=fields[1], team_name=fields[2], group=fields[3], password=fields[4])
|
other_fields.setdefault('event_code',fields[1])
|
||||||
|
other_fields.setdefault('team_name',fields[1])
|
||||||
|
other_fields.setdefault('group',fields[1])
|
||||||
|
user = CustomUser.objects.create_user(email=fields[0], password=fields[4], **other_fields)
|
||||||
user.is_superuser = False
|
user.is_superuser = False
|
||||||
user.is_staff = False
|
user.is_staff = False
|
||||||
user.save()
|
user.save()
|
||||||
@ -855,6 +1373,8 @@ def deleteUserUploadUser(sender,instance,*args,**kwargs):
|
|||||||
|
|
||||||
@receiver(post_save, sender=UserUpload)
|
@receiver(post_save, sender=UserUpload)
|
||||||
def publish_data(sender, instance, created, **kwargs):
|
def publish_data(sender, instance, created, **kwargs):
|
||||||
|
logger.info(f"Processing ShapeLayer: {instance.name}")
|
||||||
|
|
||||||
file = instance.file.path
|
file = instance.file.path
|
||||||
#os.remove(file)
|
#os.remove(file)
|
||||||
|
|
||||||
|
|||||||
25
rog/permissions.py
Normal file
25
rog/permissions.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
|
||||||
|
from rest_framework import permissions
|
||||||
|
from .models import Team,Member
|
||||||
|
|
||||||
|
class IsMemberOrTeamOwner(permissions.BasePermission):
|
||||||
|
def has_object_permission(self, request, view, obj):
|
||||||
|
# Check if user is a member of the team or the team owner
|
||||||
|
return request.user in obj.team.members.all() or request.user == obj.team.owner
|
||||||
|
|
||||||
|
class IsTeamOwner(permissions.BasePermission):
|
||||||
|
def has_object_permission(self, request, view, obj):
|
||||||
|
if isinstance(obj, Team):
|
||||||
|
return obj.owner == request.user
|
||||||
|
elif isinstance(obj, Member):
|
||||||
|
return obj.team.owner == request.user
|
||||||
|
return False
|
||||||
|
|
||||||
|
class IsTeamOwnerOrMember(permissions.BasePermission):
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
team_id = view.kwargs.get('team_id')
|
||||||
|
if not team_id:
|
||||||
|
return False
|
||||||
|
team = Team.objects.get(id=team_id)
|
||||||
|
return team.owner == request.user or team.members.filter(user=request.user).exists()
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user