Compare commits

2 Commits

Author SHA1 Message Date
1c7a6a1f5c Added APIs for team and members 2024-07-20 11:15:33 +09:00
f0114ef33c setup for Akiras Laptop 2024-04-22 00:08:34 +09:00
353 changed files with 998 additions and 122182 deletions

BIN
.DS_Store vendored

Binary file not shown.

5
.gitignore vendored
View File

@ -157,10 +157,6 @@ dmypy.json
# Cython debug symbols
cython_debug/
# migration files
rog/migrations/
# PyCharm
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
@ -169,4 +165,3 @@ rog/migrations/
#.idea/
# End of https://www.toptal.com/developers/gitignore/api/django
.DS_Store

View File

@ -1,5 +1,8 @@
# FROM python:3.9.9-slim-buster
FROM osgeo/gdal:ubuntu-small-3.4.0
# Akira
FROM python:3.10
FROM ubuntu:latest
WORKDIR /app
@ -14,6 +17,13 @@ ARG TZ Asia/Tokyo \
RUN apt-get update -y
# 必要なライブラリのインストール by akira
RUN apt-get update && \
apt-get install -y software-properties-common && \
add-apt-repository ppa:ubuntugis/ppa && \
apt-get update && \
apt-get install -y gdal-bin libgdal-dev python3-gdal
# Install GDAL dependencies
RUN apt-get install -y libgdal-dev g++ --no-install-recommends && \
apt-get clean -y
@ -23,7 +33,7 @@ ENV CPLUS_INCLUDE_PATH=/usr/include/gdal
ENV C_INCLUDE_PATH=/usr/include/gdal
RUN apt-get update \
&& apt-get -y install netcat gcc postgresql \
&& apt-get -y install netcat-openbsd gcc postgresql \
&& apt-get clean
RUN apt-get update \
@ -39,7 +49,6 @@ RUN apt-get update && apt-get install -y \
python3-pip
RUN pip install --upgrade pip
RUN pip install -e ./SumasenLibs/excel_lib
RUN apt-get update
@ -52,15 +61,14 @@ RUN pip install gunicorn
#RUN ["chmod", "+x", "wait-for.sh"]
# xlsxwriterを追加
RUN pip install -r requirements.txt \
&& pip install django-cors-headers \
&& pip install xlsxwriter gunicorn
RUN pip install -r requirements.txt
COPY . /app
# Collect static files
RUN python manage.py collectstatic --noinput
RUN python3 manage.py collectstatic --noinput
# Use Gunicorn as the entrypoint
CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000"]

View File

@ -1,35 +0,0 @@
FROM nginx:alpine
# Create necessary directories and set permissions
RUN mkdir -p /usr/share/nginx/html \
&& mkdir -p /var/log/nginx \
&& mkdir -p /var/cache/nginx \
&& chown -R nginx:nginx /usr/share/nginx/html \
&& chown -R nginx:nginx /var/log/nginx \
&& chown -R nginx:nginx /var/cache/nginx \
&& chmod -R 755 /usr/share/nginx/html
# Copy files - notice the change in the source path
COPY supervisor/html/* /usr/share/nginx/html/
COPY supervisor/nginx/default.conf /etc/nginx/conf.d/default.conf
# メディアディレクトリを作成
RUN mkdir -p /app/media && chmod 755 /app/media
# 静的ファイルをコピー
#COPY ./static /usr/share/nginx/html/static
# 権限の設定
RUN chown -R nginx:nginx /app/media
# Set final permissions
RUN chown -R nginx:nginx /usr/share/nginx/html \
&& chmod -R 755 /usr/share/nginx/html \
&& touch /var/log/nginx/access.log \
&& touch /var/log/nginx/error.log \
&& chown -R nginx:nginx /var/log/nginx \
&& chown -R nginx:nginx /etc/nginx/conf.d
EXPOSE 80
CMD ["nginx", "-g", "daemon off;"]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

6
README.jpn Normal file
View File

@ -0,0 +1,6 @@
デプロイ:
you can just run
docker-compose up -d
will deploy it

View File

@ -1,19 +0,0 @@
# SumasenExcel Library
Excel操作のためのシンプルなPythonライブラリです。
## インストール方法
```bash
pip install -e .
## 使用方法
from sumaexcel import SumasenExcel
excel = SumasenExcel("path/to/file.xlsx")
data = excel.read_excel()
## ライセンス
MIT License

View File

@ -1,20 +0,0 @@
version: '3.8'
services:
python:
build:
context: ..
dockerfile: docker/python/Dockerfile
volumes:
- ..:/app
environment:
- PYTHONPATH=/app
- POSTGRES_DB=rogdb
- POSTGRES_USER=admin
- POSTGRES_PASSWORD=admin123456
- POSTGRES_HOST=localhost
- POSTGRES_PORT=5432
network_mode: "host"
tty: true
container_name: python_container # コンテナ名を明示的に指定

View File

@ -1,26 +0,0 @@
FROM python:3.9-slim
WORKDIR /app
# GPGキーの更新とパッケージのインストール
RUN apt-get update --allow-insecure-repositories && \
apt-get install -y --allow-unauthenticated python3-dev libpq-dev postgresql-client && \
rm -rf /var/lib/apt/lists/*
# Pythonパッケージのインストール
COPY requirements.txt .
COPY setup.py .
COPY README.md .
COPY . .
RUN pip install --no-cache-dir -r requirements.txt
# 開発用パッケージのインストール
RUN pip install --no-cache-dir --upgrade pip \
pytest \
pytest-cov \
flake8
# パッケージのインストール
RUN pip install -e .

View File

@ -1,5 +0,0 @@
openpyxl>=3.0.0
pandas>=1.0.0
pillow>=8.0.0
configparser>=5.0.0
psycopg2-binary==2.9.9

View File

@ -1,25 +0,0 @@
# setup.py
from setuptools import setup, find_packages
setup(
name="sumaexcel",
version="0.1.0",
packages=find_packages(),
install_requires=[
"openpyxl>=3.0.0",
"pandas>=1.0.0"
],
author="Akira Miyata",
author_email="akira.miyata@sumasen.net",
description="Excel handling library",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://github.com/akiramiyata/sumaexcel",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
)

View File

@ -1,4 +0,0 @@
from .sumaexcel import SumasenExcel
__version__ = "0.1.0"
__all__ = ["SumasenExcel"]

View File

@ -1,102 +0,0 @@
# sumaexcel/conditional.py
from typing import Dict, Any, List, Union
from openpyxl.formatting.rule import Rule, ColorScaleRule, DataBarRule, IconSetRule
from openpyxl.styles import PatternFill, Font, Border, Side
from openpyxl.worksheet.worksheet import Worksheet
class ConditionalFormatManager:
"""Handle conditional formatting in Excel"""
def __init__(self, worksheet: Worksheet):
self.worksheet = worksheet
def add_color_scale(
self,
cell_range: str,
min_color: str = "00FF0000", # Red
mid_color: str = "00FFFF00", # Yellow
max_color: str = "0000FF00" # Green
) -> None:
"""Add color scale conditional formatting"""
rule = ColorScaleRule(
start_type='min',
start_color=min_color,
mid_type='percentile',
mid_value=50,
mid_color=mid_color,
end_type='max',
end_color=max_color
)
self.worksheet.conditional_formatting.add(cell_range, rule)
def add_data_bar(
self,
cell_range: str,
color: str = "000000FF", # Blue
show_value: bool = True
) -> None:
"""Add data bar conditional formatting"""
rule = DataBarRule(
start_type='min',
end_type='max',
color=color,
showValue=show_value
)
self.worksheet.conditional_formatting.add(cell_range, rule)
def add_icon_set(
self,
cell_range: str,
icon_style: str = '3Arrows', # '3Arrows', '3TrafficLights', '3Signs'
reverse_icons: bool = False
) -> None:
"""Add icon set conditional formatting"""
rule = IconSetRule(
icon_style=icon_style,
type='percent',
values=[0, 33, 67],
reverse_icons=reverse_icons
)
self.worksheet.conditional_formatting.add(cell_range, rule)
def add_custom_rule(
self,
cell_range: str,
rule_type: str,
formula: str,
fill_color: str = None,
font_color: str = None,
bold: bool = None,
border_style: str = None,
border_color: str = None
) -> None:
"""Add custom conditional formatting rule"""
dxf = {}
if fill_color:
dxf['fill'] = PatternFill(start_color=fill_color, end_color=fill_color)
if font_color or bold is not None:
dxf['font'] = Font(color=font_color, bold=bold)
if border_style and border_color:
side = Side(style=border_style, color=border_color)
dxf['border'] = Border(left=side, right=side, top=side, bottom=side)
rule = Rule(type=rule_type, formula=[formula], dxf=dxf)
self.worksheet.conditional_formatting.add(cell_range, rule)
def copy_conditional_format(
self,
source_range: str,
target_range: str
) -> None:
"""Copy conditional formatting from one range to another"""
source_rules = self.worksheet.conditional_formatting.get(source_range)
if source_rules:
for rule in source_rules:
self.worksheet.conditional_formatting.add(target_range, rule)
def clear_conditional_format(
self,
cell_range: str
) -> None:
"""Clear conditional formatting from specified range"""
self.worksheet.conditional_formatting.delete(cell_range)

View File

@ -1,166 +0,0 @@
# config_handler.py
#
import configparser
import os
from typing import Any, Dict, Optional
import configparser
import os
import re
from typing import Any, Dict, Optional
class ConfigHandler:
"""変数置換機能付きの設定ファイル管理クラス"""
def __init__(self, ini_file_path: str, variables: Dict[str, str] = None):
"""
Args:
ini_file_path (str): INIファイルのパス
variables (Dict[str, str], optional): 置換用の変数辞書
"""
self.ini_file_path = ini_file_path
self.variables = variables or {}
self.config = configparser.ConfigParser()
self.load_config()
def _substitute_variables(self, text: str) -> str:
"""
テキスト内の変数を置換する
Args:
text (str): 置換対象のテキスト
Returns:
str: 置換後のテキスト
"""
# ${var}形式の変数を置換
pattern1 = r'\${([^}]+)}'
# [var]形式の変数を置換
pattern2 = r'\[([^\]]+)\]'
def replace_var(match):
var_name = match.group(1)
return self.variables.get(var_name, match.group(0))
# 両方のパターンで置換を実行
text = re.sub(pattern1, replace_var, text)
text = re.sub(pattern2, replace_var, text)
return text
def load_config(self) -> None:
"""設定ファイルを読み込み、変数を置換する"""
if not os.path.exists(self.ini_file_path):
raise FileNotFoundError(f"設定ファイルが見つかりません: {self.ini_file_path}")
# まず生のテキストとして読み込む
with open(self.ini_file_path, 'r', encoding='utf-8') as f:
content = f.read()
# 変数を置換
substituted_content = self._substitute_variables(content)
# 置換済みの内容を StringIO 経由で configparser に読み込ませる
from io import StringIO
self.config.read_file(StringIO(substituted_content))
def get_value(self, section: str, key: str, default: Any = None) -> Optional[str]:
"""
指定されたセクションのキーの値を取得する
Args:
section (str): セクション名
key (str): キー名
default (Any): デフォルト値(オプション)
Returns:
Optional[str]: 設定値。存在しない場合はデフォルト値
"""
try:
return self.config[section][key]
except KeyError:
return default
def get_section(self, section: str) -> Dict[str, str]:
"""
指定されたセクションの全ての設定を取得する
Args:
section (str): セクション名
Returns:
Dict[str, str]: セクションの設定をディクショナリで返す
"""
try:
return dict(self.config[section])
except KeyError:
return {}
def get_all_sections(self) -> Dict[str, Dict[str, str]]:
"""
全てのセクションの設定を取得する
Returns:
Dict[str, Dict[str, str]]: 全セクションの設定をネストされたディクショナリで返す
"""
return {section: dict(self.config[section]) for section in self.config.sections()}
# 使用例
if __name__ == "__main__":
# サンプルのINIファイル作成
sample_ini = """
[Database]
host = localhost
port = 5432
database = mydb
user = admin
password = secret
[Application]
debug = true
log_level = INFO
max_connections = 100
[Paths]
data_dir = /var/data
log_file = /var/log/app.log
"""
# サンプルINIファイルを作成
with open('config.ini', 'w', encoding='utf-8') as f:
f.write(sample_ini)
# 設定を読み込んで使用
config = ConfigHandler('config.ini')
# 特定の値を取得
db_host = config.get_value('Database', 'host')
db_port = config.get_value('Database', 'port')
print(f"Database connection: {db_host}:{db_port}")
# セクション全体を取得
db_config = config.get_section('Database')
print("Database configuration:", db_config)
# 全ての設定を取得
all_config = config.get_all_sections()
print("All configurations:", all_config)
# サンプル:
# # 設定ファイルから値を取得
# config = ConfigHandler('config.ini')
#
# # データベース設定を取得
# db_host = config.get_value('Database', 'host')
# db_port = config.get_value('Database', 'port')
# db_name = config.get_value('Database', 'database')
#
# # アプリケーション設定を取得
# debug_mode = config.get_value('Application', 'debug')
# log_level = config.get_value('Application', 'log_level')
#

View File

@ -1,77 +0,0 @@
# sumaexcel/image.py
from typing import Optional, Tuple, Union
from pathlib import Path
import os
from PIL import Image
from openpyxl.drawing.image import Image as XLImage
from openpyxl.worksheet.worksheet import Worksheet
class ImageManager:
"""Handle image operations in Excel"""
def __init__(self, worksheet: Worksheet):
self.worksheet = worksheet
self.temp_dir = Path("/tmp/sumaexcel_images")
self.temp_dir.mkdir(parents=True, exist_ok=True)
def add_image(
self,
image_path: Union[str, Path],
cell_coordinates: Tuple[int, int],
size: Optional[Tuple[int, int]] = None,
keep_aspect_ratio: bool = True,
anchor_type: str = 'absolute'
) -> None:
"""Add image to worksheet at specified position"""
# Convert path to Path object
image_path = Path(image_path)
# Open and process image
with Image.open(image_path) as img:
# Get original size
orig_width, orig_height = img.size
# Calculate new size if specified
if size:
target_width, target_height = size
if keep_aspect_ratio:
ratio = min(target_width/orig_width, target_height/orig_height)
target_width = int(orig_width * ratio)
target_height = int(orig_height * ratio)
# Resize image
img = img.resize((target_width, target_height), Image.LANCZOS)
# Save temporary resized image
temp_path = self.temp_dir / f"temp_{image_path.name}"
img.save(temp_path)
image_path = temp_path
# Create Excel image object
excel_image = XLImage(str(image_path))
# Add to worksheet
self.worksheet.add_image(excel_image, anchor=f'{cell_coordinates[0]}{cell_coordinates[1]}')
def add_image_absolute(
self,
image_path: Union[str, Path],
position: Tuple[int, int],
size: Optional[Tuple[int, int]] = None
) -> None:
"""Add image with absolute positioning"""
excel_image = XLImage(str(image_path))
if size:
excel_image.width, excel_image.height = size
excel_image.anchor = 'absolute'
excel_image.top, excel_image.left = position
self.worksheet.add_image(excel_image)
def cleanup(self) -> None:
"""Clean up temporary files"""
for file in self.temp_dir.glob("temp_*"):
file.unlink()
def __del__(self):
"""Cleanup on object destruction"""
self.cleanup()

View File

@ -1,96 +0,0 @@
# sumaexcel/merge.py
from typing import List, Tuple, Dict
from openpyxl.worksheet.worksheet import Worksheet
from openpyxl.worksheet.merge import MergedCellRange
class MergeManager:
"""Handle merge cell operations"""
def __init__(self, worksheet: Worksheet):
self.worksheet = worksheet
self._merged_ranges: List[MergedCellRange] = []
self._load_merged_ranges()
def _load_merged_ranges(self) -> None:
"""Load existing merged ranges from worksheet"""
self._merged_ranges = list(self.worksheet.merged_cells.ranges)
def merge_cells(
self,
start_row: int,
start_col: int,
end_row: int,
end_col: int
) -> None:
"""Merge cells in specified range"""
self.worksheet.merge_cells(
start_row=start_row,
start_column=start_col,
end_row=end_row,
end_column=end_col
)
self._load_merged_ranges()
def unmerge_cells(
self,
start_row: int,
start_col: int,
end_row: int,
end_col: int
) -> None:
"""Unmerge cells in specified range"""
self.worksheet.unmerge_cells(
start_row=start_row,
start_column=start_col,
end_row=end_row,
end_column=end_col
)
self._load_merged_ranges()
def copy_merged_cells(
self,
source_range: Tuple[int, int, int, int],
target_start_row: int,
target_start_col: int
) -> None:
"""Copy merged cells from source range to target position"""
src_row1, src_col1, src_row2, src_col2 = source_range
row_offset = target_start_row - src_row1
col_offset = target_start_col - src_col1
for merged_range in self._merged_ranges:
if (src_row1 <= merged_range.min_row <= src_row2 and
src_col1 <= merged_range.min_col <= src_col2):
new_row1 = merged_range.min_row + row_offset
new_col1 = merged_range.min_col + col_offset
new_row2 = merged_range.max_row + row_offset
new_col2 = merged_range.max_col + col_offset
self.merge_cells(new_row1, new_col1, new_row2, new_col2)
def shift_merged_cells(
self,
start_row: int,
rows: int = 0,
cols: int = 0
) -> None:
"""Shift merged cells by specified number of rows and columns"""
new_ranges = []
for merged_range in self._merged_ranges:
if merged_range.min_row >= start_row:
new_row1 = merged_range.min_row + rows
new_col1 = merged_range.min_col + cols
new_row2 = merged_range.max_row + rows
new_col2 = merged_range.max_col + cols
self.worksheet.unmerge_cells(
start_row=merged_range.min_row,
start_column=merged_range.min_col,
end_row=merged_range.max_row,
end_column=merged_range.max_col
)
new_ranges.append((new_row1, new_col1, new_row2, new_col2))
for new_range in new_ranges:
self.merge_cells(*new_range)

View File

@ -1,148 +0,0 @@
# sumaexcel/page.py
from typing import Optional, Dict, Any, Union
from openpyxl.worksheet.worksheet import Worksheet
from openpyxl.worksheet.page import PageMargins, PrintPageSetup
# sumaexcel/page.py (continued)
class PageManager:
"""Handle page setup and header/footer settings"""
def __init__(self, worksheet: Worksheet):
self.worksheet = worksheet
def set_page_setup(
self,
orientation: str = 'portrait',
paper_size: int = 9, # A4
fit_to_height: Optional[int] = None,
fit_to_width: Optional[int] = None,
scale: Optional[int] = None
) -> None:
"""Configure page setup
Args:
orientation: 'portrait' or 'landscape'
paper_size: paper size (e.g., 9 for A4)
fit_to_height: number of pages tall
fit_to_width: number of pages wide
scale: zoom scale (1-400)
"""
setup = PrintPageSetup(
orientation=orientation,
paperSize=paper_size,
scale=scale,
fitToHeight=fit_to_height,
fitToWidth=fit_to_width
)
self.worksheet.page_setup = setup
def set_margins(
self,
left: float = 0.7,
right: float = 0.7,
top: float = 0.75,
bottom: float = 0.75,
header: float = 0.3,
footer: float = 0.3
) -> None:
"""Set page margins in inches"""
margins = PageMargins(
left=left,
right=right,
top=top,
bottom=bottom,
header=header,
footer=footer
)
self.worksheet.page_margins = margins
def set_header_footer(
self,
odd_header: Optional[str] = None,
odd_footer: Optional[str] = None,
even_header: Optional[str] = None,
even_footer: Optional[str] = None,
first_header: Optional[str] = None,
first_footer: Optional[str] = None,
different_first: bool = False,
different_odd_even: bool = False
) -> None:
"""Set headers and footers
Format codes:
- &P: Page number
- &N: Total pages
- &D: Date
- &T: Time
- &[Tab]: Sheet name
- &[Path]: File path
- &[File]: File name
- &[Tab]: Worksheet name
"""
self.worksheet.oddHeader.left = odd_header or ""
self.worksheet.oddFooter.left = odd_footer or ""
if different_odd_even:
self.worksheet.evenHeader.left = even_header or ""
self.worksheet.evenFooter.left = even_footer or ""
if different_first:
self.worksheet.firstHeader.left = first_header or ""
self.worksheet.firstFooter.left = first_footer or ""
self.worksheet.differentFirst = different_first
self.worksheet.differentOddEven = different_odd_even
def set_print_area(self, range_string: str) -> None:
"""Set print area
Args:
range_string: Cell range in A1 notation (e.g., 'A1:H42')
"""
self.worksheet.print_area = range_string
def set_print_title_rows(self, rows: str) -> None:
"""Set rows to repeat at top of each page
Args:
rows: Row range (e.g., '1:3')
"""
self.worksheet.print_title_rows = rows
def set_print_title_columns(self, cols: str) -> None:
"""Set columns to repeat at left of each page
Args:
cols: Column range (e.g., 'A:B')
"""
self.worksheet.print_title_cols = cols
def set_print_options(
self,
grid_lines: bool = False,
horizontal_centered: bool = False,
vertical_centered: bool = False,
headers: bool = False
) -> None:
"""Set print options"""
self.worksheet.print_gridlines = grid_lines
self.worksheet.print_options.horizontalCentered = horizontal_centered
self.worksheet.print_options.verticalCentered = vertical_centered
self.worksheet.print_options.headers = headers
class PaperSizes:
"""Standard paper size constants"""
LETTER = 1
LETTER_SMALL = 2
TABLOID = 3
LEDGER = 4
LEGAL = 5
STATEMENT = 6
EXECUTIVE = 7
A3 = 8
A4 = 9
A4_SMALL = 10
A5 = 11
B4 = 12
B5 = 13

View File

@ -1,115 +0,0 @@
# sumaexcel/styles.py
from typing import Dict, Any, Optional, Union
from openpyxl.styles import (
Font, PatternFill, Alignment, Border, Side,
NamedStyle, Protection, Color
)
from openpyxl.styles.differential import DifferentialStyle
from openpyxl.formatting.rule import Rule
from openpyxl.worksheet.worksheet import Worksheet
class StyleManager:
"""Excel style management class"""
@staticmethod
def create_font(
name: str = "Arial",
size: int = 11,
bold: bool = False,
italic: bool = False,
color: str = "000000",
underline: str = None,
strike: bool = False
) -> Font:
"""Create a Font object with specified parameters"""
return Font(
name=name,
size=size,
bold=bold,
italic=italic,
color=color,
underline=underline,
strike=strike
)
@staticmethod
def create_fill(
fill_type: str = "solid",
start_color: str = "FFFFFF",
end_color: str = None
) -> PatternFill:
"""Create a PatternFill object"""
return PatternFill(
fill_type=fill_type,
start_color=start_color,
end_color=end_color or start_color
)
@staticmethod
def create_border(
style: str = "thin",
color: str = "000000"
) -> Border:
"""Create a Border object"""
side = Side(style=style, color=color)
return Border(
left=side,
right=side,
top=side,
bottom=side
)
@staticmethod
def create_alignment(
horizontal: str = "general",
vertical: str = "bottom",
wrap_text: bool = False,
shrink_to_fit: bool = False,
indent: int = 0
) -> Alignment:
"""Create an Alignment object"""
return Alignment(
horizontal=horizontal,
vertical=vertical,
wrap_text=wrap_text,
shrink_to_fit=shrink_to_fit,
indent=indent
)
@staticmethod
def copy_style(source_cell: Any, target_cell: Any) -> None:
"""Copy all style properties from source cell to target cell"""
target_cell.font = Font(
name=source_cell.font.name,
size=source_cell.font.size,
bold=source_cell.font.bold,
italic=source_cell.font.italic,
color=source_cell.font.color,
underline=source_cell.font.underline,
strike=source_cell.font.strike
)
if source_cell.fill.patternType != None:
target_cell.fill = PatternFill(
fill_type=source_cell.fill.patternType,
start_color=source_cell.fill.start_color.rgb,
end_color=source_cell.fill.end_color.rgb
)
target_cell.border = Border(
left=source_cell.border.left,
right=source_cell.border.right,
top=source_cell.border.top,
bottom=source_cell.border.bottom
)
target_cell.alignment = Alignment(
horizontal=source_cell.alignment.horizontal,
vertical=source_cell.alignment.vertical,
wrap_text=source_cell.alignment.wrap_text,
shrink_to_fit=source_cell.alignment.shrink_to_fit,
indent=source_cell.alignment.indent
)
if source_cell.number_format:
target_cell.number_format = source_cell.number_format

View File

@ -1,628 +0,0 @@
# sumaexcel/excel.py
import openpyxl
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
from openpyxl.utils import get_column_letter
import pandas as pd
from typing import Optional, Dict, List, Union, Any
import os
import shutil
from datetime import datetime
from typing import Optional, Dict, Any, Union, Tuple
from pathlib import Path
# psycopg2のインポートを追加
import psycopg2
import psycopg2.extras
from copy import copy
from openpyxl.utils import range_boundaries
from .config_handler import ConfigHandler # ini file のロード
#from .styles import StyleManager
#from .merge import MergeManager
#from .image import ImageManager
#from .conditional import ConditionalFormatManager
from .page import PageManager, PaperSizes
import logging
logging.basicConfig(
level=logging.INFO, # INFOレベル以上のログを表示
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
class SumasenExcel:
"""Enhanced Excel handling class with extended functionality"""
def __init__(self, document: str, variables: Dict[str, Any],
docbase: str = "./docbase") -> Dict[str, str]:
"""Initialize Excel document with basic settings
Args:
document: Document name
variables: Variables for document
docbase: Base directory for documents (default: "./docbase")
Returns:
Dict with status (true/false) and optional error message
"""
self.debug = True
self.workbook = None
self.template_filepath = None
self.output_filepath = None
self.current_sheet = None
self.dbname = None
self.user = None
self.password = None
self.host = None
self.port = None
self._style_manager = None
self._merge_manager = None
self._image_manager = None
self._conditional_manager = None
self._page_manager = None
try:
# document base を設定
self.docpath = docbase
if not os.path.exists(docbase):
logging.error(f"Document base directory not found: {docbase}")
# ini fileをロード
self.ini_file_path = f"{docbase}/{document}.ini"
self.conf = ConfigHandler(self.ini_file_path, variables)
if not os.path.exists(self.ini_file_path):
logging.error(f"INI file not found: {self.ini_file_path}")
# basic section をロード
basic = self.conf.get_section("basic")
if not basic:
logging.error(f"Basic section not found in INI file: {self.ini_file_path}")
self.basic = basic
# basicセクションから必要なパラメータを取得
template_file = basic.get("template_file")
if not template_file:
logging.error("template_file not found in basic section")
doc_file = basic.get("doc_file")
if not doc_file:
logging.error("doc_file not found in basic section")
self.maxcol = basic.get("maxcol")
if not self.maxcol:
self.maxcol = 100 # デフォルト値を設定
logging.warning("maxcol not found in basic section. Defaulting to 100")
else:
self.maxcol = int(self.maxcol)
sections = basic.get("sections")
if not sections:
logging.error("sections not found in basic section")
# セクションをリストに変換
self.section_list = [s.strip() for s in sections.split(",")]
if not self.section_list:
logging.error("sections not found in basic section")
# 出力ファイルパスを設定
self.output_filepath = f"{self.docpath}/{doc_file}"
# 新規のExcelワークブックを作成
self.workbook = openpyxl.Workbook()
# デフォルトで作成されるシートを削除
self.workbook.remove(self.workbook.active)
# テンプレートワークブックをロード
self.template_filepath = f"{self.docpath}/{template_file}"
if not os.path.exists(self.template_filepath):
logging.error(f"Template file not found: {self.template_filepath}")
self.template_workbook = openpyxl.load_workbook(self.template_filepath)
self.template_sheet = self.template_workbook.active
except Exception as e:
logging.error(f"Error initializing Excel document: {str(e)}")
def make_report(self,variables: Dict[str, Any]):
"""レポートを生成する"""
logging.info("make_report step-1")
try:
ret_status = True
# セクションごとに処理を実行(ワークシート単位)
for section in self.section_list:
ret = self.proceed_section(section,variables)
if ret["status"] == False:
message = ret.get("message", "No message provided")
return {"status": False, "message": f"Fail generating section: {section}...{message}"}
# 生成したワークブックを保存
self.workbook.save(self.output_filepath)
return {"status": True, "message": f"Report generated successfully : {self.output_filepath}", "filepath":self.output_filepath}
except Exception as e:
return {"status": False, "message": f"Exception in make_report: Error generating report: {str(e)}"}
def proceed_section(self, section: str, variables: Dict[str, Any]):
print(f"make_report.proceed_section step-1:section={section}")
try:
# セクションの設定を取得
section_config = self.conf.get_section(section)
# セクションが存在しない場合はスキップ
if not section_config:
return {"status": False, "message": f"Error no section found: {section}"}
# テンプレートシートをコピー
template_sheet_name = section_config.get("template_sheet")
if not template_sheet_name or template_sheet_name not in self.template_workbook.sheetnames:
return {"status": False, "message": f"Error no template sheet found: {template_sheet_name}"}
# シートの名前を設定
new_sheet = self.workbook.create_sheet(title=section_config.get("sheet_name", section))
self.worksheet = new_sheet
self.dbname=variables.get('db')
self.user=variables.get('username')
self.password=variables.get('password')
self.host=variables.get('host','postgres')
self.port=variables.get('port','5432')
if not self.dbname or not self.user or not self.password or not self.host or not self.port:
return {"status": False, "message": f"Error no database connection information"}
print(f"db={self.dbname},user={self.user},pass={self.password},host={self.host},port={self.port}")
# PostgreSQLに接続
self.conn = psycopg2.connect(
dbname=self.dbname,
user=self.user,
password=self.password,
host=self.host,
port=self.port
)
#self._style_manager = StyleManager()
#self._merge_manager = MergeManager(self.current_sheet)
#self._image_manager = ImageManager(self.current_sheet)
#self._conditional_manager = ConditionalFormatManager(self.current_sheet)
#self._page_manager = PageManager(self.current_sheet)
# シートの幅を設定
fit_to_width = section_config.get("fit_to_width")
if fit_to_width:
new_sheet.sheet_view.zoomScaleNormal = float(fit_to_width)
# シートの向きを設定
orientation = section_config.get("orientation")
new_sheet.sheet_view.orientation = orientation if orientation else "portrait"
self.current_worksheet = new_sheet
# グループ定義を取得
groups = section_config.get("groups")
if not groups:
return {"status": False, "message": f"Error no group definitions found: {section}"}
# グループをリストに変換
group_list = [g.strip() for g in groups.split(",")]
if not group_list:
return {"status": False, "message": f"Error invalid group definitions found: {section}"}
# 各グループの設定を取得
for group in group_list:
section_group = f"{section}.{group}"
ret = self.proceed_group(section_group,variables)
if ret["status"] == False:
return ret
return {"status": True, "message": f"Success generating section: {section}"}
except Exception as e:
return {"status": False, "message": f"Exception in proceed_section : Error generating report: {str(e)}"}
def proceed_group(self,group:str,variables: Dict[str, Any]):
logging.info(f"make_report.proceed_group step-1:section={group}")
try:
group_config = self.conf.get_section(group)
if not group_config:
return {"status": False, "message": f"Error no group section found: {group}"}
# グループの処理パラメータを取得
group_range = group_config.get("group_range")
table = group_config.get("table_name")
where = group_config.get("where")
if not where or not table or not group_range:
return {"status": False, "message": f"Error invalid group parameters: {group_config}"}
sort = group_config.get("sort")
if not sort:
ret = self.proceed_one_record(table,where,group_range,variables)
if ret.get("status") == True:
return {"status": True, "message": f"Success generating group: {group}"}
else:
ret = self.proceed_all_records(table,where,sort,group_range,variables)
if ret.get("status") == True:
return {"status": True, "message": f"Success generating group: {group}"}
except Exception as e:
logging.error(f"Error in proceed_group: {str(e)}")
return {"status": False, "message": f"Exception in proceed_group : Error generating report: {str(e)}"}
def proceed_one_record(self,table:str,where:str,group_range:str,variables: Dict[str, Any]):
"""1レコードのデータを取得してシートの値を置き換える
Args:
table: テーブル名
where: WHERE句
group_range: 処理対象範囲
variables: DB接続情報を含む変数辞書
"""
try:
print(f"make_report.proceed_one_record step-1:table={table},where={where},group_range={group_range}")
# まずself.template_sheetの指定範囲のセルをself.current_sheetにコピーする。
self.copy_template_to_current(group_range,group_range)
print(f"step-1")
cursor = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# SQLクエリを実行
query = f"SELECT * FROM {table} WHERE {where} LIMIT 1"
cursor.execute(query)
record = cursor.fetchone()
print(f"query={query}")
print(f"record={record}")
if record:
# group_rangeの範囲内のセルを走査
for row in self.current_worksheet:
for cell in row:
if cell.value and isinstance(cell.value, str):
# [field_name]形式の文字列を検索
import re
matches = re.findall(r'\[(.*?)\]', cell.value)
# マッチした場合、フィールド値で置換
if matches:
new_value = cell.value
for field_name in matches:
if field_name in record:
new_value = new_value.replace(
f'[{field_name}]',
str(record[field_name])
)
cell.value = new_value
cursor.close()
return {"status": True, "message": f"Success generating group: "}
except Exception as e:
logging.error(f"Error in proceed_one_record: {str(e)}")
return {"status": False, "message": f"Exception in proceed_one_record:Error generating report: {str(e)}"}
def get_column_letter(self, cell_reference):
"""
セル参照(例:'A12')から列文字を取得
"""
if isinstance(cell_reference, str):
# アルファベット部分を抽出
column = ''.join(c for c in cell_reference if c.isalpha())
if column:
return column
return 'A' # デフォルト値
def get_row_number(self,cell_reference):
"""
セル参照(例:'A12')から行番号を取得
"""
if isinstance(cell_reference, str):
# 数字部分のみを抽出
digits = ''.join(c for c in cell_reference if c.isdigit())
if digits:
return int(digits)
return int(cell_reference)
def proceed_all_records(self, table: str, where: str, sort: str, group_range: str, variables: Dict[str, Any]):
"""複数レコードを取得してシートの値を置き換える
Args:
table: テーブル名
where: WHERE句
sort: ORDER BY句
group_range: 処理対象範囲
variables: DB接続情報を含む変数辞書
"""
print(f"make_report.proceed_all_record step-1:table={table},where={where},group_range={group_range}")
try:
# グループ範囲の行数を取得(セル参照対応)
if not group_range or ':' not in group_range:
raise ValueError(f"Invalid group_range format: {group_range}")
# グループ範囲の行数を取得
range_parts = group_range.split(':')
logging.info(f"Processing range_parts: {range_parts}") # デバッグ用ログ
start_row = self.get_row_number(range_parts[0].strip())
start_col = self.get_column_letter(range_parts[0].strip())
end_row = self.get_row_number(range_parts[1].strip())
end_col = self.get_column_letter(range_parts[1].strip())
if start_row > end_row:
raise ValueError(f"Invalid row range: start_row ({start_row}) > end_row ({end_row})")
template_rows = end_row - start_row + 1
cursor = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# SQLクエリを実行
query = f"SELECT * FROM {table} WHERE {where} ORDER BY {sort}"
cursor.execute(query)
records = cursor.fetchall()
print(f"query={query}, records={len(records)}")
current_row = start_row
for record in records:
# テンプレート範囲をコピー
self.copy_template_to_current(f"{start_col}{start_row}:{end_col}{end_row}",
f"{start_col}{current_row}:{end_col}{current_row + template_rows - 1}"
)
# コピーした範囲内のセルを走査して値を置換
for row in range(current_row, current_row + template_rows):
for cell in self.current_worksheet[row]:
if cell.value and isinstance(cell.value, str):
# [field_name]形式の文字列を検索
import re
matches = re.findall(r'\[(.*?)\]', cell.value)
# マッチした場合、フィールド値で置換
if matches:
new_value = cell.value
for field_name in matches:
if field_name in record:
new_value = new_value.replace(
f'[{field_name}]',
str(record[field_name])
)
cell.value = new_value
current_row += template_rows
cursor.close()
self.conn.close()
return {"status": True, "message": "Success processing all records"}
except Exception as e:
logging.error(f"Error in proceed_all_record: {str(e)}")
return {"status": False, "message": f"Exception in proceed_all_record:Error processing records: {str(e)}"}
def copy_template_to_current(self, orig_range, target_range):
try:
print(f"orig_rage={orig_range},target_range={target_range}")
# 範囲をパースする
orig_min_col, orig_min_row, orig_max_col, orig_max_row = range_boundaries(orig_range)
target_min_col, target_min_row, target_max_col, target_max_row = range_boundaries(target_range)
print(f"min_col, min_row, max_col, max_row = {orig_min_col}, {orig_min_row}, {orig_max_col}, {orig_max_row}")
print(f"min_col, min_row, max_col, max_row = {target_min_col}, {target_min_row}, {target_max_col}, {target_max_row}")
# Get template sheet name from ini file
section_config = self.conf.get_section(self.section_list[0]) # 現在の処理中のセクション
template_sheet_name = section_config.get("template_sheet")
if not template_sheet_name:
raise ValueError("Template sheet name not found in configuration")
# Create new sheet with template name if it doesn't exist
if template_sheet_name not in self.workbook.sheetnames:
self.current_sheet = self.workbook.create_sheet(template_sheet_name)
else:
self.current_sheet = self.workbook[template_sheet_name]
# Remove default sheet if it exists
if 'Sheet' in self.workbook.sheetnames:
del self.workbook['Sheet']
# Copy column widths
for col in range(orig_min_col, orig_max_col + 1):
col_letter = get_column_letter(col)
if col_letter in self.template_sheet.column_dimensions:
self.current_sheet.column_dimensions[col_letter].width = \
self.template_sheet.column_dimensions[col_letter].width
# Copy row heights
for row in range(orig_min_row, orig_max_row + 1):
target_row = row - orig_min_row + target_min_row
if row in self.template_sheet.row_dimensions:
source_height = self.template_sheet.row_dimensions[row].height
if source_height is not None:
if target_row not in self.current_sheet.row_dimensions:
self.current_sheet.row_dimensions[target_row] = openpyxl.worksheet.dimensions.RowDimension(target_row)
self.current_sheet.row_dimensions[target_row].height = source_height
# Copy merged cells
for merged_range in self.template_sheet.merged_cells:
min_col, min_row, max_col, max_row = range_boundaries(str(merged_range))
# Check if merge range intersects with our copy range
if (min_col >= orig_min_col and max_col <= orig_max_col and
min_row >= orig_min_row and max_row <= orig_max_row):
# Calculate target merge range
target_merge_min_row = target_min_row + (min_row - orig_min_row)
target_merge_max_row = target_min_row + (max_row - orig_min_row)
target_merge_range = f"{get_column_letter(min_col)}{target_merge_min_row}:" \
f"{get_column_letter(max_col)}{target_merge_max_row}"
self.current_sheet.merge_cells(target_merge_range)
# Copy cell contents and styles
row_offset = target_min_row - orig_min_row
for row in range(orig_min_row, orig_max_row + 1):
for col in range(orig_min_col, orig_max_col + 1):
source_cell = self.template_sheet.cell(row=row, column=col)
target_cell = self.current_sheet.cell(row=row+row_offset, column=col)
# Copy value
target_cell.value = source_cell.value
# Copy styles
if source_cell.has_style:
target_cell.font = copy(source_cell.font)
target_cell.border = copy(source_cell.border)
target_cell.fill = copy(source_cell.fill)
target_cell.number_format = source_cell.number_format
target_cell.protection = copy(source_cell.protection)
target_cell.alignment = copy(source_cell.alignment)
# Copy page setup
target_page_setup = self.current_sheet.page_setup
source_page_setup = self.template_sheet.page_setup
# Copy supported page setup attributes
copyable_attrs = [
'paperSize',
'orientation',
'fitToHeight',
'fitToWidth',
'scale'
]
for attr in copyable_attrs:
try:
if hasattr(source_page_setup, attr):
setattr(target_page_setup, attr, getattr(source_page_setup, attr))
except Exception as e:
logging.warning(f"Could not copy page setup attribute {attr}: {str(e)}")
# Copy margins
target_margins = self.current_sheet.page_margins
source_margins = self.template_sheet.page_margins
margin_attrs = ['left', 'right', 'top', 'bottom', 'header', 'footer']
for attr in margin_attrs:
try:
if hasattr(source_margins, attr):
setattr(target_margins, attr, getattr(source_margins, attr))
except Exception as e:
logging.warning(f"Could not copy margin attribute {attr}: {str(e)}")
# Copy print options
target_print = self.current_sheet.print_options
source_print = self.template_sheet.print_options
print_attrs = [
'horizontalCentered',
'verticalCentered',
'gridLines',
'gridLinesSet'
]
for attr in print_attrs:
try:
if hasattr(source_print, attr):
setattr(target_print, attr, getattr(source_print, attr))
except Exception as e:
logging.warning(f"Could not copy print option {attr}: {str(e)}")
return {"status": True, "message": "Successfully copied template range"}
except Exception as e:
logging.error(f"Error in copy_template_to_current: {str(e)}")
return {"status": False, "message": f"Exception in copy_template_to_current: {str(e)}"}
# Style operations
def apply_style(
self,
cell_range: str,
font: Dict[str, Any] = None,
fill: Dict[str, Any] = None,
border: Dict[str, Any] = None,
alignment: Dict[str, Any] = None
) -> None:
"""Apply styles to cell range"""
for row in self.current_sheet[cell_range]:
for cell in row:
if font:
cell.font = self._style_manager.create_font(**font)
if fill:
cell.fill = self._style_manager.create_fill(**fill)
if border:
cell.border = self._style_manager.create_border(**border)
if alignment:
cell.alignment = self._style_manager.create_alignment(**alignment)
# Merge operations
def merge_range(
self,
start_row: int,
start_col: int,
end_row: int,
end_col: int
) -> None:
"""Merge cell range"""
self._merge_manager.merge_cells(start_row, start_col, end_row, end_col)
# Image operations
def add_image(
self,
image_path: Union[str, Path],
position: Tuple[int, int],
size: Optional[Tuple[int, int]] = None
) -> None:
"""Add image to worksheet"""
self._image_manager.add_image(image_path, position, size)
# Conditional formatting
def add_conditional_format(
self,
cell_range: str,
format_type: str,
**kwargs
) -> None:
"""Add conditional formatting"""
if format_type == 'color_scale':
self._conditional_manager.add_color_scale(cell_range, **kwargs)
elif format_type == 'data_bar':
self._conditional_manager.add_data_bar(cell_range, **kwargs)
elif format_type == 'icon_set':
self._conditional_manager.add_icon_set(cell_range, **kwargs)
elif format_type == 'custom':
self._conditional_manager.add_custom_rule(cell_range, **kwargs)
# Page setup
def setup_page(
self,
orientation: str = 'portrait',
paper_size: int = PaperSizes.A4,
margins: Dict[str, float] = None,
header_footer: Dict[str, Any] = None
) -> None:
"""Configure page setup"""
self._page_manager.set_page_setup(
orientation=orientation,
paper_size=paper_size
)
if margins:
self._page_manager.set_margins(**margins)
if header_footer:
self._page_manager.set_header_footer(**header_footer)
def cleanup(self) -> None:
"""Cleanup temporary files"""
if self._image_manager:
self._image_manager.cleanup()
def __del__(self):
"""Destructor"""
self.cleanup()

View File

@ -1,28 +0,0 @@
from sumaexcel import SumasenExcel
import logging
# 初期化
# 初期化
variables = {
"zekken_number":"5033",
"event_code":"FC岐阜",
"db":"rogdb",
"username":"admin",
"password":"admin123456",
"host":"localhost",
"port":"5432"
}
excel = SumasenExcel(document="test", variables=variables, docbase="./testdata")
logging.info("Excelファイル作成 step-1")
# シート初期化
ret = excel.make_report(variables=variables)
logging.info(f"Excelファイル作成 step-2 : ret={ret}")
if ret["status"]==True:
filepath=ret["filepath"]
logging.info(f"Excelファイル作成 : ret.filepath={filepath}")
else:
message = ret.get("message", "No message provided")
logging.error(f"Excelファイル作成失敗 : ret.message={message}")

View File

@ -1,24 +0,0 @@
[basic]
template_file=certificate_template.xlsx
doc_file=certificate_[zekken_number].xlsx
sections=section1
maxcol=8
[section1]
template_sheet=certificate
groups=group1,group2
fit_to_width=1
orientation=portrait
[section1.group1]
table_name=mv_entry_details
where=zekken_number='[zekken_number]' and event_name='[event_code]'
group_range=A1:H11
[section1.group2]
table_name=gps_checkins
where=zekken_number='[zekken_number]' and event_code='[event_code]'
sort=path_order
group_range=A12:H12

View File

@ -53,14 +53,10 @@ INSTALLED_APPS = [
'leaflet',
'leaflet_admin_list',
'rog.apps.RogConfig',
'corsheaders', # added
'django_filters'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware', # できるだけ上部に
'django.middleware.common.CommonMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
@ -72,47 +68,10 @@ MIDDLEWARE = [
ROOT_URLCONF = 'config.urls'
CORS_ALLOW_ALL_ORIGINS = True # 開発環境のみ
CORS_ALLOW_CREDENTIALS = True
CORS_ALLOWED_METHODS = [
'GET',
'POST',
'PUT',
'PATCH',
'DELETE',
'OPTIONS'
]
CORS_ALLOWED_HEADERS = [
'accept',
'accept-encoding',
'authorization',
'content-type',
'dnt',
'origin',
'user-agent',
'x-csrftoken',
'x-requested-with',
]
# 本番環境では以下のように制限する
CORS_ALLOWED_ORIGINS = [
"https://rogaining.sumasen.net",
"http://rogaining.sumasen.net",
]
# CSRFの設定
CSRF_TRUSTED_ORIGINS = [
"http://rogaining.sumasen.net",
"https://rogaining.sumasen.net",
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
@ -179,12 +138,10 @@ USE_TZ = True
STATIC_URL = '/static/'
#STATIC_URL = '/static2/'
#STATIC_ROOT = BASE_DIR / "static"
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_ROOT = BASE_DIR / "static"
MEDIA_URL = '/media/'
#MEDIA_ROOT = BASE_DIR / "media/"
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_ROOT = BASE_DIR / "media/"
#STATICFILES_DIRS = (os.path.join(BASE_DIR, "static2"),os.path.join(BASE_DIR, "media"))
@ -219,85 +176,14 @@ REST_FRAMEWORK = {
}
#FRONTEND_URL = 'https://rogaining.intranet.sumasen.net' # フロントエンドのURLに適宜変更してください
FRONTEND_URL = 'https://rogaining.sumasen.net' # フロントエンドのURLに適宜変更してください
# この設定により、メールは実際には送信されず、代わりにコンソールに出力されます。
# Email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.outlook.com'
EMAIL_HOST = 'smtp-mail.outlook.com'
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_HOST_USER = 'rogaining@gifuai.net'
EMAIL_HOST_PASSWORD = 'ctcpy9823"x~'
DEFAULT_FROM_EMAIL = 'rogaining@gifuai.net'
EMAIL_HOST_USER = 'akira.miyata@gifuai.net'
EMAIL_HOST_PASSWORD = 'SachikoMiyata@123'
APP_DOWNLOAD_LINK = 'https://apps.apple.com/jp/app/%E5%B2%90%E9%98%9C%E3%83%8A%E3%83%93/id6444221792'
ANDROID_DOWNLOAD_LINK = 'https://play.google.com/store/apps/details?id=com.dvox.gifunavi&hl=ja'
SERVICE_NAME = '岐阜ナビ(岐阜ロゲのアプリ)'
# settings.py
DEFAULT_CHARSET = 'utf-8'
#REST_FRAMEWORK = {
# 'DEFAULT_RENDERER_CLASSES': [
# 'rest_framework.renderers.JSONRenderer',
# ],
# 'JSON_UNICODE_ESCAPE': False,
#}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '{levelname} {asctime} {module} {message}',
'style': '{',
},
},
'handlers': {
#'file': {
# 'level': 'DEBUG',
# 'class': 'logging.FileHandler',
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
# 'formatter': 'verbose',
#},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'root': {
'handlers': ['console'],
'level': 'DEBUG',
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
},
'rog': {
#'handlers': ['file','console'],
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
]
BLACKLISTED_IPS = ['44.230.58.114'] # ブロックしたい IP アドレスをここにリストとして追加
# Optional: Set a default "from" address
DEFAULT_FROM_EMAIL = 'info@gifuai.net'

View File

@ -18,21 +18,6 @@ from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
# debug_urlsビューをrogアプリケーションのviewsからインポート
from rog import views as rog_views
DEBUG = True
ALLOWED_HOSTS = ['rogaining.sumasen.net', 'localhost', '127.0.0.1']
# CORSの設定
CORS_ALLOW_ALL_ORIGINS = True
CORS_ALLOWED_ORIGINS = [
"http://rogaining.sumasen.net",
"http://localhost",
"http://127.0.0.1",
]
urlpatterns = [
path('admin/', admin.site.urls),
path('auth/', include('knox.urls')),
@ -42,8 +27,3 @@ urlpatterns = [
admin.site.site_header = "ROGANING"
admin.site.site_title = "Roganing Admin Portal"
admin.site.index_title = "Welcome to Roganing Portal"
# 開発環境での静的ファイル配信
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)

0
db.sql
View File

View File

@ -1,77 +1,46 @@
version: "3.9"
services:
# postgres-db:
# image: kartoza/postgis:12.0
# ports:
# - 5432:5432
# volumes:
# - postgres_data:/var/lib/postgresql
# - ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
# environment:
# - POSTGRES_USER=${POSTGRES_USER}
# - POSTGRES_PASS=${POSTGRES_PASS}
# - POSTGRES_DBNAME=${POSTGRES_DBNAME}
# - POSTGRES_MAX_CONNECTIONS=600
# restart: "on-failure"
# networks:
# - rog-api
api:
build:
context: .
dockerfile: Dockerfile.gdal
command: python3 manage.py runserver 0.0.0.0:8100
volumes:
- .:/app
ports:
- 8100:8100
env_file:
- .env
restart: "on-failure"
networks:
- rog-api
supervisor-web:
build:
context: .
dockerfile: Dockerfile.supervisor
volumes:
- type: bind
source: ./supervisor/html
target: /usr/share/nginx/html/supervisor
read_only: true
- type: bind
source: ./supervisor/nginx/default.conf
target: /etc/nginx/conf.d/default.conf
read_only: true
- type: volume
source: static_volume
target: /app/static
read_only: true
- type: volume
source: nginx_logs
target: /var/log/nginx
- type: bind
source: ./media
target: /usr/share/nginx/html/media
postgres-db:
image: kartoza/postgis:12.0
ports:
- "80:80"
depends_on:
- api
- 5432:5432
volumes:
- postgres_data:/var/lib/postgresql
- ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASS=${POSTGRES_PASS}
- POSTGRES_DBNAME=${POSTGRES_DBNAME}
- POSTGRES_MAX_CONNECTIONS=600
restart: "on-failure"
networks:
- rog-api
restart: always
- rog-api
api:
build:
context: .
dockerfile: Dockerfile.gdal
command: python3 manage.py runserver 0.0.0.0:8100
volumes:
- .:/app
ports:
- 8100:8100
env_file:
- .env
restart: "on-failure"
depends_on:
- postgres-db
networks:
- rog-api
#entrypoint: ["/app/wait-for.sh", "postgres-db:5432", "--", ""]
#command: python3 manage.py runserver 0.0.0.0:8100
networks:
rog-api:
driver: bridge
rog-api:
driver: bridge
volumes:
postgres_data:
geoserver-data:
static_volume:
nginx_logs:
postgres_data:
geoserver-data:

View File

@ -1,81 +0,0 @@
version: "3.9"
services:
# postgres-db:
# image: kartoza/postgis:12.0
# ports:
# - 5432:5432
# volumes:
# - postgres_data:/var/lib/postgresql
# - ./custom-postgresql.conf:/etc/postgresql/12/main/postgresql.conf
# environment:
# - POSTGRES_USER=${POSTGRES_USER}
# - POSTGRES_PASS=${POSTGRES_PASS}
# - POSTGRES_DBNAME=${POSTGRES_DBNAME}
# - POSTGRES_MAX_CONNECTIONS=600
# restart: "on-failure"
# networks:
# - rog-api
api:
build:
context: .
dockerfile: Dockerfile.gdal
command: python3 manage.py runserver 0.0.0.0:8100
volumes:
- .:/app
ports:
- 8100:8100
env_file:
- .env
restart: "on-failure"
# depends_on:
# - postgres-db
networks:
- rog-api
#entrypoint: ["/app/wait-for.sh", "postgres-db:5432", "--", ""]
#command: python3 manage.py runserver 0.0.0.0:8100
supervisor-web:
build:
context: .
dockerfile: Dockerfile.supervisor
volumes:
- type: bind
source: /etc/letsencrypt
target: /etc/nginx/ssl
read_only: true
- type: bind
source: ./supervisor/html
target: /usr/share/nginx/html
read_only: true
- type: bind
source: ./supervisor/nginx/default.conf
target: /etc/nginx/conf.d/default.conf
read_only: true
- type: volume
source: static_volume
target: /app/static
read_only: true
- type: volume
source: nginx_logs
target: /var/log/nginx
ports:
- "80:80"
depends_on:
- api
networks:
- rog-api
restart: always
networks:
rog-api:
driver: bridge
volumes:
postgres_data:
geoserver-data:
static_volume:
nginx_logs:

View File

@ -52,5 +52,4 @@ releasenotes/build
hosts
*.retry
#Vagrantfiles, since we are using docker
Vagrantfile.*
media/
Vagrantfile.*

View File

@ -1 +0,0 @@
pg_dump: error: connection to database "rogdb" failed: FATAL: Peer authentication failed for user "postgres"

View File

@ -1,10 +0,0 @@
#!/bin/sh
# Collect static files
python manage.py collectstatic --noinput
# Apply database migrations
python manage.py migrate
# Start Gunicorn
exec "$@"

View File

@ -26,33 +26,29 @@ http {
#gzip on;
server {
listen 80;
server_name localhost;
# 静的ファイルの提供
listen 80;
server_name localhost;
location /static/ {
alias /app/static/;
alias /app/static/;
}
# スーパーバイザー Web アプリケーション
location /media/ {
alias /app/media/;
}
location / {
root /usr/share/nginx/html;
index index.html;
try_files $uri $uri/ /index.html;
}
# Django API プロキシ
location /api/ {
proxy_pass http://api:8000;
proxy_pass http://app:8000;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
error_page 500 502 503 504 /50x.html;
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
root /usr/share/nginx/html;
}
}
}

View File

@ -31,7 +31,7 @@ matplotlib==3.5.0
mccabe==0.6.1
munch==2.5.0
mypy-extensions==0.4.3
numpy==1.21.4
numpy==1.26.2
packaging==21.3
pandas==1.3.4
pathspec==0.9.0
@ -46,11 +46,13 @@ pyparsing==3.0.6
pyproj==3.3.0
python-dateutil==2.8.2
pytz==2021.3
rasterio==1.2.10
#rasterio==1.2.10 Akira
rasterio==1.3.10
regex==2021.11.10
requests==2.26.0
Rtree==0.9.7
scipy==1.7.3
#scipy==1.7.3
scipy==1.10.1
seaborn==0.11.2
setuptools-scm==6.3.2
Shapely==1.8.0
@ -65,4 +67,3 @@ django-extra-fields==3.0.2
django-phonenumber-field==6.1.0
django-rest-knox==4.2.0
dj-database-url==2.0.0
django-cors-headers==4.3.0

BIN
rog/.DS_Store vendored

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -2,19 +2,12 @@ from django.conf import settings
#from django.contrib.auth import get_user_model
from .models import CustomUser
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import check_password
import logging
logger = logging.getLogger(__name__)
class EmailOrUsernameModelBackend(ModelBackend):
"""
This is a ModelBacked that allows authentication
with either a username or an email address.
"""
"""
def authenticate(self, username=None, password=None):
if '@' in username:
@ -32,35 +25,4 @@ class EmailOrUsernameModelBackend(ModelBackend):
try:
return CustomUser.objects.get(pk=username)
except get_user_model().DoesNotExist:
return None
"""
def authenticate(self, request, username=None, password=None, **kwargs):
if '@' in username:
kwargs = {'email': username}
else:
kwargs = {'username': username}
try:
user = CustomUser.objects.get(**kwargs)
if check_password(password, user.password):
logger.info(f"User authenticated successfully: {username}")
return user
else:
logger.warning(f"Password mismatch for user: {username}")
except CustomUser.DoesNotExist:
logger.warning(f"User does not exist: {username}")
except Exception as e:
logger.error(f"Authentication error for {username}: {str(e)}")
return None
def get_user(self, user_id):
try:
user = CustomUser.objects.get(pk=user_id)
logger.info(f"User retrieved: {user.username or user.email}")
return user
except CustomUser.DoesNotExist:
logger.warning(f"User with id {user_id} does not exist")
return None
except Exception as e:
logger.error(f"Error retrieving user with id {user_id}: {str(e)}")
return None
return None

View File

@ -1,7 +0,0 @@
from django import forms
from .models import NewEvent2
class CSVUploadForm(forms.Form):
event = forms.ModelChoiceField(queryset=NewEvent2.objects.all(), label="イベント選択")
csv_file = forms.FileField(label="CSVファイル")

View File

@ -1,40 +0,0 @@
4019,関ケ原2,Best Wishes,ソロ女子-5時間,pbkdf2_sha256$260000$RPvncicp11ENXxwpcpMXi1$9e/fKcfwaX3sJ91q9S70KWQcrNlraliguiHjF/UCW/I=
4010,関ケ原2,まつげん,ソロ女子-5時間,pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
4021,大垣3,まつげん,ソロ女子-5時間,pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
5,関ケ原2,てすとあきら1,ソロ男子-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
3003,関ケ原2,てすとあきら1,ソロ男子-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
3115,関ケ原2,Best Wishes,ソロ男子-5時間,pbkdf2_sha256$260000$tlNrgHyqDtfbM9f3GLv5G1$jRcR/ieTB174TZ9jW7obCBUMpyz86aywqDKw3VmhVQQ=
1010,大垣3,ハヤノテスト,一般-5時間,pbkdf2_sha256$260000$IeGmRkkUkwXXc1zO9oxvCe$ijnJTH7xhwidit+uCggSgjj/7g/vMK539IpOMA5GlnM=
1012,大垣3,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
1014,各務原2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
1018,下呂2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
1024,関ケ原2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
1026,美濃加茂2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
1028,多治見2,てすとあきら1,一般-5時間,pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
3006,関ケ原2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3009,養老2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3011,郡上2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3013,大垣3,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3015,各務原2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3017,多治見2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3019,下呂2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3021,高山2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
3023,美濃加茂2,山本哲也,ソロ男子-5時間,pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
4008,下呂2,GO!GO!YOKO,ソロ女子-5時間,pbkdf2_sha256$260000$tuv8ajw2VSmCooIxNHJhdD$m7q0fqPIsAs7L9uubt+PUVsmexwpJPXPCgVs9GjY12c=
3121,関ケ原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3126,大垣3,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
3128,多治見2,MASA,ソロ男子-5時間,pbkdf2_sha256$260000$qpaSbqryD4f5bZaY893Ug4$Gk8XuqsJbSkX9Hxrl/xg9LtjM8JQkpgNkpbbNzTmhzY=
3124,関ケ原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
3132,各務原2,岐阜市イイとこあるある探検隊,ソロ男子-5時間,pbkdf2_sha256$260000$QWc5BpSBUbkUwP9UlIzyE5$do+VKkH8mNibg6PJDsm6AJ/VMFh3NWdzwZ9IQW/26xA=
3135,大垣3,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
3137,関ケ原2,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
3139,養老2,akira,ソロ男子-5時間,pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
3073,養老2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3075,高山2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3077,郡上2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3081,美濃加茂2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3083,多治見2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3085,各務原2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3079,下呂2,yamadeus,ソロ男子-5時間,pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
3093,関ケ原2,岐阜愛,ソロ男子-5時間,pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
3099,高山2,岐阜愛,ソロ男子-5時間,pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
1 4019 関ケ原2 Best Wishes ソロ女子-5時間 pbkdf2_sha256$260000$RPvncicp11ENXxwpcpMXi1$9e/fKcfwaX3sJ91q9S70KWQcrNlraliguiHjF/UCW/I=
2 4010 関ケ原2 まつげん ソロ女子-5時間 pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
3 4021 大垣3 まつげん ソロ女子-5時間 pbkdf2_sha256$260000$LMvH0KtHeHbCuuUZ5n88VZ$Lnsqs/u45QKoFN6lUdqC79nIMz5LwaKWMpmX/0aEXa8=
4 5 関ケ原2 てすとあきら1 ソロ男子-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
5 3003 関ケ原2 てすとあきら1 ソロ男子-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
6 3115 関ケ原2 Best Wishes ソロ男子-5時間 pbkdf2_sha256$260000$tlNrgHyqDtfbM9f3GLv5G1$jRcR/ieTB174TZ9jW7obCBUMpyz86aywqDKw3VmhVQQ=
7 1010 大垣3 ハヤノテスト 一般-5時間 pbkdf2_sha256$260000$IeGmRkkUkwXXc1zO9oxvCe$ijnJTH7xhwidit+uCggSgjj/7g/vMK539IpOMA5GlnM=
8 1012 大垣3 てすとあきら1 一般-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
9 1014 各務原2 てすとあきら1 一般-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
10 1018 下呂2 てすとあきら1 一般-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
11 1024 関ケ原2 てすとあきら1 一般-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
12 1026 美濃加茂2 てすとあきら1 一般-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
13 1028 多治見2 てすとあきら1 一般-5時間 pbkdf2_sha256$260000$0GY5pt5V127jGd8HkkEort$8ZL0eY2qTZHydyzUUN5LNKZnmmibfu1x3QQ/7rJX1Vc=
14 3006 関ケ原2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
15 3009 養老2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
16 3011 郡上2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
17 3013 大垣3 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
18 3015 各務原2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
19 3017 多治見2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
20 3019 下呂2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
21 3021 高山2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
22 3023 美濃加茂2 山本哲也 ソロ男子-5時間 pbkdf2_sha256$260000$EkYrRHZwKunjO4jiHvxyB2$kYGN0STzV9c70IKAIxK1Ija3K1y90+ote0HDTP+iSPw=
23 4008 下呂2 GO!GO!YOKO ソロ女子-5時間 pbkdf2_sha256$260000$tuv8ajw2VSmCooIxNHJhdD$m7q0fqPIsAs7L9uubt+PUVsmexwpJPXPCgVs9GjY12c=
24 3121 関ケ原2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
25 3126 大垣3 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
26 3128 多治見2 MASA ソロ男子-5時間 pbkdf2_sha256$260000$qpaSbqryD4f5bZaY893Ug4$Gk8XuqsJbSkX9Hxrl/xg9LtjM8JQkpgNkpbbNzTmhzY=
27 3124 関ケ原2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$7KsSngw2Ho719jpXsOrC8v$jfHFxglG/L0htA13t01LAy91dS+FnlAZubg6Lmd/m2Y=
28 3132 各務原2 岐阜市イイとこあるある探検隊 ソロ男子-5時間 pbkdf2_sha256$260000$QWc5BpSBUbkUwP9UlIzyE5$do+VKkH8mNibg6PJDsm6AJ/VMFh3NWdzwZ9IQW/26xA=
29 3135 大垣3 akira ソロ男子-5時間 pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
30 3137 関ケ原2 akira ソロ男子-5時間 pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
31 3139 養老2 akira ソロ男子-5時間 pbkdf2_sha256$260000$mmM2N8sSE84YaNNuDzQKxb$ox9U6rdgZq4ANzi4NizskphZWIrf7o2+JEfvC4wcn7U=
32 3073 養老2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
33 3075 高山2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
34 3077 郡上2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
35 3081 美濃加茂2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
36 3083 多治見2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
37 3085 各務原2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
38 3079 下呂2 yamadeus ソロ男子-5時間 pbkdf2_sha256$260000$sCLRTCAxQIClyDmvfbMDm0$cU3dSGTPwKHl8T3EBZ6R19oZJGkadD48pKqywAhtJOk=
39 3093 関ケ原2 岐阜愛 ソロ男子-5時間 pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=
40 3099 高山2 岐阜愛 ソロ男子-5時間 pbkdf2_sha256$260000$LFOINdd30aKaXoT9CNYY8A$eoAzV10+gp+tufabtcFOx6uoOktZUngzzDJ0WWs/v24=

View File

@ -1,3 +0,0 @@
from .ip_blocking import IPBlockingMiddleware
__all__ = ['IPBlockingMiddleware']

View File

@ -1,42 +0,0 @@
from django.core.exceptions import PermissionDenied
from django.core.cache import cache
from django.conf import settings
class IPBlockingMiddleware:
def __init__(self, get_response):
self.get_response = get_response
# 事前にブロックする IP アドレスのリスト
self.blacklisted_ips = getattr(settings, 'BLACKLISTED_IPS', [])
def __call__(self, request):
ip = self.get_client_ip(request)
# キャッシュからブロックリストを取得
blocked_ips = cache.get('blocked_ips', set())
# 事前にブロックされた IP またはキャッシュ内のブロックされた IP をチェック
if ip in self.blacklisted_ips or ip in blocked_ips:
raise PermissionDenied
# 不正アクセスの検出ロジックをここに実装
if self.is_suspicious(ip):
blocked_ips.add(ip)
cache.set('blocked_ips', blocked_ips, timeout=3600) # 1時間ブロック
raise PermissionDenied
response = self.get_response(request)
return response
def is_suspicious(self, ip):
request_count = cache.get(f'request_count_{ip}', 0)
cache.set(f'request_count_{ip}', request_count + 1, timeout=60)
return request_count > 100 # 1分間に100回以上のリクエストがあれば不審と判断
def get_client_ip(self, request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip

View File

@ -1,148 +0,0 @@
"""
このコードは永栄コードをNoufferコードに統合するための一時変換コードです。
一旦、完全にマイグレーションでき、ランキングや走行履歴が完成したら、不要になります。
"""
import psycopg2
from PIL import Image
import PIL.ExifTags
from datetime import datetime
import os
def get_gps_from_image(image_path):
"""
画像ファイルからGPS情報を抽出する
Returns: (latitude, longitude) または取得できない場合は (None, None)
"""
try:
with Image.open(image_path) as img:
exif = {
PIL.ExifTags.TAGS[k]: v
for k, v in img._getexif().items()
if k in PIL.ExifTags.TAGS
}
if 'GPSInfo' in exif:
gps_info = exif['GPSInfo']
# 緯度の計算
lat = gps_info[2]
lat = lat[0] + lat[1]/60 + lat[2]/3600
if gps_info[1] == 'S':
lat = -lat
# 経度の計算
lon = gps_info[4]
lon = lon[0] + lon[1]/60 + lon[2]/3600
if gps_info[3] == 'W':
lon = -lon
return lat, lon
except Exception as e:
print(f"GPS情報の抽出に失敗: {e}")
return None, None
def migrate_data():
# コンテナ環境用の接続情報
source_db = {
'dbname': 'gifuroge',
'user': 'admin', # 環境に合わせて変更
'password': 'admin123456', # 環境に合わせて変更
'host': 'localhost', # Dockerのサービス名
'port': '5432'
}
target_db = {
'dbname': 'rogdb',
'user': 'admin', # 環境に合わせて変更
'password': 'admin123456', # 環境に合わせて変更
'host': 'localhost', # Dockerのサービス名
'port': '5432'
}
source_conn = None
target_conn = None
source_cur = None
target_cur = None
try:
print("ソースDBへの接続を試みています...")
source_conn = psycopg2.connect(**source_db)
source_cur = source_conn.cursor()
print("ソースDBへの接続が成功しました")
print("ターゲットDBへの接続を試みています...")
target_conn = psycopg2.connect(**target_db)
target_cur = target_conn.cursor()
print("ターゲットDBへの接続が成功しました")
print("データの取得を開始します...")
source_cur.execute("""
SELECT serial_number, zekken_number, event_code, cp_number, image_address,
goal_time, late_point, create_at, create_user,
update_at, update_user, buy_flag, colabo_company_memo
FROM gps_information
""")
rows = source_cur.fetchall()
print(f"取得したレコード数: {len(rows)}")
processed_count = 0
for row in rows:
(serial_number, zekken_number, event_code, cp_number, image_address,
goal_time, late_point, create_at, create_user,
update_at, update_user, buy_flag, colabo_company_memo) = row
latitude, longitude = None, None
if image_address and os.path.exists(image_address):
latitude, longitude = get_gps_from_image(image_address)
target_cur.execute("""
INSERT INTO gps_checkins (
path_order, zekken_number, event_code, cp_number,
lattitude, longitude, image_address,
image_receipt, image_QR, validate_location,
goal_time, late_point, create_at,
create_user, update_at, update_user,
buy_flag, colabo_company_memo, points
) VALUES (
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s
)
""", (
serial_number,
zekken_number, event_code, cp_number,
latitude, longitude, image_address,
True, True, True,
goal_time, late_point, create_at,
create_user, update_at, update_user,
buy_flag if buy_flag is not None else False,
colabo_company_memo if colabo_company_memo else '',
0
))
processed_count += 1
if processed_count % 100 == 0:
print(f"処理済みレコード数: {processed_count}")
target_conn.commit()
print(f"移行完了: {processed_count}件のレコードを処理しました")
except Exception as e:
print(f"エラーが発生しました: {e}")
if target_conn:
target_conn.rollback()
finally:
if source_cur:
source_cur.close()
if target_cur:
target_cur.close()
if source_conn:
source_conn.close()
if target_conn:
target_conn.close()
print("すべての接続をクローズしました")
if __name__ == "__main__":
migrate_data()

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.9 on 2022-09-09 13:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0032_alter_location_sub_loc_id'),
]
operations = [
migrations.AlterField(
model_name='templocation',
name='sub_loc_id',
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub location id'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.9 on 2022-10-06 10:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0033_alter_templocation_sub_loc_id'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='email',
field=models.CharField(max_length=255, verbose_name='user name'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.9 on 2022-10-06 10:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0034_alter_customuser_email'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='email',
field=models.EmailField(max_length=254, unique=True, verbose_name='user name'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.9 on 2022-10-06 11:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0035_alter_customuser_email'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='email',
field=models.CharField(max_length=255, unique=True, verbose_name='Email'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.9 on 2022-10-06 13:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0036_alter_customuser_email'),
]
operations = [
migrations.AddField(
model_name='customuser',
name='is_rogaining',
field=models.BooleanField(default=False),
),
]

View File

@ -0,0 +1,28 @@
# Generated by Django 3.2.9 on 2022-10-16 10:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0037_customuser_is_rogaining'),
]
operations = [
migrations.AddField(
model_name='customuser',
name='event_code',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Event Code'),
),
migrations.AddField(
model_name='customuser',
name='team_name',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Team Name'),
),
migrations.AddField(
model_name='customuser',
name='zekken_number',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Zekken Number'),
),
]

View File

@ -0,0 +1,27 @@
# Generated by Django 3.2.9 on 2022-10-17 11:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rog', '0038_auto_20221016_1950'),
]
operations = [
migrations.CreateModel(
name='GoalImages',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goalimage', models.FileField(blank=True, null=True, upload_to='%y%m%d')),
('goaltime', models.DateTimeField(verbose_name='Goal time')),
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
('event_code', models.CharField(max_length=255, verbose_name='event code')),
('cp_number', models.IntegerField(verbose_name='CP numner')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
],
),
]

View File

@ -0,0 +1,32 @@
# Generated by Django 3.2.9 on 2022-11-05 10:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rog', '0039_goalimages'),
]
operations = [
migrations.AlterField(
model_name='goalimages',
name='goalimage',
field=models.FileField(blank=True, null=True, upload_to='goals/%y%m%d'),
),
migrations.CreateModel(
name='CheckinImages',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('checkinimage', models.FileField(blank=True, null=True, upload_to='checkin/%y%m%d')),
('checkintime', models.DateTimeField(verbose_name='Goal time')),
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
('event_code', models.CharField(max_length=255, verbose_name='event code')),
('cp_number', models.IntegerField(verbose_name='CP numner')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
],
),
]

View File

@ -0,0 +1,49 @@
# Generated by Django 3.2.9 on 2023-05-26 08:24
import django.contrib.postgres.indexes
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0040_auto_20221105_1939'),
]
operations = [
migrations.DeleteModel(
name='JpnAdminPerf',
),
migrations.DeleteModel(
name='JpnSubPerf',
),
migrations.AlterField(
model_name='location',
name='category',
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Category'),
),
migrations.AlterField(
model_name='location',
name='event_active',
field=models.BooleanField(db_index=True, default=True, verbose_name='Is Event active'),
),
migrations.AlterField(
model_name='location',
name='event_name',
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Event name'),
),
migrations.AlterField(
model_name='location',
name='group',
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Group'),
),
migrations.AlterField(
model_name='location',
name='location_id',
field=models.IntegerField(blank=True, db_index=True, null=True, verbose_name='Location id'),
),
migrations.AddIndex(
model_name='location',
index=django.contrib.postgres.indexes.GistIndex(fields=['geom'], name='rog_locatio_geom_4793cc_gist'),
),
]

View File

@ -1,274 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-24 05:31
import datetime
from django.conf import settings
import django.contrib.postgres.indexes
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
dependencies = [
('rog', '0032_alter_location_sub_loc_id'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('category_name', models.CharField(max_length=255, primary_key=True, serialize=False)),
('category_number', models.IntegerField(unique=True)),
('duration', models.DurationField(default=datetime.timedelta(seconds=18000))),
('num_of_member', models.IntegerField(default=1)),
('family', models.BooleanField(default=False)),
('female', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='CheckinImages',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('checkinimage', models.FileField(blank=True, null=True, upload_to='checkin/%y%m%d')),
('checkintime', models.DateTimeField(verbose_name='Goal time')),
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
('event_code', models.CharField(max_length=255, verbose_name='event code')),
('cp_number', models.IntegerField(verbose_name='CP numner')),
],
),
migrations.CreateModel(
name='Entry',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField()),
],
),
migrations.CreateModel(
name='EntryMember',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_temporary', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='GoalImages',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goalimage', models.FileField(blank=True, null=True, upload_to='goals/%y%m%d')),
('goaltime', models.DateTimeField(verbose_name='Goal time')),
('team_name', models.CharField(max_length=255, verbose_name='Team name')),
('event_code', models.CharField(max_length=255, verbose_name='event code')),
('cp_number', models.IntegerField(verbose_name='CP numner')),
],
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_temporary', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='NewEvent',
fields=[
('event_name', models.CharField(max_length=255, primary_key=True, serialize=False)),
('start_datetime', models.DateTimeField(default=django.utils.timezone.now)),
('end_datetime', models.DateTimeField()),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('zekken_number', models.CharField(max_length=255, unique=True)),
('team_name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='TempUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=254, unique=True)),
('password', models.CharField(max_length=128)),
('is_rogaining', models.BooleanField(default=False)),
('zekken_number', models.CharField(blank=True, max_length=255, null=True)),
('event_code', models.CharField(blank=True, max_length=255, null=True)),
('team_name', models.CharField(blank=True, max_length=255, null=True)),
('group', models.CharField(max_length=255)),
('firstname', models.CharField(blank=True, max_length=255, null=True)),
('lastname', models.CharField(blank=True, max_length=255, null=True)),
('date_of_birth', models.DateField(blank=True, null=True)),
('female', models.BooleanField(default=False)),
('verification_code', models.UUIDField(default=uuid.uuid4, editable=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('expires_at', models.DateTimeField()),
],
),
migrations.DeleteModel(
name='JpnAdminPerf',
),
migrations.DeleteModel(
name='JpnSubPerf',
),
migrations.AddField(
model_name='customuser',
name='date_of_birth',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='customuser',
name='event_code',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Event Code'),
),
migrations.AddField(
model_name='customuser',
name='female',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='customuser',
name='firstname',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='customuser',
name='is_rogaining',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='customuser',
name='lastname',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='customuser',
name='team_name',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Team Name'),
),
migrations.AddField(
model_name='customuser',
name='zekken_number',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Zekken Number'),
),
migrations.AlterField(
model_name='customuser',
name='email',
field=models.CharField(max_length=255, unique=True, verbose_name='Email'),
),
migrations.AlterField(
model_name='location',
name='category',
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Category'),
),
migrations.AlterField(
model_name='location',
name='event_active',
field=models.BooleanField(db_index=True, default=True, verbose_name='Is Event active'),
),
migrations.AlterField(
model_name='location',
name='event_name',
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Event name'),
),
migrations.AlterField(
model_name='location',
name='group',
field=models.CharField(blank=True, db_index=True, max_length=2048, null=True, verbose_name='Group'),
),
migrations.AlterField(
model_name='location',
name='location_id',
field=models.IntegerField(blank=True, db_index=True, null=True, verbose_name='Location id'),
),
migrations.AlterField(
model_name='templocation',
name='sub_loc_id',
field=models.CharField(blank=True, max_length=2048, null=True, verbose_name='Sub location id'),
),
migrations.AddIndex(
model_name='location',
index=django.contrib.postgres.indexes.GistIndex(fields=['geom'], name='rog_locatio_geom_4793cc_gist'),
),
migrations.AddField(
model_name='team',
name='category',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='rog.category'),
),
migrations.AddField(
model_name='team',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owned_teams', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='member',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.team'),
),
migrations.AddField(
model_name='member',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='goalimages',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='entrymember',
name='entry',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.entry'),
),
migrations.AddField(
model_name='entrymember',
name='member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.member'),
),
migrations.AddField(
model_name='entry',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.category'),
),
migrations.AddField(
model_name='entry',
name='event',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newevent'),
),
migrations.AddField(
model_name='entry',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='entry',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.team'),
),
migrations.AddField(
model_name='checkinimages',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='category',
unique_together={('category_name', 'category_number')},
),
migrations.AlterUniqueTogether(
name='team',
unique_together={('zekken_number', 'category')},
),
migrations.AlterUniqueTogether(
name='member',
unique_together={('team', 'user')},
),
migrations.AlterUniqueTogether(
name='entrymember',
unique_together={('entry', 'member')},
),
migrations.AlterUniqueTogether(
name='entry',
unique_together={('team', 'event', 'date', 'owner')},
),
]

View File

@ -1,34 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-24 06:22
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0033_auto_20240724_1431'),
]
operations = [
migrations.AlterField(
model_name='category',
name='category_number',
field=models.IntegerField(default=0),
),
migrations.CreateModel(
name='NewCategory',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category_name', models.CharField(max_length=255, unique=True)),
('category_number', models.IntegerField(default=0)),
('duration', models.DurationField(default=datetime.timedelta(seconds=18000))),
('num_of_member', models.IntegerField(default=1)),
('family', models.BooleanField(default=False)),
('female', models.BooleanField(default=False)),
],
options={
'unique_together': {('category_name', 'category_number')},
},
),
]

View File

@ -1,29 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-24 06:29
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('rog', '0034_auto_20240724_1522'),
]
operations = [
migrations.CreateModel(
name='NewEvent2',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('event_name', models.CharField(max_length=255, unique=True)),
('start_datetime', models.DateTimeField(default=django.utils.timezone.now)),
('end_datetime', models.DateTimeField()),
],
),
migrations.AlterField(
model_name='entry',
name='event',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newevent2'),
),
]

View File

@ -1,20 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-24 06:58
from django.db import migrations, models
import django.db.models.deletion
import rog.models
class Migration(migrations.Migration):
dependencies = [
('rog', '0035_auto_20240724_1529'),
]
operations = [
migrations.AlterField(
model_name='team',
name='category',
field=models.ForeignKey(default=rog.models.get_default_category, on_delete=django.db.models.deletion.SET_DEFAULT, to='rog.newcategory'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-24 15:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rog', '0036_alter_team_category'),
]
operations = [
migrations.AlterField(
model_name='member',
name='team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='rog.team'),
),
]

View File

@ -1,19 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-25 01:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rog', '0037_alter_member_team'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rog.newcategory'),
),
]

View File

@ -1,29 +0,0 @@
# Generated by Django 3.2.9 on 2024-07-26 06:08
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('rog', '0038_alter_entry_category'),
]
operations = [
migrations.AddField(
model_name='customuser',
name='date_joined',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='customuser',
name='email',
field=models.EmailField(max_length=254, unique=True),
),
migrations.AlterField(
model_name='customuser',
name='group',
field=models.CharField(blank=True, max_length=255),
),
]

View File

@ -1,33 +0,0 @@
# Generated by Django 3.2.9 on 2024-08-01 08:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0039_auto_20240726_1508'),
]
operations = [
migrations.AddField(
model_name='member',
name='date_of_birth',
field=models.DateField(blank=True, null=True),
),
migrations.AddField(
model_name='member',
name='female',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='member',
name='firstname',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='member',
name='lastname',
field=models.CharField(blank=True, max_length=255, null=True),
),
]

View File

@ -1,37 +0,0 @@
# Generated by Django 3.2.9 on 2024-08-02 15:11
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rog', '0040_auto_20240801_1729'),
]
operations = [
migrations.CreateModel(
name='JpnSubPerf',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(blank=True, null=True, srid=4326)),
('adm0_en', models.CharField(blank=True, max_length=254, null=True)),
('adm0_ja', models.CharField(blank=True, max_length=254, null=True)),
('adm0_pcode', models.CharField(blank=True, max_length=254, null=True)),
('adm1_en', models.CharField(blank=True, max_length=254, null=True)),
('adm1_ja', models.CharField(blank=True, max_length=254, null=True)),
('adm1_pcode', models.CharField(blank=True, max_length=254, null=True)),
('adm2_ja', models.CharField(blank=True, max_length=254, null=True)),
('adm2_en', models.CharField(blank=True, max_length=254, null=True)),
('adm2_pcode', models.CharField(blank=True, max_length=254, null=True)),
('name_modified', models.CharField(blank=True, max_length=254, null=True)),
('area_name', models.CharField(blank=True, max_length=254, null=True)),
('list_order', models.IntegerField(default=0)),
],
options={
'db_table': 'jpn_sub_perf',
'managed': False,
},
),
]

View File

@ -1,4 +1,3 @@
from django.contrib.auth.hashers import make_password
from dataclasses import field
import email
from enum import unique
@ -24,27 +23,22 @@ from django.apps import apps
from django.db import transaction
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager
from django.contrib.postgres.indexes import GistIndex
from django.db import models
from django.contrib.auth.hashers import make_password
from django.utils import timezone
from datetime import timedelta,date
from django.contrib.gis.geos import Point,MultiPoint
#from django.db import models
from django.core.exceptions import ValidationError
from datetime import timedelta
import csv
import codecs
import sys
import time
import uuid
import logging
logger = logging.getLogger(__name__)
env = environ.Env(DEBUG=(bool, False))
environ.Env.read_env(env_file=".env")
db = Db(dbname=env("POSTGRES_DBNAME"), user=env("POSTGRES_USER"), password=env("POSTGRES_PASS"), host=env("PG_HOST"), port=env("PG_PORT"))
db = Db(dbname=env("POSTGRES_DBNAME"), user=env("POSTGRES_USER"), password=env("POSTGRES_PASS"), host="postgres-db", port=env("PG_PORT"))
def get_file_path(instance, filename):
@ -72,63 +66,123 @@ def remove_bom_inplace(path):
fp.seek(-bom_length, os.SEEK_CUR)
fp.truncate()
class GifurogeRegister(models.Model):
event_code = models.CharField(max_length=100)
time = models.IntegerField(choices=[(3, '3時間'), (5, '5時間')])
owner_name_kana = models.CharField(max_length=100)
owner_name = models.CharField(max_length=100)
email = models.EmailField()
password = models.CharField(max_length=100)
owner_birthday = models.DateField(blank=True,null=True)
owner_sex = models.CharField(max_length=10,blank=True,null=True)
team_name = models.CharField(max_length=100)
department = models.CharField(max_length=100)
members_count = models.IntegerField()
member2 = models.CharField(max_length=100, blank=True, null=True)
birthday2 = models.DateField(blank=True,null=True)
sex2 = models.CharField(max_length=10,blank=True,null=True)
member3 = models.CharField(max_length=100, blank=True, null=True)
birthday3 = models.DateField(blank=True,null=True)
sex3 = models.CharField(max_length=10,blank=True,null=True)
member4 = models.CharField(max_length=100, blank=True, null=True)
birthday4 = models.DateField(blank=True,null=True)
sex4 = models.CharField(max_length=10,blank=True,null=True)
member5 = models.CharField(max_length=100, blank=True, null=True)
birthday5 = models.DateField(blank=True,null=True)
sex5 = models.CharField(max_length=10,blank=True,null=True)
#========== Akira ここから
class TempUser(models.Model):
email = models.EmailField(unique=True)
password = models.CharField(max_length=128)
is_rogaining = models.BooleanField(default=False)
zekken_number = models.CharField(max_length=255, blank=True, null=True)
event_code = models.CharField(max_length=255, blank=True, null=True)
team_name = models.CharField(max_length=255, blank=True, null=True)
group = models.CharField(max_length=255)
verification_code = models.UUIDField(default=uuid.uuid4, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
expires_at = models.DateTimeField()
def __str__(self):
return self.email
def save(self, *args, **kwargs):
if not self.expires_at:
self.expires_at = timezone.now() + timedelta(hours=24) # 24時間の有効期限
super().save(*args, **kwargs)
def is_valid(self):
return timezone.now() <= self.expires_at
class Team(models.Model):
zekken_number = models.CharField(max_length=255, primary_key=True)
team_name = models.CharField(max_length=255)
password = models.CharField(max_length=128)
def __str__(self):
return f"{self.zekken_number} - {self.team_name}"
class Member(models.Model):
zekken_number = models.ForeignKey(Team, on_delete=models.CASCADE)
userid = models.ForeignKey(CustomUser, on_delete=models.CASCADE)
class Meta:
unique_together = ('zekken_number', 'userid')
def __str__(self):
return f"{self.zekken_number} - {self.userid}"
class Entry(models.Model):
zekken_number = models.ForeignKey(Team, on_delete=models.CASCADE)
event_code = models.CharField(max_length=255)
date = models.DateField()
class Meta:
unique_together = ('zekken_number', 'event_code', 'date')
def __str__(self):
return f"{self.zekken_number} - {self.event_code} - {self.date}"
#============= Akira ここまで
class CustomUserManager(BaseUserManager):
def create_user(self, email, password=None, **other_fields):
def create_user(self, email, firstname, lastname, date_of_birth, password=None):
if not email:
raise ValueError(_("You must provide an email address"))
email = self.normalize_email(email)
user = self.model(email=email, **other_fields)
# ユニークなuseridを生成
userid = str(uuid.uuid4())
user = self.model(
email=self.normalize_email(email),
firstname=firstname,
lastname=lastname,
userid=userid,
date_of_birth=date_of_birth,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password, group, event_code=None, team_name=None, **other_fields):
# Providing default values for event_code and team_name if they are not provided
if event_code is None:
event_code = 'test' # replace this with some default value
if team_name is None:
team_name = 'test' # replace this with some default value
other_fields.setdefault('is_staff', True)
other_fields.setdefault('is_superuser', True)
other_fields.setdefault('is_active', True)
def create_superuser(self, email, firstname, lastname, date_of_birth, password):
user = self.create_user(
email,
firstname=firstname,
lastname=lastname,
date_of_birth=date_of_birth,
password=password,
)
user.is_staff = True
user.is_superuser = True
user.is_active = True
user.save(using=self._db)
return user
if other_fields.get('is_staff') is not True:
raise ValueError(_('Superuser must be assigned to staff'))
if other_fields.get('is_superuser') is not True:
raise ValueError(_('Superuser must have is_superuser set to True'))
class CustomUser(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(unique=True)
firstname = models.CharField(max_length=255)
lastname = models.CharField(max_length=255)
userid = models.CharField(max_length=255, unique=True)
date_of_birth = models.DateField()
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
event_code = models.CharField(max_length=255, blank=True, null=True)
team_name = models.CharField(max_length=255, blank=True, null=True)
zekken_number = models.CharField(max_length=255, blank=True, null=True)
return self.create_user(email, password, **other_fields)
objects = CustomUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['firstname', 'lastname', 'userid', 'date_of_birth']
def __str__(self):
return self.email
class JpnAdminMainPerf(models.Model):
geom = models.MultiPolygonField(blank=True, null=True)
@ -172,28 +226,28 @@ class JpnAdminMainPerf(models.Model):
# ###
# ### Cities
# ###
class JpnSubPerf(models.Model):
geom = models.MultiPolygonField(blank=True, null=True)
adm0_en = models.CharField(max_length=254, blank=True, null=True)
adm0_ja = models.CharField(max_length=254, blank=True, null=True)
adm0_pcode = models.CharField(max_length=254, blank=True, null=True)
adm1_en = models.CharField(max_length=254, blank=True, null=True)
adm1_ja = models.CharField(max_length=254, blank=True, null=True)
adm1_pcode = models.CharField(max_length=254, blank=True, null=True)
adm2_ja = models.CharField(max_length=254, blank=True, null=True)
adm2_en = models.CharField(max_length=254, blank=True, null=True)
adm2_pcode = models.CharField(max_length=254, blank=True, null=True)
name_modified = models.CharField(max_length=254, blank=True, null=True)
area_name = models.CharField(max_length=254, blank=True, null=True)
list_order =models.IntegerField(default=0)
# class JpnSubPerf(models.Model):
# geom = models.MultiPolygonField(blank=True, null=True)
# adm0_en = models.CharField(max_length=254, blank=True, null=True)
# adm0_ja = models.CharField(max_length=254, blank=True, null=True)
# adm0_pcode = models.CharField(max_length=254, blank=True, null=True)
# adm1_en = models.CharField(max_length=254, blank=True, null=True)
# adm1_ja = models.CharField(max_length=254, blank=True, null=True)
# adm1_pcode = models.CharField(max_length=254, blank=True, null=True)
# adm2_ja = models.CharField(max_length=254, blank=True, null=True)
# adm2_en = models.CharField(max_length=254, blank=True, null=True)
# adm2_pcode = models.CharField(max_length=254, blank=True, null=True)
# name_modified = models.CharField(max_length=254, blank=True, null=True)
# area_name = models.CharField(max_length=254, blank=True, null=True)
# list_order =models.IntegerField(default=0)
class Meta:
managed = False
db_table = 'jpn_sub_perf'
indexes = [
models.Index(fields=['geom'], name='jpn_sub_perf_geom_idx'),
# Add other fields for indexing as per the requirements
]
# class Meta:
# managed = False
# db_table = 'jpn_sub_perf'
# indexes = [
# models.Index(fields=['geom'], name='jpn_sub_perf_geom_idx'),
# # Add other fields for indexing as per the requirements
# ]
###
### Gifu Areas
@ -234,287 +288,49 @@ class UserUploadUser(models.Model):
class CustomUser(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(unique=True)
firstname = models.CharField(max_length=255)
lastname = models.CharField(max_length=255)
userid = models.CharField(max_length=255, unique=True, editable=False)
date_of_birth = models.DateField()
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
event_code = models.CharField(max_length=255, blank=True, null=True)
team_name = models.CharField(max_length=255, blank=True, null=True)
zekken_number = models.CharField(max_length=255, blank=True, null=True)
objects = CustomUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['firstname', 'lastname', 'date_of_birth']
def __str__(self):
return self.email
class CustomUser_old(AbstractBaseUser, PermissionsMixin):
class Groups(models.TextChoices):
GB1 = '大垣-初心者', '大垣-初心者'
GB2 = '大垣-3時間', '大垣-3時間'
GB3 = '大垣-5時間', '大垣-5時間'
email = models.EmailField(unique=True)
firstname = models.CharField(max_length=255,blank=True, null=True)
lastname = models.CharField(max_length=255, blank=True, null=True)
date_of_birth = models.DateField(blank=True, null=True)
female = models.BooleanField(default=False)
group = models.CharField(max_length=255,blank=True)
is_active = models.BooleanField(default=True)
email = models.CharField(_("Email"), max_length=255, unique=True)
is_staff = models.BooleanField(default=False)
date_joined = models.DateTimeField(default=timezone.now)
is_active = models.BooleanField(default=True)
is_rogaining = models.BooleanField(default=False)
zekken_number = models.CharField(_("Zekken Number"), max_length=255, blank=True, null=True)
event_code = models.CharField(_("Event Code"), max_length=255, blank=True, null=True)
team_name = models.CharField(_("Team Name"), max_length=255, blank=True, null=True)
group = models.CharField(max_length=255,
choices=Groups.choices,
default=Groups.GB1)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
REQUIRED_FIELDS = ['group',]
objects = CustomUserManager()
def set_password(self, raw_password):
self.password = make_password(raw_password)
self._password = raw_password
def __str__(self):
return self.email
class TempUser(models.Model):
email = models.EmailField(unique=True)
password = models.CharField(max_length=128)
is_rogaining = models.BooleanField(default=False)
zekken_number = models.CharField(max_length=255, blank=True, null=True)
event_code = models.CharField(max_length=255, blank=True, null=True)
team_name = models.CharField(max_length=255, blank=True, null=True)
group = models.CharField(max_length=255)
firstname = models.CharField(max_length=255,blank=True, null=True)
lastname = models.CharField(max_length=255, blank=True, null=True)
date_of_birth = models.DateField(blank=True, null=True)
female = models.BooleanField(default=False)
verification_code = models.UUIDField(default=uuid.uuid4, editable=False)
created_at = models.DateTimeField(auto_now_add=True)
expires_at = models.DateTimeField()
def set_password(self, raw_password):
self.password = make_password(raw_password)
def check_password(self, raw_password):
return check_password(raw_password, self.password)
# TempUserの作成時にこのメソッドを使用
@classmethod
def create_temp_user(cls, email, password, **kwargs):
temp_user = cls(email=email, **kwargs)
temp_user.set_password(password)
temp_user.save()
return temp_user
def __str__(self):
return self.email
def save(self, *args, **kwargs):
if not self.expires_at:
self.expires_at = timezone.now() + timedelta(hours=24) # 24時間の有効期限
super().save(*args, **kwargs)
def is_valid(self):
return timezone.now() <= self.expires_at
class NewEvent2(models.Model):
event_name = models.CharField(max_length=255, unique=True)
start_datetime = models.DateTimeField(default=timezone.now)
end_datetime = models.DateTimeField()
deadlineDateTime = models.DateTimeField(null=True, blank=True)
#// Added @2024-10-21
public = models.BooleanField(default=False)
hour_3 = models.BooleanField(default=False)
hour_5 = models.BooleanField(default=True)
class_general = models.BooleanField(default=True)
class_family = models.BooleanField(default=True)
class_solo_male = models.BooleanField(default=True)
class_solo_female = models.BooleanField(default=True)
self_rogaining = models.BooleanField(default=False)
def __str__(self):
return f"{self.event_name} - From:{self.start_datetime} To:{self.end_datetime}"
def save(self, *args, **kwargs):
if not self.deadlineDateTime:
self.deadlineDateTime = self.end_datetime #- timedelta(days=7)
super().save(*args, **kwargs)
class NewEvent(models.Model):
event_name = models.CharField(max_length=255, primary_key=True)
start_datetime = models.DateTimeField(default=timezone.now)
end_datetime = models.DateTimeField()
def __str__(self):
return f"{self.event_name} - From:{self.start_datetime} To:{self.end_datetime}"
def get_default_category():
return NewCategory.objects.get_or_create(category_name="Default Category", category_number=1)[0].id
class Team(models.Model):
# zekken_number = models.CharField(max_length=255, unique=True)
team_name = models.CharField(max_length=255)
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE, related_name='owned_teams', blank=True, null=True)
category = models.ForeignKey('NewCategory', on_delete=models.SET_DEFAULT, default=get_default_category)
# class Meta:
# unique_together = ('zekken_number', 'category')
def __str__(self):
return f"{self.team_name}, owner:{self.owner.lastname} {self.owner.firstname}"
class Member(models.Model):
team = models.ForeignKey(Team, on_delete=models.CASCADE, related_name='members')
user = models.ForeignKey(CustomUser, on_delete=models.CASCADE)
firstname = models.CharField(max_length=255, blank=True, null=True)
lastname = models.CharField(max_length=255, blank=True, null=True)
date_of_birth = models.DateField(null=True, blank=True)
female = models.BooleanField(default=False)
is_temporary = models.BooleanField(default=False) # Akira 2024-7-24
class Meta:
unique_together = ('team', 'user')
def __str__(self):
return f"{self.team.team_name} - {self.user.lastname} {self.user.firstname}"
#
class Category(models.Model):
category_name = models.CharField(max_length=255, primary_key=True)
category_number = models.IntegerField(default=0)
duration = models.DurationField(default=timedelta(hours=5))
num_of_member = models.IntegerField(default=1)
family = models.BooleanField(default=False)
female = models.BooleanField(default=False)
class Meta:
unique_together = ('category_name','category_number')
def __str__(self):
hours = self.duration.total_seconds() // 3600
return f"{self.category_name} - {self.category_number} ({int(hours)}時間)"
@property
def hours(self):
return self.duration.total_seconds() // 3600
class NewCategory(models.Model):
category_name = models.CharField(max_length=255, unique=True)
category_number = models.IntegerField(default=0)
duration = models.DurationField(default=timedelta(hours=5))
num_of_member = models.IntegerField(default=1)
family = models.BooleanField(default=False)
female = models.BooleanField(default=False)
class Meta:
unique_together = ('category_name','category_number')
def __str__(self):
hours = self.duration.total_seconds() // 3600
return f"{self.category_name} - {self.category_number} ({int(hours)}時間)"
@property
def hours(self):
return self.duration.total_seconds() // 3600
class Entry(models.Model):
team = models.ForeignKey(Team, on_delete=models.CASCADE)
event = models.ForeignKey(NewEvent2, on_delete=models.CASCADE)
category = models.ForeignKey(NewCategory, on_delete=models.CASCADE)
date = models.DateTimeField()
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE,blank=True, null=True) # Akira 2024-7-24
zekken_number = models.IntegerField(default=0)
is_active = models.BooleanField(default=True) # 新しく追加
hasParticipated = models.BooleanField(default=False) # 新しく追加
hasGoaled = models.BooleanField(default=False) # 新しく追加
class Meta:
unique_together = ('zekken_number', 'event', 'date')
def __str__(self):
return f"{self.zekken_number} - {self.team.team_name} - {self.event.event_name} - {self.date}"
def clean(self):
super().clean()
if self.event and self.category and self.date:
start = self.event.start_datetime
end = self.event.end_datetime #- self.category.duration
if not (start.date() <= self.date.date() <= end.date()):
raise ValidationError({
'date': f'日時{self.date}{start.date()}から{end.date()}の間である必要があります。'
})
# メンバーの年齢と性別をチェック
if self.team: # and not self.team.members.exists():
members = self.team.members.all() # membersを適切に取得
if not members.exists():
raise ValidationError({'team': 'チームにメンバーが登録されていません。'})
#members = Member.objects.filter(team=self.team)
#if not members.exists():
# # ここで、owner をMemberに登録する。 Akira 2024-7-24
# raise ValidationError("チームにメンバーが登録されていません。")
adults = [m for m in members if self.is_adult(m.user.date_of_birth)]
children = [m for m in members if self.is_child(m.user.date_of_birth)]
teenagers = [m for m in members if self.is_teenager(m.user.date_of_birth)]
if self.category.family:
if not (adults and children):
raise ValidationError("ファミリーカテゴリーには、18歳以上のメンバーと小学生以下のメンバーが各1名以上必要です。")
else:
if not adults:
raise ValidationError("18歳以上のメンバーが1名以上必要です。")
if children:
raise ValidationError("ファミリーカテゴリー以外では、小学生以下のメンバーは参加できません。")
if self.category.num_of_member == 1:
if len(members) != 1:
raise ValidationError("このカテゴリーはソロ参加のみ可能です。")
if not adults:
raise ValidationError("ソロ参加は18歳以上のみ可能です。")
if self.category.female and not members[0].user.female:
raise ValidationError("このカテゴリーは女性のみ参加可能です。")
if not self.category.female and members[0].user.female:
raise ValidationError("このカテゴリーは男性のみ参加可能です。")
if len(members) > self.category.num_of_member:
raise ValidationError(f"このカテゴリーは{self.category.num_of_member}名までの参加が必要です。")
def save(self, *args, **kwargs):
self.full_clean()
super().save(*args, **kwargs)
@staticmethod
def is_adult(birth_date):
today = date.today()
age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
return age >= 18
@staticmethod
def is_child(birth_date):
today = date.today()
age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
return age <= 12
@staticmethod
def is_teenager(birth_date):
today = date.today()
age = today.year - birth_date.year - ((today.month, today.day) < (birth_date.month, birth_date.day))
return 13 <= age <= 17
class EntryMember(models.Model):
entry = models.ForeignKey(Entry, on_delete=models.CASCADE)
member = models.ForeignKey(Member, on_delete=models.CASCADE)
is_temporary = models.BooleanField(default=False) # Akira 2024-7-24
class Meta:
unique_together = ('entry', 'member')
def __str__(self):
return f"{self.entry.team.zekken_number} - {self.member.user.lastname} {self.member.user.firstname}"
class GoalImages(models.Model):
user=models.ForeignKey(CustomUser, on_delete=models.DO_NOTHING)
goalimage = models.FileField(upload_to='goals/%y%m%d', blank=True, null=True)
@ -522,11 +338,6 @@ class GoalImages(models.Model):
team_name = models.CharField(_("Team name"), max_length=255)
event_code = models.CharField(_("event code"), max_length=255)
cp_number = models.IntegerField(_("CP numner"))
zekken_number = models.TextField(
null=True, # False にする
blank=True, # False にする
help_text="ゼッケン番号"
)
class CheckinImages(models.Model):
user=models.ForeignKey(CustomUser, on_delete=models.DO_NOTHING)
@ -536,122 +347,6 @@ class CheckinImages(models.Model):
event_code = models.CharField(_("event code"), max_length=255)
cp_number = models.IntegerField(_("CP numner"))
class GpsCheckin(models.Model):
id = models.AutoField(primary_key=True) # 明示的にidフィールドを追加
path_order = models.IntegerField(
null=False,
help_text="チェックポイントの順序番号"
)
zekken_number = models.TextField(
null=False,
help_text="ゼッケン番号"
)
event_code = models.TextField(
null=False,
help_text="イベントコード"
)
cp_number = models.IntegerField(
null=True,
blank=True,
help_text="チェックポイント番号"
)
lattitude = models.FloatField(
null=True,
blank=True,
help_text="緯度:写真から取得"
)
longitude = models.FloatField(
null=True,
blank=True,
help_text="経度:写真から取得"
)
image_address = models.TextField(
null=True,
blank=True,
help_text="チェックイン画像のパス"
)
image_receipt = models.TextField(
null=True,
blank=True,
default=False,
help_text="レシート画像のパス"
)
image_qr = models.BooleanField(
default=False,
help_text="QRコードスキャンフラグ"
)
validate_location = models.BooleanField(
default=False,
help_text="位置情報検証フラグ:画像認識で検証した結果"
)
goal_time = models.TextField(
null=True,
blank=True,
help_text="ゴール時刻=ゴール時のみ使用される。画像から時刻を読み取り設定する。"
)
late_point = models.IntegerField(
null=True,
blank=True,
help_text="遅刻ポイント:ゴールの時刻が制限時間を超えた場合、1分につき-50点が加算。"
)
create_at = models.DateTimeField(
null=True,
blank=True,
help_text="作成日時:データの作成日時"
)
create_user = models.TextField(
null=True,
blank=True,
help_text="作成ユーザー"
)
update_at = models.DateTimeField(
null=True,
blank=True,
help_text="更新日時"
)
update_user = models.TextField(
null=True,
blank=True,
help_text="更新ユーザー"
)
buy_flag = models.BooleanField(
default=False,
help_text="購入フラグ協賛店で購入した場合、無条件でTRUEにする。"
)
colabo_company_memo = models.TextField(
null=False,
default='',
help_text="グループコード:複数のイベントで合算する場合に使用する"
)
points = models.IntegerField(
null=True,
blank=True,
help_text="ポイント:このチェックインによる獲得ポイント。通常ポイントと買い物ポイントは分離される。ゴールの場合には減点なども含む。"
)
class Meta:
db_table = 'gps_checkins'
constraints = [
models.UniqueConstraint(
fields=['zekken_number', 'event_code', 'path_order'],
name='unique_gps_checkin'
)
]
indexes = [
models.Index(fields=['zekken_number', 'event_code','path_order'], name='idx_zekken_event'),
models.Index(fields=['create_at'], name='idx_create_at'),
]
def __str__(self):
return f"{self.event_code}-{self.zekken_number}-{self.path_order}-buy:{self.buy_flag}-valid:{self.validate_location}-point:{self.points}"
def save(self, *args, **kwargs):
# 作成時・更新時のタイムスタンプを自動設定
from django.utils import timezone
if not self.create_at:
self.create_at = timezone.now()
self.update_at = timezone.now()
super().save(*args, **kwargs)
class RogUser(models.Model):
user=models.OneToOneField(CustomUser, on_delete=models.CASCADE)
@ -1026,7 +721,7 @@ class ShapeFileLocations(models.Model):
@receiver(pre_save, sender=Location)
def location_presave(sender, instance, *args, **kwargs):
print("------############------------", instance.location_id)
#print("------############------------", instance.location_id)
templocation.objects.filter(location_id = instance.location_id).delete()
@ -1064,8 +759,6 @@ def deleteShapelocation(sender,instance,*args,**kwargs):
@receiver(post_save, sender=ShapeLayers)
def publish_data(sender, instance, created, **kwargs):
logger.info(f"Processing ShapeLayer: {instance.name}")
file = instance.file.path
file_format = os.path.basename(file).split('.')[-1]
file_name = os.path.basename(file).split('.')[0]
@ -1079,58 +772,42 @@ def publish_data(sender, instance, created, **kwargs):
os.remove(file)
try:
#logger.debug("Attempting to read shape file")
# print("before reading the file")
shp = glob.glob(r'{}/**/*.shp'.format(file_path), recursive=True)[0]
#logger.info(f"Shape file read: {shp}")
# print("this is the read file",shp)
gdf = gpd.read_file(shp)
crs_name = str(gdf.crs.srs)
#logger.debug(f"CRS name: {crs_name}")
# print(crs_name, 'crs - name')
epsg = int(crs_name.replace('epsg:',''))
if epsg is None:
epsg=4326
lm2 = getTempMappingforModel(instance.layerof, shp)
#logger.info("Saving to temporary table")
# print("### shape file is ###")
lm2.save(strict=True, verbose=True)
#logger.info("Save to temporary table completed")
os.remove(shp)
except Exception as e:
print('######## shape file##########',e)
try:
logger.debug("Attempting to read CSV file")
csv_f = glob.glob(r'{}/**/*.csv'.format(file_path), recursive=True)[0]
remove_bom_inplace(csv_f)
mdl = apps.get_model(app_label="rog", model_name=LAYER_CHOICES[instance.layerof -1][1])
print(mdl)
print(f"#### instance.layerof - {instance.layerof}")
#logger.debug(f"Model for layer: {mdl}")
# print(mdl)
# print(f"#### instance.layerof - {instance.layerof}")
with open(csv_f, mode="r", encoding="utf-8") as txt_file:
#heading = next(txt_file)
reader = csv.reader(txt_file, delimiter=",")
for fields in reader:
logger.debug(f"Processing row: {fields[0]}")
print("@@@@@@@@@@@@")
print(fields[0])
print("@@@@@@@@@@@@")
if instance.layerof == 1:
#insertShapeLayerLocation(instance.name, fields)
#updateLocation(mdl, fields)
update_or_create_location(mdl, fields)
updateLocation(mdl, fields)
if instance.layerof == 2:
updateLineTable(mdl, fields)
if instance.layerof == 3:
@ -1139,157 +816,76 @@ def publish_data(sender, instance, created, **kwargs):
with open(csv_f, mode="r", encoding="utf-8") as txt_file:
reader_2 = csv.reader(txt_file, delimiter=",")
for fields in reader_2:
logger.debug(f"Inserting ShapeLayerLocation: {fields[0]}")
print("@@@@@@@@@@@@")
print(fields[0])
print("@@@@@@@@@@@@")
if instance.layerof == 1:
insertShapeLayerLocation(instance.name, fields)
logger.info("CSV processing completed")
except Exception as e:
print('######## csv file ##########',e)
def insertShapeLayerLocation(name, fields):
logger.info(f"Attempting to insert ShapeFileLocations for file: {name}, location_id: {fields[0]}")
try:
sll = UserUploadUser(userfile=name, email=fields[0])
sll.save();
except Exception as e:
logger.error(f"Error inserting ShapeFileLocations: {e}", exc_info=True)
sll = UserUploadUser(userfile=name, email=fields[0])
sll.save();
def insertUserUploadUser(name, fields):
try:
with transaction.atomic():
sll = UserUploadUser(userfile=name, email=fields[0])
sll.save()
except Exception as e:
logger.error(f"Error updating TempLocation: {e}", exc_info=True)
def update_or_create_location(mdl, fields):
try:
with transaction.atomic():
latitude = float(fields[11]) if fields[11] and len(fields[11]) > 0 else None
longitude = float(fields[12]) if fields[12] and len(fields[12]) > 0 else None
geom = MultiPoint(Point(longitude, latitude)) if latitude is not None and longitude is not None else None
defaults={
'sub_loc_id': fields[1] if len(fields[1]) > 0 else '',
'cp': fields[2] if len(fields[2]) > 0 else 0,
# その他のフィールド...
'location_name': fields[3] if len(fields[3]) > 0 else '',
'category': fields[4] if len(fields[4]) > 0 else '',
'subcategory': fields[5] if len(fields[5]) > 0 else '',
'zip': fields[6] if len(fields[6]) > 0 else '',
'address': fields[7] if len(fields[7]) > 0 else '',
'prefecture': fields[8] if len(fields[8]) > 0 else '',
'area': fields[9] if len(fields[9]) > 0 else '',
'city': fields[10] if len(fields[10]) > 0 else '',
'latitude': latitude,
'longitude': longitude,
'photos': fields[13] if len(fields[13]) > 0 else '',
'videos': fields[14] if len(fields[14]) > 0 else '',
'webcontents': fields[15] if len(fields[15]) > 0 else '',
'status': fields[16] if len(fields[16]) > 0 else '',
'portal': fields[17] if len(fields[17]) > 0 else '',
'group': fields[18] if len(fields[18]) > 0 else '',
'phone': fields[19] if len(fields[19]) > 0 else '',
'fax': fields[20] if len(fields[20]) > 0 else '',
'email': fields[21] if len(fields[21]) > 0 else '',
'facility': fields[22] if len(fields[22]) > 0 else '',
'remark': fields[23] if len(fields[23]) > 0 else '',
'tags': fields[24] if len(fields[24]) > 0 else '',
'hidden_location': fields[25] if len(fields[25]) > 0 else False,
'auto_checkin': fields[26] if len(fields[26]) > 0 else False,
'checkin_radius': fields[27] if len(fields[27]) > 0 else 15,
'checkin_point': fields[28] if len(fields[28]) > 0 else 10,
'buy_point': fields[29] if len(fields[29]) > 0 else 0,
'evaluation_value': fields[30] if len(fields[30]) > 0 else '',
'shop_closed': fields[31] if len(fields[31]) > 0 else False,
'shop_shutdown': fields[32] if len(fields[32]) > 0 else False,
'opening_hours_mon': fields[33] if len(fields[33]) > 0 else '',
'opening_hours_tue': fields[34] if len(fields[34]) > 0 else '',
'opening_hours_wed': fields[35] if len(fields[35]) > 0 else '',
'opening_hours_thu': fields[36] if len(fields[36]) > 0 else '',
'opening_hours_fri': fields[37] if len(fields[37]) > 0 else '',
'opening_hours_sat': fields[38] if len(fields[38]) > 0 else '',
'opening_hours_sun': fields[39] if len(fields[39]) > 0 else ''
}
if geom:
defaults['geom'] = geom
obj, created = mdl.objects.update_or_create(
location_id=int(fields[0]),
defaults=defaults
)
if created:
logger.info(f"New location created with id: {obj.location_id}")
else:
logger.info(f"Location updated with id: {obj.location_id}")
except Exception as e:
logger.error(f"Error updating or creating location: {e}", exc_info=True)
with transaction.atomic():
sll = UserUploadUser(userfile=name, email=fields[0])
sll.save()
def updateLocation(mdl, fields):
print(f"Updating {fields[0]} - {fields[1]}")
print(mdl.objects.filter(location_id = int(fields[0])))
print(f"--- ${fields} ----")
try:
with transaction.atomic():
updated = mdl.objects.filter(location_id = int(fields[0])).update(
sub_loc_id = fields[1] if len(fields[1]) > 0 else '',
cp = fields[2] if len(fields[2]) > 0 else 0,
location_name = fields[3] if len(fields[3]) > 0 else '',
category = fields[4] if len(fields[4]) > 0 else '',
subcategory = fields[5] if len(fields[5]) > 0 else '',
zip = fields[6] if len(fields[6]) > 0 else '',
address = fields[7] if len(fields[7]) > 0 else '',
prefecture = fields[8] if len(fields[8]) > 0 else '',
area = fields[9] if len(fields[9]) > 0 else '',
city = fields[10] if len(fields[10]) > 0 else '',
latitude = fields[11] if len(fields[11]) > 0 else '',
longitude = fields[12] if len(fields[12]) > 0 else '',
photos = fields[13] if len(fields[13]) > 0 else '',
videos = fields[14] if len(fields[14]) > 0 else '',
webcontents = fields[15] if len(fields[15]) > 0 else '',
status = fields[16] if len(fields[16]) > 0 else '',
portal = fields[17] if len(fields[17]) > 0 else '',
group = fields[18] if len(fields[18]) > 0 else '',
phone = fields[19] if len(fields[19]) > 0 else '',
fax = fields[20] if len(fields[20]) > 0 else '',
email = fields[21] if len(fields[21]) > 0 else '',
facility = fields[22] if len(fields[22]) > 0 else '',
remark = fields[23] if len(fields[23]) > 0 else '',
tags = fields[24] if len(fields[24]) > 0 else '',
hidden_location = fields[25] if len(fields[25]) > 0 else False,
auto_checkin = fields[26] if len(fields[26]) > 0 else False,
checkin_radius = fields[27] if len(fields[27]) > 0 else 15,
checkin_point = fields[28] if len(fields[28]) > 0 else 10,
buy_point = fields[29] if len(fields[29]) > 0 else 0,
evaluation_value = fields[30] if len(fields[30]) > 0 else '',
shop_closed = fields[31] if len(fields[31]) > 0 else False,
shop_shutdown = fields[32] if len(fields[32]) > 0 else False,
opening_hours_mon = fields[33] if len(fields[33]) > 0 else '',
opening_hours_tue = fields[34] if len(fields[34]) > 0 else '',
opening_hours_wed = fields[35] if len(fields[35]) > 0 else '',
opening_hours_thu = fields[36] if len(fields[36]) > 0 else '',
opening_hours_fri = fields[37] if len(fields[37]) > 0 else '',
opening_hours_sat = fields[38] if len(fields[38]) > 0 else '',
opening_hours_sun = fields[39] if len(fields[39]) > 0 else ''
)
logger.info(f"TempLocation updated successfully. Rows affected: {updated}")
except Exception as e:
logger.error(f"Error updating TempLocation: {e}", exc_info=True)
with transaction.atomic():
mdl.objects.filter(location_id = int(fields[0])).update(
sub_loc_id = fields[1] if len(fields[1]) > 0 else '',
cp = fields[2] if len(fields[2]) > 0 else 0,
location_name = fields[3] if len(fields[3]) > 0 else '',
category = fields[4] if len(fields[4]) > 0 else '',
subcategory = fields[5] if len(fields[5]) > 0 else '',
zip = fields[6] if len(fields[6]) > 0 else '',
address = fields[7] if len(fields[7]) > 0 else '',
prefecture = fields[8] if len(fields[8]) > 0 else '',
area = fields[9] if len(fields[9]) > 0 else '',
city = fields[10] if len(fields[10]) > 0 else '',
latitude = fields[11] if len(fields[11]) > 0 else '',
longitude = fields[12] if len(fields[12]) > 0 else '',
photos = fields[13] if len(fields[13]) > 0 else '',
videos = fields[14] if len(fields[14]) > 0 else '',
webcontents = fields[15] if len(fields[15]) > 0 else '',
status = fields[16] if len(fields[16]) > 0 else '',
portal = fields[17] if len(fields[17]) > 0 else '',
group = fields[18] if len(fields[18]) > 0 else '',
phone = fields[19] if len(fields[19]) > 0 else '',
fax = fields[20] if len(fields[20]) > 0 else '',
email = fields[21] if len(fields[21]) > 0 else '',
facility = fields[22] if len(fields[22]) > 0 else '',
remark = fields[23] if len(fields[23]) > 0 else '',
tags = fields[24] if len(fields[24]) > 0 else '',
hidden_location = fields[25] if len(fields[25]) > 0 else False,
auto_checkin = fields[26] if len(fields[26]) > 0 else False,
checkin_radius = fields[27] if len(fields[27]) > 0 else 15,
checkin_point = fields[28] if len(fields[28]) > 0 else 10,
buy_point = fields[29] if len(fields[29]) > 0 else 0,
evaluation_value = fields[30] if len(fields[30]) > 0 else '',
shop_closed = fields[31] if len(fields[31]) > 0 else False,
shop_shutdown = fields[32] if len(fields[32]) > 0 else False,
opening_hours_mon = fields[33] if len(fields[33]) > 0 else '',
opening_hours_tue = fields[34] if len(fields[34]) > 0 else '',
opening_hours_wed = fields[35] if len(fields[35]) > 0 else '',
opening_hours_thu = fields[36] if len(fields[36]) > 0 else '',
opening_hours_fri = fields[37] if len(fields[37]) > 0 else '',
opening_hours_sat = fields[38] if len(fields[38]) > 0 else '',
opening_hours_sun = fields[39] if len(fields[39]) > 0 else ''
)
def updateLineTable(mdl, fields):
print(f"Updating {fields[0]} - {fields[1]}")
@ -1349,10 +945,7 @@ def updatePolygonTable(mdl, fields):
def createUser(fields):
with transaction.atomic():
other_fields.setdefault('event_code',fields[1])
other_fields.setdefault('team_name',fields[1])
other_fields.setdefault('group',fields[1])
user = CustomUser.objects.create_user(email=fields[0], password=fields[4], **other_fields)
user = CustomUser.objects.create_user(email=fields[0], event_code=fields[1], team_name=fields[2], group=fields[3], password=fields[4])
user.is_superuser = False
user.is_staff = False
user.save()
@ -1372,8 +965,6 @@ def deleteUserUploadUser(sender,instance,*args,**kwargs):
@receiver(post_save, sender=UserUpload)
def publish_data(sender, instance, created, **kwargs):
logger.info(f"Processing ShapeLayer: {instance.name}")
file = instance.file.path
#os.remove(file)

View File

@ -1,25 +0,0 @@
from rest_framework import permissions
from .models import Team,Member
class IsMemberOrTeamOwner(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
# Check if user is a member of the team or the team owner
return request.user in obj.team.members.all() or request.user == obj.team.owner
class IsTeamOwner(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
if isinstance(obj, Team):
return obj.owner == request.user
elif isinstance(obj, Member):
return obj.team.owner == request.user
return False
class IsTeamOwnerOrMember(permissions.BasePermission):
def has_permission(self, request, view):
team_id = view.kwargs.get('team_id')
if not team_id:
return False
team = Team.objects.get(id=team_id)
return team.owner == request.user or team.members.filter(user=request.user).exists()

View File

@ -1,189 +0,0 @@
-- まず既存のビューをすべて削除
DROP MATERIALIZED VIEW IF EXISTS mv_entry_details CASCADE;
DROP VIEW IF EXISTS v_category_rankings CASCADE;
DROP VIEW IF EXISTS v_checkin_summary CASCADE;
-- チェックポイントの集計用ビュー
CREATE VIEW v_checkin_summary AS
SELECT
event_code,
zekken_number, -- 文字列として保持
COUNT(*) as total_checkins,
COUNT(CASE WHEN buy_flag THEN 1 END) as purchase_count,
SUM(points) as total_points,
SUM(CASE WHEN buy_flag THEN points ELSE 0 END) as bonus_points,
SUM(CASE WHEN NOT buy_flag THEN points ELSE 0 END) as normal_points,
SUM(COALESCE(late_point, 0)) as penalty_points,
MAX(create_at) as last_checkin
FROM
gps_checkins
GROUP BY
event_code, zekken_number;
-- カテゴリー内ランキング計算用ビュー
CREATE VIEW v_category_rankings AS
SELECT
e.id,
e.event_id,
ev.event_name,
e.category_id,
CAST(e.zekken_number AS TEXT) as zekken_number, -- 数値を文字列に変換
COALESCE(cs.total_points, 0) as total_score,
RANK() OVER (PARTITION BY e.event_id, e.category_id
ORDER BY COALESCE(cs.total_points, 0) DESC) as ranking,
COUNT(*) OVER (PARTITION BY e.event_id, e.category_id) as total_participants
FROM
rog_entry e
JOIN rog_newevent2 ev ON e.event_id = ev.id
LEFT JOIN v_checkin_summary cs ON ev.event_name = cs.event_code
AND CAST(e.zekken_number AS TEXT) = cs.zekken_number
WHERE
e.is_active = true;
-- マテリアライズドビューの作成
-- マテリアライズドビューの再作成
CREATE MATERIALIZED VIEW mv_entry_details AS
SELECT
-- 既存のフィールド
e.id,
CAST(e.zekken_number AS TEXT) as zekken_number,
e.is_active,
e."hasParticipated",
e."hasGoaled",
e.date as entry_date,
-- イベント情報
ev.event_name,
ev.start_datetime,
ev.end_datetime,
ev."deadlineDateTime",
-- カテゴリー情報
nc.category_name,
nc.category_number,
nc.duration,
nc.num_of_member,
nc.family as is_family_category,
nc.female as is_female_category,
-- チーム情報
t.team_name,
-- オーナー情報
cu.email as owner_email,
cu.firstname as owner_firstname,
cu.lastname as owner_lastname,
cu.date_of_birth as owner_birth_date,
cu.female as owner_is_female,
-- スコア情報
COALESCE(cs.total_points, 0) as total_points,
COALESCE(cs.normal_points, 0) as normal_points,
COALESCE(cs.bonus_points, 0) as bonus_points,
COALESCE(cs.penalty_points, 0) as penalty_points,
COALESCE(cs.total_checkins, 0) as checkin_count,
COALESCE(cs.purchase_count, 0) as purchase_count,
-- ゴール情報
gi.goalimage as goal_image,
gi.goaltime as goal_time,
-- 完走状態の判定を追加
CASE
WHEN gi.goaltime IS NULL THEN '棄権'
WHEN gi.goaltime <= ev.end_datetime THEN '完走'
WHEN gi.goaltime > ev.end_datetime AND
gi.goaltime <= ev.end_datetime + INTERVAL '15 minutes' THEN '完走(遅刻)'
ELSE '失格'
END as validation,
-- ランキング情報
cr.ranking as category_rank,
cr.total_participants,
-- チームメンバー情報JSON形式で格納
jsonb_agg(
jsonb_build_object(
'email', m.user_id,
'firstname', m.firstname,
'lastname', m.lastname,
'birth_date', m.date_of_birth,
'is_female', m.female,
'is_temporary', m.is_temporary,
'status', CASE
WHEN m.is_temporary THEN 'TEMPORARY'
WHEN m.date_of_birth IS NULL THEN 'PENDING'
ELSE 'ACTIVE'
END,
'member_type', CASE
WHEN m.user_id = e.owner_id THEN 'OWNER'
ELSE 'MEMBER'
END
) ORDER BY
CASE WHEN m.user_id = e.owner_id THEN 0 ELSE 1 END, -- オーナーを最初に
m.id
) FILTER (WHERE m.id IS NOT NULL) as team_members
FROM
rog_entry e
INNER JOIN rog_newevent2 ev ON e.event_id = ev.id
INNER JOIN rog_newcategory nc ON e.category_id = nc.id
INNER JOIN rog_team t ON e.team_id = t.id
LEFT JOIN rog_customuser cu ON e.owner_id = cu.id
LEFT JOIN v_checkin_summary cs ON ev.event_name = cs.event_code
AND CAST(e.zekken_number AS TEXT) = cs.zekken_number
LEFT JOIN v_category_rankings cr ON e.id = cr.id
LEFT JOIN rog_member m ON t.id = m.team_id
LEFT JOIN rog_goalimages gi ON ev.event_name = gi.event_code
AND CAST(e.zekken_number AS TEXT) = gi.zekken_number
GROUP BY
e.id, e.zekken_number, e.is_active, e."hasParticipated", e."hasGoaled", e.date,
ev.event_name, ev.start_datetime, ev.end_datetime, ev."deadlineDateTime",
nc.category_name, nc.category_number, nc.duration, nc.num_of_member,
nc.family, nc.female,
t.team_name,
cu.email, cu.firstname, cu.lastname, cu.date_of_birth, cu.female,
cs.total_points, cs.normal_points, cs.bonus_points, cs.penalty_points,
cs.total_checkins, cs.purchase_count, cs.last_checkin,
cr.ranking, cr.total_participants,
gi.goalimage, gi.goaltime,
e.owner_id;
-- インデックスの再作成
CREATE UNIQUE INDEX idx_mv_entry_details_event_zekken
ON mv_entry_details(event_name, zekken_number);
-- ビューの更新
REFRESH MATERIALIZED VIEW mv_entry_details;
-- チェックインと位置情報を結合したビューを作成
DROP VIEW IF EXISTS v_checkins_locations CASCADE;
CREATE OR REPLACE VIEW v_checkins_locations AS
SELECT
g.event_code,
g.zekken_number,
g.path_order,
g.cp_number,
l.sub_loc_id,
l.location_name,
l.photos,
g.image_address,
g.create_at,
g.buy_flag,
g.validate_location,
g.points
FROM
gps_checkins g
LEFT JOIN rog_location l ON g.cp_number = l.cp
ORDER BY
g.event_code,
g.zekken_number,
g.path_order;
-- インデックスのサジェスチョン(実際のテーブルに適用する必要があります)
/*
CREATE INDEX idx_gps_checkins_cp_number ON gps_checkins(cp_number);
CREATE INDEX idx_rog_location_cp ON rog_location(cp);
*/

View File

@ -1,33 +1,36 @@
from django.contrib.auth.hashers import make_password, check_password
from django.contrib.auth import get_user_model
User = get_user_model()
import uuid
from django.db import IntegrityError
from django.conf import settings
from django.urls import reverse
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.db import transaction
from rest_framework import serializers
from rest_framework_gis.serializers import GeoFeatureModelSerializer
from sqlalchemy.sql.functions import mode
from .models import Location, Location_line, Location_polygon, JpnAdminMainPerf, Useractions, GifuAreas, RogUser, UserTracks, GoalImages, CheckinImages,CustomUser,NewEvent,NewEvent2, Team, NewCategory, Category, Entry, Member, TempUser,EntryMember
from .models import Location, Location_line, Location_polygon, JpnAdminMainPerf, Useractions, GifuAreas, RogUser, UserTracks, GoalImages, CheckinImages,CustomUser,Team, Member, Entry
from drf_extra_fields.fields import Base64ImageField
from django.contrib.auth.hashers import make_password
#from django.contrib.auth.models import User
from .models import CustomUser
from django.contrib.auth import authenticate
from .models import TestModel
import logging
from django.shortcuts import get_object_or_404
from django.utils import timezone
from datetime import datetime, date
from .models import TempUser
class RegistrationSerializer(serializers.ModelSerializer):
password2 = serializers.CharField(style={"input_type": "password"}, write_only=True)
class Meta:
model = TempUser
fields = ['email', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group', 'password', 'password2']
extra_kwargs = {
'password': {'write_only': True}
}
def validate(self, attrs):
if attrs['password'] != attrs['password2']:
raise serializers.ValidationError({"password": "Password fields didn't match."})
return attrs
def create(self, validated_data):
validated_data.pop('password2')
return TempUser.objects.create(**validated_data)
logger = logging.getLogger(__name__)
class LocationCatSerializer(serializers.ModelSerializer):
class Meta:
@ -71,106 +74,55 @@ class JPN_main_perfSerializer(serializers.ModelSerializer):
# model=JpnAdminPerf
# fields=['id','et_id', 'et_right', 'et_left', 'adm2_l', 'adm1_l', 'adm0_l', 'adm0_r', 'adm1_r', 'adm2_r', 'admlevel']
#============= Akira ここから
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = ['zekken_number', 'team_name', 'password']
extra_kwargs = {'password': {'write_only': True}}
class MemberSerializer(serializers.ModelSerializer):
class Meta:
model = Member
fields = ['zekken_number', 'userid']
class EntrySerializer(serializers.ModelSerializer):
class Meta:
model = Entry
fields = ['zekken_number', 'event_code', 'date']
class CustomUserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = ['email', 'firstname', 'lastname', 'userid', 'date_of_birth', 'password']
extra_kwargs = {
'password': {'write_only': True},
'userid': {'read_only': True}
}
def create(self, validated_data):
validated_data['password'] = make_password(validated_data.get('password'))
return super(CustomUserSerializer, self).create(validated_data)
#============= Akira ここまで
class GifuAreaSerializer(serializers.ModelSerializer):
class Meta:
model= GifuAreas
fields=['id', 'adm0_ja', 'adm0_pcode', 'adm1_en', 'adm1_ja', 'adm1_pcode', 'adm2_ja', 'adm2_en', 'adm2_pcode', 'area_nm']
class UserRegistrationSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True, required=True, validators=[validate_password])
password2 = serializers.CharField(write_only=True, required=True, validators=[validate_password])
class CreateUserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = ('email', 'password', 'password2', 'firstname', 'lastname', 'date_of_birth', 'female')
extra_kwargs = {
'email': {'required': True},
'firstname': {'required': True},
'lastname': {'required': True},
'date_of_birth': {'required': True},
}
def validate(self, attrs):
if attrs['password'] != attrs['password2']:
raise serializers.ValidationError({"password": "Password fields didn't match."})
try:
validate_password(attrs['password'])
except ValidationError as e:
raise serializers.ValidationError({"password": list(e.messages)})
return attrs
def validate_email(self, value):
if CustomUser.objects.filter(email=value).exists() or TempUser.objects.filter(email=value).exists():
raise serializers.ValidationError("この電子メールアドレスは既に使用されています。")
return value
fields = ('email', 'password')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
raw_password = validated_data.get('password')
# デバッグコード
hashed_password = make_password(raw_password)
print(f"Hashed password during registration: {hashed_password}")
is_valid = check_password(raw_password, hashed_password)
print(f"Password is valid during registration: {is_valid}")
validated_data['password'] = hashed_password
return super(UserRegistrationSerializer, self).create(validated_data)
#validated_data['password'] = make_password(validated_data.get('password'))
#return super(UserRegistrationSerializer, self).create(validated_data)
# try:
# with transaction.atomic():
# password = validated_data['password'].encode('utf-8').decode('utf-8')
#
# user = CustomUser.objects.create_user(
# email=validated_data['email'],
# password=password, # validated_data['password'],
# firstname=validated_data['firstname'],
# lastname=validated_data['lastname'],
# date_of_birth=validated_data['date_of_birth'],
# female=validated_data.get('female', False),
# group='' # この値は必要に応じて変更してください
# )
# logger.debug(f"Creating user with data: {validated_data}")
# user.set_password(validated_data['password'])
# user.save()
#
# return user
# except ValidationError as e:
# raise serializers.ValidationError({"password": list(e.messages)})
#class CreateUserSerializer(serializers.ModelSerializer):
# class Meta:
# model = CustomUser
# fields = ('email', 'password')
# extra_kwargs = {'password': {'write_only': True}}
#
# def create(self, validated_data):
# user = CustomUser.objects.create_user(validated_data['email'],validated_data['password'], '大垣-初心者','','')
# return user
class TempUserRegistrationSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True)
class Meta:
model = TempUser
fields = ('email', 'password', 'firstname', 'lastname', 'date_of_birth', 'female')
def create(self, validated_data):
# パスワードのハッシュ化はviewで行うので、ここではそのまま保存
return TempUser.objects.create(**validated_data)
#validated_data['verification_code'] = str(uuid.uuid4())
#raw_password = validated_data.get('password')
#hashed_password = make_password(raw_password)
#validated_data['password'] = hashed_password
#return TempUser.objects.create(**validated_data)
user = CustomUser.objects.create_user(validated_data['email'],validated_data['password'], '大垣-初心者','','')
return user
class UserSerializer(serializers.ModelSerializer):
class Meta:
@ -179,37 +131,15 @@ class UserSerializer(serializers.ModelSerializer):
class GolaImageSerializer(serializers.ModelSerializer):
goalimage = Base64ImageField(max_length=None, use_url=True)
class Meta:
model = GoalImages
fields="__all__"
def get_goalimage_url_old(self, car):
def get_goalimage_url(self, car):
request = self.context.get('request')
photo_url = GoalImages.goalimage.url
return request.build_absolute_uri(photo_url)
def get_goalimage_url(self, obj):
request = self.context.get('request')
if request is None:
logger.warning("Request not found in serializer context")
return None
try:
photo_url = obj.goalimage.url
absolute_url = request.build_absolute_uri(photo_url)
logger.info(f"Generated URL for goalimage: {absolute_url}")
return absolute_url
except AttributeError as e:
logger.error(f"Error generating URL for goalimage: {str(e)}")
return None
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['goalimage_url'] = self.get_goalimage_url(instance)
logger.debug(f"Serialized data: {representation}")
return representation
class CheckinImageSerializer(serializers.ModelSerializer):
checkinimage = Base64ImageField(max_length=None, use_url=True)
class Meta:
@ -229,32 +159,14 @@ class RogUserSerializer(serializers.ModelSerializer):
class LoginUserSerializer(serializers.Serializer):
#email = serializers.CharField()
email = serializers.EmailField()
email = serializers.CharField()
password = serializers.CharField()
def validate(self, data):
email = data.get('email')
password = data.get('password')
if email and password:
user = authenticate(username=email, password=password)
if user:
if user.is_active:
return user
raise serializers.ValidationError("User account is disabled.")
else:
# Check if the user exists
try:
user_obj = User.objects.get(email=email)
raise serializers.ValidationError("Incorrect password.")
except User.DoesNotExist:
raise serializers.ValidationError("User with this email does not exist.")
else:
raise serializers.ValidationError("Must include 'email' and 'password'.")
user = authenticate(**data)
if user and user.is_active:
return user
raise serializers.ValidationError("Invalid Details.")
class UseractionsSerializer(serializers.ModelSerializer):
@ -334,545 +246,3 @@ class RegistrationSerializer(serializers.ModelSerializer):
user.set_password(password)
user.save()
return user
class NewCategorySerializer(serializers.ModelSerializer):
class Meta:
model = NewCategory
fields = ['id','category_name', 'category_number', 'duration', 'num_of_member', 'family', 'female']
#fields = ['id','category_name', 'category_number']
class NewEvent2Serializer(serializers.ModelSerializer):
class Meta:
model = NewEvent2
fields = ['id','event_name', 'start_datetime', 'end_datetime', 'deadlineDateTime', 'public', 'hour_3', 'hour_5', 'class_general','class_family','class_solo_male','class_solo_female']
class NewEventSerializer(serializers.ModelSerializer):
class Meta:
model = NewEvent
fields = ['event_name', 'start_datetime', 'end_datetime']
class TeamSerializer(serializers.ModelSerializer):
category = serializers.PrimaryKeyRelatedField(queryset=NewCategory.objects.all())
#category = serializers.IntegerField()
#category = NewCategorySerializer(read_only=True)
#category_id = serializers.PrimaryKeyRelatedField(
# queryset=NewCategory.objects.all(),
# source='category',
# write_only=True
#)
owner = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Team
fields = ['id','team_name', 'category', 'owner']
read_only_fields = ['id', 'owner']
def to_representation(self, instance):
ret = super().to_representation(instance)
if instance.category:
ret['category'] = {
'id': instance.category.id,
'category_name': instance.category.category_name,
'category_number': instance.category.category_number,
'duration': instance.category.duration,
'num_of_member':instance.category.num_of_member,
'family':instance.category.family,
'female':instance.category.female
}
else:
ret['category'] = None
ret['owner'] = CustomUserSerializer(instance.owner).data
return ret
def validate_category(self, value):
if not isinstance(value, NewCategory):
raise serializers.ValidationError("Invalid category")
return value
#if not NewCategory.objects.filter(id=value).exists():
# raise serializers.ValidationError("Invalid category ID")
#return value
def create(self, validated_data):
return Team.objects.create(**validated_data)
#category_id = validated_data.pop('category')
#category = get_object_or_404(NewCategory, id=category_id)
#team = Team.objects.create(category=category, **validated_data)
#team.category = category
#return team
#category = validated_data.pop('category')
#team = Team.objects.create(category=category, **validated_data)
#return team
#logger.debug(f"Creating team with data: {validated_data}")
#validated_data['owner'] = self.context['request'].user
#return super().create(validated_data)
def update(self, instance, validated_data):
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
#if 'category' in validated_data:
# category_id = validated_data.pop('category')
# category = get_object_or_404(NewCategory, id=category_id)
# instance.category = category
#return super().update(instance, validated_data)
#if 'category' in validated_data:
# instance.category = validated_data.pop('category')
#return super().update(instance, validated_data)
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ['category_name', 'category_number', 'duration', 'num_of_member', 'family', 'female']
class EntrySerializer(serializers.ModelSerializer):
team = serializers.PrimaryKeyRelatedField(queryset=Team.objects.all())
event = serializers.PrimaryKeyRelatedField(queryset=NewEvent2.objects.all())
category = serializers.PrimaryKeyRelatedField(queryset=NewCategory.objects.all())
owner = serializers.PrimaryKeyRelatedField(read_only=True)
date = serializers.DateTimeField(input_formats=['%Y-%m-%d'])
zekken_number = serializers.IntegerField()
#date = serializers.DateTimeField(default_timezone=timezone.get_current_timezone())
class Meta:
model = Entry
fields = ['id','team', 'event', 'category', 'date','zekken_number','owner','is_active', 'hasParticipated', 'hasGoaled']
read_only_fields = ['id','owner']
def validate_date(self, value):
if isinstance(value, str):
try:
value = datetime.strptime(value, "%Y-%m-%d")
except ValueError:
raise serializers.ValidationError("Invalid date format. Use YYYY-MM-DD.")
if isinstance(value, date):
value = datetime.combine(value, datetime.min.time())
if timezone.is_naive(value):
return timezone.make_aware(value, timezone.get_current_timezone())
return value
#if isinstance(value, date):
# # dateオブジェクトをdatetimeオブジェクトに変換
# value = datetime.combine(value, datetime.min.time())
#if timezone.is_naive(value):
# return timezone.make_aware(value, timezone.get_current_timezone())
#return value
def validate_team(self, value):
if not value.members.exists():
raise serializers.ValidationError("チームにメンバーが登録されていません。")
return value
def validate_date(self, value):
if isinstance(value, datetime):
return value.date()
return value
def validate(self, data):
team = data.get('team')
event = data.get('event')
category = data.get('category')
entry_date = data.get('date')
if isinstance(entry_date, datetime):
entry_date = entry_date.date()
elif isinstance(entry_date, str):
entry_date = datetime.strptime(entry_date, "%Y-%m-%d").date()
logger.debug("test-0")
logger.debug(f"==== start:{event.start_datetime.date()} <= entry_date : {entry_date} <= end:{event.end_datetime.date()} ?? ====")
if entry_date < event.start_datetime.date() or entry_date > event.end_datetime.date():
raise serializers.ValidationError(f"日付は{event.start_datetime.date()}から{event.end_datetime.date()}の間である必要があります。")
logger.debug("test-1")
try:
logger.debug(f"Parsed data: team={team}, event={event}, category={category}, ")
owner = self.context['request'].user
zekken_number = data.get('zekken_number')
logger.debug(f"entry_date={entry_date}, owner={owner}, zekken_number={zekken_number}")
except Exception:
raise serializers.ValidationError(f"何らかのエラーが発生しました")
# Check if team, event, and category exist
if not Team.objects.filter(id=team.id).exists():
raise serializers.ValidationError("指定されたチームは存在しません。")
if not NewEvent2.objects.filter(id=event.id).exists():
raise serializers.ValidationError("指定されたイベントは存在しません。")
if not NewCategory.objects.filter(id=category.id).exists():
raise serializers.ValidationError("指定されたカテゴリーは存在しません。")
# Check for unique constraint
if Entry.objects.filter(team=team, event=event, date__date=entry_date, owner=owner).exists():
raise serializers.ValidationError("既に登録済みです。")
# Validate zekken_number
if zekken_number is not None:
if zekken_number <= 0:
raise serializers.ValidationError("ゼッケン番号は正の整数である必要があります。")
# if Entry.objects.filter(event=event, zekken_number=zekken_number).exists():
# raise serializers.ValidationError("このゼッケン番号は既に使用されています。")
return data
def to_internal_value(self, data):
# dateフィールドが文字列で来た場合の処理
if 'date' in data and isinstance(data['date'], str):
try:
# 文字列をdatetimeオブジェクトに変換
data['date'] = datetime.strptime(data['date'], "%Y-%m-%d")
except ValueError:
raise serializers.ValidationError({"date": "無効な日付形式です。YYYY-MM-DD形式を使用してください。"})
return super().to_internal_value(data)
def to_representation(self, instance):
ret = super().to_representation(instance)
ret['team'] = TeamSerializer(instance.team).data
ret['event'] = NewEvent2Serializer(instance.event).data
ret['category'] = NewCategorySerializer(instance.category).data
ret['owner'] = CustomUserSerializer(instance.owner).data
if isinstance(ret['date'], datetime):
ret['date'] = ret['date'].date().isoformat()
elif isinstance(ret['date'], date):
ret['date'] = ret['date'].isoformat()
return ret
#if isinstance(ret['date'], datetime):
# ret['date'] = ret['date'].date().isoformat()
#return ret
#def to_representation(self, instance):
# ret = super().to_representation(instance)
# ret['team'] = instance.team.team_name
# ret['event'] = instance.event.event_name
# ret['category'] = instance.category.category_name
# ret['owner'] = instance.owner.email
# return ret
class CustomUserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = ['id','email', 'firstname', 'lastname', 'date_of_birth', 'female']
read_only_fields = ['id','email']
class TeamDetailSerializer(serializers.ModelSerializer):
category = NewCategorySerializer(read_only=True)
class Meta:
model = Team
fields = ['id', 'zekken_number', 'team_name', 'category']
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = ['id','email', 'firstname', 'lastname', 'date_of_birth', 'female', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group']
read_only_fields = ('id', 'email')
class UserUpdateSerializer(serializers.ModelSerializer):
class Meta:
model = CustomUser
fields = ['firstname', 'lastname', 'date_of_birth', 'female']
extra_kwargs = {'email': {'read_only': True}}
def update(self, instance, validated_data):
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
class MemberCreationSerializer(serializers.Serializer):
#email = serializers.EmailField()
email = serializers.EmailField(allow_blank=True, required=False)
firstname = serializers.CharField(required=False, allow_blank=True)
lastname = serializers.CharField(required=False, allow_blank=True)
date_of_birth = serializers.DateField(required=False)
female = serializers.BooleanField(required=False)
class MemberWithUserSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
class Meta:
model = Member
fields = ['user', 'team']
class MemberSerializer(serializers.ModelSerializer):
email = serializers.EmailField(write_only=True)
firstname = serializers.CharField(required=False, allow_blank=True, allow_null=True)
lastname = serializers.CharField(required=False, allow_blank=True, allow_null=True)
date_of_birth = serializers.DateField(required=False, allow_null=True)
female = serializers.BooleanField(required=False)
class Meta:
model = Member
fields = ['id', 'email', 'firstname', 'lastname', 'date_of_birth', 'female']
def validate_firstname(self, value):
return value or None
def validate_lastname(self, value):
return value or None
def create(self, validated_data):
email = validated_data.pop('email')
team = self.context['team']
# 既存のユーザーを探すか、新しいユーザーを作成
user, created = CustomUser.objects.get_or_create(email=email)
# ユーザーが新しく作成された場合のみ、追加情報を更新
if created:
user.firstname = validated_data.get('firstname', '')
user.lastname = validated_data.get('lastname', '')
user.date_of_birth = validated_data.get('date_of_birth')
user.female = validated_data.get('female', False)
user.save()
# メンバーを作成
member = Member.objects.create(
user=user,
team=team,
firstname=validated_data.get('firstname'),
lastname=validated_data.get('lastname'),
date_of_birth=validated_data.get('date_of_birth'),
female=validated_data.get('female', False)
)
return member
# メンバーを作成して返す
#class MemberCreationSerializerreturn Member.objects.create(user=user, team=team)
def update(self, instance, validated_data):
user_data = validated_data.pop('user', {})
user = instance.user
for attr, value in user_data.items():
setattr(user, attr, value)
user.save()
return super().update(instance, validated_data)
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['email'] = instance.user.email
representation['firstname'] = instance.user.firstname
representation['lastname'] = instance.user.lastname
representation['date_of_birth'] = instance.user.date_of_birth
representation['female'] = instance.user.female
return representation
class MemberSerializerOld(serializers.ModelSerializer):
user = CustomUserSerializer(read_only=True)
firstname = serializers.CharField(required=False, allow_blank=True, allow_null=True)
lastname = serializers.CharField(required=False, allow_blank=True, allow_null=True)
date_of_birth = serializers.DateField(required=False, allow_null=True)
female = serializers.BooleanField(required=False)
#team = TeamDetailSerializer(read_only=True)
#email = serializers.EmailField(write_only=True, required=False)
#firstname = serializers.CharField(write_only=True, required=False)
#lastname = serializers.CharField(write_only=True, required=False)
#date_of_birth = serializers.DateField(write_only=True, required=False)
#female = serializers.BooleanField(write_only=True, required=False)
class Meta:
model = Member
fields = ['id','email','firstname','lastname','date_of_birth','female']
#read_only_fields = ['id', 'team']
'''
def create(self, validated_data):
team = validated_data['team']
email = validated_data.get('email')
if email.startswith('dummy_'):
user, _ = CustomUser.objects.get_or_create(
email=email,
defaults={**user_data, 'is_active': True}
)
else:
user, _ = CustomUser.objects.get_or_create(
email=email,
defaults={**user_data, 'is_active': False}
)
member = Member.objects.create(user=user, **validated_data)
return member
'''
def create(self, validated_data):
email = validated_data.pop('email')
team = self.context['team']
# 既存のユーザーを探すか、新しいユーザーを作成
user, created = CustomUser.objects.get_or_create(email=email)
# メンバーを作成
member = Member.objects.create(
user=user,
team=team,
firstname=validated_data.get('firstname', ''),
lastname=validated_data.get('lastname', ''),
date_of_birth=validated_data.get('date_of_birth'),
female=validated_data.get('female', False)
)
return member
def update(self, instance, validated_data):
user_data = validated_data.pop('user', {})
user = instance.user
for attr, value in user_data.items():
setattr(user, attr, value)
user.save()
return super().update(instance, validated_data)
#if user.email.startswith('dummy_'): # dummy_ で始まるメールアドレスの場合のみ更新
# for attr, value in user_data.items():
# setattr(user, attr, value)
# user.save()
#else:
# raise serializers.ValidationError("このユーザーの情報は更新できません。")
#return super().update(instance, validated_data)
def to_representation(self, instance):
representation = super().to_representation(instance)
representation['email'] = instance.user.email
return representation
'''
def to_representation(self, instance):
representation = super().to_representation(instance)
user_data = representation['user']
return {
'id': representation['id'],
'email': user_data['email'],
'firstname': user_data['firstname'],
'lastname': user_data['lastname'],
'date_of_birth': user_data['date_of_birth'],
'female': user_data['female'],
'team': representation['team']
}
'''
class EntryMemberSerializer(serializers.ModelSerializer):
class Meta:
model = EntryMember
fields = ['id', 'entry', 'member', 'is_temporary']
class TempUserSerializer(serializers.ModelSerializer):
class Meta:
model = TempUser
#fields = ['id','email', 'password', 'is_rogaining', 'zekken_number', 'event_code', 'team_name', 'group', 'firstname', 'lastname', 'date_of_birth', 'female', 'verification_code', 'created_at', 'expires_at']
fields = ['email', 'password', 'firstname', 'lastname', 'date_of_birth', 'female', 'verification_code']
class EntryCreationSerializer(serializers.Serializer):
owner_email = serializers.EmailField()
event_name = serializers.CharField()
category_name = serializers.CharField()
team_name = serializers.CharField()
zekken_number = serializers.CharField()
date = serializers.DateField()
members = serializers.ListField(child=serializers.DictField())
def create(self, validated_data):
owner = CustomUser.objects.get(email=validated_data['owner_email'])
event = NewEvent2.objects.get(event_name=validated_data['event_name'])
category = NewCategory.objects.get(category_name=validated_data['category_name'])
# Create or get team
team, _ = Team.objects.get_or_create(
zekken_number=validated_data['zekken_number'],
category=category,
defaults={'team_name': validated_data['team_name'], 'owner': owner}
)
# Create or update entry
entry, _ = Entry.objects.update_or_create(
owner=owner,
team=team,
event=event,
date=validated_data['date'],
defaults={'category': category}
)
# Process members
for member_data in validated_data['members']:
user, created = CustomUser.objects.get_or_create(
email=member_data.get('email'),
defaults={
'firstname': member_data['firstname'],
'lastname': member_data['lastname'],
'date_of_birth': member_data['date_of_birth']
}
)
if created:
TempUser.objects.create(
email=user.email,
firstname=user.firstname,
lastname=user.lastname,
date_of_birth=user.date_of_birth
)
# Send invitation email here
member, _ = NewMember.objects.get_or_create(
user=user,
team=team,
defaults={'is_temporary': created}
)
EntryMember.objects.get_or_create(entry=entry, member=member)
return entry
class PasswordResetRequestSerializer(serializers.Serializer):
email = serializers.EmailField()
class PasswordResetConfirmSerializer(serializers.Serializer):
new_password = serializers.CharField(write_only=True)
confirm_password = serializers.CharField(write_only=True)
def validate(self, data):
if data['new_password'] != data['confirm_password']:
raise serializers.ValidationError("Passwords do not match")
validate_password(data['new_password'])
return data
class UserLastGoalTimeSerializer(serializers.Serializer):
user_email = serializers.EmailField()
last_goal_time = serializers.DateTimeField()

View File

@ -1,32 +0,0 @@
<!DOCTYPE html>
<html lang="ja">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>アクティベーション成功</title>
<style>
body {
font-family: Arial, sans-serif;
display: flex;
justify-content: center;
align-items: center;
height: 100vh;
margin: 0;
background-color: #f0f0f0;
}
.message {
background-color: white;
padding: 20px;
border-radius: 5px;
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
text-align: center;
}
</style>
</head>
<body>
<div class="message">
<h1>アクティベーション成功</h1>
<p>{{ message }}</p>
</div>
</body>
</html>

Some files were not shown because too many files have changed in this diff Show More