Initial commit
This commit is contained in:
commit
ab0b4cfef1
50
.devcontainer/devcontainer.json
Executable file
50
.devcontainer/devcontainer.json
Executable file
@ -0,0 +1,50 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-docker-compose
|
||||
{
|
||||
"name": "Existing Docker Compose (Extend)",
|
||||
|
||||
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
|
||||
// The .devcontainer/docker-compose.yml file contains any overrides you need/want to make.
|
||||
"dockerComposeFile": [
|
||||
"../docker-compose.yml",
|
||||
"docker-compose.yml"
|
||||
],
|
||||
|
||||
// The 'service' property is the name of the service for the container that VS Code should
|
||||
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
|
||||
"service": "app",
|
||||
|
||||
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
||||
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Uncomment the next line if you want start specific services in your Docker Compose config.
|
||||
// "runServices": [],
|
||||
|
||||
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
|
||||
// "shutdownAction": "none",
|
||||
|
||||
// Uncomment the next line to run commands after the container is created.
|
||||
// "postCreateCommand": "cat /etc/os-release",
|
||||
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.debugpy",
|
||||
"codeium.codeium",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "devcontainer"
|
||||
}
|
||||
26
.devcontainer/docker-compose.yml
Executable file
26
.devcontainer/docker-compose.yml
Executable file
@ -0,0 +1,26 @@
|
||||
version: '3.8'
|
||||
services:
|
||||
# Update this to the name of the service you want to work with in your docker-compose.yml file
|
||||
app:
|
||||
# Uncomment if you want to override the service's Dockerfile to one in the .devcontainer
|
||||
# folder. Note that the path of the Dockerfile and context is relative to the *primary*
|
||||
# docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile"
|
||||
# array). The sample below assumes your primary file is in the root of your project.
|
||||
#
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: .devcontainer/Dockerfile
|
||||
|
||||
volumes:
|
||||
# Update this to wherever you want VS Code to mount the folder of your project
|
||||
- ..:/workspaces:cached
|
||||
|
||||
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
||||
# cap_add:
|
||||
# - SYS_PTRACE
|
||||
# security_opt:
|
||||
# - seccomp:unconfined
|
||||
|
||||
# Overrides default command so things don't shut down after the process ends.
|
||||
command: sleep infinity
|
||||
|
||||
1
.dockerignore
Executable file
1
.dockerignore
Executable file
@ -0,0 +1 @@
|
||||
data/
|
||||
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
.env
|
||||
__pycache__/
|
||||
**/__pycache__/
|
||||
data/
|
||||
25
.vscode/launch.json
vendored
Normal file
25
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python Debugger: Flask",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "flask",
|
||||
"env": {
|
||||
"FLASK_APP": "app.py",
|
||||
"FLASK_DEBUG": "1"
|
||||
},
|
||||
"args": [
|
||||
"run",
|
||||
"--no-debugger",
|
||||
"--no-reload"
|
||||
],
|
||||
"jinja": true,
|
||||
"autoStartBrowser": false
|
||||
}
|
||||
]
|
||||
}
|
||||
6
Dockerfile
Executable file
6
Dockerfile
Executable file
@ -0,0 +1,6 @@
|
||||
FROM python:3.12-bookworm
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
RUN pip install -r requirements.txt
|
||||
EXPOSE 80
|
||||
CMD ["python", "app.py"]
|
||||
15
app.py
Executable file
15
app.py
Executable file
@ -0,0 +1,15 @@
|
||||
from flask import Flask
|
||||
import os
|
||||
|
||||
def create_app():
|
||||
app = Flask(__name__)
|
||||
if "SQLALCHEMY_DATABASE_URI" in os.environ.keys():
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['SQLALCHEMY_DATABASE_URI']
|
||||
from models import db
|
||||
db.init_app(app)
|
||||
from flask_migrate import Migrate
|
||||
migrate = Migrate(app, db)
|
||||
from flask_migrate import upgrade
|
||||
with app.app_context():
|
||||
upgrade(directory="migrations")
|
||||
return app
|
||||
21
docker-compose.yml
Executable file
21
docker-compose.yml
Executable file
@ -0,0 +1,21 @@
|
||||
services:
|
||||
app:
|
||||
container_name: staffportal-app
|
||||
build: .
|
||||
ports:
|
||||
- "8000:80"
|
||||
depends_on:
|
||||
- db
|
||||
environment:
|
||||
- SQLALCHEMY_DATABASE_URI=mysql+pymysql://${MYSQL_USER}:${MYSQL_PASSWORD}@${MYSQL_HOST}:3306/${MYSQL_DATABASE}
|
||||
|
||||
db:
|
||||
image: mysql:9
|
||||
container_name: staffportal-db
|
||||
environment:
|
||||
- MYSQL_USER=${MYSQL_USER}
|
||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD}
|
||||
- MYSQL_DATABASE=${MYSQL_DATABASE}
|
||||
- MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
|
||||
volumes:
|
||||
- ./data:/var/lib/mysql
|
||||
1
migrations/README
Normal file
1
migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Single-database configuration for Flask.
|
||||
50
migrations/alembic.ini
Normal file
50
migrations/alembic.ini
Normal file
@ -0,0 +1,50 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic,flask_migrate
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[logger_flask_migrate]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = flask_migrate
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
113
migrations/env.py
Normal file
113
migrations/env.py
Normal file
@ -0,0 +1,113 @@
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
|
||||
def get_engine():
|
||||
try:
|
||||
# this works with Flask-SQLAlchemy<3 and Alchemical
|
||||
return current_app.extensions['migrate'].db.get_engine()
|
||||
except (TypeError, AttributeError):
|
||||
# this works with Flask-SQLAlchemy>=3
|
||||
return current_app.extensions['migrate'].db.engine
|
||||
|
||||
|
||||
def get_engine_url():
|
||||
try:
|
||||
return get_engine().url.render_as_string(hide_password=False).replace(
|
||||
'%', '%%')
|
||||
except AttributeError:
|
||||
return str(get_engine().url).replace('%', '%%')
|
||||
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
config.set_main_option('sqlalchemy.url', get_engine_url())
|
||||
target_db = current_app.extensions['migrate'].db
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def get_metadata():
|
||||
if hasattr(target_db, 'metadatas'):
|
||||
return target_db.metadatas[None]
|
||||
return target_db.metadata
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=get_metadata(), literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
conf_args = current_app.extensions['migrate'].configure_args
|
||||
if conf_args.get("process_revision_directives") is None:
|
||||
conf_args["process_revision_directives"] = process_revision_directives
|
||||
|
||||
connectable = get_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=get_metadata(),
|
||||
**conf_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
72
migrations/versions/936d19bcafbf_initial_migration.py
Normal file
72
migrations/versions/936d19bcafbf_initial_migration.py
Normal file
@ -0,0 +1,72 @@
|
||||
"""initial migration
|
||||
|
||||
Revision ID: 936d19bcafbf
|
||||
Revises:
|
||||
Create Date: 2024-10-06 17:53:11.262950
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '936d19bcafbf'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('roles',
|
||||
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
|
||||
sa.Column('name', sa.String(length=100), nullable=False),
|
||||
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name')
|
||||
)
|
||||
op.create_table('staff',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('firstName', sa.String(length=40), nullable=False),
|
||||
sa.Column('lastName', sa.String(length=40), nullable=False),
|
||||
sa.Column('uid', sa.String(length=100), nullable=False),
|
||||
sa.Column('email', sa.String(length=100), nullable=False),
|
||||
sa.Column('password', sa.String(length=200), nullable=True),
|
||||
sa.Column('dateOfBirth', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('roleId', sa.Integer(), nullable=False),
|
||||
sa.Column('revoked', sa.Boolean(), nullable=False),
|
||||
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['roleId'], ['roles.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('email'),
|
||||
sa.UniqueConstraint('uid')
|
||||
)
|
||||
with op.batch_alter_table('staff', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_staff_firstName'), ['firstName'], unique=False)
|
||||
batch_op.create_index(batch_op.f('ix_staff_lastName'), ['lastName'], unique=False)
|
||||
|
||||
op.create_table('email_aliases',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('email', sa.String(length=100), nullable=False),
|
||||
sa.Column('staffId', sa.Integer(), nullable=False),
|
||||
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['staffId'], ['staff.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('email')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('email_aliases')
|
||||
with op.batch_alter_table('staff', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_staff_lastName'))
|
||||
batch_op.drop_index(batch_op.f('ix_staff_firstName'))
|
||||
|
||||
op.drop_table('staff')
|
||||
op.drop_table('roles')
|
||||
# ### end Alembic commands ###
|
||||
43
models.py
Normal file
43
models.py
Normal file
@ -0,0 +1,43 @@
|
||||
from typing import List
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy import String, ForeignKey, types, func
|
||||
from datetime import datetime
|
||||
|
||||
db = SQLAlchemy()
|
||||
|
||||
class RoleModel(db.Model):
|
||||
__tablename__ = "roles"
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=False)
|
||||
name: Mapped[str] = mapped_column(String(100), unique=True, nullable=False)
|
||||
createdAt: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updatedAt: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now())
|
||||
|
||||
staff: Mapped[List["StaffModel"]] = relationship(back_populates="role")
|
||||
|
||||
class StaffModel(db.Model):
|
||||
__tablename__ = "staff"
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||
firstName: Mapped[str] = mapped_column(String(40), nullable=False, index=True)
|
||||
lastName: Mapped[str] = mapped_column(String(40), nullable=False, index=True)
|
||||
uid: Mapped[str] = mapped_column(String(100), unique=True, nullable=False)
|
||||
email: Mapped[str] = mapped_column(String(100), unique=True, nullable=False)
|
||||
password: Mapped[str] = mapped_column(String(200), nullable=True)
|
||||
dateOfBirth: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=True)
|
||||
roleId: Mapped[int] = mapped_column(ForeignKey("roles.id"))
|
||||
revoked: Mapped[bool] = mapped_column(default=False)
|
||||
createdAt: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updatedAt: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now())
|
||||
|
||||
role: Mapped["RoleModel"] = relationship(back_populates="staff")
|
||||
email_aliases: Mapped[List["EmailAliasModel"]] = relationship(back_populates="staff")
|
||||
|
||||
class EmailAliasModel(db.Model):
|
||||
__tablename__ = "email_aliases"
|
||||
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
|
||||
email: Mapped[str] = mapped_column(String(100), unique=True, nullable=False)
|
||||
staffId: Mapped[int] = mapped_column(ForeignKey("staff.id"))
|
||||
createdAt: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=False, server_default=func.now())
|
||||
updatedAt: Mapped[datetime] = mapped_column(types.DateTime(timezone=True), nullable=False, server_default=func.now(), onupdate=func.now())
|
||||
|
||||
staff: Mapped["StaffModel"] = relationship(back_populates="email_aliases")
|
||||
19
requirements.txt
Executable file
19
requirements.txt
Executable file
@ -0,0 +1,19 @@
|
||||
alembic==1.13.3
|
||||
Authlib==1.3.2
|
||||
blinker==1.8.2
|
||||
cffi==1.17.1
|
||||
click==8.1.7
|
||||
cryptography==43.0.1
|
||||
Flask==3.0.3
|
||||
Flask-Migrate==4.0.7
|
||||
Flask-SQLAlchemy==3.1.1
|
||||
greenlet==3.1.1
|
||||
itsdangerous==2.2.0
|
||||
Jinja2==3.1.4
|
||||
Mako==1.3.5
|
||||
MarkupSafe==2.1.5
|
||||
pycparser==2.22
|
||||
SQLAlchemy==2.0.35
|
||||
typing_extensions==4.12.2
|
||||
Werkzeug==3.0.4
|
||||
PyMySQL==1.1.1
|
||||
Loading…
Reference in New Issue
Block a user