Initial commit with authentication and routes for registering/login already set up
This commit is contained in:
61
.gitignore
vendored
Normal file
61
.gitignore
vendored
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
# Python specific ignores
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*.so
|
||||||
|
*.egg
|
||||||
|
*.egg-info/
|
||||||
|
*.pyd
|
||||||
|
*.whl
|
||||||
|
*.log
|
||||||
|
*.pot
|
||||||
|
*.mo
|
||||||
|
*.pyo
|
||||||
|
|
||||||
|
# Virtual environment directories
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
.venv/
|
||||||
|
|
||||||
|
# Python packaging
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Cache files
|
||||||
|
*.log
|
||||||
|
*.sqlite3
|
||||||
|
*.db
|
||||||
|
.DS_Store
|
||||||
|
*.bak
|
||||||
|
*.swp
|
||||||
|
|
||||||
|
# Test output
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
coverage.*
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.pid
|
||||||
|
|
||||||
|
# FastAPI specific
|
||||||
|
*.env
|
||||||
|
alembic/versions/* # Ignore migration files if autogenerated
|
||||||
|
instance/
|
||||||
|
|
||||||
|
# VS Code settings (specific to SSH)
|
||||||
|
.vscode/
|
||||||
|
.vscode-server/
|
||||||
|
|
||||||
|
# Ignore SSH settings files
|
||||||
|
.ssh/
|
||||||
|
*.pem
|
||||||
|
*.key
|
||||||
|
|
||||||
|
# pyright cache (VSCode Python language server)
|
||||||
|
.pyright/
|
||||||
117
alembic.ini
Normal file
117
alembic.ini
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||||
|
script_location = alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = /home/nextjs/fastapi/app
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
# version_path_separator = newline
|
||||||
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = postgresql+psycopg://postgres:postgresbro@10.0.0.124/scrap
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
1
alembic/README
Normal file
1
alembic/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
84
alembic/env.py
Normal file
84
alembic/env.py
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'app'))
|
||||||
|
|
||||||
|
from models import Base
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
@ -0,0 +1,56 @@
|
|||||||
|
"""Add relation between bid and user through userId and added nullable false to all foreign keys
|
||||||
|
|
||||||
|
Revision ID: 21ac26bca176
|
||||||
|
Revises: 22e9331f3fbd
|
||||||
|
Create Date: 2024-10-13 20:21:05.124526
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '21ac26bca176'
|
||||||
|
down_revision: Union[str, None] = '22e9331f3fbd'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('accounts', 'userId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=False)
|
||||||
|
op.alter_column('auctions', 'vehicleId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=False)
|
||||||
|
op.alter_column('auctions', 'userId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=False)
|
||||||
|
op.add_column('bids', sa.Column('userId', sa.Integer(), nullable=False))
|
||||||
|
op.alter_column('bids', 'auctionId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=False)
|
||||||
|
op.create_foreign_key(None, 'bids', 'users', ['userId'], ['id'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, 'bids', type_='foreignkey')
|
||||||
|
op.alter_column('bids', 'auctionId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=True)
|
||||||
|
op.drop_column('bids', 'userId')
|
||||||
|
op.alter_column('auctions', 'userId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=True)
|
||||||
|
op.alter_column('auctions', 'vehicleId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=True)
|
||||||
|
op.alter_column('accounts', 'userId',
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@ -0,0 +1,30 @@
|
|||||||
|
"""Add relation between Bid and User
|
||||||
|
|
||||||
|
Revision ID: 22e9331f3fbd
|
||||||
|
Revises: 3ab355a9d3b1
|
||||||
|
Create Date: 2024-10-13 19:49:18.378918
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '22e9331f3fbd'
|
||||||
|
down_revision: Union[str, None] = '3ab355a9d3b1'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
32
alembic/versions/2ea50aba1814_added_postcode_and_city.py
Normal file
32
alembic/versions/2ea50aba1814_added_postcode_and_city.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""Added postcode and city
|
||||||
|
|
||||||
|
Revision ID: 2ea50aba1814
|
||||||
|
Revises: f0915dd7eb8b
|
||||||
|
Create Date: 2024-10-14 04:23:57.393072
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '2ea50aba1814'
|
||||||
|
down_revision: Union[str, None] = 'f0915dd7eb8b'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('users', sa.Column('postcode', sa.Integer(), nullable=False))
|
||||||
|
op.add_column('users', sa.Column('city', sa.String(), nullable=False))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('users', 'city')
|
||||||
|
op.drop_column('users', 'postcode')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@ -0,0 +1,30 @@
|
|||||||
|
"""Add relation between Bid and User
|
||||||
|
|
||||||
|
Revision ID: 3ab355a9d3b1
|
||||||
|
Revises: 88d9df7768b2
|
||||||
|
Create Date: 2024-10-13 19:44:39.230332
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '3ab355a9d3b1'
|
||||||
|
down_revision: Union[str, None] = '88d9df7768b2'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
138
alembic/versions/88d9df7768b2_initial_migration.py
Normal file
138
alembic/versions/88d9df7768b2_initial_migration.py
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
"""Initial migration
|
||||||
|
|
||||||
|
Revision ID: 88d9df7768b2
|
||||||
|
Revises:
|
||||||
|
Create Date: 2024-10-12 15:40:52.735580
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '88d9df7768b2'
|
||||||
|
down_revision: Union[str, None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('equipment',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_table('users',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.Column('company', sa.String(), nullable=True),
|
||||||
|
sa.Column('address', sa.String(), nullable=False),
|
||||||
|
sa.Column('latitude', sa.String(), nullable=True),
|
||||||
|
sa.Column('longitude', sa.String(), nullable=True),
|
||||||
|
sa.Column('phone', sa.String(), nullable=False),
|
||||||
|
sa.Column('privatePhone', sa.String(), nullable=True),
|
||||||
|
sa.Column('email', sa.String(), nullable=False),
|
||||||
|
sa.Column('cvr', sa.String(), nullable=True),
|
||||||
|
sa.Column('password', sa.String(), nullable=False),
|
||||||
|
sa.Column('username', sa.String(), nullable=False),
|
||||||
|
sa.Column('role', sa.Enum('PRIVATE', 'BUSINESS', name='userrole'), nullable=True),
|
||||||
|
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('email'),
|
||||||
|
sa.UniqueConstraint('username')
|
||||||
|
)
|
||||||
|
op.create_table('vehicles',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('brand', sa.String(), nullable=False),
|
||||||
|
sa.Column('model', sa.String(), nullable=False),
|
||||||
|
sa.Column('variant', sa.String(), nullable=True),
|
||||||
|
sa.Column('year', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('kilometers', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('condition', sa.String(), nullable=False),
|
||||||
|
sa.Column('location', sa.String(), nullable=False),
|
||||||
|
sa.Column('latitude', sa.String(), nullable=False),
|
||||||
|
sa.Column('longitude', sa.String(), nullable=False),
|
||||||
|
sa.Column('gasType', sa.String(), nullable=False),
|
||||||
|
sa.Column('images', sa.Text(), nullable=False),
|
||||||
|
sa.Column('description', sa.String(), nullable=False),
|
||||||
|
sa.Column('service', sa.String(), nullable=False),
|
||||||
|
sa.Column('inspectedAt', sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_table('verification_tokens',
|
||||||
|
sa.Column('identifier', sa.String(), nullable=False),
|
||||||
|
sa.Column('token', sa.String(), nullable=False),
|
||||||
|
sa.Column('expires', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('identifier'),
|
||||||
|
sa.UniqueConstraint('token')
|
||||||
|
)
|
||||||
|
op.create_table('accounts',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('userId', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('type', sa.String(), nullable=False),
|
||||||
|
sa.Column('provider', sa.String(), nullable=False),
|
||||||
|
sa.Column('providerAccountId', sa.String(), nullable=False),
|
||||||
|
sa.Column('refresh_token', sa.String(), nullable=True),
|
||||||
|
sa.Column('access_token', sa.String(), nullable=True),
|
||||||
|
sa.Column('expires_at', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['userId'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_table('auctions',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('vehicleId', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('userId', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('askingPrice', sa.Float(), nullable=False),
|
||||||
|
sa.Column('description', sa.String(), nullable=True),
|
||||||
|
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['userId'], ['users.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['vehicleId'], ['vehicles.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_table('sessions',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('sessionToken', sa.String(), nullable=False),
|
||||||
|
sa.Column('userId', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('expires', sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['userId'], ['users.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('sessionToken')
|
||||||
|
)
|
||||||
|
op.create_table('vehicle_equipment',
|
||||||
|
sa.Column('vehicle_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('equipment_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['equipment_id'], ['equipment.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['vehicle_id'], ['vehicles.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('vehicle_id', 'equipment_id')
|
||||||
|
)
|
||||||
|
op.create_table('bids',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('auctionId', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('bid', sa.Float(), nullable=False),
|
||||||
|
sa.Column('updatedAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.Column('createdAt', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['auctionId'], ['auctions.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('bids')
|
||||||
|
op.drop_table('vehicle_equipment')
|
||||||
|
op.drop_table('sessions')
|
||||||
|
op.drop_table('auctions')
|
||||||
|
op.drop_table('accounts')
|
||||||
|
op.drop_table('verification_tokens')
|
||||||
|
op.drop_table('vehicles')
|
||||||
|
op.drop_table('users')
|
||||||
|
op.drop_table('equipment')
|
||||||
|
# ### end Alembic commands ###
|
||||||
32
alembic/versions/bc6a303851bc_removed_username.py
Normal file
32
alembic/versions/bc6a303851bc_removed_username.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
"""Removed username
|
||||||
|
|
||||||
|
Revision ID: bc6a303851bc
|
||||||
|
Revises: 21ac26bca176
|
||||||
|
Create Date: 2024-10-14 03:48:16.528692
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'bc6a303851bc'
|
||||||
|
down_revision: Union[str, None] = '21ac26bca176'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint('users_username_key', 'users', type_='unique')
|
||||||
|
op.drop_column('users', 'username')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('users', sa.Column('username', sa.VARCHAR(), autoincrement=False, nullable=False))
|
||||||
|
op.create_unique_constraint('users_username_key', 'users', ['username'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
@ -0,0 +1,30 @@
|
|||||||
|
"""changed user_input to email in UserLogin BaseModel
|
||||||
|
|
||||||
|
Revision ID: f0915dd7eb8b
|
||||||
|
Revises: bc6a303851bc
|
||||||
|
Create Date: 2024-10-14 03:52:56.985970
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'f0915dd7eb8b'
|
||||||
|
down_revision: Union[str, None] = 'bc6a303851bc'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
25
app/database.py
Normal file
25
app/database.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Set up the database URL (adjust to your psycopg3 format)
|
||||||
|
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+psycopg://postgres:postgresbro@10.0.0.124/scrap")
|
||||||
|
|
||||||
|
# Create the async engine with psycopg3
|
||||||
|
engine = create_async_engine(DATABASE_URL, echo=True)
|
||||||
|
|
||||||
|
# Create the sessionmaker for async sessions
|
||||||
|
SessionLocal = sessionmaker(
|
||||||
|
bind=engine,
|
||||||
|
class_=AsyncSession,
|
||||||
|
autocommit=False,
|
||||||
|
autoflush=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
# Dependency for getting the database session
|
||||||
|
async def get_db():
|
||||||
|
async with SessionLocal() as session:
|
||||||
|
yield session
|
||||||
29
app/main.py
Normal file
29
app/main.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncEngine
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from app.routers import auth # Assuming you have a router for auth logic
|
||||||
|
from app.database import engine
|
||||||
|
from app.models import Base
|
||||||
|
|
||||||
|
# Define a lifespan context manager to handle startup and shutdown events
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI):
|
||||||
|
# Run code before the app starts
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
# Yield control to run the app
|
||||||
|
yield
|
||||||
|
|
||||||
|
# Run code before the app shuts down (if needed)
|
||||||
|
await engine.dispose()
|
||||||
|
|
||||||
|
# Initialize the FastAPI app with the lifespan event handler
|
||||||
|
app = FastAPI(lifespan=lifespan)
|
||||||
|
|
||||||
|
# Register your API routes
|
||||||
|
app.include_router(auth.router)
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {"message": "Hello World"}
|
||||||
170
app/models.py
Normal file
170
app/models.py
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, Text
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy.ext.declarative import as_declarative, declared_attr
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
@as_declarative()
|
||||||
|
class Base:
|
||||||
|
@declared_attr
|
||||||
|
def __tablename__(cls):
|
||||||
|
return cls.__name__.lower()
|
||||||
|
|
||||||
|
# Automatically apply nullable=False to ForeignKey columns unless explicitly set
|
||||||
|
@staticmethod
|
||||||
|
def Column(*args, **kwargs):
|
||||||
|
if any(isinstance(arg, ForeignKey) for arg in args) and 'nullable' not in kwargs:
|
||||||
|
kwargs['nullable'] = False
|
||||||
|
return Column(*args, **kwargs)
|
||||||
|
|
||||||
|
# Enum for User Role
|
||||||
|
from sqlalchemy import Enum
|
||||||
|
from enum import Enum as PyEnum
|
||||||
|
|
||||||
|
class UserRole(PyEnum):
|
||||||
|
PRIVATE = "PRIVATE"
|
||||||
|
BUSINESS = "BUSINESS"
|
||||||
|
|
||||||
|
# User model
|
||||||
|
class User(Base):
|
||||||
|
__tablename__ = 'users'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
name = Column(String, nullable=False)
|
||||||
|
company = Column(String, nullable=True)
|
||||||
|
address = Column(String, nullable=False)
|
||||||
|
postcode = Column(Integer, nullable=False)
|
||||||
|
city = Column(String, nullable=False)
|
||||||
|
latitude = Column(String, nullable=True)
|
||||||
|
longitude = Column(String, nullable=True)
|
||||||
|
phone = Column(String, nullable=False)
|
||||||
|
privatePhone = Column(String, nullable=True)
|
||||||
|
email = Column(String, unique=True, nullable=False)
|
||||||
|
cvr = Column(String, nullable=True)
|
||||||
|
password = Column(String, nullable=False)
|
||||||
|
role = Column(Enum(UserRole), default=UserRole.PRIVATE)
|
||||||
|
updatedAt = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||||
|
createdAt = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
|
|
||||||
|
auctions = relationship('Auction', back_populates='user')
|
||||||
|
accounts = relationship('Account', back_populates='user')
|
||||||
|
sessions = relationship('Session', back_populates='user')
|
||||||
|
bids = relationship('Bid', back_populates='user')
|
||||||
|
|
||||||
|
# Vehicle model
|
||||||
|
class Vehicle(Base):
|
||||||
|
__tablename__ = 'vehicles'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
brand = Column(String, nullable=False)
|
||||||
|
model = Column(String, nullable=False)
|
||||||
|
variant = Column(String, nullable=True)
|
||||||
|
year = Column(Integer, nullable=False)
|
||||||
|
kilometers = Column(Integer, nullable=False)
|
||||||
|
condition = Column(String, nullable=False)
|
||||||
|
location = Column(String, nullable=False)
|
||||||
|
latitude = Column(String, nullable=False)
|
||||||
|
longitude = Column(String, nullable=False)
|
||||||
|
gasType = Column(String, nullable=False)
|
||||||
|
images = Column(Text, nullable=False) # Store image paths or references
|
||||||
|
description = Column(String, nullable=False)
|
||||||
|
service = Column(String, nullable=False)
|
||||||
|
inspectedAt = Column(DateTime(timezone=True), nullable=True)
|
||||||
|
updatedAt = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||||
|
createdAt = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
|
|
||||||
|
auctions = relationship('Auction', back_populates='vehicle')
|
||||||
|
equipment = relationship('VehicleEquipment', back_populates='vehicle')
|
||||||
|
|
||||||
|
# Equipment model
|
||||||
|
class Equipment(Base):
|
||||||
|
__tablename__ = 'equipment'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
name = Column(String, nullable=False)
|
||||||
|
|
||||||
|
vehicles = relationship('VehicleEquipment', back_populates='equipment')
|
||||||
|
|
||||||
|
# Vehicle-Equipment association table
|
||||||
|
class VehicleEquipment(Base):
|
||||||
|
__tablename__ = 'vehicle_equipment'
|
||||||
|
vehicle_id = Column(Integer, ForeignKey('vehicles.id'), primary_key=True)
|
||||||
|
equipment_id = Column(Integer, ForeignKey('equipment.id'), primary_key=True)
|
||||||
|
|
||||||
|
vehicle = relationship('Vehicle', back_populates='equipment')
|
||||||
|
equipment = relationship('Equipment', back_populates='vehicles')
|
||||||
|
|
||||||
|
# Auction model
|
||||||
|
class Auction(Base):
|
||||||
|
__tablename__ = 'auctions'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
vehicleId = Column(Integer, ForeignKey('vehicles.id'), nullable=False)
|
||||||
|
userId = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||||
|
askingPrice = Column(Float, nullable=False)
|
||||||
|
description = Column(String, nullable=True)
|
||||||
|
updatedAt = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||||
|
createdAt = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
|
|
||||||
|
user = relationship('User', back_populates='auctions')
|
||||||
|
vehicle = relationship('Vehicle', back_populates='auctions')
|
||||||
|
bids = relationship('Bid', back_populates='auction')
|
||||||
|
|
||||||
|
# Bid model
|
||||||
|
class Bid(Base):
|
||||||
|
__tablename__ = 'bids'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
auctionId = Column(Integer, ForeignKey('auctions.id'), nullable=False)
|
||||||
|
userId = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||||
|
bid = Column(Float, nullable=False)
|
||||||
|
updatedAt = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||||
|
createdAt = Column(DateTime(timezone=True), server_default=func.now())
|
||||||
|
|
||||||
|
auction = relationship('Auction', back_populates='bids')
|
||||||
|
user = relationship('User', back_populates='bids')
|
||||||
|
|
||||||
|
# Account model
|
||||||
|
class Account(Base):
|
||||||
|
__tablename__ = 'accounts'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
userId = Column(Integer, ForeignKey('users.id'), nullable=False)
|
||||||
|
type = Column(String, nullable=False)
|
||||||
|
provider = Column(String, nullable=False)
|
||||||
|
providerAccountId = Column(String, nullable=False)
|
||||||
|
refresh_token = Column(String, nullable=True)
|
||||||
|
access_token = Column(String, nullable=True)
|
||||||
|
expires_at = Column(Integer, nullable=True)
|
||||||
|
|
||||||
|
user = relationship('User', back_populates='accounts')
|
||||||
|
|
||||||
|
# Session model
|
||||||
|
class Session(Base):
|
||||||
|
__tablename__ = 'sessions'
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
sessionToken = Column(String, unique=True, nullable=False)
|
||||||
|
userId = Column(Integer, ForeignKey('users.id'))
|
||||||
|
expires = Column(DateTime, nullable=False)
|
||||||
|
|
||||||
|
user = relationship('User', back_populates='sessions')
|
||||||
|
|
||||||
|
# VerificationToken model
|
||||||
|
class VerificationToken(Base):
|
||||||
|
__tablename__ = 'verification_tokens'
|
||||||
|
identifier = Column(String, primary_key=True)
|
||||||
|
token = Column(String, unique=True, nullable=False)
|
||||||
|
expires = Column(DateTime, nullable=False)
|
||||||
|
|
||||||
|
# Pydantic model for user registration
|
||||||
|
class UserCreate(BaseModel):
|
||||||
|
email: str
|
||||||
|
password: str
|
||||||
|
name: str
|
||||||
|
phone: str
|
||||||
|
address: str
|
||||||
|
postcode: int
|
||||||
|
city: str
|
||||||
|
role: str
|
||||||
|
company: Optional[str] = None
|
||||||
|
privatePhone: Optional[str] = None
|
||||||
|
cvr: Optional[str] = None
|
||||||
|
|
||||||
|
class UserLogin(BaseModel):
|
||||||
|
email: str
|
||||||
|
password: str
|
||||||
0
app/routers/__init__.py
Normal file
0
app/routers/__init__.py
Normal file
82
app/routers/auth.py
Normal file
82
app/routers/auth.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
from ..database import get_db
|
||||||
|
from ..models import User, UserCreate, UserLogin
|
||||||
|
from ..security import create_access_token, verify_access_token
|
||||||
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
|
import bcrypt
|
||||||
|
|
||||||
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
# Register a new user
|
||||||
|
@router.post("/api/v1/register")
|
||||||
|
async def register(user_data: UserCreate, db: AsyncSession = Depends(get_db)):
|
||||||
|
async with db.begin():
|
||||||
|
# Check if email already exists
|
||||||
|
query = select(User).filter((User.email == user_data.email))
|
||||||
|
result = await db.execute(query)
|
||||||
|
user = result.scalars().first()
|
||||||
|
|
||||||
|
if user:
|
||||||
|
raise HTTPException(status_code=400, detail="Email already exists")
|
||||||
|
|
||||||
|
# If user is registering as business, require company, privatePhone, and cvr
|
||||||
|
if user_data.role == 'BUSINESS':
|
||||||
|
if not user_data.company or not user_data.privatePhone or not user_data.cvr:
|
||||||
|
raise HTTPException(status_code=400, detail="Company, Private Phone, and CVR are required for business users.")
|
||||||
|
|
||||||
|
# Hash the password using bcrypt
|
||||||
|
hashed_password = bcrypt.hashpw(user_data.password.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
|
||||||
|
|
||||||
|
# Create a new user with the provided information
|
||||||
|
new_user = User(
|
||||||
|
email=user_data.email,
|
||||||
|
password=hashed_password,
|
||||||
|
name=user_data.name,
|
||||||
|
role=user_data.role,
|
||||||
|
phone=user_data.phone,
|
||||||
|
address=user_data.address,
|
||||||
|
postcode=user_data.postcode,
|
||||||
|
city=user_data.postcode,
|
||||||
|
company=user_data.company,
|
||||||
|
privatePhone=user_data.privatePhone,
|
||||||
|
cvr=user_data.cvr
|
||||||
|
)
|
||||||
|
|
||||||
|
db.add(new_user)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
return {"message": "User created successfully"}
|
||||||
|
|
||||||
|
# User login
|
||||||
|
@router.post("/api/v1/login")
|
||||||
|
async def login(login_data: UserLogin, db: AsyncSession = Depends(get_db)):
|
||||||
|
async with db.begin():
|
||||||
|
# Check if email is an email
|
||||||
|
query = select(User).filter(User.email == login_data.email)
|
||||||
|
|
||||||
|
result = await db.execute(query)
|
||||||
|
user = result.scalars().first()
|
||||||
|
|
||||||
|
if not user or not bcrypt.checkpw(login_data.password.encode('utf-8'), user.password.encode('utf-8')):
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid credentials")
|
||||||
|
|
||||||
|
access_token = create_access_token(data={"user_id": user.id})
|
||||||
|
return {"access_token": access_token, "token_type": "bearer"}
|
||||||
|
|
||||||
|
# Protected route example
|
||||||
|
@router.get("/api/v1/protected")
|
||||||
|
async def protected_route(token: str = Depends(oauth2_scheme), db: AsyncSession = Depends(get_db)):
|
||||||
|
user_id = verify_access_token(token)
|
||||||
|
async with db.begin():
|
||||||
|
query = select(User).filter(User.id == user_id)
|
||||||
|
result = await db.execute(query)
|
||||||
|
user = result.scalars().first()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(status_code=401, detail="User not found")
|
||||||
|
|
||||||
|
return {"message": f"Hello, {user.name}"}
|
||||||
54
app/security.py
Normal file
54
app/security.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
import bcrypt
|
||||||
|
from jose import jwt, JWTError
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from fastapi import HTTPException, status
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Secret and algorithm for JWT
|
||||||
|
SECRET_KEY = os.getenv('SECRET_KEY', 'your_jwt_secret_key') # Ensure this is set in your environment
|
||||||
|
ALGORITHM = "HS256"
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = 30
|
||||||
|
|
||||||
|
# Hash password using bcrypt directly
|
||||||
|
def get_password_hash(password: str) -> str:
|
||||||
|
"""Hashes the password using bcrypt."""
|
||||||
|
salt = bcrypt.gensalt() # Generate a salt
|
||||||
|
hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt) # Hash the password
|
||||||
|
return hashed_password.decode('utf-8') # Return as a string
|
||||||
|
|
||||||
|
# Verify password using bcrypt directly
|
||||||
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
|
"""Verifies if the plain password matches the hashed password."""
|
||||||
|
return bcrypt.checkpw(plain_password.encode('utf-8'), hashed_password.encode('utf-8'))
|
||||||
|
|
||||||
|
# Create JWT token
|
||||||
|
def create_access_token(data: dict, expires_delta: timedelta = None):
|
||||||
|
"""Creates a JWT token with expiration time."""
|
||||||
|
to_encode = data.copy()
|
||||||
|
if expires_delta:
|
||||||
|
expire = datetime.utcnow() + expires_delta
|
||||||
|
else:
|
||||||
|
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||||
|
to_encode.update({"exp": expire})
|
||||||
|
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||||
|
return encoded_jwt
|
||||||
|
|
||||||
|
# Verify JWT token
|
||||||
|
def verify_access_token(token: str):
|
||||||
|
"""Verifies the JWT token and returns the user_id if valid."""
|
||||||
|
try:
|
||||||
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||||
|
user_id: str = payload.get("user_id")
|
||||||
|
if user_id is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid token",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
|
return user_id
|
||||||
|
except JWTError:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
detail="Invalid token",
|
||||||
|
headers={"WWW-Authenticate": "Bearer"},
|
||||||
|
)
|
||||||
161
app/seed.py
Normal file
161
app/seed.py
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
# seed.py
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from database import SessionLocal, init_db
|
||||||
|
from models import Equipment
|
||||||
|
|
||||||
|
# List of equipment data
|
||||||
|
equipmentData = [
|
||||||
|
"aut.",
|
||||||
|
"aut.gear/tiptronic",
|
||||||
|
"ratgearskifte",
|
||||||
|
"alu.",
|
||||||
|
"15\" Alufælge",
|
||||||
|
"16\" Alufælge",
|
||||||
|
"17\" Alufælge",
|
||||||
|
"18\" Alufælge",
|
||||||
|
"19\" Alufælge",
|
||||||
|
"20\" Alufælge",
|
||||||
|
"21\" Alufælge",
|
||||||
|
"22\" Alufælge",
|
||||||
|
"vinterhjul",
|
||||||
|
"varme i rat",
|
||||||
|
"airc.",
|
||||||
|
"fuldaut. klima",
|
||||||
|
"2 zone klima",
|
||||||
|
"3 zone klima",
|
||||||
|
"4 zone klima",
|
||||||
|
"køl i handskerum",
|
||||||
|
"elektrisk",
|
||||||
|
"kabinevarmer",
|
||||||
|
"motorkabinevarmer",
|
||||||
|
"alarm",
|
||||||
|
"el-klapbare sidespejle",
|
||||||
|
"el-klapbare sidespejle m. varme",
|
||||||
|
"el-spejle m/varme",
|
||||||
|
"automatisk parkerings system",
|
||||||
|
"360° kamera",
|
||||||
|
"bakkamera",
|
||||||
|
"parkeringssensor (bag)",
|
||||||
|
"parkeringssensor (for)",
|
||||||
|
"adaptiv fartpilot",
|
||||||
|
"automatisk",
|
||||||
|
"start/stop",
|
||||||
|
"el betjent bagklap",
|
||||||
|
"dæktryksmåler",
|
||||||
|
"adaptiv undervogn",
|
||||||
|
"elektrisk parkeringsbremse",
|
||||||
|
"træthedsregistrering",
|
||||||
|
"skiltegenkendelse",
|
||||||
|
"CD",
|
||||||
|
"CD/radio",
|
||||||
|
"radio med CDboks",
|
||||||
|
"el komfortsæder",
|
||||||
|
"sportssæder",
|
||||||
|
"integrerede børnesæder",
|
||||||
|
"3 individuelle sæder i bag",
|
||||||
|
"lygtevasker",
|
||||||
|
"tågelygter",
|
||||||
|
"bi-xenon",
|
||||||
|
"xenonlys",
|
||||||
|
"automatisk lys",
|
||||||
|
"fjernlysassistent",
|
||||||
|
"kurvelys",
|
||||||
|
"LED kørelys",
|
||||||
|
"fuld LED forlygter",
|
||||||
|
"airbag",
|
||||||
|
"db. airbags",
|
||||||
|
"4 airbags",
|
||||||
|
"6 airbags",
|
||||||
|
"7 airbags",
|
||||||
|
"8 airbags",
|
||||||
|
"9 airbags",
|
||||||
|
"10 airbags",
|
||||||
|
"ABS",
|
||||||
|
"antispin",
|
||||||
|
"c.lås",
|
||||||
|
"fjernb. c.lås",
|
||||||
|
"nøglefri betjening",
|
||||||
|
"fartpilot",
|
||||||
|
"kørecomputer",
|
||||||
|
"infocenter",
|
||||||
|
"startspærre",
|
||||||
|
"varme i forrude",
|
||||||
|
"auto. nedbl. Bakspejl",
|
||||||
|
"udv. temp. måler",
|
||||||
|
"regnsensor",
|
||||||
|
"sædevarme",
|
||||||
|
"højdejust. forsæder",
|
||||||
|
"højdejust. førersæde",
|
||||||
|
"el indst. forsæder",
|
||||||
|
"el indst. førersæde m. memory",
|
||||||
|
"soltag",
|
||||||
|
"el-soltag",
|
||||||
|
"glastag",
|
||||||
|
"el-ruder",
|
||||||
|
"4x el-ruder",
|
||||||
|
"el-spejle",
|
||||||
|
"DAB radio",
|
||||||
|
"DAB+ radio",
|
||||||
|
"navigation",
|
||||||
|
"multifunktionsrat",
|
||||||
|
"håndfrit til mobil",
|
||||||
|
"bluetooth",
|
||||||
|
"musikstreaming via bluetooth",
|
||||||
|
"nightvision",
|
||||||
|
"digitalt cockpit",
|
||||||
|
"headup display",
|
||||||
|
"Android Auto",
|
||||||
|
"Apple CarPlay",
|
||||||
|
"Internet",
|
||||||
|
"trådløs",
|
||||||
|
"mobilopladning",
|
||||||
|
"SD kortlæser",
|
||||||
|
"USB tilslutning",
|
||||||
|
"AUX tilslutning",
|
||||||
|
"armlæn",
|
||||||
|
"isofix",
|
||||||
|
"bagagerumsdækken",
|
||||||
|
"kopholder",
|
||||||
|
"stofindtræk",
|
||||||
|
"dellæder",
|
||||||
|
"læderindtræk",
|
||||||
|
"kunstlæder",
|
||||||
|
"splitbagsæde",
|
||||||
|
"læderrat",
|
||||||
|
"ESP",
|
||||||
|
"servo",
|
||||||
|
"vognbaneassistent",
|
||||||
|
"blindvinkelsassistent",
|
||||||
|
"automatisk nødbremsesystem",
|
||||||
|
"sænket",
|
||||||
|
"tagræling",
|
||||||
|
"tonede ruder",
|
||||||
|
"mørktonede ruder i bag",
|
||||||
|
"1 ejer",
|
||||||
|
"ikke ryger",
|
||||||
|
"service ok",
|
||||||
|
"brugtbilsattest",
|
||||||
|
"træk",
|
||||||
|
"aftag. træk",
|
||||||
|
"svingbart træk (manuel)",
|
||||||
|
"svingbart træk (elektrisk)",
|
||||||
|
"diesel partikel filter",
|
||||||
|
"undervognsbehandlet"
|
||||||
|
]
|
||||||
|
|
||||||
|
def seed_equipment_data(db: Session):
|
||||||
|
for equipment in equipmentData:
|
||||||
|
db.add(Equipment(name=equipment))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
init_db() # Ensure all tables are created
|
||||||
|
db = SessionLocal()
|
||||||
|
try:
|
||||||
|
seed_equipment_data(db)
|
||||||
|
print("Equipment data seeded.")
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
8
requirements.txt
Normal file
8
requirements.txt
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
fastapi[standard]==0.115.0
|
||||||
|
pydantic==2.9.0
|
||||||
|
SQLAlchemy==2.0.35
|
||||||
|
Alembic==1.13.3
|
||||||
|
'psycopg[binary,async]'==3.2.3
|
||||||
|
python-jose==3.3.0
|
||||||
|
PyJWT==2.9.0
|
||||||
|
bcrypt
|
||||||
40
requirements2.txt
Normal file
40
requirements2.txt
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
alembic==1.13.3
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.6.0
|
||||||
|
certifi==2024.8.30
|
||||||
|
click==8.1.7
|
||||||
|
dnspython==2.7.0
|
||||||
|
email_validator==2.2.0
|
||||||
|
fastapi==0.115.0
|
||||||
|
fastapi-cli==0.0.5
|
||||||
|
greenlet==3.1.1
|
||||||
|
h11==0.14.0
|
||||||
|
httpcore==1.0.6
|
||||||
|
httptools==0.6.1
|
||||||
|
httpx==0.27.2
|
||||||
|
idna==3.10
|
||||||
|
Jinja2==3.1.4
|
||||||
|
Mako==1.3.5
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
MarkupSafe==3.0.1
|
||||||
|
mdurl==0.1.2
|
||||||
|
psycopg==3.2.3
|
||||||
|
psycopg-binary==3.2.3
|
||||||
|
pydantic==2.9.0
|
||||||
|
pydantic_core==2.23.2
|
||||||
|
Pygments==2.18.0
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
python-multipart==0.0.12
|
||||||
|
PyYAML==6.0.2
|
||||||
|
rich==13.9.2
|
||||||
|
shellingham==1.5.4
|
||||||
|
sniffio==1.3.1
|
||||||
|
SQLAlchemy==2.0.35
|
||||||
|
starlette==0.38.6
|
||||||
|
typer==0.12.5
|
||||||
|
typing_extensions==4.12.2
|
||||||
|
tzdata==2024.2
|
||||||
|
uvicorn==0.31.1
|
||||||
|
uvloop==0.20.0
|
||||||
|
watchfiles==0.24.0
|
||||||
|
websockets==13.1
|
||||||
47
requirements3.txt
Normal file
47
requirements3.txt
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
alembic==1.13.3
|
||||||
|
annotated-types==0.7.0
|
||||||
|
anyio==4.6.0
|
||||||
|
bcrypt==4.2.0
|
||||||
|
certifi==2024.8.30
|
||||||
|
click==8.1.7
|
||||||
|
dnspython==2.7.0
|
||||||
|
ecdsa==0.19.0
|
||||||
|
email_validator==2.2.0
|
||||||
|
fastapi==0.115.0
|
||||||
|
fastapi-cli==0.0.5
|
||||||
|
greenlet==3.1.1
|
||||||
|
h11==0.14.0
|
||||||
|
httpcore==1.0.6
|
||||||
|
httptools==0.6.1
|
||||||
|
httpx==0.27.2
|
||||||
|
idna==3.10
|
||||||
|
Jinja2==3.1.4
|
||||||
|
Mako==1.3.5
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
MarkupSafe==3.0.1
|
||||||
|
mdurl==0.1.2
|
||||||
|
psycopg==3.2.3
|
||||||
|
psycopg-binary==3.2.3
|
||||||
|
pyasn1==0.6.1
|
||||||
|
pydantic==2.9.0
|
||||||
|
pydantic_core==2.23.2
|
||||||
|
Pygments==2.18.0
|
||||||
|
PyJWT==2.9.0
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
python-jose==3.3.0
|
||||||
|
python-multipart==0.0.12
|
||||||
|
PyYAML==6.0.2
|
||||||
|
rich==13.9.2
|
||||||
|
rsa==4.9
|
||||||
|
shellingham==1.5.4
|
||||||
|
six==1.16.0
|
||||||
|
sniffio==1.3.1
|
||||||
|
SQLAlchemy==2.0.35
|
||||||
|
starlette==0.38.6
|
||||||
|
typer==0.12.5
|
||||||
|
typing_extensions==4.12.2
|
||||||
|
tzdata==2024.2
|
||||||
|
uvicorn==0.31.1
|
||||||
|
uvloop==0.20.0
|
||||||
|
watchfiles==0.24.0
|
||||||
|
websockets==13.1
|
||||||
Reference in New Issue
Block a user