Skip to content

Commit 0f25813

Browse files
committed
Initial commit
0 parents  commit 0f25813

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1188
-0
lines changed

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.idea*
2+
venv*

ReadMe.md

+29
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
# FastAPI Template Repo
2+
## Based on the backend in [@tiangolo's](https://github.com/tiangolo) [full-stack-fastapi-postgresql](https://github.com/tiangolo/full-stack-fastapi-postgresql)
3+
4+
### Setup
5+
- Create and activate a virtual environment
6+
- Setup the env.ps1 script with desired variables.
7+
- In the virtual environment:
8+
- run the ```env.ps1``` script to set the environment variables.
9+
- install the packages included in the ```req.txt```
10+
- run ```python ./bootstrap.py```
11+
12+
If you want the alembic generated model use:
13+
- run ```alembic revision --autogenerate -m "Setup initial user table"```
14+
- run ```alembic upgrade head```
15+
- run ```python ./initial_data.py```
16+
17+
Lastly...
18+
- run ```uvicorn app.main:app --reload``` to start the application
19+
20+
### Usage
21+
- Once the application is running, by
22+
going to ```localhost:8000/docs``` or what you configured in the env.ps1, you will be able to login and tryout the endpoints.
23+
24+
25+
### Change Log:
26+
#### 1-Aug-2019
27+
Changing this repo from a repo used for demoing an issue to a template for making an API service using postgres and FastAPI.
28+
29+
- Adding an archive with the old readme detailing the error.

alembic.ini

+71
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
# A generic, single database configuration.
2+
3+
[alembic]
4+
# path to migration scripts
5+
script_location = alembic
6+
7+
# template used to generate migration files
8+
# file_template = %%(rev)s_%%(slug)s
9+
10+
# timezone to use when rendering the date
11+
# within the migration file as well as the filename.
12+
# string value is passed to dateutil.tz.gettz()
13+
# leave blank for localtime
14+
# timezone =
15+
16+
# max length of characters to apply to the
17+
# "slug" field
18+
#truncate_slug_length = 40
19+
20+
# set to 'true' to run the environment during
21+
# the 'revision' command, regardless of autogenerate
22+
# revision_environment = false
23+
24+
# set to 'true' to allow .pyc and .pyo files without
25+
# a source .py file to be detected as revisions in the
26+
# versions/ directory
27+
# sourceless = false
28+
29+
# version location specification; this defaults
30+
# to alembic/versions. When using multiple version
31+
# directories, initial revisions must be specified with --version-path
32+
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
33+
34+
# the output encoding used when revision files
35+
# are written from script.py.mako
36+
# output_encoding = utf-8
37+
38+
# Logging configuration
39+
[loggers]
40+
keys = root,sqlalchemy,alembic
41+
42+
[handlers]
43+
keys = console
44+
45+
[formatters]
46+
keys = generic
47+
48+
[logger_root]
49+
level = WARN
50+
handlers = console
51+
qualname =
52+
53+
[logger_sqlalchemy]
54+
level = WARN
55+
handlers =
56+
qualname = sqlalchemy.engine
57+
58+
[logger_alembic]
59+
level = INFO
60+
handlers =
61+
qualname = alembic
62+
63+
[handler_console]
64+
class = StreamHandler
65+
args = (sys.stderr,)
66+
level = NOTSET
67+
formatter = generic
68+
69+
[formatter_generic]
70+
format = %(levelname)-5.5s [%(name)s] %(message)s
71+
datefmt = %H:%M:%S

alembic/README

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Generic single-database configuration.

alembic/env.py

+167
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
from __future__ import with_statement
2+
3+
import os
4+
5+
from alembic import context
6+
from sqlalchemy import (
7+
engine_from_config,
8+
pool,
9+
MetaData,
10+
Table,
11+
ForeignKeyConstraint,
12+
Index,
13+
)
14+
from logging.config import fileConfig
15+
from app.core.config import POSTGRES_SCHEMA, PUBLIC_TABLES, SCHEMA_QUERY
16+
17+
# this is the Alembic Config object, which provides
18+
# access to the values within the .ini file in use.
19+
config = context.config
20+
21+
# Interpret the config file for Python logging.
22+
# This line sets up loggers basically.
23+
fileConfig(config.config_file_name)
24+
25+
# add your model's MetaData object here
26+
# for 'autogenerate' support
27+
# from myapp import mymodel
28+
# target_metadata = mymodel.Base.metadata
29+
# target_metadata = None
30+
31+
from app.db.base import Base # noqa
32+
33+
target_metadata = Base.metadata
34+
35+
# other values from the config, defined by the needs of env.py,
36+
# can be acquired:
37+
# my_important_option = config.get_main_option("my_important_option")
38+
# ... etc.
39+
40+
41+
def get_url():
42+
user = os.getenv("POSTGRES_USER", "postgres")
43+
password = os.getenv("POSTGRES_PASSWORD", "")
44+
server = os.getenv("POSTGRES_SERVER", "db")
45+
db = os.getenv("POSTGRES_DB", "app")
46+
port = os.getenv("POSTGRES_PORT", 5432)
47+
return f"postgresql://{user}:{password}@{server}:{port}/{db}"
48+
49+
50+
def include_schemas(names):
51+
# produce an include object function that filters on the given schemas
52+
def include_object(object, name, type_, reflected, compare_to):
53+
if type_ == "table":
54+
return object.schema in names
55+
return True
56+
57+
return include_object
58+
59+
60+
def lookup_correct_schema(name):
61+
if name in PUBLIC_TABLES:
62+
return "public"
63+
else:
64+
return POSTGRES_SCHEMA
65+
66+
67+
def _get_table_key(name, schema):
68+
if schema is None:
69+
return name
70+
else:
71+
return schema + "." + name
72+
73+
74+
def tometadata(table, metadata, schema):
75+
key = _get_table_key(table.name, schema)
76+
if key in metadata.tables:
77+
return metadata.tables[key]
78+
79+
args = []
80+
for c in table.columns:
81+
args.append(c.copy(schema=schema))
82+
new_table = Table(table.name, metadata, schema=schema, *args, **table.kwargs)
83+
for c in table.constraints:
84+
if isinstance(c, ForeignKeyConstraint):
85+
constraint_schema = lookup_correct_schema(c.elements[0].column.table.name)
86+
else:
87+
constraint_schema = schema
88+
new_table.append_constraint(
89+
c.copy(schema=constraint_schema, target_table=new_table)
90+
)
91+
92+
for index in table.indexes:
93+
# skip indexes that would be generated
94+
# by the 'index' flag on Column
95+
if len(index.columns) == 1 and list(index.columns)[0].index:
96+
continue
97+
Index(
98+
index.name,
99+
unique=index.unique,
100+
*[new_table.c[col] for col in index.columns.keys()],
101+
**index.kwargs,
102+
)
103+
return table._schema_item_copy(new_table)
104+
105+
106+
meta_schemax = MetaData()
107+
for table in target_metadata.tables.values():
108+
tometadata(table, meta_schemax, lookup_correct_schema(table.name))
109+
target_metadata = meta_schemax
110+
111+
112+
def run_migrations_offline():
113+
"""Run migrations in 'offline' mode.
114+
115+
This configures the context with just a URL
116+
and not an Engine, though an Engine is acceptable
117+
here as well. By skipping the Engine creation
118+
we don't even need a DBAPI to be available.
119+
120+
Calls to context.execute() here emit the given string to the
121+
script output.
122+
123+
"""
124+
url = get_url()
125+
context.configure(
126+
url=url,
127+
target_metadata=target_metadata,
128+
literal_binds=True,
129+
compare_type=True,
130+
version_table_schema="schema_case",
131+
)
132+
133+
with context.begin_transaction():
134+
context.run_migrations()
135+
136+
137+
def run_migrations_online():
138+
"""Run migrations in 'online' mode.
139+
140+
In this scenario we need to create an Engine
141+
and associate a connection with the context.
142+
143+
"""
144+
configuration = config.get_section(config.config_ini_section)
145+
configuration["sqlalchemy.url"] = get_url()
146+
connectable = engine_from_config(
147+
configuration, prefix="sqlalchemy.", poolclass=pool.NullPool
148+
)
149+
150+
with connectable.connect() as connection:
151+
context.configure(
152+
connection=connection,
153+
target_metadata=target_metadata,
154+
compare_type=True,
155+
include_schemas=True, # schemas,
156+
version_table_schema=POSTGRES_SCHEMA,
157+
include_object=include_schemas([None, POSTGRES_SCHEMA])
158+
)
159+
with context.begin_transaction():
160+
context.execute(f"SET search_path TO {POSTGRES_SCHEMA}")
161+
context.run_migrations()
162+
163+
164+
if context.is_offline_mode():
165+
run_migrations_offline()
166+
else:
167+
run_migrations_online()

alembic/script.py.mako

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
"""${message}
2+
3+
Revision ID: ${up_revision}
4+
Revises: ${down_revision | comma,n}
5+
Create Date: ${create_date}
6+
7+
"""
8+
from alembic import op
9+
import sqlalchemy as sa
10+
${imports if imports else ""}
11+
12+
# revision identifiers, used by Alembic.
13+
revision = ${repr(up_revision)}
14+
down_revision = ${repr(down_revision)}
15+
branch_labels = ${repr(branch_labels)}
16+
depends_on = ${repr(depends_on)}
17+
18+
19+
def upgrade():
20+
${upgrades if upgrades else "pass"}
21+
22+
23+
def downgrade():
24+
${downgrades if downgrades else "pass"}

alembic/versions/.keep

Whitespace-only changes.
+43
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
"""My Test
2+
3+
Revision ID: 0dd362c5b5bf
4+
Revises:
5+
Create Date: 2019-07-31 10:20:33.478780
6+
7+
"""
8+
from alembic import op
9+
import sqlalchemy as sa
10+
11+
12+
# revision identifiers, used by Alembic.
13+
revision = '0dd362c5b5bf'
14+
down_revision = None
15+
branch_labels = None
16+
depends_on = None
17+
18+
19+
def upgrade():
20+
# ### commands auto generated by Alembic - please adjust! ###
21+
op.create_table('user',
22+
sa.Column('id', sa.Integer(), nullable=False),
23+
sa.Column('full_name', sa.String(), nullable=True),
24+
sa.Column('email', sa.String(), nullable=True),
25+
sa.Column('hashed_password', sa.String(), nullable=True),
26+
sa.Column('is_active', sa.Boolean(), nullable=True),
27+
sa.Column('is_superuser', sa.Boolean(), nullable=True),
28+
sa.PrimaryKeyConstraint('id'),
29+
schema='schema_case'
30+
)
31+
op.create_index(op.f('ix_schema_case_user_email'), 'user', ['email'], unique=True, schema='schema_case')
32+
op.create_index(op.f('ix_schema_case_user_full_name'), 'user', ['full_name'], unique=False, schema='schema_case')
33+
op.create_index(op.f('ix_schema_case_user_id'), 'user', ['id'], unique=False, schema='schema_case')
34+
# ### end Alembic commands ###
35+
36+
37+
def downgrade():
38+
# ### commands auto generated by Alembic - please adjust! ###
39+
op.drop_index(op.f('ix_schema_case_user_id'), table_name='user', schema='schema_case')
40+
op.drop_index(op.f('ix_schema_case_user_full_name'), table_name='user', schema='schema_case')
41+
op.drop_index(op.f('ix_schema_case_user_email'), table_name='user', schema='schema_case')
42+
op.drop_table('user', schema='schema_case')
43+
# ### end Alembic commands ###

app/__init__.py

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from . import api, core, crud, db, db_models, models

app/api/__init__.py

Whitespace-only changes.

app/api/api_v1/__init__.py

Whitespace-only changes.

app/api/api_v1/api.py

+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
from fastapi import APIRouter
2+
3+
from app.api.api_v1.endpoints import health, login, users
4+
5+
api_router = APIRouter()
6+
api_router.include_router(login.router, tags=["login"])
7+
api_router.include_router(health.router, prefix="/health", tags=["health"])
8+
api_router.include_router(users.router, prefix="/users", tags=["users"])
9+

app/api/api_v1/endpoints/__init__.py

Whitespace-only changes.

app/api/api_v1/endpoints/health.py

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
from fastapi import APIRouter
2+
3+
4+
router = APIRouter()
5+
6+
7+
@router.get("/")
8+
def get_health():
9+
"""
10+
Retrieve items.
11+
"""
12+
return {"health": "ok"}

0 commit comments

Comments
 (0)