Shri commited on
Commit
973406c
·
1 Parent(s): cc8f808

feat: added asyncpg to endpoints

Browse files
alembic.ini ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts.
5
+ # this is typically a path given in POSIX (e.g. forward slashes)
6
+ # format, relative to the token %(here)s which refers to the location of this
7
+ # ini file
8
+ script_location = %(here)s/alembic
9
+
10
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
11
+ # Uncomment the line below if you want the files to be prepended with date and time
12
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
13
+ # for all available tokens
14
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
15
+
16
+ # sys.path path, will be prepended to sys.path if present.
17
+ # defaults to the current working directory. for multiple paths, the path separator
18
+ # is defined by "path_separator" below.
19
+ prepend_sys_path = .
20
+
21
+
22
+ # timezone to use when rendering the date within the migration file
23
+ # as well as the filename.
24
+ # If specified, requires the tzdata library which can be installed by adding
25
+ # `alembic[tz]` to the pip requirements.
26
+ # string value is passed to ZoneInfo()
27
+ # leave blank for localtime
28
+ # timezone =
29
+
30
+ # max length of characters to apply to the "slug" field
31
+ # truncate_slug_length = 40
32
+
33
+ # set to 'true' to run the environment during
34
+ # the 'revision' command, regardless of autogenerate
35
+ # revision_environment = false
36
+
37
+ # set to 'true' to allow .pyc and .pyo files without
38
+ # a source .py file to be detected as revisions in the
39
+ # versions/ directory
40
+ # sourceless = false
41
+
42
+ # version location specification; This defaults
43
+ # to <script_location>/versions. When using multiple version
44
+ # directories, initial revisions must be specified with --version-path.
45
+ # The path separator used here should be the separator specified by "path_separator"
46
+ # below.
47
+ # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
48
+
49
+ # path_separator; This indicates what character is used to split lists of file
50
+ # paths, including version_locations and prepend_sys_path within configparser
51
+ # files such as alembic.ini.
52
+ # The default rendered in new alembic.ini files is "os", which uses os.pathsep
53
+ # to provide os-dependent path splitting.
54
+ #
55
+ # Note that in order to support legacy alembic.ini files, this default does NOT
56
+ # take place if path_separator is not present in alembic.ini. If this
57
+ # option is omitted entirely, fallback logic is as follows:
58
+ #
59
+ # 1. Parsing of the version_locations option falls back to using the legacy
60
+ # "version_path_separator" key, which if absent then falls back to the legacy
61
+ # behavior of splitting on spaces and/or commas.
62
+ # 2. Parsing of the prepend_sys_path option falls back to the legacy
63
+ # behavior of splitting on spaces, commas, or colons.
64
+ #
65
+ # Valid values for path_separator are:
66
+ #
67
+ # path_separator = :
68
+ # path_separator = ;
69
+ # path_separator = space
70
+ # path_separator = newline
71
+ #
72
+ # Use os.pathsep. Default configuration used for new projects.
73
+ path_separator = os
74
+
75
+ # set to 'true' to search source files recursively
76
+ # in each "version_locations" directory
77
+ # new in Alembic version 1.10
78
+ # recursive_version_locations = false
79
+
80
+ # the output encoding used when revision files
81
+ # are written from script.py.mako
82
+ # output_encoding = utf-8
83
+
84
+ # database URL. This is consumed by the user-maintained env.py script only.
85
+ # other means of configuring database URLs may be customized within the env.py
86
+ # file.
87
+ sqlalchemy.url = driver://user:pass@localhost/dbname
88
+
89
+
90
+ [post_write_hooks]
91
+ # post_write_hooks defines scripts or Python functions that are run
92
+ # on newly generated revision scripts. See the documentation for further
93
+ # detail and examples
94
+
95
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
96
+ # hooks = black
97
+ # black.type = console_scripts
98
+ # black.entrypoint = black
99
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
100
+
101
+ # lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
102
+ # hooks = ruff
103
+ # ruff.type = module
104
+ # ruff.module = ruff
105
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
106
+
107
+ # Alternatively, use the exec runner to execute a binary found on your PATH
108
+ # hooks = ruff
109
+ # ruff.type = exec
110
+ # ruff.executable = ruff
111
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
112
+
113
+ # Logging configuration. This is also consumed by the user-maintained
114
+ # env.py script only.
115
+ [loggers]
116
+ keys = root,sqlalchemy,alembic
117
+
118
+ [handlers]
119
+ keys = console
120
+
121
+ [formatters]
122
+ keys = generic
123
+
124
+ [logger_root]
125
+ level = WARNING
126
+ handlers = console
127
+ qualname =
128
+
129
+ [logger_sqlalchemy]
130
+ level = WARNING
131
+ handlers =
132
+ qualname = sqlalchemy.engine
133
+
134
+ [logger_alembic]
135
+ level = INFO
136
+ handlers =
137
+ qualname = alembic
138
+
139
+ [handler_console]
140
+ class = StreamHandler
141
+ args = (sys.stderr,)
142
+ level = NOTSET
143
+ formatter = generic
144
+
145
+ [formatter_generic]
146
+ format = %(levelname)-5.5s [%(name)s] %(message)s
147
+ datefmt = %H:%M:%S
alembic/README ADDED
@@ -0,0 +1 @@
 
 
1
+ Generic single-database configuration.
alembic/env.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from logging.config import fileConfig
2
+
3
+ from sqlalchemy import engine_from_config, pool
4
+ from sqlmodel import SQLModel
5
+
6
+ from alembic import context
7
+ from src.core import *
8
+ from src.core.config import settings
9
+
10
+ # this is the Alembic Config object, which provides
11
+ # access to the values within the .ini file in use.
12
+ config = context.config
13
+ config.set_main_option(name="sqlalchemy.url", value=settings.DATABASE_URL)
14
+ # Interpret the config file for Python logging.
15
+ # This line sets up loggers basically.
16
+ if config.config_file_name is not None:
17
+ fileConfig(config.config_file_name)
18
+
19
+ # add your model's MetaData object here
20
+ # for 'autogenerate' support
21
+ # from myapp import mymodel
22
+ # target_metadata = mymodel.Base.metadata
23
+ target_metadata = SQLModel.metadata
24
+
25
+ # other values from the config, defined by the needs of env.py,
26
+ # can be acquired:
27
+ # my_important_option = config.get_main_option("my_important_option")
28
+ # ... etc.
29
+
30
+
31
+ def run_migrations_offline() -> None:
32
+ """Run migrations in 'offline' mode.
33
+
34
+ This configures the context with just a URL
35
+ and not an Engine, though an Engine is acceptable
36
+ here as well. By skipping the Engine creation
37
+ we don't even need a DBAPI to be available.
38
+
39
+ Calls to context.execute() here emit the given string to the
40
+ script output.
41
+
42
+ """
43
+ url = config.get_main_option("sqlalchemy.url")
44
+ context.configure(
45
+ url=url,
46
+ target_metadata=target_metadata,
47
+ literal_binds=True,
48
+ dialect_opts={"paramstyle": "named"},
49
+ )
50
+
51
+ with context.begin_transaction():
52
+ context.run_migrations()
53
+
54
+
55
+ def run_migrations_online() -> None:
56
+ """Run migrations in 'online' mode.
57
+
58
+ In this scenario we need to create an Engine
59
+ and associate a connection with the context.
60
+
61
+ """
62
+ connectable = engine_from_config(
63
+ config.get_section(config.config_ini_section, {}),
64
+ prefix="sqlalchemy.",
65
+ poolclass=pool.NullPool,
66
+ )
67
+
68
+ with connectable.connect() as connection:
69
+ context.configure(connection=connection, target_metadata=target_metadata)
70
+
71
+ with context.begin_transaction():
72
+ context.run_migrations()
73
+
74
+
75
+ if context.is_offline_mode():
76
+ run_migrations_offline()
77
+ else:
78
+ run_migrations_online()
alembic/script.py.mako ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+ ${imports if imports else ""}
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = ${repr(up_revision)}
17
+ down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
18
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
19
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ ${upgrades if upgrades else "pass"}
25
+
26
+
27
+ def downgrade() -> None:
28
+ """Downgrade schema."""
29
+ ${downgrades if downgrades else "pass"}
alembic/versions/1de57edd5ce6_added_emotion_date_in_emotion_log.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """added emotion date in emotion_log
2
+
3
+ Revision ID: 1de57edd5ce6
4
+ Revises: fe1c25d91325
5
+ Create Date: 2025-11-10 14:18:28.647865
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = '1de57edd5ce6'
17
+ down_revision: Union[str, Sequence[str], None] = 'fe1c25d91325'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.add_column('emotion_logs', sa.Column('emotion_date', sa.Date(), nullable=False))
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ op.drop_column('emotion_logs', 'emotion_date')
33
+ # ### end Alembic commands ###
alembic/versions/6d44744180a1_initial_migration.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Initial Migration
2
+
3
+ Revision ID: 6d44744180a1
4
+ Revises:
5
+ Create Date: 2025-11-10 13:59:02.212280
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+ from sqlalchemy.dialects import postgresql
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = '6d44744180a1'
17
+ down_revision: Union[str, Sequence[str], None] = None
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.drop_table('otp_verification')
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ op.create_table('otp_verification',
33
+ sa.Column('id', sa.UUID(), autoincrement=False, nullable=False),
34
+ sa.Column('email', sa.VARCHAR(), autoincrement=False, nullable=False),
35
+ sa.Column('otp', sa.VARCHAR(), autoincrement=False, nullable=False),
36
+ sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
37
+ sa.Column('expires_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
38
+ sa.Column('is_verified', sa.BOOLEAN(), autoincrement=False, nullable=False),
39
+ sa.Column('temp_name', sa.VARCHAR(), autoincrement=False, nullable=True),
40
+ sa.Column('temp_password', sa.VARCHAR(), autoincrement=False, nullable=True),
41
+ sa.PrimaryKeyConstraint('id', name=op.f('otp_verification_pkey')),
42
+ sa.UniqueConstraint('email', name=op.f('otp_verification_email_key'), postgresql_include=[], postgresql_nulls_not_distinct=False)
43
+ )
44
+ # ### end Alembic commands ###
alembic/versions/8c5fd0627d03_alter_users_verifications_columns.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """alter:users/verifications columns..
2
+
3
+ Revision ID: 8c5fd0627d03
4
+ Revises: c5d0592f9dbd
5
+ Create Date: 2025-11-10 14:55:48.357288
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = '8c5fd0627d03'
17
+ down_revision: Union[str, Sequence[str], None] = 'c5d0592f9dbd'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.drop_column('emotion_logs', 'emotion_date')
26
+ op.add_column('users', sa.Column('is_verified', sa.Boolean(), server_default='false', nullable=False))
27
+ op.add_column('users', sa.Column('verification_token', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
28
+ op.add_column('users', sa.Column('verification_expires_at', sa.DateTime(), nullable=True))
29
+ # ### end Alembic commands ###
30
+
31
+
32
+ def downgrade() -> None:
33
+ """Downgrade schema."""
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ op.drop_column('users', 'verification_expires_at')
36
+ op.drop_column('users', 'verification_token')
37
+ op.drop_column('users', 'is_verified')
38
+ op.add_column('emotion_logs', sa.Column('emotion_date', sa.DATE(), autoincrement=False, nullable=True))
39
+ # ### end Alembic commands ###
alembic/versions/c3992c66f8e6_updated_emotion_log_date.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """updated emotion log date
2
+
3
+ Revision ID: c3992c66f8e6
4
+ Revises: 1de57edd5ce6
5
+ Create Date: 2025-11-10 14:38:01.581906
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = 'c3992c66f8e6'
17
+ down_revision: Union[str, Sequence[str], None] = '1de57edd5ce6'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.add_column('emotion_logs', sa.Column('emotion_date', sa.Date(), nullable=True))
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ op.drop_column('emotion_logs', 'emotion_date')
33
+ # ### end Alembic commands ###
alembic/versions/c5d0592f9dbd_alter_users_verification_cols.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """alter:users/verification cols..
2
+
3
+ Revision ID: c5d0592f9dbd
4
+ Revises: c3992c66f8e6
5
+ Create Date: 2025-11-10 14:46:54.649132
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = 'c5d0592f9dbd'
17
+ down_revision: Union[str, Sequence[str], None] = 'c3992c66f8e6'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.drop_column('emotion_logs', 'emotion_date')
26
+ op.add_column('users', sa.Column('is_verified', sa.Boolean(), nullable=False))
27
+ op.add_column('users', sa.Column('verification_token', sqlmodel.sql.sqltypes.AutoString(), nullable=True))
28
+ op.add_column('users', sa.Column('verification_expires_at', sa.DateTime(), nullable=True))
29
+ # ### end Alembic commands ###
30
+
31
+
32
+ def downgrade() -> None:
33
+ """Downgrade schema."""
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ op.drop_column('users', 'verification_expires_at')
36
+ op.drop_column('users', 'verification_token')
37
+ op.drop_column('users', 'is_verified')
38
+ op.add_column('emotion_logs', sa.Column('emotion_date', sa.DATE(), autoincrement=False, nullable=True))
39
+ # ### end Alembic commands ###
alembic/versions/fe1c25d91325_updated_emotion_s_ge_value.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """updated emotion's ge value
2
+
3
+ Revision ID: fe1c25d91325
4
+ Revises: 6d44744180a1
5
+ Create Date: 2025-11-10 14:04:59.908027
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel.sql.sqltypes
13
+
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = 'fe1c25d91325'
17
+ down_revision: Union[str, Sequence[str], None] = '6d44744180a1'
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ """Upgrade schema."""
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ pass
26
+ # ### end Alembic commands ###
27
+
28
+
29
+ def downgrade() -> None:
30
+ """Downgrade schema."""
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ pass
33
+ # ### end Alembic commands ###
requirements/dev.txt CHANGED
@@ -1,11 +1,15 @@
 
1
  annotated-doc==0.0.3
2
  annotated-types==0.7.0
3
  anyio==4.11.0
 
4
  click==8.3.0
5
  fastapi==0.121.0
6
  greenlet==3.2.4
7
  h11==0.16.0
8
  idna==3.11
 
 
9
  psycopg2-binary==2.9.11
10
  pydantic==1.10.24
11
  pydantic_core==2.41.4
 
1
+ alembic==1.17.1
2
  annotated-doc==0.0.3
3
  annotated-types==0.7.0
4
  anyio==4.11.0
5
+ asyncpg==0.30.0
6
  click==8.3.0
7
  fastapi==0.121.0
8
  greenlet==3.2.4
9
  h11==0.16.0
10
  idna==3.11
11
+ Mako==1.3.10
12
+ MarkupSafe==3.0.3
13
  psycopg2-binary==2.9.11
14
  pydantic==1.10.24
15
  pydantic_core==2.41.4
src/core/__init__.py CHANGED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from src.auth import models as auth_models
2
+ from src.chatbot import models as chatbot_models
3
+ from src.core import models as core_models
4
+ from src.feed import models as feed_models
5
+ from src.home import models as home_models
6
+ from src.profile import models as profile_models
src/core/config.py CHANGED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ from dotenv import load_dotenv
4
+
5
+ load_dotenv()
6
+
7
+
8
+ class Settings:
9
+ DATABASE_URL: str = os.getenv("DATABASE_URL")
10
+ ENV: str = os.getenv("ENV", "development")
11
+ DEBUG: str = os.getenv("DEBUG", "false").lower() == "true"
12
+ ASYNC_DATABASE_URL: str = os.getenv("ASYNC_DATABASE_URL")
13
+
14
+ settings = Settings()
src/core/database.py CHANGED
@@ -1,20 +1,37 @@
1
- import os
2
 
3
  from dotenv import load_dotenv
 
4
  from sqlmodel import SQLModel, create_engine
 
5
 
6
- from src.core import models as core_models
7
- from src.feed import models as feed_models
8
 
9
  load_dotenv()
10
 
11
- engine = create_engine(os.getenv("DATABASE_URL"), echo=True)
 
 
 
 
 
 
 
 
 
 
12
 
13
 
14
  def init_db():
15
  SQLModel.metadata.create_all(engine)
16
 
17
 
 
 
 
 
 
18
  if __name__ == "__main__":
19
  print("Table creating")
20
  init_db()
 
1
+ from typing import AsyncGenerator
2
 
3
  from dotenv import load_dotenv
4
+ from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine
5
  from sqlmodel import SQLModel, create_engine
6
+ from sqlmodel.ext.asyncio.session import AsyncSession
7
 
8
+ from src.core import *
9
+ from src.core.config import settings
10
 
11
  load_dotenv()
12
 
13
+ engine = create_engine(
14
+ settings.DATABASE_URL, echo=True
15
+ ) # to false on prod just to chcek for now
16
+
17
+ async_engine = create_async_engine(
18
+ url=settings.ASYNC_DATABASE_URL, future=True, connect_args={"ssl": True}
19
+ )
20
+
21
+ async_session = async_sessionmaker(
22
+ class_=AsyncSession, bind=async_engine, expire_on_commit=False
23
+ )
24
 
25
 
26
  def init_db():
27
  SQLModel.metadata.create_all(engine)
28
 
29
 
30
+ async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
31
+ async with async_session() as session:
32
+ yield session
33
+
34
+
35
  if __name__ == "__main__":
36
  print("Table creating")
37
  init_db()
src/core/models.py CHANGED
@@ -20,6 +20,11 @@ class Users(SQLModel, table=True):
20
  email_id: str = Field(unique=True, nullable=False)
21
  password: str = Field(nullable=False)
22
  user_name: str = Field(nullable=False)
 
 
 
 
 
23
  dob: Optional[date] = None
24
  address: Optional[str] = None
25
  profile_picture: Optional[str] = None
@@ -61,11 +66,11 @@ class EmotionLogs(SQLModel, table=True):
61
  __tablename__ = "emotion_logs"
62
  __table_args__ = (
63
  UniqueConstraint("user_id", "log_date"),
64
- CheckConstraint("morning_emotion BETWEEN 1 AND 10 or morning_emotion IS NULL"),
65
- CheckConstraint("evening_emotion BETWEEN 1 AND 10 or evening_emotion IS NULL"),
66
  )
67
  id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
68
  user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
69
- morning_emotion: Optional[int] = Field(default=None, ge=1, le=10)
70
- evening_emotion: Optional[int] = Field(default=None, ge=1, le=10)
71
  log_date: date = Field(default_factory=date.today)
 
20
  email_id: str = Field(unique=True, nullable=False)
21
  password: str = Field(nullable=False)
22
  user_name: str = Field(nullable=False)
23
+ is_verified: bool = Field(
24
+ default=False, sa_column_kwargs={"server_default": "false"}
25
+ )
26
+ verification_token: Optional[str] = None
27
+ verification_expires_at: Optional[datetime] = None
28
  dob: Optional[date] = None
29
  address: Optional[str] = None
30
  profile_picture: Optional[str] = None
 
66
  __tablename__ = "emotion_logs"
67
  __table_args__ = (
68
  UniqueConstraint("user_id", "log_date"),
69
+ CheckConstraint("morning_emotion BETWEEN 1 AND 7 or morning_emotion IS NULL"),
70
+ CheckConstraint("evening_emotion BETWEEN 1 AND 7 or evening_emotion IS NULL"),
71
  )
72
  id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
73
  user_id: uuid.UUID = Field(foreign_key="users.id", nullable=False)
74
+ morning_emotion: Optional[int] = Field(default=None, ge=1, le=7)
75
+ evening_emotion: Optional[int] = Field(default=None, ge=1, le=7)
76
  log_date: date = Field(default_factory=date.today)
src/home/router.py CHANGED
@@ -1,7 +1,7 @@
1
  from fastapi import APIRouter, Depends, HTTPException
2
- from sqlmodel import Session
3
 
4
- from src.core.database import engine
5
 
6
  from .schemas import BaseResponse, EmotionLogCreate
7
  from .service import add_or_update_emotion, get_emotions, get_home_data
@@ -9,25 +9,22 @@ from .service import add_or_update_emotion, get_emotions, get_home_data
9
  router = APIRouter(prefix="/home", tags=["Home"])
10
 
11
 
12
- def get_session():
13
- with Session(engine) as session:
14
- yield session
15
-
16
-
17
  @router.get("/{user_id}", response_model=BaseResponse)
18
- def fetch_home_data(user_id: str, session: Session = Depends(get_session)):
 
 
19
  try:
20
- data = get_home_data(user_id, session)
21
  return {"code": 200, "data": data}
22
  except ValueError as e:
23
  raise HTTPException(status_code=404, detail=str(e))
24
 
25
 
26
  @router.post("/emotion", response_model=BaseResponse)
27
- def create_or_update_emotion(
28
- data: EmotionLogCreate, session: Session = Depends(get_session)
29
  ):
30
- record = add_or_update_emotion(data, session)
31
  return {
32
  "code": 200,
33
  "data": {
@@ -39,6 +36,8 @@ def create_or_update_emotion(
39
 
40
 
41
  @router.get("/emotion/{user_id}", response_model=BaseResponse)
42
- def get_user_emotions(user_id: str, session: Session = Depends(get_session)):
43
- data = get_emotions(user_id, session)
 
 
44
  return {"code": 200, "data": data}
 
1
  from fastapi import APIRouter, Depends, HTTPException
2
+ from sqlmodel.ext.asyncio.session import AsyncSession
3
 
4
+ from src.core.database import get_async_session
5
 
6
  from .schemas import BaseResponse, EmotionLogCreate
7
  from .service import add_or_update_emotion, get_emotions, get_home_data
 
9
  router = APIRouter(prefix="/home", tags=["Home"])
10
 
11
 
 
 
 
 
 
12
  @router.get("/{user_id}", response_model=BaseResponse)
13
+ async def fetch_home_data(
14
+ user_id: str, session: AsyncSession = Depends(get_async_session)
15
+ ):
16
  try:
17
+ data = await get_home_data(user_id, session)
18
  return {"code": 200, "data": data}
19
  except ValueError as e:
20
  raise HTTPException(status_code=404, detail=str(e))
21
 
22
 
23
  @router.post("/emotion", response_model=BaseResponse)
24
+ async def create_or_update_emotion(
25
+ data: EmotionLogCreate, session: AsyncSession = Depends(get_async_session)
26
  ):
27
+ record = await add_or_update_emotion(data, session)
28
  return {
29
  "code": 200,
30
  "data": {
 
36
 
37
 
38
  @router.get("/emotion/{user_id}", response_model=BaseResponse)
39
+ async def get_user_emotions(
40
+ user_id: str, session: AsyncSession = Depends(get_async_session)
41
+ ):
42
+ data = await get_emotions(user_id, session)
43
  return {"code": 200, "data": data}
src/home/service.py CHANGED
@@ -1,6 +1,7 @@
1
  from datetime import date, timedelta
2
 
3
- from sqlmodel import Session, select
 
4
 
5
  from src.core.models import EmotionLogs, Users
6
 
@@ -9,18 +10,20 @@ from .schemas import EmotionLogCreate, EmotionLogResponse, HomeResponseData
9
  PHILOSOPHY_TEXT = "Your mind is your greatest asset — train it daily."
10
 
11
 
12
- def get_home_data(user_id: str, session: Session) -> HomeResponseData:
13
- user = session.exec(select(Users).where(Users.id == user_id)).first()
 
14
  if not user:
15
  raise ValueError("User not found")
16
 
17
  seven_days_ago = date.today() - timedelta(days=7)
18
- emotion_logs = session.exec(
19
  select(EmotionLogs)
20
  .where(EmotionLogs.user_id == user_id)
21
  .where(EmotionLogs.log_date >= seven_days_ago)
22
  .order_by(EmotionLogs.log_date)
23
- ).all()
 
24
 
25
  emotion_responses = [
26
  EmotionLogResponse(
@@ -39,12 +42,13 @@ def get_home_data(user_id: str, session: Session) -> HomeResponseData:
39
  )
40
 
41
 
42
- def add_or_update_emotion(data: EmotionLogCreate, session: Session):
43
- existing_log = session.exec(
44
  select(EmotionLogs)
45
  .where(EmotionLogs.user_id == data.user_id)
46
  .where(EmotionLogs.log_date == data.log_date)
47
- ).first()
 
48
 
49
  if existing_log:
50
  if data.morning_emotion is not None:
@@ -60,17 +64,18 @@ def add_or_update_emotion(data: EmotionLogCreate, session: Session):
60
  )
61
  session.add(new_log)
62
 
63
- session.commit()
64
- session.refresh(existing_log or new_log)
65
  return existing_log or new_log
66
 
67
 
68
- def get_emotions(user_id: str, session: Session):
69
- logs = session.exec(
70
  select(EmotionLogs)
71
  .where(EmotionLogs.user_id == user_id)
72
  .order_by(EmotionLogs.log_date.desc())
73
- ).all()
 
74
 
75
  return [
76
  EmotionLogResponse(
 
1
  from datetime import date, timedelta
2
 
3
+ from sqlmodel import select
4
+ from sqlmodel.ext.asyncio.session import AsyncSession
5
 
6
  from src.core.models import EmotionLogs, Users
7
 
 
10
  PHILOSOPHY_TEXT = "Your mind is your greatest asset — train it daily."
11
 
12
 
13
+ async def get_home_data(user_id: str, session: AsyncSession) -> HomeResponseData:
14
+ result = await session.exec(select(Users).where(Users.id == user_id))
15
+ user = result.first()
16
  if not user:
17
  raise ValueError("User not found")
18
 
19
  seven_days_ago = date.today() - timedelta(days=7)
20
+ result = await session.exec(
21
  select(EmotionLogs)
22
  .where(EmotionLogs.user_id == user_id)
23
  .where(EmotionLogs.log_date >= seven_days_ago)
24
  .order_by(EmotionLogs.log_date)
25
+ )
26
+ emotion_logs = result.all()
27
 
28
  emotion_responses = [
29
  EmotionLogResponse(
 
42
  )
43
 
44
 
45
+ async def add_or_update_emotion(data: EmotionLogCreate, session: AsyncSession):
46
+ result = await session.exec(
47
  select(EmotionLogs)
48
  .where(EmotionLogs.user_id == data.user_id)
49
  .where(EmotionLogs.log_date == data.log_date)
50
+ )
51
+ existing_log = result.first()
52
 
53
  if existing_log:
54
  if data.morning_emotion is not None:
 
64
  )
65
  session.add(new_log)
66
 
67
+ await session.commit()
68
+ await session.refresh(existing_log or new_log)
69
  return existing_log or new_log
70
 
71
 
72
+ async def get_emotions(user_id: str, session: AsyncSession):
73
+ result = await session.exec(
74
  select(EmotionLogs)
75
  .where(EmotionLogs.user_id == user_id)
76
  .order_by(EmotionLogs.log_date.desc())
77
+ )
78
+ logs = result.all()
79
 
80
  return [
81
  EmotionLogResponse(
src/main.py CHANGED
@@ -1,11 +1,14 @@
1
  from fastapi import FastAPI
2
 
 
3
  from src.home.router import router as home_router
4
 
5
  app = FastAPI(title="Yuvabe App API")
6
 
7
  app.include_router(home_router, prefix="/home", tags=["Home"])
8
 
 
 
9
 
10
  @app.get("/")
11
  def root():
 
1
  from fastapi import FastAPI
2
 
3
+ from src.core.database import init_db
4
  from src.home.router import router as home_router
5
 
6
  app = FastAPI(title="Yuvabe App API")
7
 
8
  app.include_router(home_router, prefix="/home", tags=["Home"])
9
 
10
+ init_db()
11
+
12
 
13
  @app.get("/")
14
  def root():