diff --git a/ada-project-docs/wave_04.md b/ada-project-docs/wave_04.md index 8fbf9e2ae..ae922d0cb 100644 --- a/ada-project-docs/wave_04.md +++ b/ada-project-docs/wave_04.md @@ -152,7 +152,7 @@ Press "Send" and see the Slack message come through! ### Modify `/tasks//mark_complete` to Call the Slack API -Now that we've verified that we can successfully make a request to the Slack API, let's write some code to automatically call the Slack API from the API we've created! +Now that we've verified that we can successfully make a request to the Slack API, let's write some code to automatically call the Slack API from the API we've created!# Given a task that has: diff --git a/app/__init__.py b/app/__init__.py index 3c581ceeb..4337c0f51 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,13 +1,18 @@ from flask import Flask from .db import db, migrate from .models import task, goal +from .routes.task_routes import bp as tasks_bp +from .routes.goal_routes import bp as goal_bp +from flask_cors import CORS import os def create_app(config=None): app = Flask(__name__) + CORS(app) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('SQLALCHEMY_DATABASE_URI') + if config: # Merge `config` into the app's configuration @@ -18,5 +23,7 @@ def create_app(config=None): migrate.init_app(app, db) # Register Blueprints here - + app.register_blueprint(tasks_bp) + app.register_blueprint(goal_bp) return app + diff --git a/app/models/goal.py b/app/models/goal.py index 44282656b..2e9b8acb4 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,5 +1,18 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship from ..db import db class Goal(db.Model): + __tablename__ = 'goal' id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + tasks: Mapped[list["Task"]] = relationship(back_populates="goal") + + def to_dict(self): + return { + "id": self.id, + "title": self.title + } + + @classmethod + def from_dict(cls, goal_data): + return cls(title=goal_data["title"]) \ No newline at end of file diff --git a/app/models/task.py b/app/models/task.py index 5d99666a4..03571a863 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -1,5 +1,32 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import ForeignKey, DateTime from ..db import db +from datetime import datetime +from typing import Optional + class Task(db.Model): + __tablename__ = 'task' id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + description: Mapped[str] + completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True ) + goal_id: Mapped[Optional[int]] = mapped_column(ForeignKey("goal.id")) + goal: Mapped[Optional["Goal"]] = relationship(back_populates="tasks") + + def to_dict(self): + return { + "id": self.id, + "title": self.title, + "description": self.description, + "is_complete": self.completed_at is not None + } + + @classmethod + def from_dict(cls, task_data): + return cls( + title=task_data["title"], + description=task_data["description"], + completed_at=datetime.now() if task_data.get("is_complete") else None, + goal_id=task_data.get("goal_id", None) + ) \ No newline at end of file diff --git a/app/routes/goal_routes.py b/app/routes/goal_routes.py index 3aae38d49..6966a4f1c 100644 --- a/app/routes/goal_routes.py +++ b/app/routes/goal_routes.py @@ -1 +1,94 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, request, abort, make_response +from app.models.goal import Goal +from app.models.task import Task +from .route_utilities import validate_model, create_model, get_models_with_filters +from ..db import db + +bp = Blueprint("goal_bp", __name__, url_prefix="/goals") + +@bp.post("") +def create_goal(): + request_body = request.get_json() + if "title" not in request_body or not request_body["title"]: + return {"details": "Invalid data"}, 400 + + goal_data = {"title": request_body.get("title")} + goal_dict, status_code = create_model(Goal, goal_data) + + return {"goal": goal_dict}, status_code + +@bp.get("") +def get_goal(): + + goals_response = get_models_with_filters(Goal) + return goals_response, 200 + + +@bp.get("/") +def get_one_goal(goal_id): + goal = validate_model(Goal, goal_id) + + return {"goal":goal.to_dict()} + + +@bp.put("") +def update_one_goal(goal_id): + goal = validate_model(Goal, goal_id) + request_body = request.get_json() + + goal.title = request_body["title"] + db.session.commit() + + return {"goal": goal.to_dict()}, 200 + + +@bp.delete("") +def delete_goal(goal_id): + goal = validate_model(Goal, goal_id) + + db.session.delete(goal) + db.session.commit() + + response = { + "details": f'Goal {goal.id} "{goal.title}" successfully deleted' + } + return response, 200 + +@bp.post("//tasks") +def associate_tasks_with_goal(goal_id): + goal = validate_model(Goal, goal_id) + request_body = request.get_json() + task_ids = request_body.get("task_ids", []) + + for task_id in task_ids: + task = validate_model(Task,task_id) + task.goal_id = goal.id + + db.session.commit() + + return { + "id": goal.id, + "task_ids": task_ids + }, 200 + +@bp.get("//tasks") +def get_tasks_of_goal(goal_id): + goal = validate_model(Goal, goal_id) + + tasks_data = [ + { + "id": task.id, + "goal_id": task.goal_id, + "title": task.title, + "description": task.description, + "is_complete": bool(task.completed_at) + } + for task in goal.tasks + ] + + return { + "id": goal.id, + "title": goal.title, + "tasks": tasks_data + }, 200 + diff --git a/app/routes/route_utilities.py b/app/routes/route_utilities.py new file mode 100644 index 000000000..c79fab03e --- /dev/null +++ b/app/routes/route_utilities.py @@ -0,0 +1,50 @@ +from flask import abort, make_response +from ..db import db + +def validate_model(cls, model_id): + try: + model_id = int(model_id) + except: + abort(make_response({"message":f"{cls.__name__} id {(model_id)} invalid"}, 400)) + + query = db.select(cls).where(cls.id == model_id) + model = db.session.scalar(query) + + if not model: + abort(make_response({ "message": f"{cls.__name__} {model_id} not found"}, 404)) + + return model + +def create_model(cls, model_data): + try: + new_model = cls.from_dict(model_data) + except (KeyError, ValueError): + response = {"details": "Invalid data"} + abort(make_response(response, 400)) + + db.session.add(new_model) + db.session.commit() + + return new_model.to_dict(), 201 + +def get_models_with_filters(cls, filters=None, sort_by=None, sort_order='asc'): + query = db.select(cls) + + if filters: + for attribute, value in filters.items(): + if hasattr(cls, attribute): + query = query.where(getattr(cls, attribute).ilike(f"%{value}%")) + if sort_by and hasattr(cls, sort_by): + sort_attr = getattr(cls, sort_by) + if sort_order == 'asc': + query = query.order_by(sort_attr.asc()) + elif sort_order == 'desc': + query = query.order_by(sort_attr.desc()) + else: + query = query.order_by(cls,id) + + + models = db.session.scalars(query.order_by(cls.id)) + models_response = [model.to_dict() for model in models] + + return models_response \ No newline at end of file diff --git a/app/routes/task_routes.py b/app/routes/task_routes.py index 3aae38d49..8e7f49035 100644 --- a/app/routes/task_routes.py +++ b/app/routes/task_routes.py @@ -1 +1,98 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, abort, make_response, Response, request +import requests +from app.models.task import Task +from datetime import datetime +from sqlalchemy import asc, desc +from .route_utilities import validate_model, create_model, get_models_with_filters +from ..db import db +import os + + +bp = Blueprint("tasks_bp", __name__, url_prefix="/tasks") + +@bp.post("") +def create_task(): + request_body = request.get_json() + + if "title" not in request_body or "description" not in request_body: + return {"details": "Invalid data"}, 400 + + task_data = {"title": request_body.get("title"), + "description": request_body.get("description"), + "completed_at": request_body.get("completed_at")} + + task_dict, status_code = create_model(Task, task_data) + return {"task": task_dict}, status_code + +@bp.get("") +def get_task(): + query_params = request.args.to_dict() + sort_order = query_params.pop('sort', 'asc') + sort_by = query_params.pop('sort_by', 'title') + tasks_response = get_models_with_filters(Task, sort_by=sort_by, sort_order=sort_order) + return tasks_response, 200 + + +@bp.get("/") +def get_one_task(task_id): + task = validate_model(Task, task_id) + response = {"task": task.to_dict()} + + if task.goal_id is not None: + response["task"]["goal_id"] = task.goal_id + return response, 200 + +@bp.put("/") +def update_task(task_id): + task = validate_model(Task, task_id) + request_body = request.get_json() + task.title = request_body["title"] + task.description = request_body["description"] + + db.session.commit() + + return {"task": task.to_dict()}, 200 + +@bp.delete("/") +def delete_task(task_id): + task = validate_model(Task, task_id) + db.session.delete(task) + db.session.commit() + + response = { + "details": f'Task {task.id} "{task.title}" successfully deleted' + } + + return response, 200 + + +@bp.patch("//mark_complete") +def mark_task_complete(task_id): + task = validate_model(Task, task_id) + + task.completed_at = datetime.now() + db.session.commit() + + slack_token = os.environ.get("SLACK_BOT_TOKEN") + url = "https://slack.com/api/chat.postMessage" + headers = {"Authorization": f"Bearer {slack_token}"} + request_body = { + "channel": "task-notifications", + "text": f"Someone just completed the task '{task.title}'" + } + requests.post(url, json=request_body, headers=headers) + + return{"task": task.to_dict()}, 200 + + +@bp.patch("//mark_incomplete") +def mark_task_incomplete(task_id): + + task = validate_model(Task, task_id) + + task.completed_at = None + db.session.commit() + + return {"task": task.to_dict()} + + \ No newline at end of file diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..0e0484415 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..ec9d45c26 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..4c9709271 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions['migrate'].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions['migrate'].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace( + '%', '%%') + except AttributeError: + return str(get_engine().url).replace('%', '%%') + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option('sqlalchemy.url', get_engine_url()) +target_db = current_app.extensions['migrate'].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, 'metadatas'): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + conf_args = current_app.extensions['migrate'].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/149dabc5070a_.py b/migrations/versions/149dabc5070a_.py new file mode 100644 index 000000000..f67847718 --- /dev/null +++ b/migrations/versions/149dabc5070a_.py @@ -0,0 +1,34 @@ +"""empty message + +Revision ID: 149dabc5070a +Revises: 981aa984ae6a +Create Date: 2024-11-07 18:23:08.044333 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '149dabc5070a' +down_revision = '981aa984ae6a' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.add_column(sa.Column('goal_id', sa.Integer(), nullable=True)) + batch_op.create_foreign_key(None, 'goal', ['goal_id'], ['id']) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('task', schema=None) as batch_op: + batch_op.drop_constraint(None, type_='foreignkey') + batch_op.drop_column('goal_id') + + # ### end Alembic commands ### diff --git a/migrations/versions/981aa984ae6a_.py b/migrations/versions/981aa984ae6a_.py new file mode 100644 index 000000000..bf7954783 --- /dev/null +++ b/migrations/versions/981aa984ae6a_.py @@ -0,0 +1,32 @@ +"""empty message + +Revision ID: 981aa984ae6a +Revises: ee241bad151e +Create Date: 2024-11-06 14:12:32.904332 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '981aa984ae6a' +down_revision = 'ee241bad151e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.add_column(sa.Column('title', sa.String(), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('goal', schema=None) as batch_op: + batch_op.drop_column('title') + + # ### end Alembic commands ### diff --git a/migrations/versions/ee241bad151e_.py b/migrations/versions/ee241bad151e_.py new file mode 100644 index 000000000..45c2d2283 --- /dev/null +++ b/migrations/versions/ee241bad151e_.py @@ -0,0 +1,39 @@ +"""empty message + +Revision ID: ee241bad151e +Revises: +Create Date: 2024-10-31 14:24:36.143577 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ee241bad151e' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('task', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/tests/test_wave_01.py b/tests/test_wave_01.py index dca626d78..c53eefa93 100644 --- a/tests/test_wave_01.py +++ b/tests/test_wave_01.py @@ -2,7 +2,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_no_saved_tasks(client): # Act response = client.get("/tasks") @@ -13,7 +13,7 @@ def test_get_tasks_no_saved_tasks(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_one_saved_tasks(client, one_task): # Act response = client.get("/tasks") @@ -32,7 +32,7 @@ def test_get_tasks_one_saved_tasks(client, one_task): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_task(client, one_task): # Act response = client.get("/tasks/1") @@ -51,7 +51,7 @@ def test_get_task(client, one_task): } -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_task_not_found(client): # Act response = client.get("/tasks/1") @@ -59,14 +59,11 @@ def test_get_task_not_found(client): # Assert assert response.status_code == 404 + assert response_body == {"message": "Task 1 not found"} + + - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - - -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_task(client): # Act response = client.post("/tasks", json={ @@ -93,7 +90,7 @@ def test_create_task(client): assert new_task.completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_update_task(client, one_task): # Act response = client.put("/tasks/1", json={ @@ -119,7 +116,7 @@ def test_update_task(client, one_task): assert task.completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_update_task_not_found(client): # Act response = client.put("/tasks/1", json={ @@ -130,14 +127,10 @@ def test_update_task_not_found(client): # Assert assert response.status_code == 404 - - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** + assert response_body == {"message": "Task 1 not found"} -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task(client, one_task): # Act response = client.delete("/tasks/1") @@ -152,7 +145,7 @@ def test_delete_task(client, one_task): assert Task.query.get(1) == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task_not_found(client): # Act response = client.delete("/tasks/1") @@ -160,16 +153,10 @@ def test_delete_task_not_found(client): # Assert assert response.status_code == 404 - - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - assert Task.query.all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_task_must_contain_title(client): # Act response = client.post("/tasks", json={ @@ -186,7 +173,7 @@ def test_create_task_must_contain_title(client): assert Task.query.all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_task_must_contain_description(client): # Act response = client.post("/tasks", json={ diff --git a/tests/test_wave_02.py b/tests/test_wave_02.py index a087e0909..651e3aebd 100644 --- a/tests/test_wave_02.py +++ b/tests/test_wave_02.py @@ -1,7 +1,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_sorted_asc(client, three_tasks): # Act response = client.get("/tasks?sort=asc") @@ -29,7 +29,7 @@ def test_get_tasks_sorted_asc(client, three_tasks): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_sorted_desc(client, three_tasks): # Act response = client.get("/tasks?sort=desc") diff --git a/tests/test_wave_03.py b/tests/test_wave_03.py index 32d379822..07f4ce9ff 100644 --- a/tests/test_wave_03.py +++ b/tests/test_wave_03.py @@ -5,7 +5,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_on_incomplete_task(client, one_task): # Arrange """ @@ -42,7 +42,7 @@ def test_mark_complete_on_incomplete_task(client, one_task): assert Task.query.get(1).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_on_complete_task(client, completed_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -62,7 +62,7 @@ def test_mark_incomplete_on_complete_task(client, completed_task): assert Task.query.get(1).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_on_completed_task(client, completed_task): # Arrange """ @@ -99,7 +99,7 @@ def test_mark_complete_on_completed_task(client, completed_task): assert Task.query.get(1).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_on_incomplete_task(client, one_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -119,7 +119,7 @@ def test_mark_incomplete_on_incomplete_task(client, one_task): assert Task.query.get(1).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_missing_task(client): # Act response = client.patch("/tasks/1/mark_complete") @@ -127,14 +127,10 @@ def test_mark_complete_missing_task(client): # Assert assert response.status_code == 404 + assert response_body == {"message": "Task 1 not found"} - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_missing_task(client): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -142,8 +138,4 @@ def test_mark_incomplete_missing_task(client): # Assert assert response.status_code == 404 - - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** + assert response_body == {"message": "Task 1 not found"} diff --git a/tests/test_wave_05.py b/tests/test_wave_05.py index aee7c52a1..a4b9b81a0 100644 --- a/tests/test_wave_05.py +++ b/tests/test_wave_05.py @@ -1,7 +1,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_goals_no_saved_goals(client): # Act response = client.get("/goals") @@ -12,7 +12,7 @@ def test_get_goals_no_saved_goals(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_goals_one_saved_goal(client, one_goal): # Act response = client.get("/goals") @@ -29,7 +29,7 @@ def test_get_goals_one_saved_goal(client, one_goal): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_goal(client, one_goal): # Act response = client.get("/goals/1") @@ -46,22 +46,16 @@ def test_get_goal(client, one_goal): } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_get_goal_not_found(client): pass # Act response = client.get("/goals/1") response_body = response.get_json() - - raise Exception("Complete test") - # Assert - # ---- Complete Test ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Test ---- + assert response.status_code == 404 -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal(client): # Act response = client.post("/goals", json={ @@ -80,34 +74,38 @@ def test_create_goal(client): } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_update_goal(client, one_goal): - raise Exception("Complete test") - # Act - # ---- Complete Act Here ---- - # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # assertion 3 goes here - # ---- Complete Assertions Here ---- + response = client.put("/goals/1", json={ + "title": "Build a habit of going outside daily" + }) + response_body = response.get_json() + assert response.status_code == 200 + assert "goal" in response_body + assert response_body == { + "goal": { + "id": 1, + "title": "Build a habit of going outside daily", + + } + } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_update_goal_not_found(client): - raise Exception("Complete test") + # Act - # ---- Complete Act Here ---- + response = client.get("/goals/1") + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} + -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_delete_goal(client, one_goal): # Act response = client.delete("/goals/1") @@ -123,28 +121,19 @@ def test_delete_goal(client, one_goal): # Check that the goal was deleted response = client.get("/goals/1") assert response.status_code == 404 - - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** + -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_delete_goal_not_found(client): - raise Exception("Complete test") - - # Act - # ---- Complete Act Here ---- - - # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + + response = client.delete("/goals/1") + response_body = response.get_json() + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal_missing_title(client): # Act response = client.post("/goals", json={}) diff --git a/tests/test_wave_06.py b/tests/test_wave_06.py index 8afa4325e..4027a96e7 100644 --- a/tests/test_wave_06.py +++ b/tests/test_wave_06.py @@ -2,7 +2,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_post_task_ids_to_goal(client, one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -23,7 +23,7 @@ def test_post_task_ids_to_goal(client, one_goal, three_tasks): assert len(Goal.query.get(1).tasks) == 3 -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_post_task_ids_to_goal_already_with_goals(client, one_task_belongs_to_one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -42,7 +42,7 @@ def test_post_task_ids_to_goal_already_with_goals(client, one_task_belongs_to_on assert len(Goal.query.get(1).tasks) == 2 -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_goal(client): # Act response = client.get("/goals/1/tasks") @@ -50,14 +50,12 @@ def test_get_tasks_for_specific_goal_no_goal(client): # Assert assert response.status_code == 404 + assert response_body is not None + assert response_body["message"] == "Goal 1 not found" + - raise Exception("Complete test with assertion about response body") - # ***************************************************************** - # **Complete test with assertion about response body*************** - # ***************************************************************** - -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): # Act response = client.get("/goals/1/tasks") @@ -74,7 +72,7 @@ def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): # Act response = client.get("/goals/1/tasks") @@ -99,7 +97,7 @@ def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_task_includes_goal_id(client, one_task_belongs_to_one_goal): response = client.get("/tasks/1") response_body = response.get_json()