diff --git a/README.md b/README.md index 85e1c0f69..481217c98 100644 --- a/README.md +++ b/README.md @@ -55,3 +55,14 @@ This project is designed to fulfill the features described in detail in each wav 1. [Wave 6: Establishing a one-to-many relationship between two models](ada-project-docs/wave_06.md) 1. [Wave 7: Deployment](ada-project-docs/wave_07.md) 1. [Optional Enhancements](ada-project-docs/optional-enhancements.md) + +psql -U postgres postgres +drop database task_list_api_development; +create database task_list_api_development; +drop database task_list_api_test; +create database task_list_api_test; +\q +rm -rf migrations +flask db init +flask db migrate -m "Recreate model migrations" +flask db upgrade diff --git a/ada-project-docs/setup.md b/ada-project-docs/setup.md index b3ee1840d..dc64381c5 100644 --- a/ada-project-docs/setup.md +++ b/ada-project-docs/setup.md @@ -61,7 +61,7 @@ SQLALCHEMY_TEST_DATABASE_URI=postgresql+psycopg2://postgres:postgres@localhost:5 Run `$ flask db init`. -**_After you make your first model in Wave 1_**, run the other commands `migrate` and `upgrade`. +**_After you make your first model in Wave 1_**, run the other commands `$ flask db migrate` and `$ flask db upgrade`. ## Run `$ flask run` or `$ flask run --debug` diff --git a/ada-project-docs/wave_01.md b/ada-project-docs/wave_01.md index 30be86ad8..98c4e532e 100644 --- a/ada-project-docs/wave_01.md +++ b/ada-project-docs/wave_01.md @@ -40,8 +40,8 @@ Tasks should contain these attributes. **The tests require the following columns ### Tips -- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any mispellings. - - That said, remember that dictionaries do not have an implied order. This is still true in JSON with objects. When you make Postman requests, the order of the key/value pairings within the response JSON object does not need to match the order specified in this document. (The term "object" in JSON is analagous to "dictionary" in Python.) +- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any misspellings. + - That said, remember that dictionaries do not have an implied order. This is still true in JSON with objects. When you make Postman requests, the order of the key/value pairings within the response JSON object does not need to match the order specified in this document. (The term "object" in JSON is analogous to "dictionary" in Python.) - Use the tests in `tests/test_wave_01.py` to guide your implementation. - You may feel that there are missing tests and missing edge cases considered in this wave. This is intentional. - You have fulfilled wave 1 requirements if all of the wave 1 tests pass. diff --git a/ada-project-docs/wave_02.md b/ada-project-docs/wave_02.md index acc1dc0a4..1f3560e3c 100644 --- a/ada-project-docs/wave_02.md +++ b/ada-project-docs/wave_02.md @@ -10,7 +10,7 @@ The following are required routes for wave 2. Feel free to implement the routes ### Tips -- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any mispellings. +- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any misspellings. - Use the tests in `tests/test_wave_02.py` to guide your implementation. - You may feel that there are missing tests and missing edge cases considered in this wave. This is intentional. - You have fulfilled wave 2 requirements if all of the wave 2 tests pass. diff --git a/ada-project-docs/wave_05.md b/ada-project-docs/wave_05.md index bb601b0b5..70272f7ff 100644 --- a/ada-project-docs/wave_05.md +++ b/ada-project-docs/wave_05.md @@ -17,7 +17,7 @@ This wave requires more test writing. - The tests you need to write are scaffolded in the `test_wave_05.py` file. - These tests are currently skipped with `@pytest.mark.skip(reason="test to be completed by student")` and the function body has `pass` in it. Once you implement these tests you should remove the `skip` decorator and the `pass`. - For the tests you write, use the requirements in this document to guide your test writing. - - Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any mispellings. + - Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any misspellings. - You can model your tests off of the Wave 1 tests for Tasks. - Some tests use a [fixture](https://docs.pytest.org/en/6.2.x/fixture.html) named `one_goal` that is defined in `tests/conftest.py`. This fixture saves a specific goal to the test database. diff --git a/ada-project-docs/wave_06.md b/ada-project-docs/wave_06.md index 699738ba8..349805a69 100644 --- a/ada-project-docs/wave_06.md +++ b/ada-project-docs/wave_06.md @@ -18,7 +18,7 @@ Secondly, we should create our new route, `/goals//tasks`, so that our - Use lesson materials and independent research to review how to set up a one-to-many relationship in Flask. - Remember to run `flask db migrate` and `flask db upgrade` whenever there is a change to the model. -- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any mispellings. +- Pay attention to the exact shape of the expected JSON. Double-check nested data structures and the names of the keys for any misspellings. - Use the tests in `tests/test_wave_06.py` to guide your implementation. - Some tests use a fixture named `one_task_belongs_to_one_goal` that is defined in `tests/conftest.py`. This fixture saves a task and a goal to the test database, and uses SQLAlchemy to associate the goal and task together. diff --git a/app/__init__.py b/app/__init__.py index 3c581ceeb..f020f1309 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -1,11 +1,17 @@ from flask import Flask from .db import db, migrate -from .models import task, goal +from .models import goal, task +from .routes.task_routes import bp as tasks_bp +from .routes.goal_routes import bp as goals_bp import os +from flask_cors import CORS def create_app(config=None): app = Flask(__name__) + CORS(app) + app.config['CORS_HEADERS'] = 'Content-Type' + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('SQLALCHEMY_DATABASE_URI') @@ -18,5 +24,8 @@ def create_app(config=None): migrate.init_app(app, db) # Register Blueprints here + app.register_blueprint(tasks_bp) + app.register_blueprint(goals_bp) + return app diff --git a/app/models/goal.py b/app/models/goal.py index 44282656b..49d37e535 100644 --- a/app/models/goal.py +++ b/app/models/goal.py @@ -1,5 +1,23 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship from ..db import db class Goal(db.Model): id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + tasks: Mapped[list["Task"]] = relationship(back_populates="goal") + + + def to_dict(self): + return dict( + goal=dict( + id=self.id, + title=self.title + ) + ) + + # from JSON to model + @classmethod + def from_dict(cls, goal_data): + return cls( + title=goal_data["title"] + ) \ No newline at end of file diff --git a/app/models/task.py b/app/models/task.py index 5d99666a4..3281889a3 100644 --- a/app/models/task.py +++ b/app/models/task.py @@ -1,5 +1,47 @@ -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy import ForeignKey from ..db import db +from datetime import datetime +from typing import Optional class Task(db.Model): id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + title: Mapped[str] + description: Mapped[str] + completed_at: Mapped[Optional[str]] + goal_id: Mapped[Optional[int]] = mapped_column(ForeignKey("goal.id")) + goal: Mapped[Optional["Goal"]] = relationship(back_populates="tasks") + + + # from model to JSON + def to_dict(self): + if self.goal_id is None: + return dict( + id=self.id, + title=self.title, + description=self.description, + is_complete=self.completed_at!=None + ) + else: + return dict( + id=self.id, + title=self.title, + description=self.description, + is_complete=self.completed_at!=None, + goal_id=self.goal_id + ) + + # from JSON to model + @classmethod + def from_dict(cls, task_data): + return cls( + title=task_data["title"], + description=task_data["description"] + ) + + + + + + + diff --git a/app/routes/goal_routes.py b/app/routes/goal_routes.py index 3aae38d49..4a78d5aaa 100644 --- a/app/routes/goal_routes.py +++ b/app/routes/goal_routes.py @@ -1 +1,146 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, request, Response, make_response, abort +from app.models.goal import Goal +from ..db import db +from ..models.task import Task +from ..routes.task_routes import validate_task + +bp = Blueprint("goals_bp", __name__, url_prefix="/goals" ) + +@bp.post("") +def create_goal(): + request_body = request.get_json() + + try: + new_goal = Goal.from_dict(request_body) + + except KeyError as e: + response = { + "details": "Invalid data" + } + abort(make_response(response, 400)) + + db.session.add(new_goal) + db.session.commit() + + return new_goal.to_dict(), 201 + +@bp.get("") +def get_all_goals(): + query = db.select(Goal) + + title_param = request.args.get("title") + if title_param: + query = query.where(Goal.title.ilike(f"%{title_param}%")) + + sort_param = request.args.get("sort") + if sort_param: + if sort_param == "asc": + query = query.order_by(Goal.title.asc()) + elif sort_param == "desc": + query = query.order_by(Goal.title.desc()) + + query = query.order_by(Goal.id) + goals = db.session.scalars(query) + + goals_response = [] + for goal in goals: + goals_response.append( + { + "id": goal.id, + "title": goal.title + } + ) + + return goals_response + +@bp.get("/") +def get_one_goal(goal_id): + goal = validate_goal(goal_id) + + return goal.to_dict() + + +@bp.put("/") +def update_goal(goal_id): + goal = validate_goal(goal_id) + request_body = request.get_json() + + goal.title = request_body["title"] + + db.session.commit() + + response = { + "goal": { + "id": goal.id, + "title": goal.title + } + } + + return response, 200 + +@bp.delete("/") +def delete_goal(goal_id): + goal = validate_goal(goal_id) + db.session.delete(goal) + db.session.commit() + + response = { + "details": f"Goal {goal_id} \"{goal.title}\" successfully deleted" + } + + return response, 200 + +def validate_goal(goal_id): + try: + goal_id = int(goal_id) + except: + response = {"details": "Invalid data"} + + abort(make_response(response , 400)) + + query = db.select(Goal).where(Goal.id == goal_id) + goal = db.session.scalar(query) + + if not goal: + response = {"message": f"Goal {goal_id} not found"} + abort(make_response(response, 404)) + + return goal + +@bp.post("//tasks") +def assign_tasks_to_goal(goal_id): + goal = validate_goal(goal_id) + + request_body = request.get_json() + task_ids = request_body["task_ids"] + + for task_id in task_ids: + task = validate_task(task_id) + task.goal_id = goal_id + db.session.add(task) + + db.session.commit() + + response = { + "id": int(goal_id), + "task_ids": task_ids + } + + return response, 200 + +@bp.get("//tasks") +def get_goal_with_assigned_tasks(goal_id): + goal = validate_goal(goal_id) + + query = db.select(Task).where(Goal.id == goal_id) + + tasks = db.session.scalars(query) + + response = {} + response["id"] = goal.id + response["title"] = goal.title + response["tasks"] = [] + + response["tasks"] = [task.to_dict() for task in tasks] + + return response, 200 diff --git a/app/routes/task_routes.py b/app/routes/task_routes.py index 3aae38d49..d6ace6b50 100644 --- a/app/routes/task_routes.py +++ b/app/routes/task_routes.py @@ -1 +1,157 @@ -from flask import Blueprint \ No newline at end of file +from flask import Blueprint, request, Response, make_response, abort +from app.models.task import Task +from ..db import db +from datetime import datetime +import os +import requests +from os import environ + + +bp = Blueprint("tasks_bp", __name__, url_prefix="/tasks" ) + +@bp.post("") +def create_task(): + request_body = request.get_json() + + try: + new_task = Task.from_dict(request_body) + + except KeyError as e: + print(e) + response = { + "details": "Invalid data" + } + abort(make_response(response, 400)) + + db.session.add(new_task) + db.session.commit() + + return {"task": new_task.to_dict()}, 201 + +@bp.get("") +def get_all_tasks(): + query = db.select(Task) + + title_param = request.args.get("title") + if title_param: + query = query.where(Task.title.ilike(f"%{title_param}%")) + + description_param = request.args.get("description") + if description_param: + query = query.where(Task.description.ilike(f"%{description_param}%")) + + completed_at_param = request.args.get("completed_at") + if completed_at_param: + query = query.where(Task.completed_at.ilike(f"%{completed_at_param}%")) + + sort_param = request.args.get("sort") + if sort_param: + if sort_param == "asc": + query = query.order_by(Task.title.asc()) + elif sort_param == "desc": + query = query.order_by(Task.title.desc()) + + query = query.order_by(Task.id) + tasks = db.session.scalars(query) + + response = [task.to_dict() for task in tasks] + return response, 200 + +@bp.get("/") +def get_one_task(task_id): + task = validate_task(task_id) + + return {"task": task.to_dict()} + + +@bp.put("/") +def update_task(task_id): + task = validate_task(task_id) + request_body = request.get_json() + + task.title = request_body["title"] + task.description = request_body["description"] + db.session.commit() + + response = { + "task": task.to_dict() + } + + return response, 200 + +@bp.patch("//mark_complete") +def mark_complete(task_id): + task = validate_task(task_id) + + task.completed_at = datetime.utcnow() + db.session.commit() + + if os.environ.get('SEND_SLACK_NOTIFICATIONS') == "True": + send_slack_notification(task.title) + + response = { + "task": task.to_dict() + } + + return response, 200 + +def send_slack_notification(task_title): + request_data = { + "channel": "#api-test-channel", + "username": "LD bot", + "text": f"Someone just completed the task \"{task_title}\"" + } + + response = requests.post( + url="https://slack.com/api/chat.postMessage", + json=request_data, + headers={"Authorization": f"Bearer {environ.get('SLACK_WEB_CLIENT_TOKEN')}"}, + timeout=5 + ) + response.raise_for_status() + return response.json().get("ok", False) + +@bp.patch("//mark_incomplete") +def mark_incomplete(task_id): + task = validate_task(task_id) + + if task.completed_at != None: + task.completed_at = None + db.session.commit() + else: + pass + + response = { + "task": task.to_dict() + } + + return response, 200 + +@bp.delete("/") +def delete_task(task_id): + task = validate_task(task_id) + db.session.delete(task) + db.session.commit() + + response = { + "details": f"Task {task_id} \"{task.title}\" successfully deleted" + } + + return response, 200 + +def validate_task(task_id): + try: + task_id = int(task_id) + except: + response = {"details": "Invalid data"} + + abort(make_response(response , 400)) + + query = db.select(Task).where(Task.id == task_id) + task = db.session.scalar(query) + + if not task: + response = {"message": f"Task {task_id} not found"} + abort(make_response(response, 404)) + + return task diff --git a/migrations/README b/migrations/README new file mode 100644 index 000000000..0e0484415 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 000000000..ec9d45c26 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 000000000..4c9709271 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,113 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + + +def get_engine(): + try: + # this works with Flask-SQLAlchemy<3 and Alchemical + return current_app.extensions['migrate'].db.get_engine() + except (TypeError, AttributeError): + # this works with Flask-SQLAlchemy>=3 + return current_app.extensions['migrate'].db.engine + + +def get_engine_url(): + try: + return get_engine().url.render_as_string(hide_password=False).replace( + '%', '%%') + except AttributeError: + return str(get_engine().url).replace('%', '%%') + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option('sqlalchemy.url', get_engine_url()) +target_db = current_app.extensions['migrate'].db + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def get_metadata(): + if hasattr(target_db, 'metadatas'): + return target_db.metadatas[None] + return target_db.metadata + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=get_metadata(), literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + conf_args = current_app.extensions['migrate'].configure_args + if conf_args.get("process_revision_directives") is None: + conf_args["process_revision_directives"] = process_revision_directives + + connectable = get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=get_metadata(), + **conf_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/91d41aaf0216_recreate_model_migrations.py b/migrations/versions/91d41aaf0216_recreate_model_migrations.py new file mode 100644 index 000000000..23bb27cbd --- /dev/null +++ b/migrations/versions/91d41aaf0216_recreate_model_migrations.py @@ -0,0 +1,42 @@ +"""Recreate model migrations + +Revision ID: 91d41aaf0216 +Revises: +Create Date: 2024-12-27 20:23:28.094007 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '91d41aaf0216' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('goal', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('task', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('completed_at', sa.String(), nullable=True), + sa.Column('goal_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['goal_id'], ['goal.id'], ), + sa.PrimaryKeyConstraint('id') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('task') + op.drop_table('goal') + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt index af8fc4cf4..c33605f22 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,6 +20,7 @@ psycopg2-binary==2.9.9 pytest==8.0.0 python-dotenv==1.0.1 requests==2.32.3 +slack_sdk==3.33.3 SQLAlchemy==2.0.25 typing_extensions==4.9.0 urllib3==2.2.3 diff --git a/tests/conftest.py b/tests/conftest.py index e370e597b..5027fcc77 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import pytest +from flask import Flask from app import create_app from app.db import db from flask.signals import request_finished @@ -8,6 +9,7 @@ from app.models.goal import Goal from datetime import datetime + load_dotenv() @pytest.fixture diff --git a/tests/test_wave_01.py b/tests/test_wave_01.py index dca626d78..190e0244a 100644 --- a/tests/test_wave_01.py +++ b/tests/test_wave_01.py @@ -2,7 +2,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_no_saved_tasks(client): # Act response = client.get("/tasks") @@ -13,7 +13,7 @@ def test_get_tasks_no_saved_tasks(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_one_saved_tasks(client, one_task): # Act response = client.get("/tasks") @@ -32,7 +32,7 @@ def test_get_tasks_one_saved_tasks(client, one_task): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_task(client, one_task): # Act response = client.get("/tasks/1") @@ -51,7 +51,7 @@ def test_get_task(client, one_task): } -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_task_not_found(client): # Act response = client.get("/tasks/1") @@ -60,13 +60,17 @@ def test_get_task_not_found(client): # Assert assert response.status_code == 404 - raise Exception("Complete test with assertion about response body") + assert response_body == { + "message": "Task 1 not found" + } + + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_task(client): # Act response = client.post("/tasks", json={ @@ -93,7 +97,7 @@ def test_create_task(client): assert new_task.completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_update_task(client, one_task): # Act response = client.put("/tasks/1", json={ @@ -119,7 +123,7 @@ def test_update_task(client, one_task): assert task.completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_update_task_not_found(client): # Act response = client.put("/tasks/1", json={ @@ -131,13 +135,17 @@ def test_update_task_not_found(client): # Assert assert response.status_code == 404 - raise Exception("Complete test with assertion about response body") + assert response_body == { + "message": "Task 1 not found" + } + + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task(client, one_task): # Act response = client.delete("/tasks/1") @@ -152,7 +160,7 @@ def test_delete_task(client, one_task): assert Task.query.get(1) == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_delete_task_not_found(client): # Act response = client.delete("/tasks/1") @@ -161,7 +169,11 @@ def test_delete_task_not_found(client): # Assert assert response.status_code == 404 - raise Exception("Complete test with assertion about response body") + assert response_body == { + "message": "Task 1 not found" + } + + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** @@ -169,7 +181,7 @@ def test_delete_task_not_found(client): assert Task.query.all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_task_must_contain_title(client): # Act response = client.post("/tasks", json={ @@ -186,7 +198,7 @@ def test_create_task_must_contain_title(client): assert Task.query.all() == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_task_must_contain_description(client): # Act response = client.post("/tasks", json={ diff --git a/tests/test_wave_02.py b/tests/test_wave_02.py index a087e0909..651e3aebd 100644 --- a/tests/test_wave_02.py +++ b/tests/test_wave_02.py @@ -1,7 +1,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_sorted_asc(client, three_tasks): # Act response = client.get("/tasks?sort=asc") @@ -29,7 +29,7 @@ def test_get_tasks_sorted_asc(client, three_tasks): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_sorted_desc(client, three_tasks): # Act response = client.get("/tasks?sort=desc") diff --git a/tests/test_wave_03.py b/tests/test_wave_03.py index 32d379822..e48afcc4b 100644 --- a/tests/test_wave_03.py +++ b/tests/test_wave_03.py @@ -5,7 +5,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_on_incomplete_task(client, one_task): # Arrange """ @@ -42,7 +42,7 @@ def test_mark_complete_on_incomplete_task(client, one_task): assert Task.query.get(1).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_on_complete_task(client, completed_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -62,7 +62,7 @@ def test_mark_incomplete_on_complete_task(client, completed_task): assert Task.query.get(1).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_on_completed_task(client, completed_task): # Arrange """ @@ -99,7 +99,7 @@ def test_mark_complete_on_completed_task(client, completed_task): assert Task.query.get(1).completed_at -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_on_incomplete_task(client, one_task): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -119,7 +119,7 @@ def test_mark_incomplete_on_incomplete_task(client, one_task): assert Task.query.get(1).completed_at == None -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_complete_missing_task(client): # Act response = client.patch("/tasks/1/mark_complete") @@ -127,14 +127,15 @@ def test_mark_complete_missing_task(client): # Assert assert response.status_code == 404 + assert response_body == {"message": "Task 1 not found"} - raise Exception("Complete test with assertion about response body") + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_mark_incomplete_missing_task(client): # Act response = client.patch("/tasks/1/mark_incomplete") @@ -142,8 +143,9 @@ def test_mark_incomplete_missing_task(client): # Assert assert response.status_code == 404 + assert response_body == {"message": "Task 1 not found"} - raise Exception("Complete test with assertion about response body") + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** diff --git a/tests/test_wave_04.py b/tests/test_wave_04.py index d0b26b2d1..47e3d6785 100644 --- a/tests/test_wave_04.py +++ b/tests/test_wave_04.py @@ -1 +1,29 @@ +from app.models.task import Task +import os + # There are no tests for wave 4. + +# def test_massage_in_slack_on_completed_task(client, completed_task): +# os.environ["SEND_SLACK_NOTIFICATIONS"] = "True" + +# # Act +# response = client.patch(f"/tasks/1/mark_complete") +# response_body = response.get_json() + +# # Assert +# assert response.status_code == 200 +# assert response_body["task"]["is_complete"] == True +# assert response_body == { +# "task": { +# "id": 1, +# "title": "Go on my daily walk 🏞", +# "description": "Notice something new every day", +# "is_complete": True +# } +# } +# assert Task.query.get(1).completed_at + +# # TODO somehow check that the message was posted in Slack channel. +# # I haven't found Slack API method for that + +# os.environ["SEND_SLACK_NOTIFICATIONS"] = "False" diff --git a/tests/test_wave_05.py b/tests/test_wave_05.py index aee7c52a1..b17f561d7 100644 --- a/tests/test_wave_05.py +++ b/tests/test_wave_05.py @@ -1,7 +1,7 @@ import pytest -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_goals_no_saved_goals(client): # Act response = client.get("/goals") @@ -12,7 +12,7 @@ def test_get_goals_no_saved_goals(client): assert response_body == [] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_goals_one_saved_goal(client, one_goal): # Act response = client.get("/goals") @@ -29,7 +29,7 @@ def test_get_goals_one_saved_goal(client, one_goal): ] -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_goal(client, one_goal): # Act response = client.get("/goals/1") @@ -46,22 +46,23 @@ def test_get_goal(client, one_goal): } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_get_goal_not_found(client): pass # Act response = client.get("/goals/1") response_body = response.get_json() - raise Exception("Complete test") + # raise Exception("Complete test") # Assert # ---- Complete Test ---- - # assertion 1 goes here + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} # assertion 2 goes here # ---- Complete Test ---- -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal(client): # Act response = client.post("/goals", json={ @@ -80,34 +81,40 @@ def test_create_goal(client): } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_update_goal(client, one_goal): - raise Exception("Complete test") + # raise Exception("Complete test") # Act - # ---- Complete Act Here ---- + response = client.put("/goals/1", json={ + "title": "Update Goal" + }) + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # assertion 3 goes here - # ---- Complete Assertions Here ---- - + assert response.status_code == 200 + assert "goal" in response_body + assert response_body == { + "goal": { + "id": 1, + "title": "Update Goal" + } + } -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_update_goal_not_found(client): - raise Exception("Complete test") + # raise Exception("Complete test") # Act - # ---- Complete Act Here ---- + response = client.put("/goals/1", json={ + "title": "Update Goal" + }) + response_body = response.get_json() # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_delete_goal(client, one_goal): # Act response = client.delete("/goals/1") @@ -123,28 +130,31 @@ def test_delete_goal(client, one_goal): # Check that the goal was deleted response = client.get("/goals/1") assert response.status_code == 404 + response_body = response.get_json() + assert response_body == {"message": "Goal 1 not found"} - raise Exception("Complete test with assertion about response body") + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** -@pytest.mark.skip(reason="test to be completed by student") +# @pytest.mark.skip(reason="test to be completed by student") def test_delete_goal_not_found(client): - raise Exception("Complete test") + # raise Exception("Complete test") # Act - # ---- Complete Act Here ---- + response = client.delete("/goals/1") + response_body = response.get_json() + # Assert - # ---- Complete Assertions Here ---- - # assertion 1 goes here - # assertion 2 goes here - # ---- Complete Assertions Here ---- + assert response.status_code == 404 + assert response_body == {"message": "Goal 1 not found"} + -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_create_goal_missing_title(client): # Act response = client.post("/goals", json={}) diff --git a/tests/test_wave_06.py b/tests/test_wave_06.py index 8afa4325e..9ecce3954 100644 --- a/tests/test_wave_06.py +++ b/tests/test_wave_06.py @@ -1,8 +1,7 @@ from app.models.goal import Goal import pytest - -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_post_task_ids_to_goal(client, one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -23,7 +22,7 @@ def test_post_task_ids_to_goal(client, one_goal, three_tasks): assert len(Goal.query.get(1).tasks) == 3 -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_post_task_ids_to_goal_already_with_goals(client, one_task_belongs_to_one_goal, three_tasks): # Act response = client.post("/goals/1/tasks", json={ @@ -42,7 +41,7 @@ def test_post_task_ids_to_goal_already_with_goals(client, one_task_belongs_to_on assert len(Goal.query.get(1).tasks) == 2 -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_goal(client): # Act response = client.get("/goals/1/tasks") @@ -51,13 +50,15 @@ def test_get_tasks_for_specific_goal_no_goal(client): # Assert assert response.status_code == 404 - raise Exception("Complete test with assertion about response body") + assert response_body == {"message": "Goal 1 not found"} + + # raise Exception("Complete test with assertion about response body") # ***************************************************************** # **Complete test with assertion about response body*************** # ***************************************************************** -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): # Act response = client.get("/goals/1/tasks") @@ -74,7 +75,7 @@ def test_get_tasks_for_specific_goal_no_tasks(client, one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): # Act response = client.get("/goals/1/tasks") @@ -99,7 +100,7 @@ def test_get_tasks_for_specific_goal(client, one_task_belongs_to_one_goal): } -@pytest.mark.skip(reason="No way to test this feature yet") +# @pytest.mark.skip(reason="No way to test this feature yet") def test_get_task_includes_goal_id(client, one_task_belongs_to_one_goal): response = client.get("/tasks/1") response_body = response.get_json()