Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make fetch job deletion a Celery task. #201

Merged
merged 1 commit into from
Sep 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion AIPscan/Aggregator/tasks.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import json
import os
import shutil

import requests
from celery.utils.log import get_task_logger
Expand All @@ -20,7 +21,7 @@
)
from AIPscan.extensions import celery
from AIPscan.helpers import file_sha256_hash
from AIPscan.models import AIP, get_mets_tasks # Custom celery Models.
from AIPscan.models import AIP, FetchJob, get_mets_tasks # Custom celery Models.

logger = get_task_logger(__name__)

Expand Down Expand Up @@ -373,3 +374,12 @@
os.remove(download_file)
except OSError as err:
tasklogger.warning("Unable to delete METS file: {}".format(err))


@celery.task()
def delete_fetch_job(fetch_job_id):
fetch_job = FetchJob.query.get(fetch_job_id)
if os.path.exists(fetch_job.download_directory):
shutil.rmtree(fetch_job.download_directory)

Check warning on line 383 in AIPscan/Aggregator/tasks.py

View check run for this annotation

Codecov / codecov/patch

AIPscan/Aggregator/tasks.py#L383

Added line #L383 was not covered by tests
db.session.delete(fetch_job)
db.session.commit()
20 changes: 19 additions & 1 deletion AIPscan/Aggregator/tests/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from AIPscan.Aggregator.tasks import (
TaskError,
delete_aip,
delete_fetch_job,
get_mets,
make_request,
parse_packages_and_load_mets,
Expand All @@ -23,7 +24,7 @@
VALID_JSON,
MockResponse,
)
from AIPscan.models import AIP
from AIPscan.models import AIP, FetchJob

SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
FIXTURES_DIR = os.path.join(SCRIPT_DIR, "fixtures")
Expand Down Expand Up @@ -176,6 +177,23 @@ def mock_download_mets(
)


def test_delete_fetch_job_task(app_instance, tmpdir, mocker):
"""Test that fetch job gets deleted by delete fetch job task logic."""
storage_service = test_helpers.create_test_storage_service()

# Create fetch job and confirm expected ID
fetch_job1 = test_helpers.create_test_fetch_job(
storage_service_id=storage_service.id
)

assert fetch_job1.id == 1

# Delete fetch job and confirm it no longer exists
delete_fetch_job(fetch_job1.id)

assert FetchJob.query.filter_by(id=fetch_job1.id).first() is None


@pytest.mark.parametrize(
"response, raises_task_error",
[
Expand Down
7 changes: 2 additions & 5 deletions AIPscan/Aggregator/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,13 +232,10 @@

@aggregator.route("/delete_fetch_job/<id>", methods=["GET"])
def delete_fetch_job(id):
tasks.delete_fetch_job.delay(id)

Check warning on line 235 in AIPscan/Aggregator/views.py

View check run for this annotation

Codecov / codecov/patch

AIPscan/Aggregator/views.py#L235

Added line #L235 was not covered by tests
fetch_job = FetchJob.query.get(id)
storage_service = StorageService.query.get(fetch_job.storage_service_id)
if os.path.exists(fetch_job.download_directory):
shutil.rmtree(fetch_job.download_directory)
db.session.delete(fetch_job)
db.session.commit()
flash("Fetch job {} is deleted".format(fetch_job.download_start))
flash("Fetch job {} is being deleted".format(fetch_job.download_start))

Check warning on line 238 in AIPscan/Aggregator/views.py

View check run for this annotation

Codecov / codecov/patch

AIPscan/Aggregator/views.py#L238

Added line #L238 was not covered by tests
return redirect(url_for("aggregator.storage_service", id=storage_service.id))


Expand Down
Loading