Skip to content

Commit

Permalink
remove limit complete types, keep:
Browse files Browse the repository at this point in the history
stopped_by_user, stopped_quota_reached
  • Loading branch information
ikreymer committed Nov 12, 2023
1 parent 94169dd commit 49ddab3
Show file tree
Hide file tree
Showing 10 changed files with 12 additions and 27 deletions.
9 changes: 1 addition & 8 deletions backend/btrixcloud/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,14 +158,7 @@ class UserOut(BaseModel):

FAILED_STATES = ["canceled", "failed", "skipped_quota_reached"]

SUCCESSFUL_STATES = [
"complete",
"complete:time-limit",
"complete:size-limit",
"complete:page-limit",
"complete:user-stop",
"complete:time-quota",
]
SUCCESSFUL_STATES = ["complete", "stopped_by_user", "stopped_quota_reached"]

RUNNING_AND_STARTING_STATES = [*STARTING_STATES, *RUNNING_STATES]

Expand Down
9 changes: 1 addition & 8 deletions backend/test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,7 @@

FAILED_STATES = ["canceled", "failed", "skipped_quota_reached"]

SUCCESSFUL_STATES = [
"complete",
"complete:time-limit",
"complete:size-limit",
"complete:page-limit",
"complete:user-stop",
"complete:time-quota",
]
SUCCESSFUL_STATES = ["complete", "stopped_by_user", "stopped_quota_reached"]

FINISHED_STATES = [*FAILED_STATES, *SUCCESSFUL_STATES]

Expand Down
2 changes: 1 addition & 1 deletion backend/test/test_crawlconfigs.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ def test_incremental_workflow_total_size_and_last_crawl_stats(
headers=crawler_auth_headers,
)
data = r.json()
if data["state"] == "complete:page-limit":
if data["state"] == "complete":
break
time.sleep(5)

Expand Down
2 changes: 1 addition & 1 deletion backend/test/test_run_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def test_wait_for_complete(admin_auth_headers, default_org_id, admin_crawl_id):
headers=admin_auth_headers,
)
data = r.json()
assert data["state"] == "complete:page-limit"
assert data["state"] == "complete"

assert len(data["resources"]) == 1
assert data["resources"][0]["path"]
Expand Down
4 changes: 2 additions & 2 deletions backend/test/test_stop_cancel_crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def test_start_crawl_and_stop_immediately(
time.sleep(5)
data = get_crawl(default_org_id, crawler_auth_headers, crawl_id)

assert data["state"] in ("canceled", "complete:user-stop")
assert data["state"] in ("canceled", "stopped_by_user")
assert data["stopping"] == True


Expand Down Expand Up @@ -172,7 +172,7 @@ def test_stop_crawl_partial(
time.sleep(5)
data = get_crawl(default_org_id, crawler_auth_headers, crawl_id)

assert data["state"] == "complete:user-stop"
assert data["state"] == "stopped_by_user"
assert data["stopping"] == True

assert len(data["resources"]) == 1
5 changes: 2 additions & 3 deletions backend/test/test_uploads.py
Original file line number Diff line number Diff line change
Expand Up @@ -504,7 +504,7 @@ def test_get_all_crawls_by_cid(
def test_get_all_crawls_by_state(admin_auth_headers, default_org_id, admin_crawl_id):
"""Test filtering /all-crawls by cid"""
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/all-crawls?state=complete,complete:user-stop,complete:page-limit",
f"{API_PREFIX}/orgs/{default_org_id}/all-crawls?state=complete,stopped_by_user",
headers=admin_auth_headers,
)
assert r.status_code == 200
Expand All @@ -514,8 +514,7 @@ def test_get_all_crawls_by_state(admin_auth_headers, default_org_id, admin_crawl
for item in items:
assert item["state"] in (
"complete",
"complete:user-stop",
"complete:page-limit",
"stopped_by_user",
)


Expand Down
2 changes: 1 addition & 1 deletion backend/test/test_webhooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def test_webhooks_sent(
headers=admin_auth_headers,
)
data = r.json()
if data["state"] == "complete:page-limit":
if data["state"] == "complete":
break
time.sleep(5)

Expand Down
2 changes: 1 addition & 1 deletion backend/test_nightly/test_crawl_timeout.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_crawl_timeout(admin_auth_headers, default_org_id, timeout_crawl):
)
assert r.status_code == 200
data = r.json()
assert data["state"] == "complete:time-limit"
assert data["state"] == "complete"


def test_crawl_files_replicated(admin_auth_headers, default_org_id, timeout_crawl):
Expand Down
2 changes: 1 addition & 1 deletion backend/test_nightly/test_execution_minutes_quota.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def test_crawl_stopped_when_quota_reached(org_with_quotas, admin_auth_headers):
# Ensure that crawl was stopped by quota
assert (
get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers)
== "complete:exec-time-quota"
== "stopped_quota_reached"
)

time.sleep(5)
Expand Down
2 changes: 1 addition & 1 deletion backend/test_nightly/test_max_crawl_size_limit.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ def test_max_crawl_size(admin_auth_headers, default_org_id, max_crawl_size_crawl
)
assert r.status_code == 200
data = r.json()
assert data["state"] == "complete:size-limit"
assert data["state"] == "complete"

0 comments on commit 49ddab3

Please sign in to comment.