Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix test and add ts filter
Browse files Browse the repository at this point in the history
tw4l committed Jan 30, 2025
1 parent 57f64d8 commit aa04c2a
Showing 2 changed files with 23 additions and 4 deletions.
6 changes: 6 additions & 0 deletions backend/btrixcloud/pages.py
Original file line number Diff line number Diff line change
@@ -575,6 +575,7 @@ async def list_collection_pages(
org: Optional[Organization] = None,
url: Optional[str] = None,
url_prefix: Optional[str] = None,
ts: Optional[datetime] = None,
page_size: int = DEFAULT_PAGE_SIZE,
page: int = 1,
sort_by: Optional[str] = None,
@@ -602,6 +603,9 @@ async def list_collection_pages(
elif url:
query["url"] = urllib.parse.unquote(url)

if ts:
query["ts"] = ts

aggregate = [{"$match": query}]

if sort_by:
@@ -983,6 +987,7 @@ async def get_collection_pages_list(
org: Organization = Depends(org_viewer_dep),
url: Optional[str] = None,
urlPrefix: Optional[str] = None,
ts: Optional[datetime] = None,
pageSize: int = DEFAULT_PAGE_SIZE,
page: int = 1,
sortBy: Optional[str] = None,
@@ -994,6 +999,7 @@ async def get_collection_pages_list(
org=org,
url=url,
url_prefix=urlPrefix,
ts=ts,
page_size=pageSize,
page=page,
sort_by=sortBy,
21 changes: 17 additions & 4 deletions backend/test/test_collections.py
Original file line number Diff line number Diff line change
@@ -613,6 +613,7 @@ def test_list_pages_in_collection(crawler_auth_headers, default_org_id):
coll_page = pages[0]
coll_page_id = coll_page["id"]
coll_page_url = coll_page["url"]
coll_page_ts = coll_page["ts"]

# Test exact url filter
r = requests.get(
@@ -622,10 +623,22 @@ def test_list_pages_in_collection(crawler_auth_headers, default_org_id):
assert r.status_code == 200
data = r.json()

assert data["total"] == 1
matching_page = data["items"][0]
assert matching_page["id"] == coll_page_id
assert matching_page["url"] == coll_page_url
assert data["total"] >= 1
for matching_page in data["items"]:
assert matching_page["url"] == coll_page_url

# Test exact url and ts filters together
r = requests.get(
f"{API_PREFIX}/orgs/{default_org_id}/collections/{_coll_id}/pages?url={coll_page_url}&ts={coll_page_ts}",
headers=crawler_auth_headers,
)
assert r.status_code == 200
data = r.json()

assert data["total"] >= 1
for matching_page in data["items"]:
assert matching_page["url"] == coll_page_url
assert matching_page["ts"] == coll_page_ts

# Test urlPrefix filter
url_prefix = coll_page_url[:8]

0 comments on commit aa04c2a

Please sign in to comment.