Unverified Commit 45827043 authored by Martin Cech's avatar Martin Cech Committed by GitHub
Browse files

Merge pull request #18044 from mvdbeek/ensure_offset_and_limit_not_negative

[24.0] Ensure that offset and limit are never negative
parents b97825b7 5545dcd7
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -36,10 +36,11 @@ FolderIdPathParam = Annotated[
    Path(..., title="Folder ID", description="The encoded identifier of the library folder."),
]

LimitQueryParam: int = Query(default=10, title="Limit", description="Maximum number of contents to return.")
LimitQueryParam: int = Query(default=10, ge=1, title="Limit", description="Maximum number of contents to return.")

OffsetQueryParam: int = Query(
    default=0,
    ge=0,
    title="Offset",
    description="Return contents from this specified position. For example, if ``limit`` is set to 100 and ``offset`` to 200, contents between position 200-299 will be returned.",
)
+2 −1
Original line number Diff line number Diff line
@@ -161,10 +161,11 @@ SortByQueryParam: JobIndexSortByEnum = Query(
    description="Sort results by specified field.",
)

LimitQueryParam: int = Query(default=500, title="Limit", description="Maximum number of jobs to return.")
LimitQueryParam: int = Query(default=500, ge=1, title="Limit", description="Maximum number of jobs to return.")

OffsetQueryParam: int = Query(
    default=0,
    ge=0,
    title="Offset",
    description="Return jobs starting from this specified position. For example, if ``limit`` is set to 100 and ``offset`` to 200, jobs 200-299 will be returned.",
)
+2 −1
Original line number Diff line number Diff line
@@ -72,10 +72,11 @@ SortDescQueryParam: bool = Query(
    description="Sort in descending order?",
)

LimitQueryParam: int = Query(default=100, lt=1000, title="Limit number of queries.")
LimitQueryParam: int = Query(default=100, ge=1, lt=1000, title="Limit number of queries.")

OffsetQueryParam: int = Query(
    default=0,
    ge=0,
    title="Number of pages to skip in sorted query (to enable pagination).",
)

+4 −1
Original line number Diff line number Diff line
@@ -815,10 +815,11 @@ SortDescQueryParam: Optional[bool] = Query(
    description="Sort in descending order?",
)

LimitQueryParam: Optional[int] = Query(default=None, title="Limit number of queries.")
LimitQueryParam: Optional[int] = Query(default=None, ge=1, title="Limit number of queries.")

OffsetQueryParam: Optional[int] = Query(
    default=0,
    ge=0,
    title="Number of workflows to skip in sorted query (to enable pagination).",
)

@@ -1229,6 +1230,7 @@ InvocationsIncludeTerminalQueryParam = Annotated[
InvocationsLimitQueryParam = Annotated[
    Optional[int],
    Query(
        ge=1,
        title="Limit",
        description="Limit the number of invocations to return.",
    ),
@@ -1237,6 +1239,7 @@ InvocationsLimitQueryParam = Annotated[
InvocationsOffsetQueryParam = Annotated[
    Optional[int],
    Query(
        ge=0,
        title="Offset",
        description="Number of invocations to skip.",
    ),
+7 −2
Original line number Diff line number Diff line
@@ -182,14 +182,19 @@ steps:

    @pytest.mark.require_new_history
    def test_index_limit_and_offset_filter(self, history_id):
        # create 2 datasets
        self.__history_with_new_dataset(history_id)
        self.__history_with_new_dataset(history_id)
        jobs = self.__jobs_index(data={"history_id": history_id})
        assert len(jobs) > 0
        length = len(jobs)
        jobs = self.__jobs_index(data={"history_id": history_id, "offset": 1})
        assert len(jobs) == length - 1
        jobs = self.__jobs_index(data={"history_id": history_id, "limit": 0})
        assert len(jobs) == 0
        jobs = self.__jobs_index(data={"history_id": history_id, "limit": 1})
        assert len(jobs) == 1
        response = self._get("jobs", data={"history_id": history_id, "limit": -1})
        assert response.status_code == 400
        assert response.json()["err_msg"] == "Input should be greater than or equal to 1 in ('query', 'limit')"

    @pytest.mark.require_new_history
    def test_index_search_filter_tool_id(self, history_id):