From f9706f4b88e6278a5be1057c30a8939fed415767 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 9 Mar 2026 16:40:34 +0000
Subject: [PATCH 01/17] feat(client): add support for binary request streaming
---
src/gradient/_base_client.py | 22 +++++++++++-----------
1 file changed, 11 insertions(+), 11 deletions(-)
diff --git a/src/gradient/_base_client.py b/src/gradient/_base_client.py
index ca3db359..4ddc8392 100644
--- a/src/gradient/_base_client.py
+++ b/src/gradient/_base_client.py
@@ -486,17 +486,17 @@ def _build_request(
) -> httpx.Request:
if log.isEnabledFor(logging.DEBUG):
log.debug(
- "Request options",
- # model_dump(
- # options,
- # exclude_unset=True,
- # # Pydantic v1 can't dump every type we support in content, so we exclude it for now.
- # exclude={
- # "content",
- # }
- # if PYDANTIC_V1
- # else {},
- # ),
+ "Request options: %s",
+ model_dump(
+ options,
+ exclude_unset=True,
+ # Pydantic v1 can't dump every type we support in content, so we exclude it for now.
+ exclude={
+ "content",
+ }
+ if PYDANTIC_V1
+ else {},
+ ),
)
kwargs: dict[str, Any] = {}
From 32778769c86140d283a9f456568f68803208ae62 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 9 Mar 2026 16:44:56 +0000
Subject: [PATCH 02/17] feat(api): responses
---
.stats.yml | 2 +-
api.md | 12 +
src/gradient/types/__init__.py | 1 +
.../types/response_create_response.py | 332 ++++++++++++++++++
4 files changed, 346 insertions(+), 1 deletion(-)
create mode 100644 src/gradient/types/response_create_response.py
diff --git a/.stats.yml b/.stats.yml
index 0d591538..105ad90e 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-2344b44246a44d39ad5b74d3077bd2958745aad67feb15970756532fa0b3f9d6.yml
openapi_spec_hash: a1913979235ce152a8dc380fabe5362e
-config_hash: 6c9a04f3cc5dd88e1e4f0ae42d98ba9a
+config_hash: 3302f40607e596148c9ac7706346a858
diff --git a/api.md b/api.md
index 45e4eaeb..606ed40c 100644
--- a/api.md
+++ b/api.md
@@ -1075,3 +1075,15 @@ from gradient.types import BillingListInsightsResponse
Methods:
- client.billing.list_insights(end_date, \*, account_urn, start_date, \*\*params) -> BillingListInsightsResponse
+
+# Responses
+
+Types:
+
+```python
+from gradient.types import ResponseCreateResponse
+```
+
+Methods:
+
+- client.responses.create(\*\*params) -> ResponseCreateResponse
diff --git a/src/gradient/types/__init__.py b/src/gradient/types/__init__.py
index fc2907b2..9e897482 100644
--- a/src/gradient/types/__init__.py
+++ b/src/gradient/types/__init__.py
@@ -84,6 +84,7 @@
from .api_openai_api_key_info import APIOpenAIAPIKeyInfo as APIOpenAIAPIKeyInfo
from .gpu_droplet_list_params import GPUDropletListParams as GPUDropletListParams
from .image_generate_response import ImageGenerateResponse as ImageGenerateResponse
+from .response_create_response import ResponseCreateResponse as ResponseCreateResponse
from .api_deployment_visibility import APIDeploymentVisibility as APIDeploymentVisibility
from .gpu_droplet_create_params import GPUDropletCreateParams as GPUDropletCreateParams
from .gpu_droplet_list_response import GPUDropletListResponse as GPUDropletListResponse
diff --git a/src/gradient/types/response_create_response.py b/src/gradient/types/response_create_response.py
new file mode 100644
index 00000000..0dfe2a4a
--- /dev/null
+++ b/src/gradient/types/response_create_response.py
@@ -0,0 +1,332 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import builtins
+from typing import Dict, List, Union, Optional
+from typing_extensions import Literal, TypeAlias
+
+from .._models import BaseModel
+from .shared.chat_completion_token_logprob import ChatCompletionTokenLogprob
+
+__all__ = [
+ "ResponseCreateResponse",
+ "Usage",
+ "UsageInputTokensDetails",
+ "UsageOutputTokensDetails",
+ "Choice",
+ "ChoiceMessage",
+ "ChoiceMessageToolCall",
+ "ChoiceMessageToolCallFunction",
+ "ChoiceLogprobs",
+ "Output",
+ "OutputUnionMember0",
+ "OutputUnionMember1",
+ "OutputUnionMember2",
+ "OutputUnionMember2Content",
+ "Tool",
+]
+
+
+class UsageInputTokensDetails(BaseModel):
+ """A detailed breakdown of the input tokens."""
+
+ cached_tokens: int
+ """The number of tokens that were retrieved from the cache.
+
+ [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching).
+ """
+
+
+class UsageOutputTokensDetails(BaseModel):
+ """A detailed breakdown of the output tokens."""
+
+ reasoning_tokens: int
+ """The number of reasoning tokens."""
+
+ tool_output_tokens: int
+ """The number of tool output tokens."""
+
+
+class Usage(BaseModel):
+ """
+ Detailed token usage statistics for the request, including input/output token counts and detailed breakdowns.
+ """
+
+ input_tokens: int
+ """The number of input tokens."""
+
+ input_tokens_details: UsageInputTokensDetails
+ """A detailed breakdown of the input tokens."""
+
+ output_tokens: int
+ """The number of output tokens."""
+
+ output_tokens_details: UsageOutputTokensDetails
+ """A detailed breakdown of the output tokens."""
+
+ total_tokens: int
+ """The total number of tokens used."""
+
+
+class ChoiceMessageToolCallFunction(BaseModel):
+ """The function that the model called."""
+
+ arguments: str
+ """
+ The arguments to call the function with, as generated by the model in JSON
+ format. Note that the model does not always generate valid JSON, and may
+ hallucinate parameters not defined by your function schema. Validate the
+ arguments in your code before calling your function.
+ """
+
+ name: str
+ """The name of the function to call."""
+
+
+class ChoiceMessageToolCall(BaseModel):
+ id: str
+ """The ID of the tool call."""
+
+ function: ChoiceMessageToolCallFunction
+ """The function that the model called."""
+
+ type: Literal["function"]
+ """The type of the tool. Currently, only `function` is supported."""
+
+
+class ChoiceMessage(BaseModel):
+ """The generated message response."""
+
+ content: Optional[str] = None
+ """The generated text content."""
+
+ role: Optional[Literal["assistant"]] = None
+ """The role of the message author, which is always `assistant`."""
+
+ tool_calls: Optional[List[ChoiceMessageToolCall]] = None
+ """The tool calls generated by the model, such as function calls."""
+
+
+class ChoiceLogprobs(BaseModel):
+ """Log probability information for the choice.
+
+ Only present if logprobs was requested in the request.
+ """
+
+ content: Optional[List[ChatCompletionTokenLogprob]] = None
+ """A list of message content tokens with log probability information."""
+
+
+class Choice(BaseModel):
+ finish_reason: Literal["stop", "length", "tool_calls", "content_filter"]
+ """The reason the model stopped generating tokens.
+
+ This will be `stop` if the model hit a natural stop point or a provided stop
+ sequence, `length` if the maximum number of tokens specified in the request was
+ reached, or `tool_calls` if the model called a tool.
+ """
+
+ index: int
+ """The index of the choice in the list of choices."""
+
+ message: ChoiceMessage
+ """The generated message response."""
+
+ logprobs: Optional[ChoiceLogprobs] = None
+ """Log probability information for the choice.
+
+ Only present if logprobs was requested in the request.
+ """
+
+
+class OutputUnionMember0(BaseModel):
+ arguments: str
+ """JSON string of function arguments"""
+
+ call_id: str
+ """The unique ID of the function tool call"""
+
+ name: str
+ """The name of the function to call"""
+
+ type: Literal["function_call"]
+ """The type of output item"""
+
+ id: Optional[str] = None
+ """The unique ID of the function tool call (same as call_id)"""
+
+ encrypted_content: Optional[str] = None
+ """Encrypted content (optional)"""
+
+ status: Optional[str] = None
+ """Status of the item (optional, can be null)"""
+
+
+class OutputUnionMember1(BaseModel):
+ text: str
+ """The text content"""
+
+ type: Literal["text"]
+ """The type of output item"""
+
+
+class OutputUnionMember2Content(BaseModel):
+ text: str
+ """The reasoning text content"""
+
+ type: Literal["reasoning_text"]
+ """The type of content"""
+
+
+class OutputUnionMember2(BaseModel):
+ id: str
+ """The unique ID of the reasoning item"""
+
+ content: List[OutputUnionMember2Content]
+ """Array of reasoning content parts"""
+
+ summary: List[object]
+ """Summary of the reasoning (usually empty)"""
+
+ type: Literal["reasoning"]
+ """The type of output item"""
+
+ encrypted_content: Optional[str] = None
+ """Encrypted content (optional)"""
+
+ status: Optional[str] = None
+ """Status of the item (optional, can be null)"""
+
+
+Output: TypeAlias = Union[OutputUnionMember0, OutputUnionMember1, OutputUnionMember2]
+
+
+class Tool(BaseModel):
+ """Tool definition for Responses API (flat format).
+
+ This format is used by VLLM's Responses API where name, description, and parameters are at the top level of the tool object.
+ """
+
+ type: Literal["function", "web_search", "web_search_2025_08_26"]
+ """The type of the tool.
+
+ Supported values are `function` (custom tools), `web_search`, and
+ `web_search_2025_08_26` (built-in web search).
+ """
+
+ description: Optional[str] = None
+ """
+ A description of what the function does, used by the model to choose when and
+ how to call the function.
+ """
+
+ name: Optional[str] = None
+ """The name of the function to be called.
+
+ Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length
+ of 64.
+ """
+
+ parameters: Optional[Dict[str, object]] = None
+ """The parameters the functions accepts, described as a JSON Schema object.
+
+ See the [guide](/docs/guides/function-calling) for examples, and the
+ [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
+ documentation about the format.
+
+ Omitting `parameters` defines a function with an empty parameter list.
+ """
+
+
+class ResponseCreateResponse(BaseModel):
+ """
+ Represents a text-to-text response returned by the model, based on the provided input. VLLM models only.
+ """
+
+ id: str
+ """A unique identifier for the response."""
+
+ created: int
+ """The Unix timestamp (in seconds) of when the response was created."""
+
+ model: str
+ """The model used to generate the response."""
+
+ object: Literal["response"]
+ """The object type, which is always `response`."""
+
+ usage: Usage
+ """
+ Detailed token usage statistics for the request, including input/output token
+ counts and detailed breakdowns.
+ """
+
+ background: Optional[bool] = None
+ """Whether the request was processed in the background"""
+
+ choices: Optional[List[Choice]] = None
+ """A list of response choices.
+
+ Can be more than one if `n` is greater than 1. Optional - Responses API
+ primarily uses the output array.
+ """
+
+ input_messages: Optional[List[builtins.object]] = None
+ """Input messages (if applicable)"""
+
+ max_output_tokens: Optional[int] = None
+ """Maximum output tokens setting"""
+
+ max_tool_calls: Optional[int] = None
+ """Maximum tool calls setting"""
+
+ output: Optional[List[Output]] = None
+ """An array of content items generated by the model.
+
+ This includes text content, function calls, reasoning items, and other output
+ types. Use this field for Responses API compatibility.
+ """
+
+ output_messages: Optional[List[builtins.object]] = None
+ """Output messages (if applicable)"""
+
+ parallel_tool_calls: Optional[bool] = None
+ """Whether parallel tool calls are enabled"""
+
+ previous_response_id: Optional[str] = None
+ """Previous response ID (for multi-turn conversations)"""
+
+ prompt: Optional[str] = None
+ """Prompt used for the response"""
+
+ reasoning: Optional[str] = None
+ """Reasoning content"""
+
+ service_tier: Optional[str] = None
+ """Service tier used"""
+
+ status: Optional[str] = None
+ """Status of the response"""
+
+ temperature: Optional[float] = None
+ """Temperature setting used for the response"""
+
+ text: Optional[str] = None
+ """Text content"""
+
+ tool_choice: Optional[str] = None
+ """Tool choice setting used for the response"""
+
+ tools: Optional[List[Tool]] = None
+ """Tools available for the response"""
+
+ top_logprobs: Optional[int] = None
+ """Top logprobs setting"""
+
+ top_p: Optional[float] = None
+ """Top-p setting used for the response"""
+
+ truncation: Optional[str] = None
+ """Truncation setting"""
+
+ user: Optional[str] = None
+ """User identifier"""
From 3c5e28f7e5804102b650d5659a738aeca3c810a7 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Tue, 10 Feb 2026 22:54:31 +0000
Subject: [PATCH 03/17] codegen metadata
---
.stats.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.stats.yml b/.stats.yml
index 105ad90e..579c9fc7 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-2344b44246a44d39ad5b74d3077bd2958745aad67feb15970756532fa0b3f9d6.yml
openapi_spec_hash: a1913979235ce152a8dc380fabe5362e
-config_hash: 3302f40607e596148c9ac7706346a858
+config_hash: 3c7741f27a23621a5a8e3cae5610088d
From e103720daa6eaae0454cbb857d97b1b2e43b405f Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Wed, 11 Mar 2026 19:41:30 +0000
Subject: [PATCH 04/17] chore: format all `api.md` files
---
src/gradient/_base_client.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/gradient/_base_client.py b/src/gradient/_base_client.py
index 4ddc8392..e2135952 100644
--- a/src/gradient/_base_client.py
+++ b/src/gradient/_base_client.py
@@ -62,7 +62,7 @@
not_given,
)
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
-from ._compat import PYDANTIC_V1, model_copy
+from ._compat import PYDANTIC_V1, model_copy, model_dump
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
APIResponse,
From e13929984bbfa991d51a56946a40380a909f1d75 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Tue, 17 Feb 2026 21:13:15 +0000
Subject: [PATCH 05/17] feat(api): manual updates
---
.stats.yml | 2 +-
api.md | 12 -
src/gradient/_base_client.py | 24 +-
src/gradient/types/__init__.py | 1 -
.../types/response_create_response.py | 332 ------------------
5 files changed, 13 insertions(+), 358 deletions(-)
delete mode 100644 src/gradient/types/response_create_response.py
diff --git a/.stats.yml b/.stats.yml
index 579c9fc7..0d591538 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-2344b44246a44d39ad5b74d3077bd2958745aad67feb15970756532fa0b3f9d6.yml
openapi_spec_hash: a1913979235ce152a8dc380fabe5362e
-config_hash: 3c7741f27a23621a5a8e3cae5610088d
+config_hash: 6c9a04f3cc5dd88e1e4f0ae42d98ba9a
diff --git a/api.md b/api.md
index 606ed40c..45e4eaeb 100644
--- a/api.md
+++ b/api.md
@@ -1075,15 +1075,3 @@ from gradient.types import BillingListInsightsResponse
Methods:
- client.billing.list_insights(end_date, \*, account_urn, start_date, \*\*params) -> BillingListInsightsResponse
-
-# Responses
-
-Types:
-
-```python
-from gradient.types import ResponseCreateResponse
-```
-
-Methods:
-
-- client.responses.create(\*\*params) -> ResponseCreateResponse
diff --git a/src/gradient/_base_client.py b/src/gradient/_base_client.py
index e2135952..ca3db359 100644
--- a/src/gradient/_base_client.py
+++ b/src/gradient/_base_client.py
@@ -62,7 +62,7 @@
not_given,
)
from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
-from ._compat import PYDANTIC_V1, model_copy, model_dump
+from ._compat import PYDANTIC_V1, model_copy
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
APIResponse,
@@ -486,17 +486,17 @@ def _build_request(
) -> httpx.Request:
if log.isEnabledFor(logging.DEBUG):
log.debug(
- "Request options: %s",
- model_dump(
- options,
- exclude_unset=True,
- # Pydantic v1 can't dump every type we support in content, so we exclude it for now.
- exclude={
- "content",
- }
- if PYDANTIC_V1
- else {},
- ),
+ "Request options",
+ # model_dump(
+ # options,
+ # exclude_unset=True,
+ # # Pydantic v1 can't dump every type we support in content, so we exclude it for now.
+ # exclude={
+ # "content",
+ # }
+ # if PYDANTIC_V1
+ # else {},
+ # ),
)
kwargs: dict[str, Any] = {}
diff --git a/src/gradient/types/__init__.py b/src/gradient/types/__init__.py
index 9e897482..fc2907b2 100644
--- a/src/gradient/types/__init__.py
+++ b/src/gradient/types/__init__.py
@@ -84,7 +84,6 @@
from .api_openai_api_key_info import APIOpenAIAPIKeyInfo as APIOpenAIAPIKeyInfo
from .gpu_droplet_list_params import GPUDropletListParams as GPUDropletListParams
from .image_generate_response import ImageGenerateResponse as ImageGenerateResponse
-from .response_create_response import ResponseCreateResponse as ResponseCreateResponse
from .api_deployment_visibility import APIDeploymentVisibility as APIDeploymentVisibility
from .gpu_droplet_create_params import GPUDropletCreateParams as GPUDropletCreateParams
from .gpu_droplet_list_response import GPUDropletListResponse as GPUDropletListResponse
diff --git a/src/gradient/types/response_create_response.py b/src/gradient/types/response_create_response.py
deleted file mode 100644
index 0dfe2a4a..00000000
--- a/src/gradient/types/response_create_response.py
+++ /dev/null
@@ -1,332 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-import builtins
-from typing import Dict, List, Union, Optional
-from typing_extensions import Literal, TypeAlias
-
-from .._models import BaseModel
-from .shared.chat_completion_token_logprob import ChatCompletionTokenLogprob
-
-__all__ = [
- "ResponseCreateResponse",
- "Usage",
- "UsageInputTokensDetails",
- "UsageOutputTokensDetails",
- "Choice",
- "ChoiceMessage",
- "ChoiceMessageToolCall",
- "ChoiceMessageToolCallFunction",
- "ChoiceLogprobs",
- "Output",
- "OutputUnionMember0",
- "OutputUnionMember1",
- "OutputUnionMember2",
- "OutputUnionMember2Content",
- "Tool",
-]
-
-
-class UsageInputTokensDetails(BaseModel):
- """A detailed breakdown of the input tokens."""
-
- cached_tokens: int
- """The number of tokens that were retrieved from the cache.
-
- [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching).
- """
-
-
-class UsageOutputTokensDetails(BaseModel):
- """A detailed breakdown of the output tokens."""
-
- reasoning_tokens: int
- """The number of reasoning tokens."""
-
- tool_output_tokens: int
- """The number of tool output tokens."""
-
-
-class Usage(BaseModel):
- """
- Detailed token usage statistics for the request, including input/output token counts and detailed breakdowns.
- """
-
- input_tokens: int
- """The number of input tokens."""
-
- input_tokens_details: UsageInputTokensDetails
- """A detailed breakdown of the input tokens."""
-
- output_tokens: int
- """The number of output tokens."""
-
- output_tokens_details: UsageOutputTokensDetails
- """A detailed breakdown of the output tokens."""
-
- total_tokens: int
- """The total number of tokens used."""
-
-
-class ChoiceMessageToolCallFunction(BaseModel):
- """The function that the model called."""
-
- arguments: str
- """
- The arguments to call the function with, as generated by the model in JSON
- format. Note that the model does not always generate valid JSON, and may
- hallucinate parameters not defined by your function schema. Validate the
- arguments in your code before calling your function.
- """
-
- name: str
- """The name of the function to call."""
-
-
-class ChoiceMessageToolCall(BaseModel):
- id: str
- """The ID of the tool call."""
-
- function: ChoiceMessageToolCallFunction
- """The function that the model called."""
-
- type: Literal["function"]
- """The type of the tool. Currently, only `function` is supported."""
-
-
-class ChoiceMessage(BaseModel):
- """The generated message response."""
-
- content: Optional[str] = None
- """The generated text content."""
-
- role: Optional[Literal["assistant"]] = None
- """The role of the message author, which is always `assistant`."""
-
- tool_calls: Optional[List[ChoiceMessageToolCall]] = None
- """The tool calls generated by the model, such as function calls."""
-
-
-class ChoiceLogprobs(BaseModel):
- """Log probability information for the choice.
-
- Only present if logprobs was requested in the request.
- """
-
- content: Optional[List[ChatCompletionTokenLogprob]] = None
- """A list of message content tokens with log probability information."""
-
-
-class Choice(BaseModel):
- finish_reason: Literal["stop", "length", "tool_calls", "content_filter"]
- """The reason the model stopped generating tokens.
-
- This will be `stop` if the model hit a natural stop point or a provided stop
- sequence, `length` if the maximum number of tokens specified in the request was
- reached, or `tool_calls` if the model called a tool.
- """
-
- index: int
- """The index of the choice in the list of choices."""
-
- message: ChoiceMessage
- """The generated message response."""
-
- logprobs: Optional[ChoiceLogprobs] = None
- """Log probability information for the choice.
-
- Only present if logprobs was requested in the request.
- """
-
-
-class OutputUnionMember0(BaseModel):
- arguments: str
- """JSON string of function arguments"""
-
- call_id: str
- """The unique ID of the function tool call"""
-
- name: str
- """The name of the function to call"""
-
- type: Literal["function_call"]
- """The type of output item"""
-
- id: Optional[str] = None
- """The unique ID of the function tool call (same as call_id)"""
-
- encrypted_content: Optional[str] = None
- """Encrypted content (optional)"""
-
- status: Optional[str] = None
- """Status of the item (optional, can be null)"""
-
-
-class OutputUnionMember1(BaseModel):
- text: str
- """The text content"""
-
- type: Literal["text"]
- """The type of output item"""
-
-
-class OutputUnionMember2Content(BaseModel):
- text: str
- """The reasoning text content"""
-
- type: Literal["reasoning_text"]
- """The type of content"""
-
-
-class OutputUnionMember2(BaseModel):
- id: str
- """The unique ID of the reasoning item"""
-
- content: List[OutputUnionMember2Content]
- """Array of reasoning content parts"""
-
- summary: List[object]
- """Summary of the reasoning (usually empty)"""
-
- type: Literal["reasoning"]
- """The type of output item"""
-
- encrypted_content: Optional[str] = None
- """Encrypted content (optional)"""
-
- status: Optional[str] = None
- """Status of the item (optional, can be null)"""
-
-
-Output: TypeAlias = Union[OutputUnionMember0, OutputUnionMember1, OutputUnionMember2]
-
-
-class Tool(BaseModel):
- """Tool definition for Responses API (flat format).
-
- This format is used by VLLM's Responses API where name, description, and parameters are at the top level of the tool object.
- """
-
- type: Literal["function", "web_search", "web_search_2025_08_26"]
- """The type of the tool.
-
- Supported values are `function` (custom tools), `web_search`, and
- `web_search_2025_08_26` (built-in web search).
- """
-
- description: Optional[str] = None
- """
- A description of what the function does, used by the model to choose when and
- how to call the function.
- """
-
- name: Optional[str] = None
- """The name of the function to be called.
-
- Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length
- of 64.
- """
-
- parameters: Optional[Dict[str, object]] = None
- """The parameters the functions accepts, described as a JSON Schema object.
-
- See the [guide](/docs/guides/function-calling) for examples, and the
- [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
- documentation about the format.
-
- Omitting `parameters` defines a function with an empty parameter list.
- """
-
-
-class ResponseCreateResponse(BaseModel):
- """
- Represents a text-to-text response returned by the model, based on the provided input. VLLM models only.
- """
-
- id: str
- """A unique identifier for the response."""
-
- created: int
- """The Unix timestamp (in seconds) of when the response was created."""
-
- model: str
- """The model used to generate the response."""
-
- object: Literal["response"]
- """The object type, which is always `response`."""
-
- usage: Usage
- """
- Detailed token usage statistics for the request, including input/output token
- counts and detailed breakdowns.
- """
-
- background: Optional[bool] = None
- """Whether the request was processed in the background"""
-
- choices: Optional[List[Choice]] = None
- """A list of response choices.
-
- Can be more than one if `n` is greater than 1. Optional - Responses API
- primarily uses the output array.
- """
-
- input_messages: Optional[List[builtins.object]] = None
- """Input messages (if applicable)"""
-
- max_output_tokens: Optional[int] = None
- """Maximum output tokens setting"""
-
- max_tool_calls: Optional[int] = None
- """Maximum tool calls setting"""
-
- output: Optional[List[Output]] = None
- """An array of content items generated by the model.
-
- This includes text content, function calls, reasoning items, and other output
- types. Use this field for Responses API compatibility.
- """
-
- output_messages: Optional[List[builtins.object]] = None
- """Output messages (if applicable)"""
-
- parallel_tool_calls: Optional[bool] = None
- """Whether parallel tool calls are enabled"""
-
- previous_response_id: Optional[str] = None
- """Previous response ID (for multi-turn conversations)"""
-
- prompt: Optional[str] = None
- """Prompt used for the response"""
-
- reasoning: Optional[str] = None
- """Reasoning content"""
-
- service_tier: Optional[str] = None
- """Service tier used"""
-
- status: Optional[str] = None
- """Status of the response"""
-
- temperature: Optional[float] = None
- """Temperature setting used for the response"""
-
- text: Optional[str] = None
- """Text content"""
-
- tool_choice: Optional[str] = None
- """Tool choice setting used for the response"""
-
- tools: Optional[List[Tool]] = None
- """Tools available for the response"""
-
- top_logprobs: Optional[int] = None
- """Top logprobs setting"""
-
- top_p: Optional[float] = None
- """Top-p setting used for the response"""
-
- truncation: Optional[str] = None
- """Truncation setting"""
-
- user: Optional[str] = None
- """User identifier"""
From f774a64d51891a10e3d30ddaafe556afdf4502e0 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Thu, 19 Feb 2026 16:03:13 +0000
Subject: [PATCH 06/17] chore(internal): remove mock server code
---
scripts/mock | 41 -----------------------------------------
scripts/test | 46 ----------------------------------------------
2 files changed, 87 deletions(-)
delete mode 100755 scripts/mock
diff --git a/scripts/mock b/scripts/mock
deleted file mode 100755
index 0b28f6ea..00000000
--- a/scripts/mock
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-cd "$(dirname "$0")/.."
-
-if [[ -n "$1" && "$1" != '--'* ]]; then
- URL="$1"
- shift
-else
- URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)"
-fi
-
-# Check if the URL is empty
-if [ -z "$URL" ]; then
- echo "Error: No OpenAPI spec path/url provided or found in .stats.yml"
- exit 1
-fi
-
-echo "==> Starting mock server with URL ${URL}"
-
-# Run prism mock on the given spec
-if [ "$1" == "--daemon" ]; then
- npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log &
-
- # Wait for server to come online
- echo -n "Waiting for server"
- while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do
- echo -n "."
- sleep 0.1
- done
-
- if grep -q "✖ fatal" ".prism.log"; then
- cat .prism.log
- exit 1
- fi
-
- echo
-else
- npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL"
-fi
diff --git a/scripts/test b/scripts/test
index 4aa8fd64..1012c870 100755
--- a/scripts/test
+++ b/scripts/test
@@ -4,53 +4,7 @@ set -e
cd "$(dirname "$0")/.."
-RED='\033[0;31m'
-GREEN='\033[0;32m'
-YELLOW='\033[0;33m'
-NC='\033[0m' # No Color
-function prism_is_running() {
- curl --silent "http://localhost:4010" >/dev/null 2>&1
-}
-
-kill_server_on_port() {
- pids=$(lsof -t -i tcp:"$1" || echo "")
- if [ "$pids" != "" ]; then
- kill "$pids"
- echo "Stopped $pids."
- fi
-}
-
-function is_overriding_api_base_url() {
- [ -n "$TEST_API_BASE_URL" ]
-}
-
-if ! is_overriding_api_base_url && ! prism_is_running ; then
- # When we exit this script, make sure to kill the background mock server process
- trap 'kill_server_on_port 4010' EXIT
-
- # Start the dev server
- ./scripts/mock --daemon
-fi
-
-if is_overriding_api_base_url ; then
- echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}"
- echo
-elif ! prism_is_running ; then
- echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server"
- echo -e "running against your OpenAPI spec."
- echo
- echo -e "To run the server, pass in the path or url of your OpenAPI"
- echo -e "spec to the prism command:"
- echo
- echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}"
- echo
-
- exit 1
-else
- echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}"
- echo
-fi
export DEFER_PYDANTIC_BUILD=false
From fa4da887118bdf9462d74328a81a61dc4c4c30e9 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Thu, 19 Feb 2026 16:33:08 +0000
Subject: [PATCH 07/17] chore: update mock server docs
---
CONTRIBUTING.md | 7 -
.../agents/chat/test_completions.py | 32 +--
.../evaluation_metrics/anthropic/test_keys.py | 104 ++++-----
.../evaluation_metrics/oauth2/test_dropbox.py | 16 +-
.../evaluation_metrics/openai/test_keys.py | 104 ++++-----
.../agents/evaluation_metrics/test_oauth2.py | 16 +-
.../test_scheduled_indexing.py | 48 ++---
.../evaluation_metrics/test_workspaces.py | 96 ++++-----
.../workspaces/test_agents.py | 40 ++--
tests/api_resources/agents/test_api_keys.py | 92 ++++----
.../agents/test_evaluation_datasets.py | 32 +--
.../agents/test_evaluation_metrics.py | 28 +--
.../agents/test_evaluation_runs.py | 68 +++---
.../agents/test_evaluation_test_cases.py | 88 ++++----
tests/api_resources/agents/test_functions.py | 56 ++---
.../agents/test_knowledge_bases.py | 48 ++---
tests/api_resources/agents/test_routes.py | 72 +++----
tests/api_resources/agents/test_versions.py | 40 ++--
.../apps/test_job_invocations.py | 20 +-
tests/api_resources/chat/test_completions.py | 32 +--
.../databases/schema_registry/test_config.py | 64 +++---
.../gpu_droplets/account/test_keys.py | 68 +++---
.../gpu_droplets/firewalls/test_droplets.py | 32 +--
.../gpu_droplets/firewalls/test_rules.py | 40 ++--
.../gpu_droplets/firewalls/test_tags.py | 32 +--
.../gpu_droplets/floating_ips/test_actions.py | 64 +++---
.../gpu_droplets/images/test_actions.py | 48 ++---
.../load_balancers/test_droplets.py | 32 +--
.../load_balancers/test_forwarding_rules.py | 32 +--
.../gpu_droplets/test_actions.py | 200 +++++++++---------
.../gpu_droplets/test_autoscale.py | 140 ++++++------
.../gpu_droplets/test_backups.py | 56 ++---
.../test_destroy_with_associated_resources.py | 64 +++---
.../gpu_droplets/test_firewalls.py | 84 ++++----
.../gpu_droplets/test_floating_ips.py | 76 +++----
.../api_resources/gpu_droplets/test_images.py | 72 +++----
.../gpu_droplets/test_load_balancers.py | 136 ++++++------
.../api_resources/gpu_droplets/test_sizes.py | 16 +-
.../gpu_droplets/test_snapshots.py | 40 ++--
.../gpu_droplets/test_volumes.py | 96 ++++-----
.../gpu_droplets/volumes/test_actions.py | 132 ++++++------
.../gpu_droplets/volumes/test_snapshots.py | 72 +++----
.../api_resources/inference/test_api_keys.py | 84 ++++----
.../knowledge_bases/test_data_sources.py | 92 ++++----
.../knowledge_bases/test_indexing_jobs.py | 100 ++++-----
.../models/providers/test_anthropic.py | 104 ++++-----
.../models/providers/test_openai.py | 104 ++++-----
tests/api_resources/nfs/test_snapshots.py | 48 ++---
tests/api_resources/test_agents.py | 124 +++++------
tests/api_resources/test_billing.py | 20 +-
tests/api_resources/test_gpu_droplets.py | 144 ++++++-------
tests/api_resources/test_images.py | 32 +--
tests/api_resources/test_knowledge_bases.py | 100 ++++-----
tests/api_resources/test_models.py | 16 +-
tests/api_resources/test_nfs.py | 136 ++++++------
tests/api_resources/test_regions.py | 16 +-
tests/api_resources/test_responses.py | 32 +--
tests/api_resources/test_retrieve.py | 20 +-
58 files changed, 1900 insertions(+), 1907 deletions(-)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 140a090b..670e32c7 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -85,13 +85,6 @@ $ pip install ./path-to-wheel-file.whl
## Running tests
-Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests.
-
-```sh
-# you will need npm installed
-$ npx prism mock path/to/your/openapi.yml
-```
-
```sh
$ ./scripts/test
```
diff --git a/tests/api_resources/agents/chat/test_completions.py b/tests/api_resources/agents/chat/test_completions.py
index a0df0e6f..30d797ff 100644
--- a/tests/api_resources/agents/chat/test_completions.py
+++ b/tests/api_resources/agents/chat/test_completions.py
@@ -17,7 +17,7 @@
class TestCompletions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
completion = client.agents.chat.completions.create(
@@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None:
completion = client.agents.chat.completions.create(
@@ -71,7 +71,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.agents.chat.completions.with_raw_response.create(
@@ -89,7 +89,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
completion = response.parse()
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.agents.chat.completions.with_streaming_response.create(
@@ -109,7 +109,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
completion_stream = client.agents.chat.completions.create(
@@ -124,7 +124,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
completion_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
completion_stream = client.agents.chat.completions.create(
@@ -164,7 +164,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
completion_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.agents.chat.completions.with_raw_response.create(
@@ -182,7 +182,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
stream = response.parse()
stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.agents.chat.completions.with_streaming_response.create(
@@ -212,7 +212,7 @@ class TestAsyncCompletions:
ids=["loose", "strict", "aiohttp"],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
completion = await async_client.agents.chat.completions.create(
@@ -226,7 +226,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
completion = await async_client.agents.chat.completions.create(
@@ -266,7 +266,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.chat.completions.with_raw_response.create(
@@ -284,7 +284,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
completion = await response.parse()
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.agents.chat.completions.with_streaming_response.create(
@@ -304,7 +304,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
completion_stream = await async_client.agents.chat.completions.create(
@@ -319,7 +319,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
await completion_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
completion_stream = await async_client.agents.chat.completions.create(
@@ -359,7 +359,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
await completion_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.chat.completions.with_raw_response.create(
@@ -377,7 +377,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
stream = await response.parse()
await stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.agents.chat.completions.with_streaming_response.create(
diff --git a/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py b/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py
index 5028698c..3cb43489 100644
--- a/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py
+++ b/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py
@@ -24,13 +24,13 @@
class TestKeys:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.create()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.create(
@@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.create()
@@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.create() as response:
@@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.retrieve(
@@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve(
@@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.retrieve(
@@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.update(
@@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.update(
@@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update(
@@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.update(
@@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.list(
@@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list()
@@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list() as response:
@@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.delete(
@@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete(
@@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.delete(
@@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_agents(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.list_agents(
@@ -243,7 +243,7 @@ def test_method_list_agents(self, client: Gradient) -> None:
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_agents_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.anthropic.keys.list_agents(
@@ -253,7 +253,7 @@ def test_method_list_agents_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_agents(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents(
@@ -265,7 +265,7 @@ def test_raw_response_list_agents(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_agents(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list_agents(
@@ -279,7 +279,7 @@ def test_streaming_response_list_agents(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_agents(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -293,13 +293,13 @@ class TestAsyncKeys:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.create()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.create(
@@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.create()
@@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.create() as response:
@@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.retrieve(
@@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve(
@@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.retrieve(
@@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.update(
@@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.update(
@@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update(
@@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.update(
@@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.list(
@@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list()
@@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list() as response:
@@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.delete(
@@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete(
@@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.delete(
@@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_agents(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.list_agents(
@@ -512,7 +512,7 @@ async def test_method_list_agents(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.anthropic.keys.list_agents(
@@ -522,7 +522,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents(
@@ -534,7 +534,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> No
key = await response.parse()
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list_agents(
@@ -548,7 +548,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradient)
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py b/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py
index 417bb3b1..64aea805 100644
--- a/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py
+++ b/tests/api_resources/agents/evaluation_metrics/oauth2/test_dropbox.py
@@ -17,13 +17,13 @@
class TestDropbox:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_tokens(self, client: Gradient) -> None:
dropbox = client.agents.evaluation_metrics.oauth2.dropbox.create_tokens()
assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_tokens_with_all_params(self, client: Gradient) -> None:
dropbox = client.agents.evaluation_metrics.oauth2.dropbox.create_tokens(
@@ -32,7 +32,7 @@ def test_method_create_tokens_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_tokens(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.oauth2.dropbox.with_raw_response.create_tokens()
@@ -42,7 +42,7 @@ def test_raw_response_create_tokens(self, client: Gradient) -> None:
dropbox = response.parse()
assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_tokens(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.oauth2.dropbox.with_streaming_response.create_tokens() as response:
@@ -60,13 +60,13 @@ class TestAsyncDropbox:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_tokens(self, async_client: AsyncGradient) -> None:
dropbox = await async_client.agents.evaluation_metrics.oauth2.dropbox.create_tokens()
assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_tokens_with_all_params(self, async_client: AsyncGradient) -> None:
dropbox = await async_client.agents.evaluation_metrics.oauth2.dropbox.create_tokens(
@@ -75,7 +75,7 @@ async def test_method_create_tokens_with_all_params(self, async_client: AsyncGra
)
assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_tokens(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.oauth2.dropbox.with_raw_response.create_tokens()
@@ -85,7 +85,7 @@ async def test_raw_response_create_tokens(self, async_client: AsyncGradient) ->
dropbox = await response.parse()
assert_matches_type(DropboxCreateTokensResponse, dropbox, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_tokens(self, async_client: AsyncGradient) -> None:
async with (
diff --git a/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py b/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py
index 7da165c2..475c52f8 100644
--- a/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py
+++ b/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py
@@ -24,13 +24,13 @@
class TestKeys:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.create()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.create(
@@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.openai.keys.with_raw_response.create()
@@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.openai.keys.with_streaming_response.create() as response:
@@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.retrieve(
@@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve(
@@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.openai.keys.with_streaming_response.retrieve(
@@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.update(
@@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.update(
@@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.openai.keys.with_raw_response.update(
@@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.openai.keys.with_streaming_response.update(
@@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.list(
@@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.openai.keys.with_raw_response.list()
@@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.openai.keys.with_streaming_response.list() as response:
@@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.delete(
@@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.openai.keys.with_raw_response.delete(
@@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.openai.keys.with_streaming_response.delete(
@@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_agents(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.list_agents(
@@ -243,7 +243,7 @@ def test_method_list_agents(self, client: Gradient) -> None:
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_agents_with_all_params(self, client: Gradient) -> None:
key = client.agents.evaluation_metrics.openai.keys.list_agents(
@@ -253,7 +253,7 @@ def test_method_list_agents_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_agents(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents(
@@ -265,7 +265,7 @@ def test_raw_response_list_agents(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_agents(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.openai.keys.with_streaming_response.list_agents(
@@ -279,7 +279,7 @@ def test_streaming_response_list_agents(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_agents(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -293,13 +293,13 @@ class TestAsyncKeys:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.create()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.create(
@@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.create()
@@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.create() as response:
@@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.retrieve(
@@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve(
@@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.retrieve(
@@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.update(
@@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.update(
@@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.update(
@@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.update(
@@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.list(
@@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list()
@@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.list() as response:
@@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.delete(
@@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.delete(
@@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyDeleteResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.delete(
@@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_agents(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.list_agents(
@@ -512,7 +512,7 @@ async def test_method_list_agents(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.agents.evaluation_metrics.openai.keys.list_agents(
@@ -522,7 +522,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi
)
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents(
@@ -534,7 +534,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> No
key = await response.parse()
assert_matches_type(KeyListAgentsResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.list_agents(
@@ -548,7 +548,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradient)
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/agents/evaluation_metrics/test_oauth2.py b/tests/api_resources/agents/evaluation_metrics/test_oauth2.py
index f247d94f..10137439 100644
--- a/tests/api_resources/agents/evaluation_metrics/test_oauth2.py
+++ b/tests/api_resources/agents/evaluation_metrics/test_oauth2.py
@@ -17,13 +17,13 @@
class TestOauth2:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_generate_url(self, client: Gradient) -> None:
oauth2 = client.agents.evaluation_metrics.oauth2.generate_url()
assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_generate_url_with_all_params(self, client: Gradient) -> None:
oauth2 = client.agents.evaluation_metrics.oauth2.generate_url(
@@ -32,7 +32,7 @@ def test_method_generate_url_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_generate_url(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.oauth2.with_raw_response.generate_url()
@@ -42,7 +42,7 @@ def test_raw_response_generate_url(self, client: Gradient) -> None:
oauth2 = response.parse()
assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_generate_url(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.oauth2.with_streaming_response.generate_url() as response:
@@ -60,13 +60,13 @@ class TestAsyncOauth2:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_generate_url(self, async_client: AsyncGradient) -> None:
oauth2 = await async_client.agents.evaluation_metrics.oauth2.generate_url()
assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_generate_url_with_all_params(self, async_client: AsyncGradient) -> None:
oauth2 = await async_client.agents.evaluation_metrics.oauth2.generate_url(
@@ -75,7 +75,7 @@ async def test_method_generate_url_with_all_params(self, async_client: AsyncGrad
)
assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_generate_url(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.oauth2.with_raw_response.generate_url()
@@ -85,7 +85,7 @@ async def test_raw_response_generate_url(self, async_client: AsyncGradient) -> N
oauth2 = await response.parse()
assert_matches_type(Oauth2GenerateURLResponse, oauth2, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_generate_url(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.oauth2.with_streaming_response.generate_url() as response:
diff --git a/tests/api_resources/agents/evaluation_metrics/test_scheduled_indexing.py b/tests/api_resources/agents/evaluation_metrics/test_scheduled_indexing.py
index 388e06c9..788b758a 100644
--- a/tests/api_resources/agents/evaluation_metrics/test_scheduled_indexing.py
+++ b/tests/api_resources/agents/evaluation_metrics/test_scheduled_indexing.py
@@ -21,13 +21,13 @@
class TestScheduledIndexing:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
scheduled_indexing = client.agents.evaluation_metrics.scheduled_indexing.create()
assert_matches_type(ScheduledIndexingCreateResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
scheduled_indexing = client.agents.evaluation_metrics.scheduled_indexing.create(
@@ -37,7 +37,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ScheduledIndexingCreateResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.scheduled_indexing.with_raw_response.create()
@@ -47,7 +47,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
scheduled_indexing = response.parse()
assert_matches_type(ScheduledIndexingCreateResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.scheduled_indexing.with_streaming_response.create() as response:
@@ -59,7 +59,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
scheduled_indexing = client.agents.evaluation_metrics.scheduled_indexing.retrieve(
@@ -67,7 +67,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(ScheduledIndexingRetrieveResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.scheduled_indexing.with_raw_response.retrieve(
@@ -79,7 +79,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
scheduled_indexing = response.parse()
assert_matches_type(ScheduledIndexingRetrieveResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.scheduled_indexing.with_streaming_response.retrieve(
@@ -93,7 +93,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -101,7 +101,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
scheduled_indexing = client.agents.evaluation_metrics.scheduled_indexing.delete(
@@ -109,7 +109,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(ScheduledIndexingDeleteResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.scheduled_indexing.with_raw_response.delete(
@@ -121,7 +121,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
scheduled_indexing = response.parse()
assert_matches_type(ScheduledIndexingDeleteResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.scheduled_indexing.with_streaming_response.delete(
@@ -135,7 +135,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -149,13 +149,13 @@ class TestAsyncScheduledIndexing:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await async_client.agents.evaluation_metrics.scheduled_indexing.create()
assert_matches_type(ScheduledIndexingCreateResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await async_client.agents.evaluation_metrics.scheduled_indexing.create(
@@ -165,7 +165,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(ScheduledIndexingCreateResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.scheduled_indexing.with_raw_response.create()
@@ -175,7 +175,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await response.parse()
assert_matches_type(ScheduledIndexingCreateResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with (
@@ -189,7 +189,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await async_client.agents.evaluation_metrics.scheduled_indexing.retrieve(
@@ -197,7 +197,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ScheduledIndexingRetrieveResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.scheduled_indexing.with_raw_response.retrieve(
@@ -209,7 +209,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await response.parse()
assert_matches_type(ScheduledIndexingRetrieveResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.scheduled_indexing.with_streaming_response.retrieve(
@@ -223,7 +223,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -231,7 +231,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await async_client.agents.evaluation_metrics.scheduled_indexing.delete(
@@ -239,7 +239,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ScheduledIndexingDeleteResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.scheduled_indexing.with_raw_response.delete(
@@ -251,7 +251,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
scheduled_indexing = await response.parse()
assert_matches_type(ScheduledIndexingDeleteResponse, scheduled_indexing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.scheduled_indexing.with_streaming_response.delete(
@@ -265,7 +265,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/agents/evaluation_metrics/test_workspaces.py b/tests/api_resources/agents/evaluation_metrics/test_workspaces.py
index 4f85212d..3493f322 100644
--- a/tests/api_resources/agents/evaluation_metrics/test_workspaces.py
+++ b/tests/api_resources/agents/evaluation_metrics/test_workspaces.py
@@ -24,13 +24,13 @@
class TestWorkspaces:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.create()
assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.create(
@@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.with_raw_response.create()
@@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
workspace = response.parse()
assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.with_streaming_response.create() as response:
@@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.retrieve(
@@ -70,7 +70,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve(
@@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
workspace = response.parse()
assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.with_streaming_response.retrieve(
@@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -104,7 +104,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.update(
@@ -112,7 +112,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.update(
@@ -123,7 +123,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.with_raw_response.update(
@@ -135,7 +135,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
workspace = response.parse()
assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.with_streaming_response.update(
@@ -149,7 +149,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"):
@@ -157,13 +157,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_workspace_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.list()
assert_matches_type(WorkspaceListResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.with_raw_response.list()
@@ -173,7 +173,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
workspace = response.parse()
assert_matches_type(WorkspaceListResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.with_streaming_response.list() as response:
@@ -185,7 +185,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.delete(
@@ -193,7 +193,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.with_raw_response.delete(
@@ -205,7 +205,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
workspace = response.parse()
assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.with_streaming_response.delete(
@@ -219,7 +219,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -227,7 +227,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_evaluation_test_cases(self, client: Gradient) -> None:
workspace = client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases(
@@ -235,7 +235,7 @@ def test_method_list_evaluation_test_cases(self, client: Gradient) -> None:
)
assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_evaluation_test_cases(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases(
@@ -247,7 +247,7 @@ def test_raw_response_list_evaluation_test_cases(self, client: Gradient) -> None
workspace = response.parse()
assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_evaluation_test_cases(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.with_streaming_response.list_evaluation_test_cases(
@@ -261,7 +261,7 @@ def test_streaming_response_list_evaluation_test_cases(self, client: Gradient) -
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_evaluation_test_cases(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -275,13 +275,13 @@ class TestAsyncWorkspaces:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.create()
assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.create(
@@ -291,7 +291,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.create()
@@ -301,7 +301,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
workspace = await response.parse()
assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.create() as response:
@@ -313,7 +313,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.retrieve(
@@ -321,7 +321,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve(
@@ -333,7 +333,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
workspace = await response.parse()
assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.retrieve(
@@ -347,7 +347,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -355,7 +355,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.update(
@@ -363,7 +363,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.update(
@@ -374,7 +374,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.update(
@@ -386,7 +386,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
workspace = await response.parse()
assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.update(
@@ -400,7 +400,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"):
@@ -408,13 +408,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_workspace_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.list()
assert_matches_type(WorkspaceListResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list()
@@ -424,7 +424,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
workspace = await response.parse()
assert_matches_type(WorkspaceListResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.list() as response:
@@ -436,7 +436,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.delete(
@@ -444,7 +444,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.delete(
@@ -456,7 +456,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
workspace = await response.parse()
assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.delete(
@@ -470,7 +470,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -478,7 +478,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None:
workspace = await async_client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases(
@@ -486,7 +486,7 @@ async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradie
)
assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases(
@@ -498,7 +498,7 @@ async def test_raw_response_list_evaluation_test_cases(self, async_client: Async
workspace = await response.parse()
assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.list_evaluation_test_cases(
@@ -512,7 +512,7 @@ async def test_streaming_response_list_evaluation_test_cases(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
diff --git a/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py b/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py
index 4154843c..33166f69 100644
--- a/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py
+++ b/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py
@@ -20,7 +20,7 @@
class TestAgents:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
agent = client.agents.evaluation_metrics.workspaces.agents.list(
@@ -28,7 +28,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
agent = client.agents.evaluation_metrics.workspaces.agents.list(
@@ -39,7 +39,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list(
@@ -51,7 +51,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.list(
@@ -65,7 +65,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -73,7 +73,7 @@ def test_path_params_list(self, client: Gradient) -> None:
workspace_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_move(self, client: Gradient) -> None:
agent = client.agents.evaluation_metrics.workspaces.agents.move(
@@ -81,7 +81,7 @@ def test_method_move(self, client: Gradient) -> None:
)
assert_matches_type(AgentMoveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_move_with_all_params(self, client: Gradient) -> None:
agent = client.agents.evaluation_metrics.workspaces.agents.move(
@@ -91,7 +91,7 @@ def test_method_move_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentMoveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_move(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move(
@@ -103,7 +103,7 @@ def test_raw_response_move(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentMoveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_move(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.move(
@@ -117,7 +117,7 @@ def test_streaming_response_move(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_move(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"):
@@ -131,7 +131,7 @@ class TestAsyncAgents:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.evaluation_metrics.workspaces.agents.list(
@@ -139,7 +139,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.evaluation_metrics.workspaces.agents.list(
@@ -150,7 +150,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list(
@@ -162,7 +162,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.list(
@@ -176,7 +176,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"):
@@ -184,7 +184,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None:
workspace_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_move(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.evaluation_metrics.workspaces.agents.move(
@@ -192,7 +192,7 @@ async def test_method_move(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentMoveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_move_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.evaluation_metrics.workspaces.agents.move(
@@ -202,7 +202,7 @@ async def test_method_move_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(AgentMoveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_move(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move(
@@ -214,7 +214,7 @@ async def test_raw_response_move(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentMoveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_move(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.move(
@@ -228,7 +228,7 @@ async def test_streaming_response_move(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_move(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"):
diff --git a/tests/api_resources/agents/test_api_keys.py b/tests/api_resources/agents/test_api_keys.py
index dbb19890..438f97b9 100644
--- a/tests/api_resources/agents/test_api_keys.py
+++ b/tests/api_resources/agents/test_api_keys.py
@@ -23,7 +23,7 @@
class TestAPIKeys:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
api_key = client.agents.api_keys.create(
@@ -31,7 +31,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
api_key = client.agents.api_keys.create(
@@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.api_keys.with_raw_response.create(
@@ -53,7 +53,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.api_keys.with_streaming_response.create(
@@ -67,7 +67,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_create(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -75,7 +75,7 @@ def test_path_params_create(self, client: Gradient) -> None:
path_agent_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
api_key = client.agents.api_keys.update(
@@ -84,7 +84,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
api_key = client.agents.api_keys.update(
@@ -96,7 +96,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.api_keys.with_raw_response.update(
@@ -109,7 +109,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.api_keys.with_streaming_response.update(
@@ -124,7 +124,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -139,7 +139,7 @@ def test_path_params_update(self, client: Gradient) -> None:
path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
api_key = client.agents.api_keys.list(
@@ -147,7 +147,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
api_key = client.agents.api_keys.list(
@@ -157,7 +157,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.api_keys.with_raw_response.list(
@@ -169,7 +169,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.api_keys.with_streaming_response.list(
@@ -183,7 +183,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -191,7 +191,7 @@ def test_path_params_list(self, client: Gradient) -> None:
agent_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
api_key = client.agents.api_keys.delete(
@@ -200,7 +200,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.api_keys.with_raw_response.delete(
@@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.api_keys.with_streaming_response.delete(
@@ -228,7 +228,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -243,7 +243,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_regenerate(self, client: Gradient) -> None:
api_key = client.agents.api_keys.regenerate(
@@ -252,7 +252,7 @@ def test_method_regenerate(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_regenerate(self, client: Gradient) -> None:
response = client.agents.api_keys.with_raw_response.regenerate(
@@ -265,7 +265,7 @@ def test_raw_response_regenerate(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_regenerate(self, client: Gradient) -> None:
with client.agents.api_keys.with_streaming_response.regenerate(
@@ -280,7 +280,7 @@ def test_streaming_response_regenerate(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_regenerate(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -301,7 +301,7 @@ class TestAsyncAPIKeys:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.create(
@@ -309,7 +309,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.create(
@@ -319,7 +319,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.api_keys.with_raw_response.create(
@@ -331,7 +331,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.api_keys.with_streaming_response.create(
@@ -345,7 +345,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_create(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -353,7 +353,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None:
path_agent_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.update(
@@ -362,7 +362,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.update(
@@ -374,7 +374,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.api_keys.with_raw_response.update(
@@ -387,7 +387,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.api_keys.with_streaming_response.update(
@@ -402,7 +402,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -417,7 +417,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.list(
@@ -425,7 +425,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.list(
@@ -435,7 +435,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.api_keys.with_raw_response.list(
@@ -447,7 +447,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.api_keys.with_streaming_response.list(
@@ -461,7 +461,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -469,7 +469,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None:
agent_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.delete(
@@ -478,7 +478,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.api_keys.with_raw_response.delete(
@@ -491,7 +491,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.api_keys.with_streaming_response.delete(
@@ -506,7 +506,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -521,7 +521,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_regenerate(self, async_client: AsyncGradient) -> None:
api_key = await async_client.agents.api_keys.regenerate(
@@ -530,7 +530,7 @@ async def test_method_regenerate(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_regenerate(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.api_keys.with_raw_response.regenerate(
@@ -543,7 +543,7 @@ async def test_raw_response_regenerate(self, async_client: AsyncGradient) -> Non
api_key = await response.parse()
assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_regenerate(self, async_client: AsyncGradient) -> None:
async with async_client.agents.api_keys.with_streaming_response.regenerate(
@@ -558,7 +558,7 @@ async def test_streaming_response_regenerate(self, async_client: AsyncGradient)
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_regenerate(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
diff --git a/tests/api_resources/agents/test_evaluation_datasets.py b/tests/api_resources/agents/test_evaluation_datasets.py
index 5093660e..3ab8adb8 100644
--- a/tests/api_resources/agents/test_evaluation_datasets.py
+++ b/tests/api_resources/agents/test_evaluation_datasets.py
@@ -20,13 +20,13 @@
class TestEvaluationDatasets:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
evaluation_dataset = client.agents.evaluation_datasets.create()
assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
evaluation_dataset = client.agents.evaluation_datasets.create(
@@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_datasets.with_raw_response.create()
@@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
evaluation_dataset = response.parse()
assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_datasets.with_streaming_response.create() as response:
@@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_file_upload_presigned_urls(self, client: Gradient) -> None:
evaluation_dataset = client.agents.evaluation_datasets.create_file_upload_presigned_urls()
@@ -70,7 +70,7 @@ def test_method_create_file_upload_presigned_urls(self, client: Gradient) -> Non
EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_file_upload_presigned_urls_with_all_params(self, client: Gradient) -> None:
evaluation_dataset = client.agents.evaluation_datasets.create_file_upload_presigned_urls(
@@ -85,7 +85,7 @@ def test_method_create_file_upload_presigned_urls_with_all_params(self, client:
EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_file_upload_presigned_urls(self, client: Gradient) -> None:
response = client.agents.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls()
@@ -97,7 +97,7 @@ def test_raw_response_create_file_upload_presigned_urls(self, client: Gradient)
EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_file_upload_presigned_urls(self, client: Gradient) -> None:
with client.agents.evaluation_datasets.with_streaming_response.create_file_upload_presigned_urls() as response:
@@ -117,13 +117,13 @@ class TestAsyncEvaluationDatasets:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
evaluation_dataset = await async_client.agents.evaluation_datasets.create()
assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_dataset = await async_client.agents.evaluation_datasets.create(
@@ -137,7 +137,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_datasets.with_raw_response.create()
@@ -147,7 +147,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
evaluation_dataset = await response.parse()
assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_datasets.with_streaming_response.create() as response:
@@ -159,7 +159,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None:
evaluation_dataset = await async_client.agents.evaluation_datasets.create_file_upload_presigned_urls()
@@ -167,7 +167,7 @@ async def test_method_create_file_upload_presigned_urls(self, async_client: Asyn
EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_file_upload_presigned_urls_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_dataset = await async_client.agents.evaluation_datasets.create_file_upload_presigned_urls(
@@ -182,7 +182,7 @@ async def test_method_create_file_upload_presigned_urls_with_all_params(self, as
EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls()
@@ -194,7 +194,7 @@ async def test_raw_response_create_file_upload_presigned_urls(self, async_client
EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None:
async with (
diff --git a/tests/api_resources/agents/test_evaluation_metrics.py b/tests/api_resources/agents/test_evaluation_metrics.py
index 088353bb..e27b820d 100644
--- a/tests/api_resources/agents/test_evaluation_metrics.py
+++ b/tests/api_resources/agents/test_evaluation_metrics.py
@@ -20,13 +20,13 @@
class TestEvaluationMetrics:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
evaluation_metric = client.agents.evaluation_metrics.list()
assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.with_raw_response.list()
@@ -36,7 +36,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
evaluation_metric = response.parse()
assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.with_streaming_response.list() as response:
@@ -48,13 +48,13 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_regions(self, client: Gradient) -> None:
evaluation_metric = client.agents.evaluation_metrics.list_regions()
assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_regions_with_all_params(self, client: Gradient) -> None:
evaluation_metric = client.agents.evaluation_metrics.list_regions(
@@ -63,7 +63,7 @@ def test_method_list_regions_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_regions(self, client: Gradient) -> None:
response = client.agents.evaluation_metrics.with_raw_response.list_regions()
@@ -73,7 +73,7 @@ def test_raw_response_list_regions(self, client: Gradient) -> None:
evaluation_metric = response.parse()
assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_regions(self, client: Gradient) -> None:
with client.agents.evaluation_metrics.with_streaming_response.list_regions() as response:
@@ -91,13 +91,13 @@ class TestAsyncEvaluationMetrics:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
evaluation_metric = await async_client.agents.evaluation_metrics.list()
assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.with_raw_response.list()
@@ -107,7 +107,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
evaluation_metric = await response.parse()
assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.with_streaming_response.list() as response:
@@ -119,13 +119,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_regions(self, async_client: AsyncGradient) -> None:
evaluation_metric = await async_client.agents.evaluation_metrics.list_regions()
assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_regions_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_metric = await async_client.agents.evaluation_metrics.list_regions(
@@ -134,7 +134,7 @@ async def test_method_list_regions_with_all_params(self, async_client: AsyncGrad
)
assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_regions(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_metrics.with_raw_response.list_regions()
@@ -144,7 +144,7 @@ async def test_raw_response_list_regions(self, async_client: AsyncGradient) -> N
evaluation_metric = await response.parse()
assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_regions(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_metrics.with_streaming_response.list_regions() as response:
diff --git a/tests/api_resources/agents/test_evaluation_runs.py b/tests/api_resources/agents/test_evaluation_runs.py
index 8fdfe2cd..faefa31b 100644
--- a/tests/api_resources/agents/test_evaluation_runs.py
+++ b/tests/api_resources/agents/test_evaluation_runs.py
@@ -22,13 +22,13 @@
class TestEvaluationRuns:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
evaluation_run = client.agents.evaluation_runs.create()
assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
evaluation_run = client.agents.evaluation_runs.create(
@@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_runs.with_raw_response.create()
@@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
evaluation_run = response.parse()
assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_runs.with_streaming_response.create() as response:
@@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
evaluation_run = client.agents.evaluation_runs.retrieve(
@@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.evaluation_runs.with_raw_response.retrieve(
@@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
evaluation_run = response.parse()
assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.evaluation_runs.with_streaming_response.retrieve(
@@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"):
@@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_results(self, client: Gradient) -> None:
evaluation_run = client.agents.evaluation_runs.list_results(
@@ -111,7 +111,7 @@ def test_method_list_results(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_results_with_all_params(self, client: Gradient) -> None:
evaluation_run = client.agents.evaluation_runs.list_results(
@@ -121,7 +121,7 @@ def test_method_list_results_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_results(self, client: Gradient) -> None:
response = client.agents.evaluation_runs.with_raw_response.list_results(
@@ -133,7 +133,7 @@ def test_raw_response_list_results(self, client: Gradient) -> None:
evaluation_run = response.parse()
assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_results(self, client: Gradient) -> None:
with client.agents.evaluation_runs.with_streaming_response.list_results(
@@ -147,7 +147,7 @@ def test_streaming_response_list_results(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_results(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"):
@@ -155,7 +155,7 @@ def test_path_params_list_results(self, client: Gradient) -> None:
evaluation_run_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_results(self, client: Gradient) -> None:
evaluation_run = client.agents.evaluation_runs.retrieve_results(
@@ -164,7 +164,7 @@ def test_method_retrieve_results(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_results(self, client: Gradient) -> None:
response = client.agents.evaluation_runs.with_raw_response.retrieve_results(
@@ -177,7 +177,7 @@ def test_raw_response_retrieve_results(self, client: Gradient) -> None:
evaluation_run = response.parse()
assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_results(self, client: Gradient) -> None:
with client.agents.evaluation_runs.with_streaming_response.retrieve_results(
@@ -192,7 +192,7 @@ def test_streaming_response_retrieve_results(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve_results(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"):
@@ -207,13 +207,13 @@ class TestAsyncEvaluationRuns:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
evaluation_run = await async_client.agents.evaluation_runs.create()
assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_run = await async_client.agents.evaluation_runs.create(
@@ -224,7 +224,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_runs.with_raw_response.create()
@@ -234,7 +234,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
evaluation_run = await response.parse()
assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_runs.with_streaming_response.create() as response:
@@ -246,7 +246,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
evaluation_run = await async_client.agents.evaluation_runs.retrieve(
@@ -254,7 +254,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_runs.with_raw_response.retrieve(
@@ -266,7 +266,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
evaluation_run = await response.parse()
assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_runs.with_streaming_response.retrieve(
@@ -280,7 +280,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"):
@@ -288,7 +288,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_results(self, async_client: AsyncGradient) -> None:
evaluation_run = await async_client.agents.evaluation_runs.list_results(
@@ -296,7 +296,7 @@ async def test_method_list_results(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_results_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_run = await async_client.agents.evaluation_runs.list_results(
@@ -306,7 +306,7 @@ async def test_method_list_results_with_all_params(self, async_client: AsyncGrad
)
assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_results(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_runs.with_raw_response.list_results(
@@ -318,7 +318,7 @@ async def test_raw_response_list_results(self, async_client: AsyncGradient) -> N
evaluation_run = await response.parse()
assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_results(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_runs.with_streaming_response.list_results(
@@ -332,7 +332,7 @@ async def test_streaming_response_list_results(self, async_client: AsyncGradient
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_results(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"):
@@ -340,7 +340,7 @@ async def test_path_params_list_results(self, async_client: AsyncGradient) -> No
evaluation_run_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_results(self, async_client: AsyncGradient) -> None:
evaluation_run = await async_client.agents.evaluation_runs.retrieve_results(
@@ -349,7 +349,7 @@ async def test_method_retrieve_results(self, async_client: AsyncGradient) -> Non
)
assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_results(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_runs.with_raw_response.retrieve_results(
@@ -362,7 +362,7 @@ async def test_raw_response_retrieve_results(self, async_client: AsyncGradient)
evaluation_run = await response.parse()
assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_results(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_runs.with_streaming_response.retrieve_results(
@@ -377,7 +377,7 @@ async def test_streaming_response_retrieve_results(self, async_client: AsyncGrad
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve_results(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"):
diff --git a/tests/api_resources/agents/test_evaluation_test_cases.py b/tests/api_resources/agents/test_evaluation_test_cases.py
index a8942239..03c9af54 100644
--- a/tests/api_resources/agents/test_evaluation_test_cases.py
+++ b/tests/api_resources/agents/test_evaluation_test_cases.py
@@ -23,13 +23,13 @@
class TestEvaluationTestCases:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.create()
assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.create(
@@ -48,7 +48,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.evaluation_test_cases.with_raw_response.create()
@@ -58,7 +58,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
evaluation_test_case = response.parse()
assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.evaluation_test_cases.with_streaming_response.create() as response:
@@ -70,7 +70,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.retrieve(
@@ -78,7 +78,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_with_all_params(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.retrieve(
@@ -87,7 +87,7 @@ def test_method_retrieve_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.evaluation_test_cases.with_raw_response.retrieve(
@@ -99,7 +99,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
evaluation_test_case = response.parse()
assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.evaluation_test_cases.with_streaming_response.retrieve(
@@ -113,7 +113,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"):
@@ -121,7 +121,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
test_case_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.update(
@@ -129,7 +129,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.update(
@@ -148,7 +148,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.evaluation_test_cases.with_raw_response.update(
@@ -160,7 +160,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
evaluation_test_case = response.parse()
assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.evaluation_test_cases.with_streaming_response.update(
@@ -174,7 +174,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"):
@@ -182,13 +182,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_test_case_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.list()
assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.evaluation_test_cases.with_raw_response.list()
@@ -198,7 +198,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
evaluation_test_case = response.parse()
assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.evaluation_test_cases.with_streaming_response.list() as response:
@@ -210,7 +210,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_evaluation_runs(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.list_evaluation_runs(
@@ -218,7 +218,7 @@ def test_method_list_evaluation_runs(self, client: Gradient) -> None:
)
assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_evaluation_runs_with_all_params(self, client: Gradient) -> None:
evaluation_test_case = client.agents.evaluation_test_cases.list_evaluation_runs(
@@ -227,7 +227,7 @@ def test_method_list_evaluation_runs_with_all_params(self, client: Gradient) ->
)
assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_evaluation_runs(self, client: Gradient) -> None:
response = client.agents.evaluation_test_cases.with_raw_response.list_evaluation_runs(
@@ -239,7 +239,7 @@ def test_raw_response_list_evaluation_runs(self, client: Gradient) -> None:
evaluation_test_case = response.parse()
assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_evaluation_runs(self, client: Gradient) -> None:
with client.agents.evaluation_test_cases.with_streaming_response.list_evaluation_runs(
@@ -253,7 +253,7 @@ def test_streaming_response_list_evaluation_runs(self, client: Gradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_evaluation_runs(self, client: Gradient) -> None:
with pytest.raises(
@@ -269,13 +269,13 @@ class TestAsyncEvaluationTestCases:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.create()
assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.create(
@@ -294,7 +294,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_test_cases.with_raw_response.create()
@@ -304,7 +304,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await response.parse()
assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_test_cases.with_streaming_response.create() as response:
@@ -316,7 +316,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.retrieve(
@@ -324,7 +324,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.retrieve(
@@ -333,7 +333,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient
)
assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_test_cases.with_raw_response.retrieve(
@@ -345,7 +345,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await response.parse()
assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_test_cases.with_streaming_response.retrieve(
@@ -359,7 +359,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"):
@@ -367,7 +367,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
test_case_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.update(
@@ -375,7 +375,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.update(
@@ -394,7 +394,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_test_cases.with_raw_response.update(
@@ -406,7 +406,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await response.parse()
assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_test_cases.with_streaming_response.update(
@@ -420,7 +420,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"):
@@ -428,13 +428,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_test_case_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.list()
assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_test_cases.with_raw_response.list()
@@ -444,7 +444,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await response.parse()
assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_test_cases.with_streaming_response.list() as response:
@@ -456,7 +456,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_evaluation_runs(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.list_evaluation_runs(
@@ -464,7 +464,7 @@ async def test_method_list_evaluation_runs(self, async_client: AsyncGradient) ->
)
assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_evaluation_runs_with_all_params(self, async_client: AsyncGradient) -> None:
evaluation_test_case = await async_client.agents.evaluation_test_cases.list_evaluation_runs(
@@ -473,7 +473,7 @@ async def test_method_list_evaluation_runs_with_all_params(self, async_client: A
)
assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.evaluation_test_cases.with_raw_response.list_evaluation_runs(
@@ -485,7 +485,7 @@ async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradie
evaluation_test_case = await response.parse()
assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_evaluation_runs(self, async_client: AsyncGradient) -> None:
async with async_client.agents.evaluation_test_cases.with_streaming_response.list_evaluation_runs(
@@ -499,7 +499,7 @@ async def test_streaming_response_list_evaluation_runs(self, async_client: Async
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_evaluation_runs(self, async_client: AsyncGradient) -> None:
with pytest.raises(
diff --git a/tests/api_resources/agents/test_functions.py b/tests/api_resources/agents/test_functions.py
index 64d55331..6b1fb5a0 100644
--- a/tests/api_resources/agents/test_functions.py
+++ b/tests/api_resources/agents/test_functions.py
@@ -21,7 +21,7 @@
class TestFunctions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
function = client.agents.functions.create(
@@ -29,7 +29,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(FunctionCreateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
function = client.agents.functions.create(
@@ -44,7 +44,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(FunctionCreateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.functions.with_raw_response.create(
@@ -56,7 +56,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
function = response.parse()
assert_matches_type(FunctionCreateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.functions.with_streaming_response.create(
@@ -70,7 +70,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_create(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -78,7 +78,7 @@ def test_path_params_create(self, client: Gradient) -> None:
path_agent_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
function = client.agents.functions.update(
@@ -87,7 +87,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(FunctionUpdateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
function = client.agents.functions.update(
@@ -104,7 +104,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(FunctionUpdateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.functions.with_raw_response.update(
@@ -117,7 +117,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
function = response.parse()
assert_matches_type(FunctionUpdateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.functions.with_streaming_response.update(
@@ -132,7 +132,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -147,7 +147,7 @@ def test_path_params_update(self, client: Gradient) -> None:
path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
function = client.agents.functions.delete(
@@ -156,7 +156,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(FunctionDeleteResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.functions.with_raw_response.delete(
@@ -169,7 +169,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
function = response.parse()
assert_matches_type(FunctionDeleteResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.functions.with_streaming_response.delete(
@@ -184,7 +184,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -205,7 +205,7 @@ class TestAsyncFunctions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
function = await async_client.agents.functions.create(
@@ -213,7 +213,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(FunctionCreateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
function = await async_client.agents.functions.create(
@@ -228,7 +228,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(FunctionCreateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.functions.with_raw_response.create(
@@ -240,7 +240,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
function = await response.parse()
assert_matches_type(FunctionCreateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.functions.with_streaming_response.create(
@@ -254,7 +254,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_create(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -262,7 +262,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None:
path_agent_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
function = await async_client.agents.functions.update(
@@ -271,7 +271,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(FunctionUpdateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
function = await async_client.agents.functions.update(
@@ -288,7 +288,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(FunctionUpdateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.functions.with_raw_response.update(
@@ -301,7 +301,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
function = await response.parse()
assert_matches_type(FunctionUpdateResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.functions.with_streaming_response.update(
@@ -316,7 +316,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"):
@@ -331,7 +331,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
function = await async_client.agents.functions.delete(
@@ -340,7 +340,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(FunctionDeleteResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.functions.with_raw_response.delete(
@@ -353,7 +353,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
function = await response.parse()
assert_matches_type(FunctionDeleteResponse, function, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.functions.with_streaming_response.delete(
@@ -368,7 +368,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
diff --git a/tests/api_resources/agents/test_knowledge_bases.py b/tests/api_resources/agents/test_knowledge_bases.py
index 2cf09753..c773fd94 100644
--- a/tests/api_resources/agents/test_knowledge_bases.py
+++ b/tests/api_resources/agents/test_knowledge_bases.py
@@ -17,7 +17,7 @@
class TestKnowledgeBases:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_attach(self, client: Gradient) -> None:
knowledge_base = client.agents.knowledge_bases.attach(
@@ -25,7 +25,7 @@ def test_method_attach(self, client: Gradient) -> None:
)
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_attach(self, client: Gradient) -> None:
response = client.agents.knowledge_bases.with_raw_response.attach(
@@ -37,7 +37,7 @@ def test_raw_response_attach(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_attach(self, client: Gradient) -> None:
with client.agents.knowledge_bases.with_streaming_response.attach(
@@ -51,7 +51,7 @@ def test_streaming_response_attach(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_attach(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -59,7 +59,7 @@ def test_path_params_attach(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_attach_single(self, client: Gradient) -> None:
knowledge_base = client.agents.knowledge_bases.attach_single(
@@ -68,7 +68,7 @@ def test_method_attach_single(self, client: Gradient) -> None:
)
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_attach_single(self, client: Gradient) -> None:
response = client.agents.knowledge_bases.with_raw_response.attach_single(
@@ -81,7 +81,7 @@ def test_raw_response_attach_single(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_attach_single(self, client: Gradient) -> None:
with client.agents.knowledge_bases.with_streaming_response.attach_single(
@@ -96,7 +96,7 @@ def test_streaming_response_attach_single(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_attach_single(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -111,7 +111,7 @@ def test_path_params_attach_single(self, client: Gradient) -> None:
agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_detach(self, client: Gradient) -> None:
knowledge_base = client.agents.knowledge_bases.detach(
@@ -120,7 +120,7 @@ def test_method_detach(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_detach(self, client: Gradient) -> None:
response = client.agents.knowledge_bases.with_raw_response.detach(
@@ -133,7 +133,7 @@ def test_raw_response_detach(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_detach(self, client: Gradient) -> None:
with client.agents.knowledge_bases.with_streaming_response.detach(
@@ -148,7 +148,7 @@ def test_streaming_response_detach(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_detach(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -169,7 +169,7 @@ class TestAsyncKnowledgeBases:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_attach(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.agents.knowledge_bases.attach(
@@ -177,7 +177,7 @@ async def test_method_attach(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_attach(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.knowledge_bases.with_raw_response.attach(
@@ -189,7 +189,7 @@ async def test_raw_response_attach(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_attach(self, async_client: AsyncGradient) -> None:
async with async_client.agents.knowledge_bases.with_streaming_response.attach(
@@ -203,7 +203,7 @@ async def test_streaming_response_attach(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_attach(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -211,7 +211,7 @@ async def test_path_params_attach(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_attach_single(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.agents.knowledge_bases.attach_single(
@@ -220,7 +220,7 @@ async def test_method_attach_single(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_attach_single(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.knowledge_bases.with_raw_response.attach_single(
@@ -233,7 +233,7 @@ async def test_raw_response_attach_single(self, async_client: AsyncGradient) ->
knowledge_base = await response.parse()
assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_attach_single(self, async_client: AsyncGradient) -> None:
async with async_client.agents.knowledge_bases.with_streaming_response.attach_single(
@@ -248,7 +248,7 @@ async def test_streaming_response_attach_single(self, async_client: AsyncGradien
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_attach_single(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
@@ -263,7 +263,7 @@ async def test_path_params_attach_single(self, async_client: AsyncGradient) -> N
agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_detach(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.agents.knowledge_bases.detach(
@@ -272,7 +272,7 @@ async def test_method_detach(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_detach(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.knowledge_bases.with_raw_response.detach(
@@ -285,7 +285,7 @@ async def test_raw_response_detach(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_detach(self, async_client: AsyncGradient) -> None:
async with async_client.agents.knowledge_bases.with_streaming_response.detach(
@@ -300,7 +300,7 @@ async def test_streaming_response_detach(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_detach(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"):
diff --git a/tests/api_resources/agents/test_routes.py b/tests/api_resources/agents/test_routes.py
index 3444dcc7..4edc5f73 100644
--- a/tests/api_resources/agents/test_routes.py
+++ b/tests/api_resources/agents/test_routes.py
@@ -22,7 +22,7 @@
class TestRoutes:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
route = client.agents.routes.update(
@@ -31,7 +31,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(RouteUpdateResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
route = client.agents.routes.update(
@@ -45,7 +45,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(RouteUpdateResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.routes.with_raw_response.update(
@@ -58,7 +58,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
route = response.parse()
assert_matches_type(RouteUpdateResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.routes.with_streaming_response.update(
@@ -73,7 +73,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(
@@ -90,7 +90,7 @@ def test_path_params_update(self, client: Gradient) -> None:
path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
route = client.agents.routes.delete(
@@ -99,7 +99,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(RouteDeleteResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.routes.with_raw_response.delete(
@@ -112,7 +112,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
route = response.parse()
assert_matches_type(RouteDeleteResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.routes.with_streaming_response.delete(
@@ -127,7 +127,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"):
@@ -142,7 +142,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add(self, client: Gradient) -> None:
route = client.agents.routes.add(
@@ -151,7 +151,7 @@ def test_method_add(self, client: Gradient) -> None:
)
assert_matches_type(RouteAddResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add_with_all_params(self, client: Gradient) -> None:
route = client.agents.routes.add(
@@ -164,7 +164,7 @@ def test_method_add_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(RouteAddResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_add(self, client: Gradient) -> None:
response = client.agents.routes.with_raw_response.add(
@@ -177,7 +177,7 @@ def test_raw_response_add(self, client: Gradient) -> None:
route = response.parse()
assert_matches_type(RouteAddResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_add(self, client: Gradient) -> None:
with client.agents.routes.with_streaming_response.add(
@@ -192,7 +192,7 @@ def test_streaming_response_add(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_add(self, client: Gradient) -> None:
with pytest.raises(
@@ -209,7 +209,7 @@ def test_path_params_add(self, client: Gradient) -> None:
path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_view(self, client: Gradient) -> None:
route = client.agents.routes.view(
@@ -217,7 +217,7 @@ def test_method_view(self, client: Gradient) -> None:
)
assert_matches_type(RouteViewResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_view(self, client: Gradient) -> None:
response = client.agents.routes.with_raw_response.view(
@@ -229,7 +229,7 @@ def test_raw_response_view(self, client: Gradient) -> None:
route = response.parse()
assert_matches_type(RouteViewResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_view(self, client: Gradient) -> None:
with client.agents.routes.with_streaming_response.view(
@@ -243,7 +243,7 @@ def test_streaming_response_view(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_view(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -257,7 +257,7 @@ class TestAsyncRoutes:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
route = await async_client.agents.routes.update(
@@ -266,7 +266,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(RouteUpdateResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
route = await async_client.agents.routes.update(
@@ -280,7 +280,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(RouteUpdateResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.routes.with_raw_response.update(
@@ -293,7 +293,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
route = await response.parse()
assert_matches_type(RouteUpdateResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.routes.with_streaming_response.update(
@@ -308,7 +308,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(
@@ -325,7 +325,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
route = await async_client.agents.routes.delete(
@@ -334,7 +334,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(RouteDeleteResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.routes.with_raw_response.delete(
@@ -347,7 +347,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
route = await response.parse()
assert_matches_type(RouteDeleteResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.routes.with_streaming_response.delete(
@@ -362,7 +362,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"):
@@ -377,7 +377,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add(self, async_client: AsyncGradient) -> None:
route = await async_client.agents.routes.add(
@@ -386,7 +386,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(RouteAddResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> None:
route = await async_client.agents.routes.add(
@@ -399,7 +399,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(RouteAddResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.routes.with_raw_response.add(
@@ -412,7 +412,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
route = await response.parse()
assert_matches_type(RouteAddResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradient) -> None:
async with async_client.agents.routes.with_streaming_response.add(
@@ -427,7 +427,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_add(self, async_client: AsyncGradient) -> None:
with pytest.raises(
@@ -444,7 +444,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None:
path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_view(self, async_client: AsyncGradient) -> None:
route = await async_client.agents.routes.view(
@@ -452,7 +452,7 @@ async def test_method_view(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(RouteViewResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_view(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.routes.with_raw_response.view(
@@ -464,7 +464,7 @@ async def test_raw_response_view(self, async_client: AsyncGradient) -> None:
route = await response.parse()
assert_matches_type(RouteViewResponse, route, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_view(self, async_client: AsyncGradient) -> None:
async with async_client.agents.routes.with_streaming_response.view(
@@ -478,7 +478,7 @@ async def test_streaming_response_view(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_view(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/agents/test_versions.py b/tests/api_resources/agents/test_versions.py
index d12e362e..ab10c5e4 100644
--- a/tests/api_resources/agents/test_versions.py
+++ b/tests/api_resources/agents/test_versions.py
@@ -17,7 +17,7 @@
class TestVersions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
version = client.agents.versions.update(
@@ -25,7 +25,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(VersionUpdateResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
version = client.agents.versions.update(
@@ -35,7 +35,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(VersionUpdateResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.versions.with_raw_response.update(
@@ -47,7 +47,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
version = response.parse()
assert_matches_type(VersionUpdateResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.versions.with_streaming_response.update(
@@ -61,7 +61,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -69,7 +69,7 @@ def test_path_params_update(self, client: Gradient) -> None:
path_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
version = client.agents.versions.list(
@@ -77,7 +77,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(VersionListResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
version = client.agents.versions.list(
@@ -87,7 +87,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(VersionListResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.versions.with_raw_response.list(
@@ -99,7 +99,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
version = response.parse()
assert_matches_type(VersionListResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.versions.with_streaming_response.list(
@@ -113,7 +113,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -127,7 +127,7 @@ class TestAsyncVersions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
version = await async_client.agents.versions.update(
@@ -135,7 +135,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(VersionUpdateResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
version = await async_client.agents.versions.update(
@@ -145,7 +145,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(VersionUpdateResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.versions.with_raw_response.update(
@@ -157,7 +157,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
version = await response.parse()
assert_matches_type(VersionUpdateResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.versions.with_streaming_response.update(
@@ -171,7 +171,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -179,7 +179,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
version = await async_client.agents.versions.list(
@@ -187,7 +187,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(VersionListResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
version = await async_client.agents.versions.list(
@@ -197,7 +197,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(VersionListResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.versions.with_raw_response.list(
@@ -209,7 +209,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
version = await response.parse()
assert_matches_type(VersionListResponse, version, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.versions.with_streaming_response.list(
@@ -223,7 +223,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/apps/test_job_invocations.py b/tests/api_resources/apps/test_job_invocations.py
index a7cb68df..388be266 100644
--- a/tests/api_resources/apps/test_job_invocations.py
+++ b/tests/api_resources/apps/test_job_invocations.py
@@ -17,7 +17,7 @@
class TestJobInvocations:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_cancel(self, client: Gradient) -> None:
job_invocation = client.apps.job_invocations.cancel(
@@ -26,7 +26,7 @@ def test_method_cancel(self, client: Gradient) -> None:
)
assert_matches_type(JobInvocationCancelResponse, job_invocation, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_cancel_with_all_params(self, client: Gradient) -> None:
job_invocation = client.apps.job_invocations.cancel(
@@ -36,7 +36,7 @@ def test_method_cancel_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(JobInvocationCancelResponse, job_invocation, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_cancel(self, client: Gradient) -> None:
response = client.apps.job_invocations.with_raw_response.cancel(
@@ -49,7 +49,7 @@ def test_raw_response_cancel(self, client: Gradient) -> None:
job_invocation = response.parse()
assert_matches_type(JobInvocationCancelResponse, job_invocation, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_cancel(self, client: Gradient) -> None:
with client.apps.job_invocations.with_streaming_response.cancel(
@@ -64,7 +64,7 @@ def test_streaming_response_cancel(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_cancel(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `app_id` but received ''"):
@@ -85,7 +85,7 @@ class TestAsyncJobInvocations:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_cancel(self, async_client: AsyncGradient) -> None:
job_invocation = await async_client.apps.job_invocations.cancel(
@@ -94,7 +94,7 @@ async def test_method_cancel(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(JobInvocationCancelResponse, job_invocation, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_cancel_with_all_params(self, async_client: AsyncGradient) -> None:
job_invocation = await async_client.apps.job_invocations.cancel(
@@ -104,7 +104,7 @@ async def test_method_cancel_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(JobInvocationCancelResponse, job_invocation, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_cancel(self, async_client: AsyncGradient) -> None:
response = await async_client.apps.job_invocations.with_raw_response.cancel(
@@ -117,7 +117,7 @@ async def test_raw_response_cancel(self, async_client: AsyncGradient) -> None:
job_invocation = await response.parse()
assert_matches_type(JobInvocationCancelResponse, job_invocation, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_cancel(self, async_client: AsyncGradient) -> None:
async with async_client.apps.job_invocations.with_streaming_response.cancel(
@@ -132,7 +132,7 @@ async def test_streaming_response_cancel(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_cancel(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `app_id` but received ''"):
diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py
index fce393fd..81125c20 100644
--- a/tests/api_resources/chat/test_completions.py
+++ b/tests/api_resources/chat/test_completions.py
@@ -17,7 +17,7 @@
class TestCompletions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
completion = client.chat.completions.create(
@@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None:
completion = client.chat.completions.create(
@@ -71,7 +71,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.chat.completions.with_raw_response.create(
@@ -89,7 +89,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
completion = response.parse()
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.chat.completions.with_streaming_response.create(
@@ -109,7 +109,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
completion_stream = client.chat.completions.create(
@@ -124,7 +124,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
completion_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
completion_stream = client.chat.completions.create(
@@ -164,7 +164,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
completion_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.chat.completions.with_raw_response.create(
@@ -182,7 +182,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
stream = response.parse()
stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.chat.completions.with_streaming_response.create(
@@ -209,7 +209,7 @@ class TestAsyncCompletions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
completion = await async_client.chat.completions.create(
@@ -223,7 +223,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
completion = await async_client.chat.completions.create(
@@ -263,7 +263,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.chat.completions.with_raw_response.create(
@@ -281,7 +281,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
completion = await response.parse()
assert_matches_type(CompletionCreateResponse, completion, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.chat.completions.with_streaming_response.create(
@@ -301,7 +301,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
completion_stream = await async_client.chat.completions.create(
@@ -316,7 +316,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
await completion_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
completion_stream = await async_client.chat.completions.create(
@@ -356,7 +356,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
await completion_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.chat.completions.with_raw_response.create(
@@ -374,7 +374,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
stream = await response.parse()
await stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.chat.completions.with_streaming_response.create(
diff --git a/tests/api_resources/databases/schema_registry/test_config.py b/tests/api_resources/databases/schema_registry/test_config.py
index ebd60c4c..c58a6ad9 100644
--- a/tests/api_resources/databases/schema_registry/test_config.py
+++ b/tests/api_resources/databases/schema_registry/test_config.py
@@ -22,7 +22,7 @@
class TestConfig:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
config = client.databases.schema_registry.config.retrieve(
@@ -30,7 +30,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(ConfigRetrieveResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.databases.schema_registry.config.with_raw_response.retrieve(
@@ -42,7 +42,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
config = response.parse()
assert_matches_type(ConfigRetrieveResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.databases.schema_registry.config.with_streaming_response.retrieve(
@@ -56,7 +56,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -64,7 +64,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
config = client.databases.schema_registry.config.update(
@@ -73,7 +73,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(ConfigUpdateResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.databases.schema_registry.config.with_raw_response.update(
@@ -86,7 +86,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
config = response.parse()
assert_matches_type(ConfigUpdateResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.databases.schema_registry.config.with_streaming_response.update(
@@ -101,7 +101,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -110,7 +110,7 @@ def test_path_params_update(self, client: Gradient) -> None:
compatibility_level="BACKWARD",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_subject(self, client: Gradient) -> None:
config = client.databases.schema_registry.config.retrieve_subject(
@@ -119,7 +119,7 @@ def test_method_retrieve_subject(self, client: Gradient) -> None:
)
assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_subject(self, client: Gradient) -> None:
response = client.databases.schema_registry.config.with_raw_response.retrieve_subject(
@@ -132,7 +132,7 @@ def test_raw_response_retrieve_subject(self, client: Gradient) -> None:
config = response.parse()
assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_subject(self, client: Gradient) -> None:
with client.databases.schema_registry.config.with_streaming_response.retrieve_subject(
@@ -147,7 +147,7 @@ def test_streaming_response_retrieve_subject(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve_subject(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -162,7 +162,7 @@ def test_path_params_retrieve_subject(self, client: Gradient) -> None:
database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_subject(self, client: Gradient) -> None:
config = client.databases.schema_registry.config.update_subject(
@@ -172,7 +172,7 @@ def test_method_update_subject(self, client: Gradient) -> None:
)
assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update_subject(self, client: Gradient) -> None:
response = client.databases.schema_registry.config.with_raw_response.update_subject(
@@ -186,7 +186,7 @@ def test_raw_response_update_subject(self, client: Gradient) -> None:
config = response.parse()
assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update_subject(self, client: Gradient) -> None:
with client.databases.schema_registry.config.with_streaming_response.update_subject(
@@ -202,7 +202,7 @@ def test_streaming_response_update_subject(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update_subject(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -225,7 +225,7 @@ class TestAsyncConfig:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
config = await async_client.databases.schema_registry.config.retrieve(
@@ -233,7 +233,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ConfigRetrieveResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.databases.schema_registry.config.with_raw_response.retrieve(
@@ -245,7 +245,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
config = await response.parse()
assert_matches_type(ConfigRetrieveResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.databases.schema_registry.config.with_streaming_response.retrieve(
@@ -259,7 +259,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -267,7 +267,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
config = await async_client.databases.schema_registry.config.update(
@@ -276,7 +276,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ConfigUpdateResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.databases.schema_registry.config.with_raw_response.update(
@@ -289,7 +289,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
config = await response.parse()
assert_matches_type(ConfigUpdateResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.databases.schema_registry.config.with_streaming_response.update(
@@ -304,7 +304,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -313,7 +313,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
compatibility_level="BACKWARD",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_subject(self, async_client: AsyncGradient) -> None:
config = await async_client.databases.schema_registry.config.retrieve_subject(
@@ -322,7 +322,7 @@ async def test_method_retrieve_subject(self, async_client: AsyncGradient) -> Non
)
assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_subject(self, async_client: AsyncGradient) -> None:
response = await async_client.databases.schema_registry.config.with_raw_response.retrieve_subject(
@@ -335,7 +335,7 @@ async def test_raw_response_retrieve_subject(self, async_client: AsyncGradient)
config = await response.parse()
assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_subject(self, async_client: AsyncGradient) -> None:
async with async_client.databases.schema_registry.config.with_streaming_response.retrieve_subject(
@@ -350,7 +350,7 @@ async def test_streaming_response_retrieve_subject(self, async_client: AsyncGrad
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve_subject(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
@@ -365,7 +365,7 @@ async def test_path_params_retrieve_subject(self, async_client: AsyncGradient) -
database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_subject(self, async_client: AsyncGradient) -> None:
config = await async_client.databases.schema_registry.config.update_subject(
@@ -375,7 +375,7 @@ async def test_method_update_subject(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update_subject(self, async_client: AsyncGradient) -> None:
response = await async_client.databases.schema_registry.config.with_raw_response.update_subject(
@@ -389,7 +389,7 @@ async def test_raw_response_update_subject(self, async_client: AsyncGradient) ->
config = await response.parse()
assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update_subject(self, async_client: AsyncGradient) -> None:
async with async_client.databases.schema_registry.config.with_streaming_response.update_subject(
@@ -405,7 +405,7 @@ async def test_streaming_response_update_subject(self, async_client: AsyncGradie
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update_subject(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/account/test_keys.py b/tests/api_resources/gpu_droplets/account/test_keys.py
index 93817d1e..42702d3a 100644
--- a/tests/api_resources/gpu_droplets/account/test_keys.py
+++ b/tests/api_resources/gpu_droplets/account/test_keys.py
@@ -22,7 +22,7 @@
class TestKeys:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.create(
@@ -31,7 +31,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.gpu_droplets.account.keys.with_raw_response.create(
@@ -44,7 +44,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.gpu_droplets.account.keys.with_streaming_response.create(
@@ -59,7 +59,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.retrieve(
@@ -67,7 +67,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.account.keys.with_raw_response.retrieve(
@@ -79,7 +79,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.account.keys.with_streaming_response.retrieve(
@@ -93,7 +93,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.update(
@@ -101,7 +101,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.update(
@@ -110,7 +110,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.gpu_droplets.account.keys.with_raw_response.update(
@@ -122,7 +122,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.gpu_droplets.account.keys.with_streaming_response.update(
@@ -136,13 +136,13 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.list(
@@ -151,7 +151,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.account.keys.with_raw_response.list()
@@ -161,7 +161,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
key = response.parse()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.account.keys.with_streaming_response.list() as response:
@@ -173,7 +173,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
key = client.gpu_droplets.account.keys.delete(
@@ -181,7 +181,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert key is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.account.keys.with_raw_response.delete(
@@ -193,7 +193,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
key = response.parse()
assert key is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.account.keys.with_streaming_response.delete(
@@ -213,7 +213,7 @@ class TestAsyncKeys:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.create(
@@ -222,7 +222,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.account.keys.with_raw_response.create(
@@ -235,7 +235,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyCreateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.account.keys.with_streaming_response.create(
@@ -250,7 +250,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.retrieve(
@@ -258,7 +258,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.account.keys.with_raw_response.retrieve(
@@ -270,7 +270,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.account.keys.with_streaming_response.retrieve(
@@ -284,7 +284,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.update(
@@ -292,7 +292,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.update(
@@ -301,7 +301,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.account.keys.with_raw_response.update(
@@ -313,7 +313,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyUpdateResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.account.keys.with_streaming_response.update(
@@ -327,13 +327,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.list(
@@ -342,7 +342,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.account.keys.with_raw_response.list()
@@ -352,7 +352,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert_matches_type(KeyListResponse, key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.account.keys.with_streaming_response.list() as response:
@@ -364,7 +364,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
key = await async_client.gpu_droplets.account.keys.delete(
@@ -372,7 +372,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert key is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.account.keys.with_raw_response.delete(
@@ -384,7 +384,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
key = await response.parse()
assert key is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.account.keys.with_streaming_response.delete(
diff --git a/tests/api_resources/gpu_droplets/firewalls/test_droplets.py b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py
index 693e315d..f75aeaf6 100644
--- a/tests/api_resources/gpu_droplets/firewalls/test_droplets.py
+++ b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py
@@ -15,7 +15,7 @@
class TestDroplets:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add(self, client: Gradient) -> None:
droplet = client.gpu_droplets.firewalls.droplets.add(
@@ -24,7 +24,7 @@ def test_method_add(self, client: Gradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_add(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.droplets.with_raw_response.add(
@@ -37,7 +37,7 @@ def test_raw_response_add(self, client: Gradient) -> None:
droplet = response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_add(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.droplets.with_streaming_response.add(
@@ -52,7 +52,7 @@ def test_streaming_response_add(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_add(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -61,7 +61,7 @@ def test_path_params_add(self, client: Gradient) -> None:
droplet_ids=[49696269],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_remove(self, client: Gradient) -> None:
droplet = client.gpu_droplets.firewalls.droplets.remove(
@@ -70,7 +70,7 @@ def test_method_remove(self, client: Gradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_remove(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.droplets.with_raw_response.remove(
@@ -83,7 +83,7 @@ def test_raw_response_remove(self, client: Gradient) -> None:
droplet = response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_remove(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.droplets.with_streaming_response.remove(
@@ -98,7 +98,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_remove(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -113,7 +113,7 @@ class TestAsyncDroplets:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add(self, async_client: AsyncGradient) -> None:
droplet = await async_client.gpu_droplets.firewalls.droplets.add(
@@ -122,7 +122,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.add(
@@ -135,7 +135,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
droplet = await response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.add(
@@ -150,7 +150,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_add(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -159,7 +159,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None:
droplet_ids=[49696269],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_remove(self, async_client: AsyncGradient) -> None:
droplet = await async_client.gpu_droplets.firewalls.droplets.remove(
@@ -168,7 +168,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.remove(
@@ -181,7 +181,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
droplet = await response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.remove(
@@ -196,7 +196,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/firewalls/test_rules.py b/tests/api_resources/gpu_droplets/firewalls/test_rules.py
index 27694390..2c04b390 100644
--- a/tests/api_resources/gpu_droplets/firewalls/test_rules.py
+++ b/tests/api_resources/gpu_droplets/firewalls/test_rules.py
@@ -15,7 +15,7 @@
class TestRules:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add(self, client: Gradient) -> None:
rule = client.gpu_droplets.firewalls.rules.add(
@@ -23,7 +23,7 @@ def test_method_add(self, client: Gradient) -> None:
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add_with_all_params(self, client: Gradient) -> None:
rule = client.gpu_droplets.firewalls.rules.add(
@@ -57,7 +57,7 @@ def test_method_add_with_all_params(self, client: Gradient) -> None:
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_add(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.rules.with_raw_response.add(
@@ -69,7 +69,7 @@ def test_raw_response_add(self, client: Gradient) -> None:
rule = response.parse()
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_add(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.rules.with_streaming_response.add(
@@ -83,7 +83,7 @@ def test_streaming_response_add(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_add(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -91,7 +91,7 @@ def test_path_params_add(self, client: Gradient) -> None:
firewall_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_remove(self, client: Gradient) -> None:
rule = client.gpu_droplets.firewalls.rules.remove(
@@ -99,7 +99,7 @@ def test_method_remove(self, client: Gradient) -> None:
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_remove_with_all_params(self, client: Gradient) -> None:
rule = client.gpu_droplets.firewalls.rules.remove(
@@ -133,7 +133,7 @@ def test_method_remove_with_all_params(self, client: Gradient) -> None:
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_remove(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.rules.with_raw_response.remove(
@@ -145,7 +145,7 @@ def test_raw_response_remove(self, client: Gradient) -> None:
rule = response.parse()
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_remove(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.rules.with_streaming_response.remove(
@@ -159,7 +159,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_remove(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -173,7 +173,7 @@ class TestAsyncRules:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add(self, async_client: AsyncGradient) -> None:
rule = await async_client.gpu_droplets.firewalls.rules.add(
@@ -181,7 +181,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None:
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> None:
rule = await async_client.gpu_droplets.firewalls.rules.add(
@@ -215,7 +215,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradient) ->
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.add(
@@ -227,7 +227,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
rule = await response.parse()
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.add(
@@ -241,7 +241,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_add(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -249,7 +249,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None:
firewall_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_remove(self, async_client: AsyncGradient) -> None:
rule = await async_client.gpu_droplets.firewalls.rules.remove(
@@ -257,7 +257,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None:
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_remove_with_all_params(self, async_client: AsyncGradient) -> None:
rule = await async_client.gpu_droplets.firewalls.rules.remove(
@@ -291,7 +291,7 @@ async def test_method_remove_with_all_params(self, async_client: AsyncGradient)
)
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.remove(
@@ -303,7 +303,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
rule = await response.parse()
assert rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.remove(
@@ -317,7 +317,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/firewalls/test_tags.py b/tests/api_resources/gpu_droplets/firewalls/test_tags.py
index 50c7563b..170c5317 100644
--- a/tests/api_resources/gpu_droplets/firewalls/test_tags.py
+++ b/tests/api_resources/gpu_droplets/firewalls/test_tags.py
@@ -15,7 +15,7 @@
class TestTags:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add(self, client: Gradient) -> None:
tag = client.gpu_droplets.firewalls.tags.add(
@@ -24,7 +24,7 @@ def test_method_add(self, client: Gradient) -> None:
)
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_add(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.tags.with_raw_response.add(
@@ -37,7 +37,7 @@ def test_raw_response_add(self, client: Gradient) -> None:
tag = response.parse()
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_add(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.tags.with_streaming_response.add(
@@ -52,7 +52,7 @@ def test_streaming_response_add(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_add(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -61,7 +61,7 @@ def test_path_params_add(self, client: Gradient) -> None:
tags=["frontend"],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_remove(self, client: Gradient) -> None:
tag = client.gpu_droplets.firewalls.tags.remove(
@@ -70,7 +70,7 @@ def test_method_remove(self, client: Gradient) -> None:
)
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_remove(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.tags.with_raw_response.remove(
@@ -83,7 +83,7 @@ def test_raw_response_remove(self, client: Gradient) -> None:
tag = response.parse()
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_remove(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.tags.with_streaming_response.remove(
@@ -98,7 +98,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_remove(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -113,7 +113,7 @@ class TestAsyncTags:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add(self, async_client: AsyncGradient) -> None:
tag = await async_client.gpu_droplets.firewalls.tags.add(
@@ -122,7 +122,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None:
)
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.add(
@@ -135,7 +135,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
tag = await response.parse()
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.add(
@@ -150,7 +150,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_add(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -159,7 +159,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None:
tags=["frontend"],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_remove(self, async_client: AsyncGradient) -> None:
tag = await async_client.gpu_droplets.firewalls.tags.remove(
@@ -168,7 +168,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None:
)
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.remove(
@@ -181,7 +181,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
tag = await response.parse()
assert tag is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.remove(
@@ -196,7 +196,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/floating_ips/test_actions.py b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py
index 7f7ab06a..31376bca 100644
--- a/tests/api_resources/gpu_droplets/floating_ips/test_actions.py
+++ b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py
@@ -21,7 +21,7 @@
class TestActions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.floating_ips.actions.create(
@@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.actions.with_raw_response.create(
@@ -43,7 +43,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
@@ -58,7 +58,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_create_overload_1(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -67,7 +67,7 @@ def test_path_params_create_overload_1(self, client: Gradient) -> None:
type="assign",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.floating_ips.actions.create(
@@ -77,7 +77,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.actions.with_raw_response.create(
@@ -91,7 +91,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
@@ -107,7 +107,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_create_overload_2(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -117,7 +117,7 @@ def test_path_params_create_overload_2(self, client: Gradient) -> None:
type="assign",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
action = client.gpu_droplets.floating_ips.actions.retrieve(
@@ -126,7 +126,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve(
@@ -139,7 +139,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve(
@@ -154,7 +154,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -163,7 +163,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
floating_ip="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
action = client.gpu_droplets.floating_ips.actions.list(
@@ -171,7 +171,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.actions.with_raw_response.list(
@@ -183,7 +183,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.actions.with_streaming_response.list(
@@ -197,7 +197,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -211,7 +211,7 @@ class TestAsyncActions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.floating_ips.actions.create(
@@ -220,7 +220,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create(
@@ -233,7 +233,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
action = await response.parse()
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
@@ -248,7 +248,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_create_overload_1(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -257,7 +257,7 @@ async def test_path_params_create_overload_1(self, async_client: AsyncGradient)
type="assign",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.floating_ips.actions.create(
@@ -267,7 +267,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create(
@@ -281,7 +281,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
action = await response.parse()
assert_matches_type(ActionCreateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
@@ -297,7 +297,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_create_overload_2(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -307,7 +307,7 @@ async def test_path_params_create_overload_2(self, async_client: AsyncGradient)
type="assign",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.floating_ips.actions.retrieve(
@@ -316,7 +316,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve(
@@ -329,7 +329,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve(
@@ -344,7 +344,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -353,7 +353,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
floating_ip="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.floating_ips.actions.list(
@@ -361,7 +361,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.list(
@@ -373,7 +373,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.list(
@@ -387,7 +387,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/images/test_actions.py b/tests/api_resources/gpu_droplets/images/test_actions.py
index ad5d4892..7cc7b4d4 100644
--- a/tests/api_resources/gpu_droplets/images/test_actions.py
+++ b/tests/api_resources/gpu_droplets/images/test_actions.py
@@ -18,7 +18,7 @@
class TestActions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.images.actions.create(
@@ -27,7 +27,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.images.actions.with_raw_response.create(
@@ -40,7 +40,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.images.actions.with_streaming_response.create(
@@ -55,7 +55,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.images.actions.create(
@@ -65,7 +65,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.images.actions.with_raw_response.create(
@@ -79,7 +79,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.images.actions.with_streaming_response.create(
@@ -95,7 +95,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
action = client.gpu_droplets.images.actions.retrieve(
@@ -104,7 +104,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.images.actions.with_raw_response.retrieve(
@@ -117,7 +117,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.images.actions.with_streaming_response.retrieve(
@@ -132,7 +132,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
action = client.gpu_droplets.images.actions.list(
@@ -140,7 +140,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.images.actions.with_raw_response.list(
@@ -152,7 +152,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.images.actions.with_streaming_response.list(
@@ -172,7 +172,7 @@ class TestAsyncActions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.images.actions.create(
@@ -181,7 +181,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.actions.with_raw_response.create(
@@ -194,7 +194,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
action = await response.parse()
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.actions.with_streaming_response.create(
@@ -209,7 +209,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.images.actions.create(
@@ -219,7 +219,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.actions.with_raw_response.create(
@@ -233,7 +233,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
action = await response.parse()
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.actions.with_streaming_response.create(
@@ -249,7 +249,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.images.actions.retrieve(
@@ -258,7 +258,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.actions.with_raw_response.retrieve(
@@ -271,7 +271,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(Action, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.actions.with_streaming_response.retrieve(
@@ -286,7 +286,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.images.actions.list(
@@ -294,7 +294,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.actions.with_raw_response.list(
@@ -306,7 +306,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.actions.with_streaming_response.list(
diff --git a/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py
index e6eefd23..884032dd 100644
--- a/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py
+++ b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py
@@ -15,7 +15,7 @@
class TestDroplets:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add(self, client: Gradient) -> None:
droplet = client.gpu_droplets.load_balancers.droplets.add(
@@ -24,7 +24,7 @@ def test_method_add(self, client: Gradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_add(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.droplets.with_raw_response.add(
@@ -37,7 +37,7 @@ def test_raw_response_add(self, client: Gradient) -> None:
droplet = response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_add(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.droplets.with_streaming_response.add(
@@ -52,7 +52,7 @@ def test_streaming_response_add(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_add(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -61,7 +61,7 @@ def test_path_params_add(self, client: Gradient) -> None:
droplet_ids=[3164444, 3164445],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_remove(self, client: Gradient) -> None:
droplet = client.gpu_droplets.load_balancers.droplets.remove(
@@ -70,7 +70,7 @@ def test_method_remove(self, client: Gradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_remove(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.droplets.with_raw_response.remove(
@@ -83,7 +83,7 @@ def test_raw_response_remove(self, client: Gradient) -> None:
droplet = response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_remove(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove(
@@ -98,7 +98,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_remove(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -113,7 +113,7 @@ class TestAsyncDroplets:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add(self, async_client: AsyncGradient) -> None:
droplet = await async_client.gpu_droplets.load_balancers.droplets.add(
@@ -122,7 +122,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.add(
@@ -135,7 +135,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
droplet = await response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.add(
@@ -150,7 +150,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_add(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -159,7 +159,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None:
droplet_ids=[3164444, 3164445],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_remove(self, async_client: AsyncGradient) -> None:
droplet = await async_client.gpu_droplets.load_balancers.droplets.remove(
@@ -168,7 +168,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None:
)
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.remove(
@@ -181,7 +181,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
droplet = await response.parse()
assert droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove(
@@ -196,7 +196,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py
index a3cc0bd1..43498e13 100644
--- a/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py
+++ b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py
@@ -15,7 +15,7 @@
class TestForwardingRules:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_add(self, client: Gradient) -> None:
forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.add(
@@ -31,7 +31,7 @@ def test_method_add(self, client: Gradient) -> None:
)
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_add(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add(
@@ -51,7 +51,7 @@ def test_raw_response_add(self, client: Gradient) -> None:
forwarding_rule = response.parse()
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_add(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add(
@@ -73,7 +73,7 @@ def test_streaming_response_add(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_add(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -89,7 +89,7 @@ def test_path_params_add(self, client: Gradient) -> None:
],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_remove(self, client: Gradient) -> None:
forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.remove(
@@ -105,7 +105,7 @@ def test_method_remove(self, client: Gradient) -> None:
)
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_remove(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove(
@@ -125,7 +125,7 @@ def test_raw_response_remove(self, client: Gradient) -> None:
forwarding_rule = response.parse()
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_remove(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove(
@@ -147,7 +147,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_remove(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -169,7 +169,7 @@ class TestAsyncForwardingRules:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_add(self, async_client: AsyncGradient) -> None:
forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.add(
@@ -185,7 +185,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None:
)
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add(
@@ -205,7 +205,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None:
forwarding_rule = await response.parse()
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add(
@@ -227,7 +227,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_add(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -243,7 +243,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None:
],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_remove(self, async_client: AsyncGradient) -> None:
forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.remove(
@@ -259,7 +259,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None:
)
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove(
@@ -279,7 +279,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None:
forwarding_rule = await response.parse()
assert forwarding_rule is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove(
@@ -301,7 +301,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/test_actions.py b/tests/api_resources/gpu_droplets/test_actions.py
index e514196b..897414b7 100644
--- a/tests/api_resources/gpu_droplets/test_actions.py
+++ b/tests/api_resources/gpu_droplets/test_actions.py
@@ -22,7 +22,7 @@
class TestActions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.retrieve(
@@ -31,7 +31,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.retrieve(
@@ -44,7 +44,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.retrieve(
@@ -59,7 +59,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.list(
@@ -67,7 +67,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.list(
@@ -77,7 +77,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.list(
@@ -89,7 +89,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.list(
@@ -103,7 +103,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_bulk_initiate_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.bulk_initiate(
@@ -111,7 +111,7 @@ def test_method_bulk_initiate_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_bulk_initiate_with_all_params_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.bulk_initiate(
@@ -120,7 +120,7 @@ def test_method_bulk_initiate_with_all_params_overload_1(self, client: Gradient)
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_bulk_initiate_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.bulk_initiate(
@@ -132,7 +132,7 @@ def test_raw_response_bulk_initiate_overload_1(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_bulk_initiate_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
@@ -146,7 +146,7 @@ def test_streaming_response_bulk_initiate_overload_1(self, client: Gradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_bulk_initiate_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.bulk_initiate(
@@ -154,7 +154,7 @@ def test_method_bulk_initiate_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_bulk_initiate_with_all_params_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.bulk_initiate(
@@ -164,7 +164,7 @@ def test_method_bulk_initiate_with_all_params_overload_2(self, client: Gradient)
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_bulk_initiate_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.bulk_initiate(
@@ -176,7 +176,7 @@ def test_raw_response_bulk_initiate_overload_2(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_bulk_initiate_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
@@ -190,7 +190,7 @@ def test_streaming_response_bulk_initiate_overload_2(self, client: Gradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -199,7 +199,7 @@ def test_method_initiate_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -212,7 +212,7 @@ def test_raw_response_initiate_overload_1(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -227,7 +227,7 @@ def test_streaming_response_initiate_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -236,7 +236,7 @@ def test_method_initiate_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -250,7 +250,7 @@ def test_method_initiate_with_all_params_overload_2(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -263,7 +263,7 @@ def test_raw_response_initiate_overload_2(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -278,7 +278,7 @@ def test_streaming_response_initiate_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_3(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -287,7 +287,7 @@ def test_method_initiate_overload_3(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_3(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -301,7 +301,7 @@ def test_method_initiate_with_all_params_overload_3(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_3(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -314,7 +314,7 @@ def test_raw_response_initiate_overload_3(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_3(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -329,7 +329,7 @@ def test_streaming_response_initiate_overload_3(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_4(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -338,7 +338,7 @@ def test_method_initiate_overload_4(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_4(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -348,7 +348,7 @@ def test_method_initiate_with_all_params_overload_4(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_4(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -361,7 +361,7 @@ def test_raw_response_initiate_overload_4(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_4(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -376,7 +376,7 @@ def test_streaming_response_initiate_overload_4(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_5(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -385,7 +385,7 @@ def test_method_initiate_overload_5(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_5(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -396,7 +396,7 @@ def test_method_initiate_with_all_params_overload_5(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_5(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -409,7 +409,7 @@ def test_raw_response_initiate_overload_5(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_5(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -424,7 +424,7 @@ def test_streaming_response_initiate_overload_5(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_6(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -433,7 +433,7 @@ def test_method_initiate_overload_6(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_6(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -443,7 +443,7 @@ def test_method_initiate_with_all_params_overload_6(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_6(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -456,7 +456,7 @@ def test_raw_response_initiate_overload_6(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_6(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -471,7 +471,7 @@ def test_streaming_response_initiate_overload_6(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_7(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -480,7 +480,7 @@ def test_method_initiate_overload_7(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_7(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -490,7 +490,7 @@ def test_method_initiate_with_all_params_overload_7(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_7(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -503,7 +503,7 @@ def test_raw_response_initiate_overload_7(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_7(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -518,7 +518,7 @@ def test_streaming_response_initiate_overload_7(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_8(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -527,7 +527,7 @@ def test_method_initiate_overload_8(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_8(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -537,7 +537,7 @@ def test_method_initiate_with_all_params_overload_8(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_8(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -550,7 +550,7 @@ def test_raw_response_initiate_overload_8(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_8(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -565,7 +565,7 @@ def test_streaming_response_initiate_overload_8(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_overload_9(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -574,7 +574,7 @@ def test_method_initiate_overload_9(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_with_all_params_overload_9(self, client: Gradient) -> None:
action = client.gpu_droplets.actions.initiate(
@@ -584,7 +584,7 @@ def test_method_initiate_with_all_params_overload_9(self, client: Gradient) -> N
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_overload_9(self, client: Gradient) -> None:
response = client.gpu_droplets.actions.with_raw_response.initiate(
@@ -597,7 +597,7 @@ def test_raw_response_initiate_overload_9(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_overload_9(self, client: Gradient) -> None:
with client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -618,7 +618,7 @@ class TestAsyncActions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.retrieve(
@@ -627,7 +627,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.retrieve(
@@ -640,7 +640,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.retrieve(
@@ -655,7 +655,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.list(
@@ -663,7 +663,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.list(
@@ -673,7 +673,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.list(
@@ -685,7 +685,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.list(
@@ -699,7 +699,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.bulk_initiate(
@@ -707,7 +707,7 @@ async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_bulk_initiate_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.bulk_initiate(
@@ -716,7 +716,7 @@ async def test_method_bulk_initiate_with_all_params_overload_1(self, async_clien
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate(
@@ -728,7 +728,7 @@ async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGr
action = await response.parse()
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
@@ -742,7 +742,7 @@ async def test_streaming_response_bulk_initiate_overload_1(self, async_client: A
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.bulk_initiate(
@@ -750,7 +750,7 @@ async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_bulk_initiate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.bulk_initiate(
@@ -760,7 +760,7 @@ async def test_method_bulk_initiate_with_all_params_overload_2(self, async_clien
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate(
@@ -772,7 +772,7 @@ async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGr
action = await response.parse()
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
@@ -786,7 +786,7 @@ async def test_streaming_response_bulk_initiate_overload_2(self, async_client: A
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -795,7 +795,7 @@ async def test_method_initiate_overload_1(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -808,7 +808,7 @@ async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -823,7 +823,7 @@ async def test_streaming_response_initiate_overload_1(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -832,7 +832,7 @@ async def test_method_initiate_overload_2(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -846,7 +846,7 @@ async def test_method_initiate_with_all_params_overload_2(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -859,7 +859,7 @@ async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -874,7 +874,7 @@ async def test_streaming_response_initiate_overload_2(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_3(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -883,7 +883,7 @@ async def test_method_initiate_overload_3(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_3(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -897,7 +897,7 @@ async def test_method_initiate_with_all_params_overload_3(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -910,7 +910,7 @@ async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_3(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -925,7 +925,7 @@ async def test_streaming_response_initiate_overload_3(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_4(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -934,7 +934,7 @@ async def test_method_initiate_overload_4(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_4(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -944,7 +944,7 @@ async def test_method_initiate_with_all_params_overload_4(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -957,7 +957,7 @@ async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_4(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -972,7 +972,7 @@ async def test_streaming_response_initiate_overload_4(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_5(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -981,7 +981,7 @@ async def test_method_initiate_overload_5(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_5(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -992,7 +992,7 @@ async def test_method_initiate_with_all_params_overload_5(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -1005,7 +1005,7 @@ async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_5(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -1020,7 +1020,7 @@ async def test_streaming_response_initiate_overload_5(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_6(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1029,7 +1029,7 @@ async def test_method_initiate_overload_6(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_6(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1039,7 +1039,7 @@ async def test_method_initiate_with_all_params_overload_6(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -1052,7 +1052,7 @@ async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_6(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -1067,7 +1067,7 @@ async def test_streaming_response_initiate_overload_6(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_7(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1076,7 +1076,7 @@ async def test_method_initiate_overload_7(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_7(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1086,7 +1086,7 @@ async def test_method_initiate_with_all_params_overload_7(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -1099,7 +1099,7 @@ async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_7(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -1114,7 +1114,7 @@ async def test_streaming_response_initiate_overload_7(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_8(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1123,7 +1123,7 @@ async def test_method_initiate_overload_8(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_8(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1133,7 +1133,7 @@ async def test_method_initiate_with_all_params_overload_8(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -1146,7 +1146,7 @@ async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_8(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
@@ -1161,7 +1161,7 @@ async def test_streaming_response_initiate_overload_8(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_overload_9(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1170,7 +1170,7 @@ async def test_method_initiate_overload_9(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_with_all_params_overload_9(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.actions.initiate(
@@ -1180,7 +1180,7 @@ async def test_method_initiate_with_all_params_overload_9(self, async_client: As
)
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
@@ -1193,7 +1193,7 @@ async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradien
action = await response.parse()
assert_matches_type(ActionInitiateResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_overload_9(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
diff --git a/tests/api_resources/gpu_droplets/test_autoscale.py b/tests/api_resources/gpu_droplets/test_autoscale.py
index bbb0c2e4..d6322172 100644
--- a/tests/api_resources/gpu_droplets/test_autoscale.py
+++ b/tests/api_resources/gpu_droplets/test_autoscale.py
@@ -24,7 +24,7 @@
class TestAutoscale:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.create(
@@ -42,7 +42,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.create(
@@ -70,7 +70,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.create(
@@ -92,7 +92,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
autoscale = response.parse()
assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.create(
@@ -116,7 +116,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.retrieve(
@@ -124,7 +124,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.retrieve(
@@ -136,7 +136,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
autoscale = response.parse()
assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.retrieve(
@@ -150,7 +150,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -158,7 +158,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.update(
@@ -174,7 +174,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.update(
@@ -197,7 +197,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.update(
@@ -217,7 +217,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
autoscale = response.parse()
assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.update(
@@ -239,7 +239,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -255,13 +255,13 @@ def test_path_params_update(self, client: Gradient) -> None:
name="my-autoscale-pool",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.list()
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.list(
@@ -271,7 +271,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.list()
@@ -281,7 +281,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
autoscale = response.parse()
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.list() as response:
@@ -293,7 +293,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.delete(
@@ -301,7 +301,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.delete(
@@ -313,7 +313,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
autoscale = response.parse()
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.delete(
@@ -327,7 +327,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -335,7 +335,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_dangerous(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.delete_dangerous(
@@ -344,7 +344,7 @@ def test_method_delete_dangerous(self, client: Gradient) -> None:
)
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete_dangerous(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.delete_dangerous(
@@ -357,7 +357,7 @@ def test_raw_response_delete_dangerous(self, client: Gradient) -> None:
autoscale = response.parse()
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete_dangerous(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous(
@@ -372,7 +372,7 @@ def test_streaming_response_delete_dangerous(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete_dangerous(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -381,7 +381,7 @@ def test_path_params_delete_dangerous(self, client: Gradient) -> None:
x_dangerous=True,
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_history(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.list_history(
@@ -389,7 +389,7 @@ def test_method_list_history(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_history_with_all_params(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.list_history(
@@ -399,7 +399,7 @@ def test_method_list_history_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_history(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.list_history(
@@ -411,7 +411,7 @@ def test_raw_response_list_history(self, client: Gradient) -> None:
autoscale = response.parse()
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_history(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.list_history(
@@ -425,7 +425,7 @@ def test_streaming_response_list_history(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_history(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -433,7 +433,7 @@ def test_path_params_list_history(self, client: Gradient) -> None:
autoscale_pool_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_members(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.list_members(
@@ -441,7 +441,7 @@ def test_method_list_members(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_members_with_all_params(self, client: Gradient) -> None:
autoscale = client.gpu_droplets.autoscale.list_members(
@@ -451,7 +451,7 @@ def test_method_list_members_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_members(self, client: Gradient) -> None:
response = client.gpu_droplets.autoscale.with_raw_response.list_members(
@@ -463,7 +463,7 @@ def test_raw_response_list_members(self, client: Gradient) -> None:
autoscale = response.parse()
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_members(self, client: Gradient) -> None:
with client.gpu_droplets.autoscale.with_streaming_response.list_members(
@@ -477,7 +477,7 @@ def test_streaming_response_list_members(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_members(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -491,7 +491,7 @@ class TestAsyncAutoscale:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.create(
@@ -509,7 +509,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.create(
@@ -537,7 +537,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.create(
@@ -559,7 +559,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
autoscale = await response.parse()
assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.create(
@@ -583,7 +583,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.retrieve(
@@ -591,7 +591,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.retrieve(
@@ -603,7 +603,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
autoscale = await response.parse()
assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.retrieve(
@@ -617,7 +617,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -625,7 +625,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.update(
@@ -641,7 +641,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.update(
@@ -664,7 +664,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.update(
@@ -684,7 +684,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
autoscale = await response.parse()
assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.update(
@@ -706,7 +706,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -722,13 +722,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
name="my-autoscale-pool",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.list()
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.list(
@@ -738,7 +738,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.list()
@@ -748,7 +748,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
autoscale = await response.parse()
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.list() as response:
@@ -760,7 +760,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.delete(
@@ -768,7 +768,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.delete(
@@ -780,7 +780,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
autoscale = await response.parse()
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.delete(
@@ -794,7 +794,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -802,7 +802,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.delete_dangerous(
@@ -811,7 +811,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> Non
)
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.delete_dangerous(
@@ -824,7 +824,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient)
autoscale = await response.parse()
assert autoscale is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous(
@@ -839,7 +839,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete_dangerous(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -848,7 +848,7 @@ async def test_path_params_delete_dangerous(self, async_client: AsyncGradient) -
x_dangerous=True,
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_history(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.list_history(
@@ -856,7 +856,7 @@ async def test_method_list_history(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_history_with_all_params(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.list_history(
@@ -866,7 +866,7 @@ async def test_method_list_history_with_all_params(self, async_client: AsyncGrad
)
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_history(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.list_history(
@@ -878,7 +878,7 @@ async def test_raw_response_list_history(self, async_client: AsyncGradient) -> N
autoscale = await response.parse()
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_history(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.list_history(
@@ -892,7 +892,7 @@ async def test_streaming_response_list_history(self, async_client: AsyncGradient
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_history(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
@@ -900,7 +900,7 @@ async def test_path_params_list_history(self, async_client: AsyncGradient) -> No
autoscale_pool_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_members(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.list_members(
@@ -908,7 +908,7 @@ async def test_method_list_members(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_members_with_all_params(self, async_client: AsyncGradient) -> None:
autoscale = await async_client.gpu_droplets.autoscale.list_members(
@@ -918,7 +918,7 @@ async def test_method_list_members_with_all_params(self, async_client: AsyncGrad
)
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_members(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.autoscale.with_raw_response.list_members(
@@ -930,7 +930,7 @@ async def test_raw_response_list_members(self, async_client: AsyncGradient) -> N
autoscale = await response.parse()
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_members(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.autoscale.with_streaming_response.list_members(
@@ -944,7 +944,7 @@ async def test_streaming_response_list_members(self, async_client: AsyncGradient
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_members(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/test_backups.py b/tests/api_resources/gpu_droplets/test_backups.py
index c6e854e4..c5e8615f 100644
--- a/tests/api_resources/gpu_droplets/test_backups.py
+++ b/tests/api_resources/gpu_droplets/test_backups.py
@@ -22,7 +22,7 @@
class TestBackups:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
backup = client.gpu_droplets.backups.list(
@@ -30,7 +30,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(BackupListResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
backup = client.gpu_droplets.backups.list(
@@ -40,7 +40,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(BackupListResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.backups.with_raw_response.list(
@@ -52,7 +52,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
backup = response.parse()
assert_matches_type(BackupListResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.backups.with_streaming_response.list(
@@ -66,13 +66,13 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_policies(self, client: Gradient) -> None:
backup = client.gpu_droplets.backups.list_policies()
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_policies_with_all_params(self, client: Gradient) -> None:
backup = client.gpu_droplets.backups.list_policies(
@@ -81,7 +81,7 @@ def test_method_list_policies_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_policies(self, client: Gradient) -> None:
response = client.gpu_droplets.backups.with_raw_response.list_policies()
@@ -91,7 +91,7 @@ def test_raw_response_list_policies(self, client: Gradient) -> None:
backup = response.parse()
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_policies(self, client: Gradient) -> None:
with client.gpu_droplets.backups.with_streaming_response.list_policies() as response:
@@ -103,13 +103,13 @@ def test_streaming_response_list_policies(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_supported_policies(self, client: Gradient) -> None:
backup = client.gpu_droplets.backups.list_supported_policies()
assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_supported_policies(self, client: Gradient) -> None:
response = client.gpu_droplets.backups.with_raw_response.list_supported_policies()
@@ -119,7 +119,7 @@ def test_raw_response_list_supported_policies(self, client: Gradient) -> None:
backup = response.parse()
assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_supported_policies(self, client: Gradient) -> None:
with client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response:
@@ -131,7 +131,7 @@ def test_streaming_response_list_supported_policies(self, client: Gradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_policy(self, client: Gradient) -> None:
backup = client.gpu_droplets.backups.retrieve_policy(
@@ -139,7 +139,7 @@ def test_method_retrieve_policy(self, client: Gradient) -> None:
)
assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_policy(self, client: Gradient) -> None:
response = client.gpu_droplets.backups.with_raw_response.retrieve_policy(
@@ -151,7 +151,7 @@ def test_raw_response_retrieve_policy(self, client: Gradient) -> None:
backup = response.parse()
assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_policy(self, client: Gradient) -> None:
with client.gpu_droplets.backups.with_streaming_response.retrieve_policy(
@@ -171,7 +171,7 @@ class TestAsyncBackups:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
backup = await async_client.gpu_droplets.backups.list(
@@ -179,7 +179,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(BackupListResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
backup = await async_client.gpu_droplets.backups.list(
@@ -189,7 +189,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(BackupListResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.backups.with_raw_response.list(
@@ -201,7 +201,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
backup = await response.parse()
assert_matches_type(BackupListResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.backups.with_streaming_response.list(
@@ -215,13 +215,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_policies(self, async_client: AsyncGradient) -> None:
backup = await async_client.gpu_droplets.backups.list_policies()
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_policies_with_all_params(self, async_client: AsyncGradient) -> None:
backup = await async_client.gpu_droplets.backups.list_policies(
@@ -230,7 +230,7 @@ async def test_method_list_policies_with_all_params(self, async_client: AsyncGra
)
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_policies(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.backups.with_raw_response.list_policies()
@@ -240,7 +240,7 @@ async def test_raw_response_list_policies(self, async_client: AsyncGradient) ->
backup = await response.parse()
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_policies(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.backups.with_streaming_response.list_policies() as response:
@@ -252,13 +252,13 @@ async def test_streaming_response_list_policies(self, async_client: AsyncGradien
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_supported_policies(self, async_client: AsyncGradient) -> None:
backup = await async_client.gpu_droplets.backups.list_supported_policies()
assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_supported_policies(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.backups.with_raw_response.list_supported_policies()
@@ -268,7 +268,7 @@ async def test_raw_response_list_supported_policies(self, async_client: AsyncGra
backup = await response.parse()
assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_supported_policies(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response:
@@ -280,7 +280,7 @@ async def test_streaming_response_list_supported_policies(self, async_client: As
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_policy(self, async_client: AsyncGradient) -> None:
backup = await async_client.gpu_droplets.backups.retrieve_policy(
@@ -288,7 +288,7 @@ async def test_method_retrieve_policy(self, async_client: AsyncGradient) -> None
)
assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_policy(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.backups.with_raw_response.retrieve_policy(
@@ -300,7 +300,7 @@ async def test_raw_response_retrieve_policy(self, async_client: AsyncGradient) -
backup = await response.parse()
assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_policy(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.backups.with_streaming_response.retrieve_policy(
diff --git a/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py
index 80f1bd7c..bdaaeab9 100644
--- a/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py
+++ b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py
@@ -20,7 +20,7 @@
class TestDestroyWithAssociatedResources:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.list(
@@ -30,7 +30,7 @@ def test_method_list(self, client: Gradient) -> None:
DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list(
@@ -44,7 +44,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list(
@@ -60,7 +60,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_check_status(self, client: Gradient) -> None:
destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.check_status(
@@ -70,7 +70,7 @@ def test_method_check_status(self, client: Gradient) -> None:
DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_check_status(self, client: Gradient) -> None:
response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status(
@@ -84,7 +84,7 @@ def test_raw_response_check_status(self, client: Gradient) -> None:
DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_check_status(self, client: Gradient) -> None:
with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status(
@@ -100,7 +100,7 @@ def test_streaming_response_check_status(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_dangerous(self, client: Gradient) -> None:
destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_dangerous(
@@ -109,7 +109,7 @@ def test_method_delete_dangerous(self, client: Gradient) -> None:
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete_dangerous(self, client: Gradient) -> None:
response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous(
@@ -122,7 +122,7 @@ def test_raw_response_delete_dangerous(self, client: Gradient) -> None:
destroy_with_associated_resource = response.parse()
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete_dangerous(self, client: Gradient) -> None:
with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous(
@@ -137,7 +137,7 @@ def test_streaming_response_delete_dangerous(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_selective(self, client: Gradient) -> None:
destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective(
@@ -145,7 +145,7 @@ def test_method_delete_selective(self, client: Gradient) -> None:
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_selective_with_all_params(self, client: Gradient) -> None:
destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective(
@@ -158,7 +158,7 @@ def test_method_delete_selective_with_all_params(self, client: Gradient) -> None
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete_selective(self, client: Gradient) -> None:
response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective(
@@ -170,7 +170,7 @@ def test_raw_response_delete_selective(self, client: Gradient) -> None:
destroy_with_associated_resource = response.parse()
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete_selective(self, client: Gradient) -> None:
with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective(
@@ -184,7 +184,7 @@ def test_streaming_response_delete_selective(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retry(self, client: Gradient) -> None:
destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.retry(
@@ -192,7 +192,7 @@ def test_method_retry(self, client: Gradient) -> None:
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retry(self, client: Gradient) -> None:
response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry(
@@ -204,7 +204,7 @@ def test_raw_response_retry(self, client: Gradient) -> None:
destroy_with_associated_resource = response.parse()
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retry(self, client: Gradient) -> None:
with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry(
@@ -224,7 +224,7 @@ class TestAsyncDestroyWithAssociatedResources:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.list(
@@ -234,7 +234,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list(
@@ -248,7 +248,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list(
@@ -264,7 +264,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_check_status(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = (
@@ -276,7 +276,7 @@ async def test_method_check_status(self, async_client: AsyncGradient) -> None:
DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_check_status(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status(
@@ -290,7 +290,7 @@ async def test_raw_response_check_status(self, async_client: AsyncGradient) -> N
DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_check_status(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status(
@@ -306,7 +306,7 @@ async def test_streaming_response_check_status(self, async_client: AsyncGradient
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = (
@@ -317,7 +317,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> Non
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous(
@@ -330,7 +330,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient)
destroy_with_associated_resource = await response.parse()
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous(
@@ -345,7 +345,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_selective(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = (
@@ -355,7 +355,7 @@ async def test_method_delete_selective(self, async_client: AsyncGradient) -> Non
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_selective_with_all_params(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = (
@@ -370,7 +370,7 @@ async def test_method_delete_selective_with_all_params(self, async_client: Async
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete_selective(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective(
@@ -382,7 +382,7 @@ async def test_raw_response_delete_selective(self, async_client: AsyncGradient)
destroy_with_associated_resource = await response.parse()
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete_selective(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective(
@@ -396,7 +396,7 @@ async def test_streaming_response_delete_selective(self, async_client: AsyncGrad
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retry(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.retry(
@@ -404,7 +404,7 @@ async def test_method_retry(self, async_client: AsyncGradient) -> None:
)
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retry(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry(
@@ -416,7 +416,7 @@ async def test_raw_response_retry(self, async_client: AsyncGradient) -> None:
destroy_with_associated_resource = await response.parse()
assert destroy_with_associated_resource is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retry(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry(
diff --git a/tests/api_resources/gpu_droplets/test_firewalls.py b/tests/api_resources/gpu_droplets/test_firewalls.py
index 3d8469b3..60c7bbc9 100644
--- a/tests/api_resources/gpu_droplets/test_firewalls.py
+++ b/tests/api_resources/gpu_droplets/test_firewalls.py
@@ -22,13 +22,13 @@
class TestFirewalls:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.create()
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.create(
@@ -77,7 +77,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.with_raw_response.create()
@@ -87,7 +87,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
firewall = response.parse()
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.with_streaming_response.create() as response:
@@ -99,7 +99,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.retrieve(
@@ -107,7 +107,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.with_raw_response.retrieve(
@@ -119,7 +119,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
firewall = response.parse()
assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.with_streaming_response.retrieve(
@@ -133,7 +133,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -141,7 +141,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.update(
@@ -150,7 +150,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(FirewallUpdateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.update(
@@ -200,7 +200,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(FirewallUpdateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.with_raw_response.update(
@@ -213,7 +213,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
firewall = response.parse()
assert_matches_type(FirewallUpdateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.with_streaming_response.update(
@@ -228,7 +228,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -237,13 +237,13 @@ def test_path_params_update(self, client: Gradient) -> None:
firewall={"name": "frontend-firewall"},
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.list()
assert_matches_type(FirewallListResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.list(
@@ -252,7 +252,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(FirewallListResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.with_raw_response.list()
@@ -262,7 +262,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
firewall = response.parse()
assert_matches_type(FirewallListResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.with_streaming_response.list() as response:
@@ -274,7 +274,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
firewall = client.gpu_droplets.firewalls.delete(
@@ -282,7 +282,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert firewall is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.firewalls.with_raw_response.delete(
@@ -294,7 +294,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
firewall = response.parse()
assert firewall is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.firewalls.with_streaming_response.delete(
@@ -308,7 +308,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -322,13 +322,13 @@ class TestAsyncFirewalls:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.create()
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.create(
@@ -377,7 +377,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.with_raw_response.create()
@@ -387,7 +387,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
firewall = await response.parse()
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.with_streaming_response.create() as response:
@@ -399,7 +399,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.retrieve(
@@ -407,7 +407,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.with_raw_response.retrieve(
@@ -419,7 +419,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
firewall = await response.parse()
assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.with_streaming_response.retrieve(
@@ -433,7 +433,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -441,7 +441,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.update(
@@ -450,7 +450,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(FirewallUpdateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.update(
@@ -500,7 +500,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(FirewallUpdateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.with_raw_response.update(
@@ -513,7 +513,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
firewall = await response.parse()
assert_matches_type(FirewallUpdateResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.with_streaming_response.update(
@@ -528,7 +528,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
@@ -537,13 +537,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
firewall={"name": "frontend-firewall"},
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.list()
assert_matches_type(FirewallListResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.list(
@@ -552,7 +552,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(FirewallListResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.with_raw_response.list()
@@ -562,7 +562,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
firewall = await response.parse()
assert_matches_type(FirewallListResponse, firewall, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.with_streaming_response.list() as response:
@@ -574,7 +574,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
firewall = await async_client.gpu_droplets.firewalls.delete(
@@ -582,7 +582,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert firewall is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.firewalls.with_raw_response.delete(
@@ -594,7 +594,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
firewall = await response.parse()
assert firewall is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.firewalls.with_streaming_response.delete(
@@ -608,7 +608,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/test_floating_ips.py b/tests/api_resources/gpu_droplets/test_floating_ips.py
index 3119bf28..84156532 100644
--- a/tests/api_resources/gpu_droplets/test_floating_ips.py
+++ b/tests/api_resources/gpu_droplets/test_floating_ips.py
@@ -21,7 +21,7 @@
class TestFloatingIPs:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.create(
@@ -29,7 +29,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.with_raw_response.create(
@@ -41,7 +41,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
floating_ip = response.parse()
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.with_streaming_response.create(
@@ -55,7 +55,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.create(
@@ -63,7 +63,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.create(
@@ -72,7 +72,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.with_raw_response.create(
@@ -84,7 +84,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
floating_ip = response.parse()
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.with_streaming_response.create(
@@ -98,7 +98,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.retrieve(
@@ -106,7 +106,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.with_raw_response.retrieve(
@@ -118,7 +118,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
floating_ip = response.parse()
assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.with_streaming_response.retrieve(
@@ -132,7 +132,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -140,13 +140,13 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.list()
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.list(
@@ -155,7 +155,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.with_raw_response.list()
@@ -165,7 +165,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
floating_ip = response.parse()
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.with_streaming_response.list() as response:
@@ -177,7 +177,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
floating_ip = client.gpu_droplets.floating_ips.delete(
@@ -185,7 +185,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert floating_ip is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.floating_ips.with_raw_response.delete(
@@ -197,7 +197,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
floating_ip = response.parse()
assert floating_ip is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.floating_ips.with_streaming_response.delete(
@@ -211,7 +211,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -225,7 +225,7 @@ class TestAsyncFloatingIPs:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.create(
@@ -233,7 +233,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.with_raw_response.create(
@@ -245,7 +245,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
floating_ip = await response.parse()
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.with_streaming_response.create(
@@ -259,7 +259,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.create(
@@ -267,7 +267,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.create(
@@ -276,7 +276,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.with_raw_response.create(
@@ -288,7 +288,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
floating_ip = await response.parse()
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.with_streaming_response.create(
@@ -302,7 +302,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.retrieve(
@@ -310,7 +310,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.with_raw_response.retrieve(
@@ -322,7 +322,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
floating_ip = await response.parse()
assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.with_streaming_response.retrieve(
@@ -336,7 +336,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
@@ -344,13 +344,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.list()
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.list(
@@ -359,7 +359,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.with_raw_response.list()
@@ -369,7 +369,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
floating_ip = await response.parse()
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.with_streaming_response.list() as response:
@@ -381,7 +381,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
floating_ip = await async_client.gpu_droplets.floating_ips.delete(
@@ -389,7 +389,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert floating_ip is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.floating_ips.with_raw_response.delete(
@@ -401,7 +401,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
floating_ip = await response.parse()
assert floating_ip is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.floating_ips.with_streaming_response.delete(
@@ -415,7 +415,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/test_images.py b/tests/api_resources/gpu_droplets/test_images.py
index 480f94a5..1bebbe99 100644
--- a/tests/api_resources/gpu_droplets/test_images.py
+++ b/tests/api_resources/gpu_droplets/test_images.py
@@ -22,13 +22,13 @@
class TestImages:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
image = client.gpu_droplets.images.create()
assert_matches_type(ImageCreateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
image = client.gpu_droplets.images.create(
@@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ImageCreateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.gpu_droplets.images.with_raw_response.create()
@@ -51,7 +51,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
image = response.parse()
assert_matches_type(ImageCreateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.gpu_droplets.images.with_streaming_response.create() as response:
@@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
image = client.gpu_droplets.images.retrieve(
@@ -71,7 +71,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(ImageRetrieveResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.images.with_raw_response.retrieve(
@@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
image = response.parse()
assert_matches_type(ImageRetrieveResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.images.with_streaming_response.retrieve(
@@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
image = client.gpu_droplets.images.update(
@@ -105,7 +105,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(ImageUpdateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
image = client.gpu_droplets.images.update(
@@ -116,7 +116,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ImageUpdateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.gpu_droplets.images.with_raw_response.update(
@@ -128,7 +128,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
image = response.parse()
assert_matches_type(ImageUpdateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.gpu_droplets.images.with_streaming_response.update(
@@ -142,13 +142,13 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
image = client.gpu_droplets.images.list()
assert_matches_type(ImageListResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
image = client.gpu_droplets.images.list(
@@ -160,7 +160,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ImageListResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.images.with_raw_response.list()
@@ -170,7 +170,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
image = response.parse()
assert_matches_type(ImageListResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.images.with_streaming_response.list() as response:
@@ -182,7 +182,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
image = client.gpu_droplets.images.delete(
@@ -190,7 +190,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert image is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.images.with_raw_response.delete(
@@ -202,7 +202,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
image = response.parse()
assert image is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.images.with_streaming_response.delete(
@@ -222,13 +222,13 @@ class TestAsyncImages:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.create()
assert_matches_type(ImageCreateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.create(
@@ -241,7 +241,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(ImageCreateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.with_raw_response.create()
@@ -251,7 +251,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
image = await response.parse()
assert_matches_type(ImageCreateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.with_streaming_response.create() as response:
@@ -263,7 +263,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.retrieve(
@@ -271,7 +271,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ImageRetrieveResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.with_raw_response.retrieve(
@@ -283,7 +283,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
image = await response.parse()
assert_matches_type(ImageRetrieveResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.with_streaming_response.retrieve(
@@ -297,7 +297,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.update(
@@ -305,7 +305,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ImageUpdateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.update(
@@ -316,7 +316,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(ImageUpdateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.with_raw_response.update(
@@ -328,7 +328,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
image = await response.parse()
assert_matches_type(ImageUpdateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.with_streaming_response.update(
@@ -342,13 +342,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.list()
assert_matches_type(ImageListResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.list(
@@ -360,7 +360,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(ImageListResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.with_raw_response.list()
@@ -370,7 +370,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
image = await response.parse()
assert_matches_type(ImageListResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.with_streaming_response.list() as response:
@@ -382,7 +382,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
image = await async_client.gpu_droplets.images.delete(
@@ -390,7 +390,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert image is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.images.with_raw_response.delete(
@@ -402,7 +402,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
image = await response.parse()
assert image is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.images.with_streaming_response.delete(
diff --git a/tests/api_resources/gpu_droplets/test_load_balancers.py b/tests/api_resources/gpu_droplets/test_load_balancers.py
index 363520e4..91138402 100644
--- a/tests/api_resources/gpu_droplets/test_load_balancers.py
+++ b/tests/api_resources/gpu_droplets/test_load_balancers.py
@@ -22,7 +22,7 @@
class TestLoadBalancers:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.create(
@@ -37,7 +37,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.create(
@@ -108,7 +108,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.create(
@@ -127,7 +127,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
load_balancer = response.parse()
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.create(
@@ -148,7 +148,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.create(
@@ -163,7 +163,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.create(
@@ -234,7 +234,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.create(
@@ -253,7 +253,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
load_balancer = response.parse()
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.create(
@@ -274,7 +274,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.retrieve(
@@ -282,7 +282,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.retrieve(
@@ -294,7 +294,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
load_balancer = response.parse()
assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.retrieve(
@@ -308,7 +308,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -316,7 +316,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_overload_1(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.update(
@@ -332,7 +332,7 @@ def test_method_update_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params_overload_1(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.update(
@@ -404,7 +404,7 @@ def test_method_update_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.update(
@@ -424,7 +424,7 @@ def test_raw_response_update_overload_1(self, client: Gradient) -> None:
load_balancer = response.parse()
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.update(
@@ -446,7 +446,7 @@ def test_streaming_response_update_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update_overload_1(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -462,7 +462,7 @@ def test_path_params_update_overload_1(self, client: Gradient) -> None:
],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_overload_2(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.update(
@@ -478,7 +478,7 @@ def test_method_update_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params_overload_2(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.update(
@@ -550,7 +550,7 @@ def test_method_update_with_all_params_overload_2(self, client: Gradient) -> Non
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.update(
@@ -570,7 +570,7 @@ def test_raw_response_update_overload_2(self, client: Gradient) -> None:
load_balancer = response.parse()
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.update(
@@ -592,7 +592,7 @@ def test_streaming_response_update_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update_overload_2(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -608,13 +608,13 @@ def test_path_params_update_overload_2(self, client: Gradient) -> None:
],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.list()
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.list(
@@ -623,7 +623,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.list()
@@ -633,7 +633,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
load_balancer = response.parse()
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.list() as response:
@@ -645,7 +645,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.delete(
@@ -653,7 +653,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.delete(
@@ -665,7 +665,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
load_balancer = response.parse()
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.delete(
@@ -679,7 +679,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -687,7 +687,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_cache(self, client: Gradient) -> None:
load_balancer = client.gpu_droplets.load_balancers.delete_cache(
@@ -695,7 +695,7 @@ def test_method_delete_cache(self, client: Gradient) -> None:
)
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete_cache(self, client: Gradient) -> None:
response = client.gpu_droplets.load_balancers.with_raw_response.delete_cache(
@@ -707,7 +707,7 @@ def test_raw_response_delete_cache(self, client: Gradient) -> None:
load_balancer = response.parse()
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete_cache(self, client: Gradient) -> None:
with client.gpu_droplets.load_balancers.with_streaming_response.delete_cache(
@@ -721,7 +721,7 @@ def test_streaming_response_delete_cache(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete_cache(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -735,7 +735,7 @@ class TestAsyncLoadBalancers:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.create(
@@ -750,7 +750,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.create(
@@ -821,7 +821,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.create(
@@ -840,7 +840,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
load_balancer = await response.parse()
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.create(
@@ -861,7 +861,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.create(
@@ -876,7 +876,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.create(
@@ -947,7 +947,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.create(
@@ -966,7 +966,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
load_balancer = await response.parse()
assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.create(
@@ -987,7 +987,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.retrieve(
@@ -995,7 +995,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.retrieve(
@@ -1007,7 +1007,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
load_balancer = await response.parse()
assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.retrieve(
@@ -1021,7 +1021,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -1029,7 +1029,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_overload_1(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.update(
@@ -1045,7 +1045,7 @@ async def test_method_update_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.update(
@@ -1117,7 +1117,7 @@ async def test_method_update_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.update(
@@ -1137,7 +1137,7 @@ async def test_raw_response_update_overload_1(self, async_client: AsyncGradient)
load_balancer = await response.parse()
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.update(
@@ -1159,7 +1159,7 @@ async def test_streaming_response_update_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update_overload_1(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -1175,7 +1175,7 @@ async def test_path_params_update_overload_1(self, async_client: AsyncGradient)
],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_overload_2(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.update(
@@ -1191,7 +1191,7 @@ async def test_method_update_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.update(
@@ -1263,7 +1263,7 @@ async def test_method_update_with_all_params_overload_2(self, async_client: Asyn
)
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.update(
@@ -1283,7 +1283,7 @@ async def test_raw_response_update_overload_2(self, async_client: AsyncGradient)
load_balancer = await response.parse()
assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.update(
@@ -1305,7 +1305,7 @@ async def test_streaming_response_update_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update_overload_2(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -1321,13 +1321,13 @@ async def test_path_params_update_overload_2(self, async_client: AsyncGradient)
],
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.list()
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.list(
@@ -1336,7 +1336,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.list()
@@ -1346,7 +1346,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
load_balancer = await response.parse()
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.list() as response:
@@ -1358,7 +1358,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.delete(
@@ -1366,7 +1366,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete(
@@ -1378,7 +1378,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
load_balancer = await response.parse()
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete(
@@ -1392,7 +1392,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
@@ -1400,7 +1400,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_cache(self, async_client: AsyncGradient) -> None:
load_balancer = await async_client.gpu_droplets.load_balancers.delete_cache(
@@ -1408,7 +1408,7 @@ async def test_method_delete_cache(self, async_client: AsyncGradient) -> None:
)
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete_cache(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete_cache(
@@ -1420,7 +1420,7 @@ async def test_raw_response_delete_cache(self, async_client: AsyncGradient) -> N
load_balancer = await response.parse()
assert load_balancer is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete_cache(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete_cache(
@@ -1434,7 +1434,7 @@ async def test_streaming_response_delete_cache(self, async_client: AsyncGradient
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete_cache(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
diff --git a/tests/api_resources/gpu_droplets/test_sizes.py b/tests/api_resources/gpu_droplets/test_sizes.py
index 7fc4fe80..308694ac 100644
--- a/tests/api_resources/gpu_droplets/test_sizes.py
+++ b/tests/api_resources/gpu_droplets/test_sizes.py
@@ -17,13 +17,13 @@
class TestSizes:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
size = client.gpu_droplets.sizes.list()
assert_matches_type(SizeListResponse, size, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
size = client.gpu_droplets.sizes.list(
@@ -32,7 +32,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(SizeListResponse, size, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.sizes.with_raw_response.list()
@@ -42,7 +42,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
size = response.parse()
assert_matches_type(SizeListResponse, size, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.sizes.with_streaming_response.list() as response:
@@ -60,13 +60,13 @@ class TestAsyncSizes:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
size = await async_client.gpu_droplets.sizes.list()
assert_matches_type(SizeListResponse, size, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
size = await async_client.gpu_droplets.sizes.list(
@@ -75,7 +75,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(SizeListResponse, size, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.sizes.with_raw_response.list()
@@ -85,7 +85,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
size = await response.parse()
assert_matches_type(SizeListResponse, size, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.sizes.with_streaming_response.list() as response:
diff --git a/tests/api_resources/gpu_droplets/test_snapshots.py b/tests/api_resources/gpu_droplets/test_snapshots.py
index 5f8da45a..ca4d146f 100644
--- a/tests/api_resources/gpu_droplets/test_snapshots.py
+++ b/tests/api_resources/gpu_droplets/test_snapshots.py
@@ -17,7 +17,7 @@
class TestSnapshots:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.snapshots.retrieve(
@@ -25,7 +25,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.snapshots.with_raw_response.retrieve(
@@ -37,7 +37,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.snapshots.with_streaming_response.retrieve(
@@ -51,13 +51,13 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.snapshots.list()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.snapshots.list(
@@ -67,7 +67,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.snapshots.with_raw_response.list()
@@ -77,7 +77,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.snapshots.with_streaming_response.list() as response:
@@ -89,7 +89,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.snapshots.delete(
@@ -97,7 +97,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.snapshots.with_raw_response.delete(
@@ -109,7 +109,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
snapshot = response.parse()
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.snapshots.with_streaming_response.delete(
@@ -129,7 +129,7 @@ class TestAsyncSnapshots:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.snapshots.retrieve(
@@ -137,7 +137,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.snapshots.with_raw_response.retrieve(
@@ -149,7 +149,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.snapshots.with_streaming_response.retrieve(
@@ -163,13 +163,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.snapshots.list()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.snapshots.list(
@@ -179,7 +179,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.snapshots.with_raw_response.list()
@@ -189,7 +189,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.snapshots.with_streaming_response.list() as response:
@@ -201,7 +201,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.snapshots.delete(
@@ -209,7 +209,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.snapshots.with_raw_response.delete(
@@ -221,7 +221,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.snapshots.with_streaming_response.delete(
diff --git a/tests/api_resources/gpu_droplets/test_volumes.py b/tests/api_resources/gpu_droplets/test_volumes.py
index 8243625d..c0d83f63 100644
--- a/tests/api_resources/gpu_droplets/test_volumes.py
+++ b/tests/api_resources/gpu_droplets/test_volumes.py
@@ -21,7 +21,7 @@
class TestVolumes:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.create(
@@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.create(
@@ -46,7 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.with_raw_response.create(
@@ -60,7 +60,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
volume = response.parse()
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.with_streaming_response.create(
@@ -76,7 +76,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.create(
@@ -86,7 +86,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.create(
@@ -101,7 +101,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.with_raw_response.create(
@@ -115,7 +115,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
volume = response.parse()
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.with_streaming_response.create(
@@ -131,7 +131,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.retrieve(
@@ -139,7 +139,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(VolumeRetrieveResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.with_raw_response.retrieve(
@@ -151,7 +151,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
volume = response.parse()
assert_matches_type(VolumeRetrieveResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.with_streaming_response.retrieve(
@@ -165,7 +165,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -173,13 +173,13 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.list()
assert_matches_type(VolumeListResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.list(
@@ -190,7 +190,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(VolumeListResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.with_raw_response.list()
@@ -200,7 +200,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
volume = response.parse()
assert_matches_type(VolumeListResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.with_streaming_response.list() as response:
@@ -212,7 +212,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.delete(
@@ -220,7 +220,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.with_raw_response.delete(
@@ -232,7 +232,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
volume = response.parse()
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.with_streaming_response.delete(
@@ -246,7 +246,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -254,13 +254,13 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_by_name(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.delete_by_name()
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_by_name_with_all_params(self, client: Gradient) -> None:
volume = client.gpu_droplets.volumes.delete_by_name(
@@ -269,7 +269,7 @@ def test_method_delete_by_name_with_all_params(self, client: Gradient) -> None:
)
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete_by_name(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.with_raw_response.delete_by_name()
@@ -279,7 +279,7 @@ def test_raw_response_delete_by_name(self, client: Gradient) -> None:
volume = response.parse()
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete_by_name(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response:
@@ -297,7 +297,7 @@ class TestAsyncVolumes:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.create(
@@ -307,7 +307,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.create(
@@ -322,7 +322,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.with_raw_response.create(
@@ -336,7 +336,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
volume = await response.parse()
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.with_streaming_response.create(
@@ -352,7 +352,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.create(
@@ -362,7 +362,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.create(
@@ -377,7 +377,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.with_raw_response.create(
@@ -391,7 +391,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
volume = await response.parse()
assert_matches_type(VolumeCreateResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.with_streaming_response.create(
@@ -407,7 +407,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.retrieve(
@@ -415,7 +415,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(VolumeRetrieveResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.with_raw_response.retrieve(
@@ -427,7 +427,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
volume = await response.parse()
assert_matches_type(VolumeRetrieveResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.with_streaming_response.retrieve(
@@ -441,7 +441,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -449,13 +449,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.list()
assert_matches_type(VolumeListResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.list(
@@ -466,7 +466,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(VolumeListResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.with_raw_response.list()
@@ -476,7 +476,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
volume = await response.parse()
assert_matches_type(VolumeListResponse, volume, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.with_streaming_response.list() as response:
@@ -488,7 +488,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.delete(
@@ -496,7 +496,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.with_raw_response.delete(
@@ -508,7 +508,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
volume = await response.parse()
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.with_streaming_response.delete(
@@ -522,7 +522,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -530,13 +530,13 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_by_name(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.delete_by_name()
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGradient) -> None:
volume = await async_client.gpu_droplets.volumes.delete_by_name(
@@ -545,7 +545,7 @@ async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGr
)
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete_by_name(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.with_raw_response.delete_by_name()
@@ -555,7 +555,7 @@ async def test_raw_response_delete_by_name(self, async_client: AsyncGradient) ->
volume = await response.parse()
assert volume is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete_by_name(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response:
diff --git a/tests/api_resources/gpu_droplets/volumes/test_actions.py b/tests/api_resources/gpu_droplets/volumes/test_actions.py
index 7159db48..f1bb3a21 100644
--- a/tests/api_resources/gpu_droplets/volumes/test_actions.py
+++ b/tests/api_resources/gpu_droplets/volumes/test_actions.py
@@ -22,7 +22,7 @@
class TestActions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.retrieve(
@@ -31,7 +31,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_with_all_params(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.retrieve(
@@ -42,7 +42,7 @@ def test_method_retrieve_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.retrieve(
@@ -55,7 +55,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.retrieve(
@@ -70,7 +70,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -79,7 +79,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
volume_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.list(
@@ -87,7 +87,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.list(
@@ -97,7 +97,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.list(
@@ -109,7 +109,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.list(
@@ -123,7 +123,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -131,7 +131,7 @@ def test_path_params_list(self, client: Gradient) -> None:
volume_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_id_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -141,7 +141,7 @@ def test_method_initiate_by_id_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -155,7 +155,7 @@ def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_by_id_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
@@ -169,7 +169,7 @@ def test_raw_response_initiate_by_id_overload_1(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_by_id_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
@@ -185,7 +185,7 @@ def test_streaming_response_initiate_by_id_overload_1(self, client: Gradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_by_id_overload_1(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -195,7 +195,7 @@ def test_path_params_initiate_by_id_overload_1(self, client: Gradient) -> None:
type="attach",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_id_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -205,7 +205,7 @@ def test_method_initiate_by_id_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -218,7 +218,7 @@ def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_by_id_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
@@ -232,7 +232,7 @@ def test_raw_response_initiate_by_id_overload_2(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_by_id_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
@@ -248,7 +248,7 @@ def test_streaming_response_initiate_by_id_overload_2(self, client: Gradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_by_id_overload_2(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -258,7 +258,7 @@ def test_path_params_initiate_by_id_overload_2(self, client: Gradient) -> None:
type="attach",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_id_overload_3(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -268,7 +268,7 @@ def test_method_initiate_by_id_overload_3(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -281,7 +281,7 @@ def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_by_id_overload_3(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
@@ -295,7 +295,7 @@ def test_raw_response_initiate_by_id_overload_3(self, client: Gradient) -> None:
action = response.parse()
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_by_id_overload_3(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
@@ -311,7 +311,7 @@ def test_streaming_response_initiate_by_id_overload_3(self, client: Gradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_by_id_overload_3(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -321,7 +321,7 @@ def test_path_params_initiate_by_id_overload_3(self, client: Gradient) -> None:
type="attach",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_name_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -330,7 +330,7 @@ def test_method_initiate_by_name_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -343,7 +343,7 @@ def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradie
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_by_name_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
@@ -356,7 +356,7 @@ def test_raw_response_initiate_by_name_overload_1(self, client: Gradient) -> Non
action = response.parse()
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_by_name_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
@@ -371,7 +371,7 @@ def test_streaming_response_initiate_by_name_overload_1(self, client: Gradient)
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_name_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -380,7 +380,7 @@ def test_method_initiate_by_name_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradient) -> None:
action = client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -392,7 +392,7 @@ def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradie
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_by_name_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
@@ -405,7 +405,7 @@ def test_raw_response_initiate_by_name_overload_2(self, client: Gradient) -> Non
action = response.parse()
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_by_name_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
@@ -426,7 +426,7 @@ class TestAsyncActions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.retrieve(
@@ -435,7 +435,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.retrieve(
@@ -446,7 +446,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient
)
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.retrieve(
@@ -459,7 +459,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionRetrieveResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.retrieve(
@@ -474,7 +474,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -483,7 +483,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
volume_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.list(
@@ -491,7 +491,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.list(
@@ -501,7 +501,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.list(
@@ -513,7 +513,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
action = await response.parse()
assert_matches_type(ActionListResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.list(
@@ -527,7 +527,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -535,7 +535,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None:
volume_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -545,7 +545,7 @@ async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradien
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_id_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -559,7 +559,7 @@ async def test_method_initiate_by_id_with_all_params_overload_1(self, async_clie
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
@@ -573,7 +573,7 @@ async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncG
action = await response.parse()
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
@@ -589,7 +589,7 @@ async def test_streaming_response_initiate_by_id_overload_1(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -599,7 +599,7 @@ async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGr
type="attach",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -609,7 +609,7 @@ async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradien
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_id_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -622,7 +622,7 @@ async def test_method_initiate_by_id_with_all_params_overload_2(self, async_clie
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
@@ -636,7 +636,7 @@ async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncG
action = await response.parse()
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
@@ -652,7 +652,7 @@ async def test_streaming_response_initiate_by_id_overload_2(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -662,7 +662,7 @@ async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGr
type="attach",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -672,7 +672,7 @@ async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradien
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_id_with_all_params_overload_3(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
@@ -685,7 +685,7 @@ async def test_method_initiate_by_id_with_all_params_overload_3(self, async_clie
)
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
@@ -699,7 +699,7 @@ async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncG
action = await response.parse()
assert_matches_type(ActionInitiateByIDResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
@@ -715,7 +715,7 @@ async def test_streaming_response_initiate_by_id_overload_3(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -725,7 +725,7 @@ async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGr
type="attach",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -734,7 +734,7 @@ async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradi
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_name_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -747,7 +747,7 @@ async def test_method_initiate_by_name_with_all_params_overload_1(self, async_cl
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
@@ -760,7 +760,7 @@ async def test_raw_response_initiate_by_name_overload_1(self, async_client: Asyn
action = await response.parse()
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
@@ -775,7 +775,7 @@ async def test_streaming_response_initiate_by_name_overload_1(self, async_client
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -784,7 +784,7 @@ async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradi
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_by_name_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
@@ -796,7 +796,7 @@ async def test_method_initiate_by_name_with_all_params_overload_2(self, async_cl
)
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
@@ -809,7 +809,7 @@ async def test_raw_response_initiate_by_name_overload_2(self, async_client: Asyn
action = await response.parse()
assert_matches_type(ActionInitiateByNameResponse, action, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
diff --git a/tests/api_resources/gpu_droplets/volumes/test_snapshots.py b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py
index ec157513..ae47fc90 100644
--- a/tests/api_resources/gpu_droplets/volumes/test_snapshots.py
+++ b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py
@@ -21,7 +21,7 @@
class TestSnapshots:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.volumes.snapshots.create(
@@ -30,7 +30,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.volumes.snapshots.create(
@@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.snapshots.with_raw_response.create(
@@ -53,7 +53,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.snapshots.with_streaming_response.create(
@@ -68,7 +68,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_create(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -77,7 +77,7 @@ def test_path_params_create(self, client: Gradient) -> None:
name="big-data-snapshot1475261774",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.volumes.snapshots.retrieve(
@@ -85,7 +85,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve(
@@ -97,7 +97,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve(
@@ -111,7 +111,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
@@ -119,7 +119,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.volumes.snapshots.list(
@@ -127,7 +127,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.volumes.snapshots.list(
@@ -137,7 +137,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.snapshots.with_raw_response.list(
@@ -149,7 +149,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.snapshots.with_streaming_response.list(
@@ -163,7 +163,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -171,7 +171,7 @@ def test_path_params_list(self, client: Gradient) -> None:
volume_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
snapshot = client.gpu_droplets.volumes.snapshots.delete(
@@ -179,7 +179,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.volumes.snapshots.with_raw_response.delete(
@@ -191,7 +191,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
snapshot = response.parse()
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.volumes.snapshots.with_streaming_response.delete(
@@ -205,7 +205,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
@@ -219,7 +219,7 @@ class TestAsyncSnapshots:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.volumes.snapshots.create(
@@ -228,7 +228,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.volumes.snapshots.create(
@@ -238,7 +238,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.create(
@@ -251,7 +251,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.create(
@@ -266,7 +266,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_create(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -275,7 +275,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None:
name="big-data-snapshot1475261774",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.volumes.snapshots.retrieve(
@@ -283,7 +283,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve(
@@ -295,7 +295,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve(
@@ -309,7 +309,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
@@ -317,7 +317,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.volumes.snapshots.list(
@@ -325,7 +325,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.volumes.snapshots.list(
@@ -335,7 +335,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.list(
@@ -347,7 +347,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.list(
@@ -361,7 +361,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
@@ -369,7 +369,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None:
volume_id="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.gpu_droplets.volumes.snapshots.delete(
@@ -377,7 +377,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.delete(
@@ -389,7 +389,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.delete(
@@ -403,7 +403,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
diff --git a/tests/api_resources/inference/test_api_keys.py b/tests/api_resources/inference/test_api_keys.py
index 0bbfa00f..99a9e553 100644
--- a/tests/api_resources/inference/test_api_keys.py
+++ b/tests/api_resources/inference/test_api_keys.py
@@ -23,13 +23,13 @@
class TestAPIKeys:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
api_key = client.inference.api_keys.create()
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
api_key = client.inference.api_keys.create(
@@ -37,7 +37,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.inference.api_keys.with_raw_response.create()
@@ -47,7 +47,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.inference.api_keys.with_streaming_response.create() as response:
@@ -59,7 +59,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
api_key = client.inference.api_keys.update(
@@ -67,7 +67,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
api_key = client.inference.api_keys.update(
@@ -77,7 +77,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.inference.api_keys.with_raw_response.update(
@@ -89,7 +89,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.inference.api_keys.with_streaming_response.update(
@@ -103,7 +103,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -111,13 +111,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
api_key = client.inference.api_keys.list()
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
api_key = client.inference.api_keys.list(
@@ -126,7 +126,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.inference.api_keys.with_raw_response.list()
@@ -136,7 +136,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.inference.api_keys.with_streaming_response.list() as response:
@@ -148,7 +148,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
api_key = client.inference.api_keys.delete(
@@ -156,7 +156,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.inference.api_keys.with_raw_response.delete(
@@ -168,7 +168,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.inference.api_keys.with_streaming_response.delete(
@@ -182,7 +182,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -190,7 +190,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_regenerate(self, client: Gradient) -> None:
api_key = client.inference.api_keys.update_regenerate(
@@ -198,7 +198,7 @@ def test_method_update_regenerate(self, client: Gradient) -> None:
)
assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update_regenerate(self, client: Gradient) -> None:
response = client.inference.api_keys.with_raw_response.update_regenerate(
@@ -210,7 +210,7 @@ def test_raw_response_update_regenerate(self, client: Gradient) -> None:
api_key = response.parse()
assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update_regenerate(self, client: Gradient) -> None:
with client.inference.api_keys.with_streaming_response.update_regenerate(
@@ -224,7 +224,7 @@ def test_streaming_response_update_regenerate(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update_regenerate(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -238,13 +238,13 @@ class TestAsyncAPIKeys:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.create()
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.create(
@@ -252,7 +252,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.inference.api_keys.with_raw_response.create()
@@ -262,7 +262,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyCreateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.inference.api_keys.with_streaming_response.create() as response:
@@ -274,7 +274,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.update(
@@ -282,7 +282,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.update(
@@ -292,7 +292,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.inference.api_keys.with_raw_response.update(
@@ -304,7 +304,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.inference.api_keys.with_streaming_response.update(
@@ -318,7 +318,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -326,13 +326,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.list()
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.list(
@@ -341,7 +341,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.inference.api_keys.with_raw_response.list()
@@ -351,7 +351,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyListResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.inference.api_keys.with_streaming_response.list() as response:
@@ -363,7 +363,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.delete(
@@ -371,7 +371,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.inference.api_keys.with_raw_response.delete(
@@ -383,7 +383,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
api_key = await response.parse()
assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.inference.api_keys.with_streaming_response.delete(
@@ -397,7 +397,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -405,7 +405,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_regenerate(self, async_client: AsyncGradient) -> None:
api_key = await async_client.inference.api_keys.update_regenerate(
@@ -413,7 +413,7 @@ async def test_method_update_regenerate(self, async_client: AsyncGradient) -> No
)
assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update_regenerate(self, async_client: AsyncGradient) -> None:
response = await async_client.inference.api_keys.with_raw_response.update_regenerate(
@@ -425,7 +425,7 @@ async def test_raw_response_update_regenerate(self, async_client: AsyncGradient)
api_key = await response.parse()
assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update_regenerate(self, async_client: AsyncGradient) -> None:
async with async_client.inference.api_keys.with_streaming_response.update_regenerate(
@@ -439,7 +439,7 @@ async def test_streaming_response_update_regenerate(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update_regenerate(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
diff --git a/tests/api_resources/knowledge_bases/test_data_sources.py b/tests/api_resources/knowledge_bases/test_data_sources.py
index d28fd409..e8f430b4 100644
--- a/tests/api_resources/knowledge_bases/test_data_sources.py
+++ b/tests/api_resources/knowledge_bases/test_data_sources.py
@@ -23,7 +23,7 @@
class TestDataSources:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.create(
@@ -31,7 +31,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceCreateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.create(
@@ -65,7 +65,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceCreateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.knowledge_bases.data_sources.with_raw_response.create(
@@ -77,7 +77,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
data_source = response.parse()
assert_matches_type(DataSourceCreateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.knowledge_bases.data_sources.with_streaming_response.create(
@@ -91,7 +91,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_create(self, client: Gradient) -> None:
with pytest.raises(
@@ -101,7 +101,7 @@ def test_path_params_create(self, client: Gradient) -> None:
path_knowledge_base_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.update(
@@ -110,7 +110,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceUpdateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.update(
@@ -128,7 +128,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceUpdateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.knowledge_bases.data_sources.with_raw_response.update(
@@ -141,7 +141,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
data_source = response.parse()
assert_matches_type(DataSourceUpdateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.knowledge_bases.data_sources.with_streaming_response.update(
@@ -156,7 +156,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(
@@ -173,7 +173,7 @@ def test_path_params_update(self, client: Gradient) -> None:
path_knowledge_base_uuid="123e4567-e89b-12d3-a456-426614174000",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.list(
@@ -181,7 +181,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceListResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.list(
@@ -191,7 +191,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceListResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.knowledge_bases.data_sources.with_raw_response.list(
@@ -203,7 +203,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
data_source = response.parse()
assert_matches_type(DataSourceListResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.knowledge_bases.data_sources.with_streaming_response.list(
@@ -217,7 +217,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -225,7 +225,7 @@ def test_path_params_list(self, client: Gradient) -> None:
knowledge_base_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.delete(
@@ -234,7 +234,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.knowledge_bases.data_sources.with_raw_response.delete(
@@ -247,7 +247,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
data_source = response.parse()
assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.knowledge_bases.data_sources.with_streaming_response.delete(
@@ -262,7 +262,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -277,13 +277,13 @@ def test_path_params_delete(self, client: Gradient) -> None:
knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_presigned_urls(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.create_presigned_urls()
assert_matches_type(DataSourceCreatePresignedURLsResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_presigned_urls_with_all_params(self, client: Gradient) -> None:
data_source = client.knowledge_bases.data_sources.create_presigned_urls(
@@ -296,7 +296,7 @@ def test_method_create_presigned_urls_with_all_params(self, client: Gradient) ->
)
assert_matches_type(DataSourceCreatePresignedURLsResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_presigned_urls(self, client: Gradient) -> None:
response = client.knowledge_bases.data_sources.with_raw_response.create_presigned_urls()
@@ -306,7 +306,7 @@ def test_raw_response_create_presigned_urls(self, client: Gradient) -> None:
data_source = response.parse()
assert_matches_type(DataSourceCreatePresignedURLsResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_presigned_urls(self, client: Gradient) -> None:
with client.knowledge_bases.data_sources.with_streaming_response.create_presigned_urls() as response:
@@ -324,7 +324,7 @@ class TestAsyncDataSources:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.create(
@@ -332,7 +332,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(DataSourceCreateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.create(
@@ -366,7 +366,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(DataSourceCreateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.data_sources.with_raw_response.create(
@@ -378,7 +378,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
data_source = await response.parse()
assert_matches_type(DataSourceCreateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.data_sources.with_streaming_response.create(
@@ -392,7 +392,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_create(self, async_client: AsyncGradient) -> None:
with pytest.raises(
@@ -402,7 +402,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None:
path_knowledge_base_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.update(
@@ -411,7 +411,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(DataSourceUpdateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.update(
@@ -429,7 +429,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(DataSourceUpdateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.data_sources.with_raw_response.update(
@@ -442,7 +442,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
data_source = await response.parse()
assert_matches_type(DataSourceUpdateResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.data_sources.with_streaming_response.update(
@@ -457,7 +457,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(
@@ -474,7 +474,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_knowledge_base_uuid="123e4567-e89b-12d3-a456-426614174000",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.list(
@@ -482,7 +482,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(DataSourceListResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.list(
@@ -492,7 +492,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(DataSourceListResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.data_sources.with_raw_response.list(
@@ -504,7 +504,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
data_source = await response.parse()
assert_matches_type(DataSourceListResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.data_sources.with_streaming_response.list(
@@ -518,7 +518,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -526,7 +526,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None:
knowledge_base_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.delete(
@@ -535,7 +535,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.data_sources.with_raw_response.delete(
@@ -548,7 +548,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
data_source = await response.parse()
assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.data_sources.with_streaming_response.delete(
@@ -563,7 +563,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -578,13 +578,13 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"',
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_presigned_urls(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.create_presigned_urls()
assert_matches_type(DataSourceCreatePresignedURLsResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_presigned_urls_with_all_params(self, async_client: AsyncGradient) -> None:
data_source = await async_client.knowledge_bases.data_sources.create_presigned_urls(
@@ -597,7 +597,7 @@ async def test_method_create_presigned_urls_with_all_params(self, async_client:
)
assert_matches_type(DataSourceCreatePresignedURLsResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_presigned_urls(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.data_sources.with_raw_response.create_presigned_urls()
@@ -607,7 +607,7 @@ async def test_raw_response_create_presigned_urls(self, async_client: AsyncGradi
data_source = await response.parse()
assert_matches_type(DataSourceCreatePresignedURLsResponse, data_source, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_presigned_urls(self, async_client: AsyncGradient) -> None:
async with (
diff --git a/tests/api_resources/knowledge_bases/test_indexing_jobs.py b/tests/api_resources/knowledge_bases/test_indexing_jobs.py
index 231aceff..516250be 100644
--- a/tests/api_resources/knowledge_bases/test_indexing_jobs.py
+++ b/tests/api_resources/knowledge_bases/test_indexing_jobs.py
@@ -25,13 +25,13 @@
class TestIndexingJobs:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.create()
assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.create(
@@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.knowledge_bases.indexing_jobs.with_raw_response.create()
@@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
indexing_job = response.parse()
assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.knowledge_bases.indexing_jobs.with_streaming_response.create() as response:
@@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.retrieve(
@@ -70,7 +70,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve(
@@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
indexing_job = response.parse()
assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve(
@@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -104,13 +104,13 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.list()
assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.list(
@@ -119,7 +119,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.knowledge_bases.indexing_jobs.with_raw_response.list()
@@ -129,7 +129,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
indexing_job = response.parse()
assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.knowledge_bases.indexing_jobs.with_streaming_response.list() as response:
@@ -141,7 +141,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_data_sources(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.retrieve_data_sources(
@@ -149,7 +149,7 @@ def test_method_retrieve_data_sources(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_data_sources(self, client: Gradient) -> None:
response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources(
@@ -161,7 +161,7 @@ def test_raw_response_retrieve_data_sources(self, client: Gradient) -> None:
indexing_job = response.parse()
assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_data_sources(self, client: Gradient) -> None:
with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_data_sources(
@@ -175,7 +175,7 @@ def test_streaming_response_retrieve_data_sources(self, client: Gradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve_data_sources(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"):
@@ -183,7 +183,7 @@ def test_path_params_retrieve_data_sources(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_signed_url(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.retrieve_signed_url(
@@ -191,7 +191,7 @@ def test_method_retrieve_signed_url(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobRetrieveSignedURLResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_signed_url(self, client: Gradient) -> None:
response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_signed_url(
@@ -203,7 +203,7 @@ def test_raw_response_retrieve_signed_url(self, client: Gradient) -> None:
indexing_job = response.parse()
assert_matches_type(IndexingJobRetrieveSignedURLResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_signed_url(self, client: Gradient) -> None:
with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_signed_url(
@@ -217,7 +217,7 @@ def test_streaming_response_retrieve_signed_url(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve_signed_url(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"):
@@ -225,7 +225,7 @@ def test_path_params_retrieve_signed_url(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_cancel(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.update_cancel(
@@ -233,7 +233,7 @@ def test_method_update_cancel(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_cancel_with_all_params(self, client: Gradient) -> None:
indexing_job = client.knowledge_bases.indexing_jobs.update_cancel(
@@ -242,7 +242,7 @@ def test_method_update_cancel_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update_cancel(self, client: Gradient) -> None:
response = client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel(
@@ -254,7 +254,7 @@ def test_raw_response_update_cancel(self, client: Gradient) -> None:
indexing_job = response.parse()
assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update_cancel(self, client: Gradient) -> None:
with client.knowledge_bases.indexing_jobs.with_streaming_response.update_cancel(
@@ -268,7 +268,7 @@ def test_streaming_response_update_cancel(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update_cancel(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -401,13 +401,13 @@ class TestAsyncIndexingJobs:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.create()
assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.create(
@@ -416,7 +416,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.create()
@@ -426,7 +426,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
indexing_job = await response.parse()
assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.create() as response:
@@ -438,7 +438,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve(
@@ -446,7 +446,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve(
@@ -458,7 +458,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
indexing_job = await response.parse()
assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve(
@@ -472,7 +472,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -480,13 +480,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.list()
assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.list(
@@ -495,7 +495,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.list()
@@ -505,7 +505,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
indexing_job = await response.parse()
assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.list() as response:
@@ -517,7 +517,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_data_sources(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve_data_sources(
@@ -525,7 +525,7 @@ async def test_method_retrieve_data_sources(self, async_client: AsyncGradient) -
)
assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources(
@@ -537,7 +537,7 @@ async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradi
indexing_job = await response.parse()
assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_data_sources(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_data_sources(
@@ -551,7 +551,7 @@ async def test_streaming_response_retrieve_data_sources(self, async_client: Asyn
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"):
@@ -559,7 +559,7 @@ async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradie
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_signed_url(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve_signed_url(
@@ -567,7 +567,7 @@ async def test_method_retrieve_signed_url(self, async_client: AsyncGradient) ->
)
assert_matches_type(IndexingJobRetrieveSignedURLResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_signed_url(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_signed_url(
@@ -579,7 +579,7 @@ async def test_raw_response_retrieve_signed_url(self, async_client: AsyncGradien
indexing_job = await response.parse()
assert_matches_type(IndexingJobRetrieveSignedURLResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_signed_url(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_signed_url(
@@ -593,7 +593,7 @@ async def test_streaming_response_retrieve_signed_url(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve_signed_url(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"):
@@ -601,7 +601,7 @@ async def test_path_params_retrieve_signed_url(self, async_client: AsyncGradient
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_cancel(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.update_cancel(
@@ -609,7 +609,7 @@ async def test_method_update_cancel(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_cancel_with_all_params(self, async_client: AsyncGradient) -> None:
indexing_job = await async_client.knowledge_bases.indexing_jobs.update_cancel(
@@ -618,7 +618,7 @@ async def test_method_update_cancel_with_all_params(self, async_client: AsyncGra
)
assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update_cancel(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel(
@@ -630,7 +630,7 @@ async def test_raw_response_update_cancel(self, async_client: AsyncGradient) ->
indexing_job = await response.parse()
assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update_cancel(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.update_cancel(
@@ -644,7 +644,7 @@ async def test_streaming_response_update_cancel(self, async_client: AsyncGradien
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update_cancel(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
diff --git a/tests/api_resources/models/providers/test_anthropic.py b/tests/api_resources/models/providers/test_anthropic.py
index b0aeb37c..d0e8209c 100644
--- a/tests/api_resources/models/providers/test_anthropic.py
+++ b/tests/api_resources/models/providers/test_anthropic.py
@@ -24,13 +24,13 @@
class TestAnthropic:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.create()
assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.create(
@@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.models.providers.anthropic.with_raw_response.create()
@@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
anthropic = response.parse()
assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.models.providers.anthropic.with_streaming_response.create() as response:
@@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.retrieve(
@@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.models.providers.anthropic.with_raw_response.retrieve(
@@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
anthropic = response.parse()
assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.models.providers.anthropic.with_streaming_response.retrieve(
@@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.update(
@@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.update(
@@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.models.providers.anthropic.with_raw_response.update(
@@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
anthropic = response.parse()
assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.models.providers.anthropic.with_streaming_response.update(
@@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.list()
assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.list(
@@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.models.providers.anthropic.with_raw_response.list()
@@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
anthropic = response.parse()
assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.models.providers.anthropic.with_streaming_response.list() as response:
@@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.delete(
@@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.models.providers.anthropic.with_raw_response.delete(
@@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
anthropic = response.parse()
assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.models.providers.anthropic.with_streaming_response.delete(
@@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_agents(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.list_agents(
@@ -243,7 +243,7 @@ def test_method_list_agents(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_agents_with_all_params(self, client: Gradient) -> None:
anthropic = client.models.providers.anthropic.list_agents(
@@ -253,7 +253,7 @@ def test_method_list_agents_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_agents(self, client: Gradient) -> None:
response = client.models.providers.anthropic.with_raw_response.list_agents(
@@ -265,7 +265,7 @@ def test_raw_response_list_agents(self, client: Gradient) -> None:
anthropic = response.parse()
assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_agents(self, client: Gradient) -> None:
with client.models.providers.anthropic.with_streaming_response.list_agents(
@@ -279,7 +279,7 @@ def test_streaming_response_list_agents(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_agents(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -293,13 +293,13 @@ class TestAsyncAnthropic:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.create()
assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.create(
@@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.anthropic.with_raw_response.create()
@@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
anthropic = await response.parse()
assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.anthropic.with_streaming_response.create() as response:
@@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.retrieve(
@@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.anthropic.with_raw_response.retrieve(
@@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
anthropic = await response.parse()
assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.anthropic.with_streaming_response.retrieve(
@@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.update(
@@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.update(
@@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.anthropic.with_raw_response.update(
@@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
anthropic = await response.parse()
assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.anthropic.with_streaming_response.update(
@@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.list()
assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.list(
@@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.anthropic.with_raw_response.list()
@@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
anthropic = await response.parse()
assert_matches_type(AnthropicListResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.anthropic.with_streaming_response.list() as response:
@@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.delete(
@@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.anthropic.with_raw_response.delete(
@@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
anthropic = await response.parse()
assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.anthropic.with_streaming_response.delete(
@@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_agents(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.list_agents(
@@ -512,7 +512,7 @@ async def test_method_list_agents(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None:
anthropic = await async_client.models.providers.anthropic.list_agents(
@@ -522,7 +522,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi
)
assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.anthropic.with_raw_response.list_agents(
@@ -534,7 +534,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> No
anthropic = await response.parse()
assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.anthropic.with_streaming_response.list_agents(
@@ -548,7 +548,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradient)
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/models/providers/test_openai.py b/tests/api_resources/models/providers/test_openai.py
index c5780e05..14ef2c24 100644
--- a/tests/api_resources/models/providers/test_openai.py
+++ b/tests/api_resources/models/providers/test_openai.py
@@ -24,13 +24,13 @@
class TestOpenAI:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
openai = client.models.providers.openai.create()
assert_matches_type(OpenAICreateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
openai = client.models.providers.openai.create(
@@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(OpenAICreateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.models.providers.openai.with_raw_response.create()
@@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
openai = response.parse()
assert_matches_type(OpenAICreateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.models.providers.openai.with_streaming_response.create() as response:
@@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
openai = client.models.providers.openai.retrieve(
@@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.models.providers.openai.with_raw_response.retrieve(
@@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
openai = response.parse()
assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.models.providers.openai.with_streaming_response.retrieve(
@@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
openai = client.models.providers.openai.update(
@@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
openai = client.models.providers.openai.update(
@@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.models.providers.openai.with_raw_response.update(
@@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
openai = response.parse()
assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.models.providers.openai.with_streaming_response.update(
@@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
openai = client.models.providers.openai.list()
assert_matches_type(OpenAIListResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
openai = client.models.providers.openai.list(
@@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIListResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.models.providers.openai.with_raw_response.list()
@@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
openai = response.parse()
assert_matches_type(OpenAIListResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.models.providers.openai.with_streaming_response.list() as response:
@@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
openai = client.models.providers.openai.delete(
@@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.models.providers.openai.with_raw_response.delete(
@@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
openai = response.parse()
assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.models.providers.openai.with_streaming_response.delete(
@@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_agents(self, client: Gradient) -> None:
openai = client.models.providers.openai.retrieve_agents(
@@ -243,7 +243,7 @@ def test_method_retrieve_agents(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_agents_with_all_params(self, client: Gradient) -> None:
openai = client.models.providers.openai.retrieve_agents(
@@ -253,7 +253,7 @@ def test_method_retrieve_agents_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_agents(self, client: Gradient) -> None:
response = client.models.providers.openai.with_raw_response.retrieve_agents(
@@ -265,7 +265,7 @@ def test_raw_response_retrieve_agents(self, client: Gradient) -> None:
openai = response.parse()
assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_agents(self, client: Gradient) -> None:
with client.models.providers.openai.with_streaming_response.retrieve_agents(
@@ -279,7 +279,7 @@ def test_streaming_response_retrieve_agents(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve_agents(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -293,13 +293,13 @@ class TestAsyncOpenAI:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.create()
assert_matches_type(OpenAICreateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.create(
@@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(OpenAICreateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.openai.with_raw_response.create()
@@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
openai = await response.parse()
assert_matches_type(OpenAICreateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.openai.with_streaming_response.create() as response:
@@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.retrieve(
@@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.openai.with_raw_response.retrieve(
@@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
openai = await response.parse()
assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.openai.with_streaming_response.retrieve(
@@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.update(
@@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.update(
@@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.openai.with_raw_response.update(
@@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
openai = await response.parse()
assert_matches_type(OpenAIUpdateResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.openai.with_streaming_response.update(
@@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"):
@@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_api_key_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.list()
assert_matches_type(OpenAIListResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.list(
@@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(OpenAIListResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.openai.with_raw_response.list()
@@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
openai = await response.parse()
assert_matches_type(OpenAIListResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.openai.with_streaming_response.list() as response:
@@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.delete(
@@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.openai.with_raw_response.delete(
@@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
openai = await response.parse()
assert_matches_type(OpenAIDeleteResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.openai.with_streaming_response.delete(
@@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"):
@@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_agents(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.retrieve_agents(
@@ -512,7 +512,7 @@ async def test_method_retrieve_agents(self, async_client: AsyncGradient) -> None
)
assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncGradient) -> None:
openai = await async_client.models.providers.openai.retrieve_agents(
@@ -522,7 +522,7 @@ async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncG
)
assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_agents(self, async_client: AsyncGradient) -> None:
response = await async_client.models.providers.openai.with_raw_response.retrieve_agents(
@@ -534,7 +534,7 @@ async def test_raw_response_retrieve_agents(self, async_client: AsyncGradient) -
openai = await response.parse()
assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradient) -> None:
async with async_client.models.providers.openai.with_streaming_response.retrieve_agents(
@@ -548,7 +548,7 @@ async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradi
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve_agents(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
diff --git a/tests/api_resources/nfs/test_snapshots.py b/tests/api_resources/nfs/test_snapshots.py
index e17265f3..5b229811 100644
--- a/tests/api_resources/nfs/test_snapshots.py
+++ b/tests/api_resources/nfs/test_snapshots.py
@@ -20,7 +20,7 @@
class TestSnapshots:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
snapshot = client.nfs.snapshots.retrieve(
@@ -29,7 +29,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.nfs.snapshots.with_raw_response.retrieve(
@@ -42,7 +42,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.nfs.snapshots.with_streaming_response.retrieve(
@@ -57,7 +57,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
@@ -66,7 +66,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
region="region",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
snapshot = client.nfs.snapshots.list(
@@ -74,7 +74,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
snapshot = client.nfs.snapshots.list(
@@ -83,7 +83,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.nfs.snapshots.with_raw_response.list(
@@ -95,7 +95,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
snapshot = response.parse()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.nfs.snapshots.with_streaming_response.list(
@@ -109,7 +109,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
snapshot = client.nfs.snapshots.delete(
@@ -118,7 +118,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.nfs.snapshots.with_raw_response.delete(
@@ -131,7 +131,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
snapshot = response.parse()
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.nfs.snapshots.with_streaming_response.delete(
@@ -146,7 +146,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
@@ -161,7 +161,7 @@ class TestAsyncSnapshots:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.nfs.snapshots.retrieve(
@@ -170,7 +170,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.snapshots.with_raw_response.retrieve(
@@ -183,7 +183,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.snapshots.with_streaming_response.retrieve(
@@ -198,7 +198,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
@@ -207,7 +207,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
region="region",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.nfs.snapshots.list(
@@ -215,7 +215,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.nfs.snapshots.list(
@@ -224,7 +224,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.snapshots.with_raw_response.list(
@@ -236,7 +236,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.snapshots.with_streaming_response.list(
@@ -250,7 +250,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.nfs.snapshots.delete(
@@ -259,7 +259,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.snapshots.with_raw_response.delete(
@@ -272,7 +272,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
snapshot = await response.parse()
assert snapshot is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.snapshots.with_streaming_response.delete(
@@ -287,7 +287,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py
index c9e59e32..0c1833f6 100644
--- a/tests/api_resources/test_agents.py
+++ b/tests/api_resources/test_agents.py
@@ -26,13 +26,13 @@
class TestAgents:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
agent = client.agents.create()
assert_matches_type(AgentCreateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
agent = client.agents.create(
@@ -51,7 +51,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentCreateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.agents.with_raw_response.create()
@@ -61,7 +61,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentCreateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.agents.with_streaming_response.create() as response:
@@ -73,7 +73,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
agent = client.agents.retrieve(
@@ -81,7 +81,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.agents.with_raw_response.retrieve(
@@ -93,7 +93,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.agents.with_streaming_response.retrieve(
@@ -107,7 +107,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -115,7 +115,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
agent = client.agents.update(
@@ -123,7 +123,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(AgentUpdateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
agent = client.agents.update(
@@ -150,7 +150,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentUpdateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.agents.with_raw_response.update(
@@ -162,7 +162,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentUpdateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.agents.with_streaming_response.update(
@@ -176,7 +176,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -184,13 +184,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
agent = client.agents.list()
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
agent = client.agents.list(
@@ -200,7 +200,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.agents.with_raw_response.list()
@@ -210,7 +210,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.agents.with_streaming_response.list() as response:
@@ -222,7 +222,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
agent = client.agents.delete(
@@ -230,7 +230,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(AgentDeleteResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.agents.with_raw_response.delete(
@@ -242,7 +242,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentDeleteResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.agents.with_streaming_response.delete(
@@ -256,7 +256,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -264,7 +264,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_usage(self, client: Gradient) -> None:
agent = client.agents.retrieve_usage(
@@ -272,7 +272,7 @@ def test_method_retrieve_usage(self, client: Gradient) -> None:
)
assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve_usage_with_all_params(self, client: Gradient) -> None:
agent = client.agents.retrieve_usage(
@@ -282,7 +282,7 @@ def test_method_retrieve_usage_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve_usage(self, client: Gradient) -> None:
response = client.agents.with_raw_response.retrieve_usage(
@@ -294,7 +294,7 @@ def test_raw_response_retrieve_usage(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve_usage(self, client: Gradient) -> None:
with client.agents.with_streaming_response.retrieve_usage(
@@ -308,7 +308,7 @@ def test_streaming_response_retrieve_usage(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve_usage(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -316,7 +316,7 @@ def test_path_params_retrieve_usage(self, client: Gradient) -> None:
uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_status(self, client: Gradient) -> None:
agent = client.agents.update_status(
@@ -324,7 +324,7 @@ def test_method_update_status(self, client: Gradient) -> None:
)
assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_status_with_all_params(self, client: Gradient) -> None:
agent = client.agents.update_status(
@@ -334,7 +334,7 @@ def test_method_update_status_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update_status(self, client: Gradient) -> None:
response = client.agents.with_raw_response.update_status(
@@ -346,7 +346,7 @@ def test_raw_response_update_status(self, client: Gradient) -> None:
agent = response.parse()
assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update_status(self, client: Gradient) -> None:
with client.agents.with_streaming_response.update_status(
@@ -360,7 +360,7 @@ def test_streaming_response_update_status(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update_status(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -473,13 +473,13 @@ class TestAsyncAgents:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.create()
assert_matches_type(AgentCreateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.create(
@@ -498,7 +498,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(AgentCreateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.create()
@@ -508,7 +508,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentCreateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.create() as response:
@@ -520,7 +520,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.retrieve(
@@ -528,7 +528,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.retrieve(
@@ -540,7 +540,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentRetrieveResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.retrieve(
@@ -554,7 +554,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -562,7 +562,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.update(
@@ -570,7 +570,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentUpdateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.update(
@@ -597,7 +597,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(AgentUpdateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.update(
@@ -609,7 +609,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentUpdateResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.update(
@@ -623,7 +623,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -631,13 +631,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.list()
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.list(
@@ -647,7 +647,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.list()
@@ -657,7 +657,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentListResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.list() as response:
@@ -669,7 +669,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.delete(
@@ -677,7 +677,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentDeleteResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.delete(
@@ -689,7 +689,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
agent = await response.parse()
assert_matches_type(AgentDeleteResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.delete(
@@ -703,7 +703,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -711,7 +711,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_usage(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.retrieve_usage(
@@ -719,7 +719,7 @@ async def test_method_retrieve_usage(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve_usage_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.retrieve_usage(
@@ -729,7 +729,7 @@ async def test_method_retrieve_usage_with_all_params(self, async_client: AsyncGr
)
assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve_usage(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.retrieve_usage(
@@ -741,7 +741,7 @@ async def test_raw_response_retrieve_usage(self, async_client: AsyncGradient) ->
agent = await response.parse()
assert_matches_type(AgentRetrieveUsageResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve_usage(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.retrieve_usage(
@@ -755,7 +755,7 @@ async def test_streaming_response_retrieve_usage(self, async_client: AsyncGradie
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve_usage(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -763,7 +763,7 @@ async def test_path_params_retrieve_usage(self, async_client: AsyncGradient) ->
uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_status(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.update_status(
@@ -771,7 +771,7 @@ async def test_method_update_status(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_status_with_all_params(self, async_client: AsyncGradient) -> None:
agent = await async_client.agents.update_status(
@@ -781,7 +781,7 @@ async def test_method_update_status_with_all_params(self, async_client: AsyncGra
)
assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update_status(self, async_client: AsyncGradient) -> None:
response = await async_client.agents.with_raw_response.update_status(
@@ -793,7 +793,7 @@ async def test_raw_response_update_status(self, async_client: AsyncGradient) ->
agent = await response.parse()
assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update_status(self, async_client: AsyncGradient) -> None:
async with async_client.agents.with_streaming_response.update_status(
@@ -807,7 +807,7 @@ async def test_streaming_response_update_status(self, async_client: AsyncGradien
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update_status(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
diff --git a/tests/api_resources/test_billing.py b/tests/api_resources/test_billing.py
index 59181b15..9bcd29e0 100644
--- a/tests/api_resources/test_billing.py
+++ b/tests/api_resources/test_billing.py
@@ -18,7 +18,7 @@
class TestBilling:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_insights(self, client: Gradient) -> None:
billing = client.billing.list_insights(
@@ -28,7 +28,7 @@ def test_method_list_insights(self, client: Gradient) -> None:
)
assert_matches_type(BillingListInsightsResponse, billing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_insights_with_all_params(self, client: Gradient) -> None:
billing = client.billing.list_insights(
@@ -40,7 +40,7 @@ def test_method_list_insights_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(BillingListInsightsResponse, billing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_insights(self, client: Gradient) -> None:
response = client.billing.with_raw_response.list_insights(
@@ -54,7 +54,7 @@ def test_raw_response_list_insights(self, client: Gradient) -> None:
billing = response.parse()
assert_matches_type(BillingListInsightsResponse, billing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_insights(self, client: Gradient) -> None:
with client.billing.with_streaming_response.list_insights(
@@ -70,7 +70,7 @@ def test_streaming_response_list_insights(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_insights(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_urn` but received ''"):
@@ -100,7 +100,7 @@ class TestAsyncBilling:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_insights(self, async_client: AsyncGradient) -> None:
billing = await async_client.billing.list_insights(
@@ -110,7 +110,7 @@ async def test_method_list_insights(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(BillingListInsightsResponse, billing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_insights_with_all_params(self, async_client: AsyncGradient) -> None:
billing = await async_client.billing.list_insights(
@@ -122,7 +122,7 @@ async def test_method_list_insights_with_all_params(self, async_client: AsyncGra
)
assert_matches_type(BillingListInsightsResponse, billing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_insights(self, async_client: AsyncGradient) -> None:
response = await async_client.billing.with_raw_response.list_insights(
@@ -136,7 +136,7 @@ async def test_raw_response_list_insights(self, async_client: AsyncGradient) ->
billing = await response.parse()
assert_matches_type(BillingListInsightsResponse, billing, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_insights(self, async_client: AsyncGradient) -> None:
async with async_client.billing.with_streaming_response.list_insights(
@@ -152,7 +152,7 @@ async def test_streaming_response_list_insights(self, async_client: AsyncGradien
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_insights(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `account_urn` but received ''"):
diff --git a/tests/api_resources/test_gpu_droplets.py b/tests/api_resources/test_gpu_droplets.py
index 7d50c037..32a26a9a 100644
--- a/tests/api_resources/test_gpu_droplets.py
+++ b/tests/api_resources/test_gpu_droplets.py
@@ -25,7 +25,7 @@
class TestGPUDroplets:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.create(
@@ -35,7 +35,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.create(
@@ -61,7 +61,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.create(
@@ -75,7 +75,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.create(
@@ -91,7 +91,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.create(
@@ -101,7 +101,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.create(
@@ -127,7 +127,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.create(
@@ -141,7 +141,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.create(
@@ -157,7 +157,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.retrieve(
@@ -165,7 +165,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.retrieve(
@@ -177,7 +177,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.retrieve(
@@ -191,13 +191,13 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list()
assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list(
@@ -209,7 +209,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.list()
@@ -219,7 +219,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.list() as response:
@@ -231,7 +231,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.delete(
@@ -239,7 +239,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.delete(
@@ -251,7 +251,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.delete(
@@ -265,7 +265,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete_by_tag(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.delete_by_tag(
@@ -273,7 +273,7 @@ def test_method_delete_by_tag(self, client: Gradient) -> None:
)
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete_by_tag(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.delete_by_tag(
@@ -285,7 +285,7 @@ def test_raw_response_delete_by_tag(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete_by_tag(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.delete_by_tag(
@@ -299,7 +299,7 @@ def test_streaming_response_delete_by_tag(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_firewalls(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_firewalls(
@@ -307,7 +307,7 @@ def test_method_list_firewalls(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_firewalls_with_all_params(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_firewalls(
@@ -317,7 +317,7 @@ def test_method_list_firewalls_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_firewalls(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.list_firewalls(
@@ -329,7 +329,7 @@ def test_raw_response_list_firewalls(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_firewalls(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.list_firewalls(
@@ -343,7 +343,7 @@ def test_streaming_response_list_firewalls(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_kernels(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_kernels(
@@ -351,7 +351,7 @@ def test_method_list_kernels(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_kernels_with_all_params(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_kernels(
@@ -361,7 +361,7 @@ def test_method_list_kernels_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_kernels(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.list_kernels(
@@ -373,7 +373,7 @@ def test_raw_response_list_kernels(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_kernels(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.list_kernels(
@@ -387,7 +387,7 @@ def test_streaming_response_list_kernels(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_neighbors(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_neighbors(
@@ -395,7 +395,7 @@ def test_method_list_neighbors(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_neighbors(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.list_neighbors(
@@ -407,7 +407,7 @@ def test_raw_response_list_neighbors(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_neighbors(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.list_neighbors(
@@ -421,7 +421,7 @@ def test_streaming_response_list_neighbors(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_snapshots(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_snapshots(
@@ -429,7 +429,7 @@ def test_method_list_snapshots(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_snapshots_with_all_params(self, client: Gradient) -> None:
gpu_droplet = client.gpu_droplets.list_snapshots(
@@ -439,7 +439,7 @@ def test_method_list_snapshots_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_snapshots(self, client: Gradient) -> None:
response = client.gpu_droplets.with_raw_response.list_snapshots(
@@ -451,7 +451,7 @@ def test_raw_response_list_snapshots(self, client: Gradient) -> None:
gpu_droplet = response.parse()
assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_snapshots(self, client: Gradient) -> None:
with client.gpu_droplets.with_streaming_response.list_snapshots(
@@ -471,7 +471,7 @@ class TestAsyncGPUDroplets:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.create(
@@ -481,7 +481,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.create(
@@ -507,7 +507,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.create(
@@ -521,7 +521,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.create(
@@ -537,7 +537,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.create(
@@ -547,7 +547,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.create(
@@ -573,7 +573,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.create(
@@ -587,7 +587,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.create(
@@ -603,7 +603,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.retrieve(
@@ -611,7 +611,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.retrieve(
@@ -623,7 +623,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.retrieve(
@@ -637,13 +637,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list()
assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list(
@@ -655,7 +655,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.list()
@@ -665,7 +665,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.list() as response:
@@ -677,7 +677,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.delete(
@@ -685,7 +685,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.delete(
@@ -697,7 +697,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
gpu_droplet = await response.parse()
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.delete(
@@ -711,7 +711,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete_by_tag(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.delete_by_tag(
@@ -719,7 +719,7 @@ async def test_method_delete_by_tag(self, async_client: AsyncGradient) -> None:
)
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete_by_tag(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.delete_by_tag(
@@ -731,7 +731,7 @@ async def test_raw_response_delete_by_tag(self, async_client: AsyncGradient) ->
gpu_droplet = await response.parse()
assert gpu_droplet is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.delete_by_tag(
@@ -745,7 +745,7 @@ async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradien
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_firewalls(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_firewalls(
@@ -753,7 +753,7 @@ async def test_method_list_firewalls(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_firewalls(
@@ -763,7 +763,7 @@ async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGr
)
assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_firewalls(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.list_firewalls(
@@ -775,7 +775,7 @@ async def test_raw_response_list_firewalls(self, async_client: AsyncGradient) ->
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_firewalls(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.list_firewalls(
@@ -789,7 +789,7 @@ async def test_streaming_response_list_firewalls(self, async_client: AsyncGradie
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_kernels(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_kernels(
@@ -797,7 +797,7 @@ async def test_method_list_kernels(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_kernels_with_all_params(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_kernels(
@@ -807,7 +807,7 @@ async def test_method_list_kernels_with_all_params(self, async_client: AsyncGrad
)
assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_kernels(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.list_kernels(
@@ -819,7 +819,7 @@ async def test_raw_response_list_kernels(self, async_client: AsyncGradient) -> N
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_kernels(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.list_kernels(
@@ -833,7 +833,7 @@ async def test_streaming_response_list_kernels(self, async_client: AsyncGradient
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_neighbors(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_neighbors(
@@ -841,7 +841,7 @@ async def test_method_list_neighbors(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_neighbors(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.list_neighbors(
@@ -853,7 +853,7 @@ async def test_raw_response_list_neighbors(self, async_client: AsyncGradient) ->
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_neighbors(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.list_neighbors(
@@ -867,7 +867,7 @@ async def test_streaming_response_list_neighbors(self, async_client: AsyncGradie
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_snapshots(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_snapshots(
@@ -875,7 +875,7 @@ async def test_method_list_snapshots(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGradient) -> None:
gpu_droplet = await async_client.gpu_droplets.list_snapshots(
@@ -885,7 +885,7 @@ async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGr
)
assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_snapshots(self, async_client: AsyncGradient) -> None:
response = await async_client.gpu_droplets.with_raw_response.list_snapshots(
@@ -897,7 +897,7 @@ async def test_raw_response_list_snapshots(self, async_client: AsyncGradient) ->
gpu_droplet = await response.parse()
assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_snapshots(self, async_client: AsyncGradient) -> None:
async with async_client.gpu_droplets.with_streaming_response.list_snapshots(
diff --git a/tests/api_resources/test_images.py b/tests/api_resources/test_images.py
index 47428d02..981570c2 100644
--- a/tests/api_resources/test_images.py
+++ b/tests/api_resources/test_images.py
@@ -17,7 +17,7 @@
class TestImages:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_generate_overload_1(self, client: Gradient) -> None:
image = client.images.generate(
@@ -25,7 +25,7 @@ def test_method_generate_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(ImageGenerateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_generate_with_all_params_overload_1(self, client: Gradient) -> None:
image = client.images.generate(
@@ -44,7 +44,7 @@ def test_method_generate_with_all_params_overload_1(self, client: Gradient) -> N
)
assert_matches_type(ImageGenerateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_generate_overload_1(self, client: Gradient) -> None:
response = client.images.with_raw_response.generate(
@@ -56,7 +56,7 @@ def test_raw_response_generate_overload_1(self, client: Gradient) -> None:
image = response.parse()
assert_matches_type(ImageGenerateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_generate_overload_1(self, client: Gradient) -> None:
with client.images.with_streaming_response.generate(
@@ -70,7 +70,7 @@ def test_streaming_response_generate_overload_1(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_generate_overload_2(self, client: Gradient) -> None:
image_stream = client.images.generate(
@@ -79,7 +79,7 @@ def test_method_generate_overload_2(self, client: Gradient) -> None:
)
image_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_generate_with_all_params_overload_2(self, client: Gradient) -> None:
image_stream = client.images.generate(
@@ -98,7 +98,7 @@ def test_method_generate_with_all_params_overload_2(self, client: Gradient) -> N
)
image_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_generate_overload_2(self, client: Gradient) -> None:
response = client.images.with_raw_response.generate(
@@ -110,7 +110,7 @@ def test_raw_response_generate_overload_2(self, client: Gradient) -> None:
stream = response.parse()
stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_generate_overload_2(self, client: Gradient) -> None:
with client.images.with_streaming_response.generate(
@@ -131,7 +131,7 @@ class TestAsyncImages:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_generate_overload_1(self, async_client: AsyncGradient) -> None:
image = await async_client.images.generate(
@@ -139,7 +139,7 @@ async def test_method_generate_overload_1(self, async_client: AsyncGradient) ->
)
assert_matches_type(ImageGenerateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_generate_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
image = await async_client.images.generate(
@@ -158,7 +158,7 @@ async def test_method_generate_with_all_params_overload_1(self, async_client: As
)
assert_matches_type(ImageGenerateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_generate_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.images.with_raw_response.generate(
@@ -170,7 +170,7 @@ async def test_raw_response_generate_overload_1(self, async_client: AsyncGradien
image = await response.parse()
assert_matches_type(ImageGenerateResponse, image, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_generate_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.images.with_streaming_response.generate(
@@ -184,7 +184,7 @@ async def test_streaming_response_generate_overload_1(self, async_client: AsyncG
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_generate_overload_2(self, async_client: AsyncGradient) -> None:
image_stream = await async_client.images.generate(
@@ -193,7 +193,7 @@ async def test_method_generate_overload_2(self, async_client: AsyncGradient) ->
)
await image_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_generate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
image_stream = await async_client.images.generate(
@@ -212,7 +212,7 @@ async def test_method_generate_with_all_params_overload_2(self, async_client: As
)
await image_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_generate_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.images.with_raw_response.generate(
@@ -224,7 +224,7 @@ async def test_raw_response_generate_overload_2(self, async_client: AsyncGradien
stream = await response.parse()
await stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_generate_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.images.with_streaming_response.generate(
diff --git a/tests/api_resources/test_knowledge_bases.py b/tests/api_resources/test_knowledge_bases.py
index 9ce9785d..a7c0d5e4 100644
--- a/tests/api_resources/test_knowledge_bases.py
+++ b/tests/api_resources/test_knowledge_bases.py
@@ -24,13 +24,13 @@
class TestKnowledgeBases:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.create()
assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.create(
@@ -89,7 +89,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.knowledge_bases.with_raw_response.create()
@@ -99,7 +99,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.knowledge_bases.with_streaming_response.create() as response:
@@ -111,7 +111,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.retrieve(
@@ -119,7 +119,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.knowledge_bases.with_raw_response.retrieve(
@@ -131,7 +131,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.knowledge_bases.with_streaming_response.retrieve(
@@ -145,7 +145,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -153,7 +153,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.update(
@@ -161,7 +161,7 @@ def test_method_update(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_update_with_all_params(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.update(
@@ -175,7 +175,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_update(self, client: Gradient) -> None:
response = client.knowledge_bases.with_raw_response.update(
@@ -187,7 +187,7 @@ def test_raw_response_update(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_update(self, client: Gradient) -> None:
with client.knowledge_bases.with_streaming_response.update(
@@ -201,7 +201,7 @@ def test_streaming_response_update(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_update(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -209,13 +209,13 @@ def test_path_params_update(self, client: Gradient) -> None:
path_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.list()
assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.list(
@@ -224,7 +224,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.knowledge_bases.with_raw_response.list()
@@ -234,7 +234,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.knowledge_bases.with_streaming_response.list() as response:
@@ -246,7 +246,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.delete(
@@ -254,7 +254,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.knowledge_bases.with_raw_response.delete(
@@ -266,7 +266,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.knowledge_bases.with_streaming_response.delete(
@@ -280,7 +280,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -384,7 +384,7 @@ def test_path_params_wait_for_database(self, client: Gradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_indexing_jobs(self, client: Gradient) -> None:
knowledge_base = client.knowledge_bases.list_indexing_jobs(
@@ -392,7 +392,7 @@ def test_method_list_indexing_jobs(self, client: Gradient) -> None:
)
assert_matches_type(KnowledgeBaseListIndexingJobsResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list_indexing_jobs(self, client: Gradient) -> None:
response = client.knowledge_bases.with_raw_response.list_indexing_jobs(
@@ -404,7 +404,7 @@ def test_raw_response_list_indexing_jobs(self, client: Gradient) -> None:
knowledge_base = response.parse()
assert_matches_type(KnowledgeBaseListIndexingJobsResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list_indexing_jobs(self, client: Gradient) -> None:
with client.knowledge_bases.with_streaming_response.list_indexing_jobs(
@@ -418,7 +418,7 @@ def test_streaming_response_list_indexing_jobs(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_list_indexing_jobs(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
@@ -432,13 +432,13 @@ class TestAsyncKnowledgeBases:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.create()
assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.create(
@@ -497,7 +497,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.with_raw_response.create()
@@ -507,7 +507,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.with_streaming_response.create() as response:
@@ -519,7 +519,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.retrieve(
@@ -527,7 +527,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.with_raw_response.retrieve(
@@ -539,7 +539,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.with_streaming_response.retrieve(
@@ -553,7 +553,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -561,7 +561,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.update(
@@ -569,7 +569,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.update(
@@ -583,7 +583,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient)
)
assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.with_raw_response.update(
@@ -595,7 +595,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.with_streaming_response.update(
@@ -609,7 +609,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_update(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"):
@@ -617,13 +617,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None:
path_uuid="",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.list()
assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.list(
@@ -632,7 +632,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.with_raw_response.list()
@@ -642,7 +642,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.with_streaming_response.list() as response:
@@ -654,7 +654,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.delete(
@@ -662,7 +662,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.with_raw_response.delete(
@@ -674,7 +674,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.with_streaming_response.delete(
@@ -688,7 +688,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"):
@@ -792,7 +792,7 @@ async def test_path_params_wait_for_database(self, async_client: AsyncGradient)
"",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_indexing_jobs(self, async_client: AsyncGradient) -> None:
knowledge_base = await async_client.knowledge_bases.list_indexing_jobs(
@@ -800,7 +800,7 @@ async def test_method_list_indexing_jobs(self, async_client: AsyncGradient) -> N
)
assert_matches_type(KnowledgeBaseListIndexingJobsResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list_indexing_jobs(self, async_client: AsyncGradient) -> None:
response = await async_client.knowledge_bases.with_raw_response.list_indexing_jobs(
@@ -812,7 +812,7 @@ async def test_raw_response_list_indexing_jobs(self, async_client: AsyncGradient
knowledge_base = await response.parse()
assert_matches_type(KnowledgeBaseListIndexingJobsResponse, knowledge_base, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list_indexing_jobs(self, async_client: AsyncGradient) -> None:
async with async_client.knowledge_bases.with_streaming_response.list_indexing_jobs(
@@ -826,7 +826,7 @@ async def test_streaming_response_list_indexing_jobs(self, async_client: AsyncGr
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_list_indexing_jobs(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"):
diff --git a/tests/api_resources/test_models.py b/tests/api_resources/test_models.py
index 8e6edaef..d2dc075a 100644
--- a/tests/api_resources/test_models.py
+++ b/tests/api_resources/test_models.py
@@ -17,13 +17,13 @@
class TestModels:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
model = client.models.list()
assert_matches_type(ModelListResponse, model, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
model = client.models.list(
@@ -34,7 +34,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(ModelListResponse, model, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.models.with_raw_response.list()
@@ -44,7 +44,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
model = response.parse()
assert_matches_type(ModelListResponse, model, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.models.with_streaming_response.list() as response:
@@ -62,13 +62,13 @@ class TestAsyncModels:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
model = await async_client.models.list()
assert_matches_type(ModelListResponse, model, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
model = await async_client.models.list(
@@ -79,7 +79,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(ModelListResponse, model, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.models.with_raw_response.list()
@@ -89,7 +89,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
model = await response.parse()
assert_matches_type(ModelListResponse, model, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.models.with_streaming_response.list() as response:
diff --git a/tests/api_resources/test_nfs.py b/tests/api_resources/test_nfs.py
index 6969ee96..e60033eb 100644
--- a/tests/api_resources/test_nfs.py
+++ b/tests/api_resources/test_nfs.py
@@ -22,7 +22,7 @@
class TestNfs:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create(self, client: Gradient) -> None:
nf = client.nfs.create(
@@ -33,7 +33,7 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(NfCreateResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.create(
@@ -48,7 +48,7 @@ def test_raw_response_create(self, client: Gradient) -> None:
nf = response.parse()
assert_matches_type(NfCreateResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.create(
@@ -65,7 +65,7 @@ def test_streaming_response_create(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
nf = client.nfs.retrieve(
@@ -74,7 +74,7 @@ def test_method_retrieve(self, client: Gradient) -> None:
)
assert_matches_type(NfRetrieveResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.retrieve(
@@ -87,7 +87,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
nf = response.parse()
assert_matches_type(NfRetrieveResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.retrieve(
@@ -102,7 +102,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -111,7 +111,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
region="region",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
nf = client.nfs.list(
@@ -119,7 +119,7 @@ def test_method_list(self, client: Gradient) -> None:
)
assert_matches_type(NfListResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.list(
@@ -131,7 +131,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
nf = response.parse()
assert_matches_type(NfListResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.list(
@@ -145,7 +145,7 @@ def test_streaming_response_list(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
nf = client.nfs.delete(
@@ -154,7 +154,7 @@ def test_method_delete(self, client: Gradient) -> None:
)
assert nf is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.delete(
@@ -167,7 +167,7 @@ def test_raw_response_delete(self, client: Gradient) -> None:
nf = response.parse()
assert nf is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.delete(
@@ -182,7 +182,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -191,7 +191,7 @@ def test_path_params_delete(self, client: Gradient) -> None:
region="region",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_overload_1(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -201,7 +201,7 @@ def test_method_initiate_action_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_with_all_params_overload_1(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -212,7 +212,7 @@ def test_method_initiate_action_with_all_params_overload_1(self, client: Gradien
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_action_overload_1(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
@@ -226,7 +226,7 @@ def test_raw_response_initiate_action_overload_1(self, client: Gradient) -> None
nf = response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_action_overload_1(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
@@ -242,7 +242,7 @@ def test_streaming_response_initiate_action_overload_1(self, client: Gradient) -
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_action_overload_1(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -252,7 +252,7 @@ def test_path_params_initiate_action_overload_1(self, client: Gradient) -> None:
type="resize",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_overload_2(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -262,7 +262,7 @@ def test_method_initiate_action_overload_2(self, client: Gradient) -> None:
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_with_all_params_overload_2(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -273,7 +273,7 @@ def test_method_initiate_action_with_all_params_overload_2(self, client: Gradien
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_action_overload_2(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
@@ -287,7 +287,7 @@ def test_raw_response_initiate_action_overload_2(self, client: Gradient) -> None
nf = response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_action_overload_2(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
@@ -303,7 +303,7 @@ def test_streaming_response_initiate_action_overload_2(self, client: Gradient) -
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_action_overload_2(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -313,7 +313,7 @@ def test_path_params_initiate_action_overload_2(self, client: Gradient) -> None:
type="resize",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_overload_3(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -323,7 +323,7 @@ def test_method_initiate_action_overload_3(self, client: Gradient) -> None:
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_with_all_params_overload_3(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -334,7 +334,7 @@ def test_method_initiate_action_with_all_params_overload_3(self, client: Gradien
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_action_overload_3(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
@@ -348,7 +348,7 @@ def test_raw_response_initiate_action_overload_3(self, client: Gradient) -> None
nf = response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_action_overload_3(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
@@ -364,7 +364,7 @@ def test_streaming_response_initiate_action_overload_3(self, client: Gradient) -
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_action_overload_3(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -374,7 +374,7 @@ def test_path_params_initiate_action_overload_3(self, client: Gradient) -> None:
type="resize",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_overload_4(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -384,7 +384,7 @@ def test_method_initiate_action_overload_4(self, client: Gradient) -> None:
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_initiate_action_with_all_params_overload_4(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
@@ -395,7 +395,7 @@ def test_method_initiate_action_with_all_params_overload_4(self, client: Gradien
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_initiate_action_overload_4(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
@@ -409,7 +409,7 @@ def test_raw_response_initiate_action_overload_4(self, client: Gradient) -> None
nf = response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_initiate_action_overload_4(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
@@ -425,7 +425,7 @@ def test_streaming_response_initiate_action_overload_4(self, client: Gradient) -
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_initiate_action_overload_4(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -441,7 +441,7 @@ class TestAsyncNfs:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.create(
@@ -452,7 +452,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(NfCreateResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.create(
@@ -467,7 +467,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
nf = await response.parse()
assert_matches_type(NfCreateResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.create(
@@ -484,7 +484,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.retrieve(
@@ -493,7 +493,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(NfRetrieveResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.retrieve(
@@ -506,7 +506,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
nf = await response.parse()
assert_matches_type(NfRetrieveResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.retrieve(
@@ -521,7 +521,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) ->
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -530,7 +530,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
region="region",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.list(
@@ -538,7 +538,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(NfListResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.list(
@@ -550,7 +550,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
nf = await response.parse()
assert_matches_type(NfListResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.list(
@@ -564,7 +564,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.delete(
@@ -573,7 +573,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
)
assert nf is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.delete(
@@ -586,7 +586,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
nf = await response.parse()
assert nf is None
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.delete(
@@ -601,7 +601,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -610,7 +610,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
region="region",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -620,7 +620,7 @@ async def test_method_initiate_action_overload_1(self, async_client: AsyncGradie
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -631,7 +631,7 @@ async def test_method_initiate_action_with_all_params_overload_1(self, async_cli
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
@@ -645,7 +645,7 @@ async def test_raw_response_initiate_action_overload_1(self, async_client: Async
nf = await response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
@@ -661,7 +661,7 @@ async def test_streaming_response_initiate_action_overload_1(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -671,7 +671,7 @@ async def test_path_params_initiate_action_overload_1(self, async_client: AsyncG
type="resize",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -681,7 +681,7 @@ async def test_method_initiate_action_overload_2(self, async_client: AsyncGradie
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -692,7 +692,7 @@ async def test_method_initiate_action_with_all_params_overload_2(self, async_cli
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
@@ -706,7 +706,7 @@ async def test_raw_response_initiate_action_overload_2(self, async_client: Async
nf = await response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
@@ -722,7 +722,7 @@ async def test_streaming_response_initiate_action_overload_2(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -732,7 +732,7 @@ async def test_path_params_initiate_action_overload_2(self, async_client: AsyncG
type="resize",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -742,7 +742,7 @@ async def test_method_initiate_action_overload_3(self, async_client: AsyncGradie
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_with_all_params_overload_3(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -753,7 +753,7 @@ async def test_method_initiate_action_with_all_params_overload_3(self, async_cli
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
@@ -767,7 +767,7 @@ async def test_raw_response_initiate_action_overload_3(self, async_client: Async
nf = await response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
@@ -783,7 +783,7 @@ async def test_streaming_response_initiate_action_overload_3(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
@@ -793,7 +793,7 @@ async def test_path_params_initiate_action_overload_3(self, async_client: AsyncG
type="resize",
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -803,7 +803,7 @@ async def test_method_initiate_action_overload_4(self, async_client: AsyncGradie
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_initiate_action_with_all_params_overload_4(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
@@ -814,7 +814,7 @@ async def test_method_initiate_action_with_all_params_overload_4(self, async_cli
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
@@ -828,7 +828,7 @@ async def test_raw_response_initiate_action_overload_4(self, async_client: Async
nf = await response.parse()
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
@@ -844,7 +844,7 @@ async def test_streaming_response_initiate_action_overload_4(self, async_client:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
diff --git a/tests/api_resources/test_regions.py b/tests/api_resources/test_regions.py
index 8cbf6afb..1ba008bb 100644
--- a/tests/api_resources/test_regions.py
+++ b/tests/api_resources/test_regions.py
@@ -17,13 +17,13 @@
class TestRegions:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
region = client.regions.list()
assert_matches_type(RegionListResponse, region, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list_with_all_params(self, client: Gradient) -> None:
region = client.regions.list(
@@ -32,7 +32,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(RegionListResponse, region, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
response = client.regions.with_raw_response.list()
@@ -42,7 +42,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
region = response.parse()
assert_matches_type(RegionListResponse, region, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
with client.regions.with_streaming_response.list() as response:
@@ -60,13 +60,13 @@ class TestAsyncRegions:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
region = await async_client.regions.list()
assert_matches_type(RegionListResponse, region, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
region = await async_client.regions.list(
@@ -75,7 +75,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
)
assert_matches_type(RegionListResponse, region, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
response = await async_client.regions.with_raw_response.list()
@@ -85,7 +85,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
region = await response.parse()
assert_matches_type(RegionListResponse, region, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
async with async_client.regions.with_streaming_response.list() as response:
diff --git a/tests/api_resources/test_responses.py b/tests/api_resources/test_responses.py
index 0d8d7acf..fc6cd4fe 100644
--- a/tests/api_resources/test_responses.py
+++ b/tests/api_resources/test_responses.py
@@ -17,7 +17,7 @@
class TestResponses:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_1(self, client: Gradient) -> None:
response = client.responses.create(
@@ -26,7 +26,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None:
)
assert_matches_type(CreateResponseResponse, response, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None:
response = client.responses.create(
@@ -56,7 +56,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
)
assert_matches_type(CreateResponseResponse, response, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_1(self, client: Gradient) -> None:
http_response = client.responses.with_raw_response.create(
@@ -69,7 +69,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None:
response = http_response.parse()
assert_matches_type(CreateResponseResponse, response, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
with client.responses.with_streaming_response.create(
@@ -84,7 +84,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None:
assert cast(Any, http_response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_overload_2(self, client: Gradient) -> None:
response_stream = client.responses.create(
@@ -94,7 +94,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None:
)
response_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None:
response_stream = client.responses.create(
@@ -124,7 +124,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
)
response_stream.response.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create_overload_2(self, client: Gradient) -> None:
response = client.responses.with_raw_response.create(
@@ -137,7 +137,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None:
stream = response.parse()
stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_create_overload_2(self, client: Gradient) -> None:
with client.responses.with_streaming_response.create(
@@ -159,7 +159,7 @@ class TestAsyncResponses:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.responses.create(
@@ -168,7 +168,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No
)
assert_matches_type(CreateResponseResponse, response, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.responses.create(
@@ -198,7 +198,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
)
assert_matches_type(CreateResponseResponse, response, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None:
http_response = await async_client.responses.with_raw_response.create(
@@ -211,7 +211,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient)
response = await http_response.parse()
assert_matches_type(CreateResponseResponse, response, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.responses.with_streaming_response.create(
@@ -226,7 +226,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert cast(Any, http_response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None:
response_stream = await async_client.responses.create(
@@ -236,7 +236,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No
)
await response_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
response_stream = await async_client.responses.create(
@@ -266,7 +266,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
)
await response_stream.response.aclose()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.responses.with_raw_response.create(
@@ -279,7 +279,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient)
stream = await response.parse()
await stream.close()
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.responses.with_streaming_response.create(
diff --git a/tests/api_resources/test_retrieve.py b/tests/api_resources/test_retrieve.py
index 167d2a96..8b5ed252 100644
--- a/tests/api_resources/test_retrieve.py
+++ b/tests/api_resources/test_retrieve.py
@@ -17,7 +17,7 @@
class TestRetrieve:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_documents(self, client: Gradient) -> None:
retrieve = client.retrieve.documents(
@@ -27,7 +27,7 @@ def test_method_documents(self, client: Gradient) -> None:
)
assert_matches_type(RetrieveDocumentsResponse, retrieve, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_documents_with_all_params(self, client: Gradient) -> None:
retrieve = client.retrieve.documents(
@@ -61,7 +61,7 @@ def test_method_documents_with_all_params(self, client: Gradient) -> None:
)
assert_matches_type(RetrieveDocumentsResponse, retrieve, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_documents(self, client: Gradient) -> None:
response = client.retrieve.with_raw_response.documents(
@@ -75,7 +75,7 @@ def test_raw_response_documents(self, client: Gradient) -> None:
retrieve = response.parse()
assert_matches_type(RetrieveDocumentsResponse, retrieve, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_documents(self, client: Gradient) -> None:
with client.retrieve.with_streaming_response.documents(
@@ -91,7 +91,7 @@ def test_streaming_response_documents(self, client: Gradient) -> None:
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_path_params_documents(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_id` but received ''"):
@@ -107,7 +107,7 @@ class TestAsyncRetrieve:
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_documents(self, async_client: AsyncGradient) -> None:
retrieve = await async_client.retrieve.documents(
@@ -117,7 +117,7 @@ async def test_method_documents(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(RetrieveDocumentsResponse, retrieve, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_documents_with_all_params(self, async_client: AsyncGradient) -> None:
retrieve = await async_client.retrieve.documents(
@@ -151,7 +151,7 @@ async def test_method_documents_with_all_params(self, async_client: AsyncGradien
)
assert_matches_type(RetrieveDocumentsResponse, retrieve, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_documents(self, async_client: AsyncGradient) -> None:
response = await async_client.retrieve.with_raw_response.documents(
@@ -165,7 +165,7 @@ async def test_raw_response_documents(self, async_client: AsyncGradient) -> None
retrieve = await response.parse()
assert_matches_type(RetrieveDocumentsResponse, retrieve, path=["response"])
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_documents(self, async_client: AsyncGradient) -> None:
async with async_client.retrieve.with_streaming_response.documents(
@@ -181,7 +181,7 @@ async def test_streaming_response_documents(self, async_client: AsyncGradient) -
assert cast(Any, response.is_closed) is True
- @pytest.mark.skip(reason="Prism tests are disabled")
+ @pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_path_params_documents(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_id` but received ''"):
From 17b88d90dafe6a1d9db3f4ea15f17c7cdf838f81 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 23 Feb 2026 13:28:21 +0000
Subject: [PATCH 08/17] chore(internal): add request options to SSE classes
---
src/gradient/_response.py | 3 +++
src/gradient/_streaming.py | 11 ++++++++---
2 files changed, 11 insertions(+), 3 deletions(-)
diff --git a/src/gradient/_response.py b/src/gradient/_response.py
index 4702edaf..0e9dc172 100644
--- a/src/gradient/_response.py
+++ b/src/gradient/_response.py
@@ -152,6 +152,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
),
response=self.http_response,
client=cast(Any, self._client),
+ options=self._options,
),
)
@@ -162,6 +163,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
cast_to=extract_stream_chunk_type(self._stream_cls),
response=self.http_response,
client=cast(Any, self._client),
+ options=self._options,
),
)
@@ -175,6 +177,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
cast_to=cast_to,
response=self.http_response,
client=cast(Any, self._client),
+ options=self._options,
),
)
diff --git a/src/gradient/_streaming.py b/src/gradient/_streaming.py
index f0516264..92ce2af2 100644
--- a/src/gradient/_streaming.py
+++ b/src/gradient/_streaming.py
@@ -4,7 +4,7 @@
import json
import inspect
from types import TracebackType
-from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast
+from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, Optional, AsyncIterator, cast
from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable
import httpx
@@ -14,6 +14,7 @@
if TYPE_CHECKING:
from ._client import Gradient, AsyncGradient
+ from ._models import FinalRequestOptions
_T = TypeVar("_T")
@@ -23,7 +24,7 @@ class Stream(Generic[_T]):
"""Provides the core interface to iterate over a synchronous stream response."""
response: httpx.Response
-
+ _options: Optional[FinalRequestOptions] = None
_decoder: SSEBytesDecoder
def __init__(
@@ -32,10 +33,12 @@ def __init__(
cast_to: type[_T],
response: httpx.Response,
client: Gradient,
+ options: Optional[FinalRequestOptions] = None,
) -> None:
self.response = response
self._cast_to = cast_to
self._client = client
+ self._options = options
self._decoder = client._make_sse_decoder()
self._iterator = self.__stream__()
@@ -104,7 +107,7 @@ class AsyncStream(Generic[_T]):
"""Provides the core interface to iterate over an asynchronous stream response."""
response: httpx.Response
-
+ _options: Optional[FinalRequestOptions] = None
_decoder: SSEDecoder | SSEBytesDecoder
def __init__(
@@ -113,10 +116,12 @@ def __init__(
cast_to: type[_T],
response: httpx.Response,
client: AsyncGradient,
+ options: Optional[FinalRequestOptions] = None,
) -> None:
self.response = response
self._cast_to = cast_to
self._client = client
+ self._options = options
self._decoder = client._make_sse_decoder()
self._iterator = self.__stream__()
From 09cfd991d0442077c4f66b47f4d9da34f608692d Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 23 Feb 2026 20:26:12 +0000
Subject: [PATCH 09/17] chore(internal): make
`test_proxy_environment_variables` more resilient
---
tests/test_client.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/tests/test_client.py b/tests/test_client.py
index d5f1bbe6..a0d6e3d8 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -1171,6 +1171,8 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Test that the proxy environment variables are set correctly
monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
+ # Delete in case our environment has this set
+ monkeypatch.delenv("HTTP_PROXY", raising=False)
client = DefaultHttpxClient()
@@ -2302,6 +2304,8 @@ async def test_get_platform(self) -> None:
async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Test that the proxy environment variables are set correctly
monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
+ # Delete in case our environment has this set
+ monkeypatch.delenv("HTTP_PROXY", raising=False)
client = DefaultAsyncHttpxClient()
From f3aa13b09ca2c1f565cc3277530bfa1775d02dd0 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Tue, 24 Feb 2026 17:42:33 +0000
Subject: [PATCH 10/17] chore(internal): make
`test_proxy_environment_variables` more resilient to env
---
tests/test_client.py | 16 ++++++++++++++--
1 file changed, 14 insertions(+), 2 deletions(-)
diff --git a/tests/test_client.py b/tests/test_client.py
index a0d6e3d8..f0605826 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -1171,8 +1171,14 @@ def retry_handler(_request: httpx.Request) -> httpx.Response:
def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Test that the proxy environment variables are set correctly
monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
- # Delete in case our environment has this set
+ # Delete in case our environment has any proxy env vars set
monkeypatch.delenv("HTTP_PROXY", raising=False)
+ monkeypatch.delenv("ALL_PROXY", raising=False)
+ monkeypatch.delenv("NO_PROXY", raising=False)
+ monkeypatch.delenv("http_proxy", raising=False)
+ monkeypatch.delenv("https_proxy", raising=False)
+ monkeypatch.delenv("all_proxy", raising=False)
+ monkeypatch.delenv("no_proxy", raising=False)
client = DefaultHttpxClient()
@@ -2304,8 +2310,14 @@ async def test_get_platform(self) -> None:
async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Test that the proxy environment variables are set correctly
monkeypatch.setenv("HTTPS_PROXY", "https://example.org")
- # Delete in case our environment has this set
+ # Delete in case our environment has any proxy env vars set
monkeypatch.delenv("HTTP_PROXY", raising=False)
+ monkeypatch.delenv("ALL_PROXY", raising=False)
+ monkeypatch.delenv("NO_PROXY", raising=False)
+ monkeypatch.delenv("http_proxy", raising=False)
+ monkeypatch.delenv("https_proxy", raising=False)
+ monkeypatch.delenv("all_proxy", raising=False)
+ monkeypatch.delenv("no_proxy", raising=False)
client = DefaultAsyncHttpxClient()
From 865fb4cadfe0ce791ecc4ad38f50ce25b00477ee Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Tue, 3 Mar 2026 01:25:54 +0000
Subject: [PATCH 11/17] chore(internal): codegen related update
---
src/gradient/_client.py | 300 +++++++
src/gradient/resources/agents/agents.py | 170 ++++
src/gradient/resources/agents/api_keys.py | 8 +
src/gradient/resources/agents/chat/chat.py | 18 +
.../resources/agents/chat/completions.py | 8 +
.../resources/agents/evaluation_datasets.py | 8 +
.../evaluation_metrics/anthropic/anthropic.py | 18 +
.../evaluation_metrics/anthropic/keys.py | 8 +
.../evaluation_metrics/evaluation_metrics.py | 62 ++
.../evaluation_metrics/oauth2/dropbox.py | 8 +
.../evaluation_metrics/oauth2/oauth2.py | 26 +
.../agents/evaluation_metrics/openai/keys.py | 8 +
.../evaluation_metrics/openai/openai.py | 18 +
.../evaluation_metrics/scheduled_indexing.py | 8 +
.../evaluation_metrics/workspaces/agents.py | 8 +
.../workspaces/workspaces.py | 26 +
.../resources/agents/evaluation_runs.py | 8 +
.../resources/agents/evaluation_test_cases.py | 8 +
src/gradient/resources/agents/functions.py | 8 +
.../resources/agents/knowledge_bases.py | 8 +
src/gradient/resources/agents/routes.py | 8 +
src/gradient/resources/agents/versions.py | 8 +
src/gradient/resources/apps/apps.py | 72 ++
.../resources/apps/job_invocations.py | 26 +
src/gradient/resources/billing.py | 58 ++
src/gradient/resources/chat/chat.py | 18 +
src/gradient/resources/chat/completions.py | 8 +
.../databases/schema_registry/config.py | 52 ++
.../schema_registry/schema_registry.py | 150 ++++
.../resources/gpu_droplets/account/account.py | 6 +
.../resources/gpu_droplets/account/keys.py | 4 +
.../resources/gpu_droplets/actions.py | 42 +
.../resources/gpu_droplets/autoscale.py | 8 +
.../resources/gpu_droplets/backups.py | 22 +
.../destroy_with_associated_resources.py | 22 +
.../gpu_droplets/firewalls/droplets.py | 16 +
.../gpu_droplets/firewalls/firewalls.py | 142 +++
.../resources/gpu_droplets/firewalls/rules.py | 16 +
.../resources/gpu_droplets/firewalls/tags.py | 16 +
.../gpu_droplets/floating_ips/actions.py | 44 +
.../gpu_droplets/floating_ips/floating_ips.py | 170 ++++
.../resources/gpu_droplets/gpu_droplets.py | 850 ++++++++++++++++++
.../resources/gpu_droplets/images/actions.py | 20 +
.../resources/gpu_droplets/images/images.py | 108 +++
.../gpu_droplets/load_balancers/droplets.py | 16 +
.../load_balancers/forwarding_rules.py | 16 +
.../load_balancers/load_balancers.py | 100 +++
src/gradient/resources/gpu_droplets/sizes.py | 22 +
.../resources/gpu_droplets/snapshots.py | 24 +
.../resources/gpu_droplets/volumes/actions.py | 20 +
.../gpu_droplets/volumes/snapshots.py | 28 +
.../resources/gpu_droplets/volumes/volumes.py | 160 ++++
src/gradient/resources/images.py | 4 +
src/gradient/resources/inference/api_keys.py | 8 +
src/gradient/resources/inference/inference.py | 18 +
.../resources/knowledge_bases/data_sources.py | 8 +
.../knowledge_bases/indexing_jobs.py | 8 +
.../knowledge_bases/knowledge_bases.py | 44 +
src/gradient/resources/models/models.py | 8 +
.../resources/models/providers/anthropic.py | 8 +
.../resources/models/providers/openai.py | 8 +
.../resources/models/providers/providers.py | 36 +
src/gradient/resources/nfs/nfs.py | 24 +
src/gradient/resources/nfs/snapshots.py | 10 +
src/gradient/resources/regions.py | 4 +
src/gradient/resources/responses.py | 4 +
66 files changed, 3198 insertions(+)
diff --git a/src/gradient/_client.py b/src/gradient/_client.py
index 42867c7d..30e58802 100644
--- a/src/gradient/_client.py
+++ b/src/gradient/_client.py
@@ -188,6 +188,9 @@ def agent_endpoint(self) -> str:
@cached_property
def agents(self) -> AgentsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.agents import AgentsResource
return AgentsResource(self)
@@ -200,18 +203,30 @@ def chat(self) -> ChatResource:
@cached_property
def images(self) -> ImagesResource:
+ """Generate images from text prompts using various AI models."""
from .resources.images import ImagesResource
return ImagesResource(self)
@cached_property
def responses(self) -> ResponsesResource:
+ """Generate text-to-text responses from text prompts."""
from .resources.responses import ResponsesResource
return ResponsesResource(self)
@cached_property
def gpu_droplets(self) -> GPUDropletsResource:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
from .resources.gpu_droplets import GPUDropletsResource
return GPUDropletsResource(self)
@@ -224,18 +239,25 @@ def inference(self) -> InferenceResource:
@cached_property
def knowledge_bases(self) -> KnowledgeBasesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.knowledge_bases import KnowledgeBasesResource
return KnowledgeBasesResource(self)
@cached_property
def models(self) -> ModelsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.models import ModelsResource
return ModelsResource(self)
@cached_property
def regions(self) -> RegionsResource:
+ """Provides information about DigitalOcean data center regions."""
from .resources.regions import RegionsResource
return RegionsResource(self)
@@ -266,6 +288,34 @@ def apps(self) -> AppsResource:
@cached_property
def billing(self) -> BillingResource:
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
from .resources.billing import BillingResource
return BillingResource(self)
@@ -560,6 +610,9 @@ def agent_endpoint(self) -> str:
@cached_property
def agents(self) -> AsyncAgentsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.agents import AsyncAgentsResource
return AsyncAgentsResource(self)
@@ -572,18 +625,30 @@ def chat(self) -> AsyncChatResource:
@cached_property
def images(self) -> AsyncImagesResource:
+ """Generate images from text prompts using various AI models."""
from .resources.images import AsyncImagesResource
return AsyncImagesResource(self)
@cached_property
def responses(self) -> AsyncResponsesResource:
+ """Generate text-to-text responses from text prompts."""
from .resources.responses import AsyncResponsesResource
return AsyncResponsesResource(self)
@cached_property
def gpu_droplets(self) -> AsyncGPUDropletsResource:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
from .resources.gpu_droplets import AsyncGPUDropletsResource
return AsyncGPUDropletsResource(self)
@@ -596,18 +661,25 @@ def inference(self) -> AsyncInferenceResource:
@cached_property
def knowledge_bases(self) -> AsyncKnowledgeBasesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.knowledge_bases import AsyncKnowledgeBasesResource
return AsyncKnowledgeBasesResource(self)
@cached_property
def models(self) -> AsyncModelsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.models import AsyncModelsResource
return AsyncModelsResource(self)
@cached_property
def regions(self) -> AsyncRegionsResource:
+ """Provides information about DigitalOcean data center regions."""
from .resources.regions import AsyncRegionsResource
return AsyncRegionsResource(self)
@@ -638,6 +710,34 @@ def apps(self) -> AsyncAppsResource:
@cached_property
def billing(self) -> AsyncBillingResource:
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
from .resources.billing import AsyncBillingResource
return AsyncBillingResource(self)
@@ -830,6 +930,9 @@ def __init__(self, client: Gradient) -> None:
@cached_property
def agents(self) -> agents.AgentsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.agents import AgentsResourceWithRawResponse
return AgentsResourceWithRawResponse(self._client.agents)
@@ -842,18 +945,30 @@ def chat(self) -> chat.ChatResourceWithRawResponse:
@cached_property
def images(self) -> images.ImagesResourceWithRawResponse:
+ """Generate images from text prompts using various AI models."""
from .resources.images import ImagesResourceWithRawResponse
return ImagesResourceWithRawResponse(self._client.images)
@cached_property
def responses(self) -> responses.ResponsesResourceWithRawResponse:
+ """Generate text-to-text responses from text prompts."""
from .resources.responses import ResponsesResourceWithRawResponse
return ResponsesResourceWithRawResponse(self._client.responses)
@cached_property
def gpu_droplets(self) -> gpu_droplets.GPUDropletsResourceWithRawResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
from .resources.gpu_droplets import GPUDropletsResourceWithRawResponse
return GPUDropletsResourceWithRawResponse(self._client.gpu_droplets)
@@ -866,18 +981,25 @@ def inference(self) -> inference.InferenceResourceWithRawResponse:
@cached_property
def knowledge_bases(self) -> knowledge_bases.KnowledgeBasesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.knowledge_bases import KnowledgeBasesResourceWithRawResponse
return KnowledgeBasesResourceWithRawResponse(self._client.knowledge_bases)
@cached_property
def models(self) -> models.ModelsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.models import ModelsResourceWithRawResponse
return ModelsResourceWithRawResponse(self._client.models)
@cached_property
def regions(self) -> regions.RegionsResourceWithRawResponse:
+ """Provides information about DigitalOcean data center regions."""
from .resources.regions import RegionsResourceWithRawResponse
return RegionsResourceWithRawResponse(self._client.regions)
@@ -908,6 +1030,34 @@ def apps(self) -> apps.AppsResourceWithRawResponse:
@cached_property
def billing(self) -> billing.BillingResourceWithRawResponse:
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
from .resources.billing import BillingResourceWithRawResponse
return BillingResourceWithRawResponse(self._client.billing)
@@ -921,6 +1071,9 @@ def __init__(self, client: AsyncGradient) -> None:
@cached_property
def agents(self) -> agents.AsyncAgentsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.agents import AsyncAgentsResourceWithRawResponse
return AsyncAgentsResourceWithRawResponse(self._client.agents)
@@ -933,18 +1086,30 @@ def chat(self) -> chat.AsyncChatResourceWithRawResponse:
@cached_property
def images(self) -> images.AsyncImagesResourceWithRawResponse:
+ """Generate images from text prompts using various AI models."""
from .resources.images import AsyncImagesResourceWithRawResponse
return AsyncImagesResourceWithRawResponse(self._client.images)
@cached_property
def responses(self) -> responses.AsyncResponsesResourceWithRawResponse:
+ """Generate text-to-text responses from text prompts."""
from .resources.responses import AsyncResponsesResourceWithRawResponse
return AsyncResponsesResourceWithRawResponse(self._client.responses)
@cached_property
def gpu_droplets(self) -> gpu_droplets.AsyncGPUDropletsResourceWithRawResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
from .resources.gpu_droplets import AsyncGPUDropletsResourceWithRawResponse
return AsyncGPUDropletsResourceWithRawResponse(self._client.gpu_droplets)
@@ -959,6 +1124,9 @@ def inference(self) -> inference.AsyncInferenceResourceWithRawResponse:
def knowledge_bases(
self,
) -> knowledge_bases.AsyncKnowledgeBasesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.knowledge_bases import (
AsyncKnowledgeBasesResourceWithRawResponse,
)
@@ -967,12 +1135,16 @@ def knowledge_bases(
@cached_property
def models(self) -> models.AsyncModelsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.models import AsyncModelsResourceWithRawResponse
return AsyncModelsResourceWithRawResponse(self._client.models)
@cached_property
def regions(self) -> regions.AsyncRegionsResourceWithRawResponse:
+ """Provides information about DigitalOcean data center regions."""
from .resources.regions import AsyncRegionsResourceWithRawResponse
return AsyncRegionsResourceWithRawResponse(self._client.regions)
@@ -1003,6 +1175,34 @@ def apps(self) -> apps.AsyncAppsResourceWithRawResponse:
@cached_property
def billing(self) -> billing.AsyncBillingResourceWithRawResponse:
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
from .resources.billing import AsyncBillingResourceWithRawResponse
return AsyncBillingResourceWithRawResponse(self._client.billing)
@@ -1016,6 +1216,9 @@ def __init__(self, client: Gradient) -> None:
@cached_property
def agents(self) -> agents.AgentsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.agents import AgentsResourceWithStreamingResponse
return AgentsResourceWithStreamingResponse(self._client.agents)
@@ -1028,18 +1231,30 @@ def chat(self) -> chat.ChatResourceWithStreamingResponse:
@cached_property
def images(self) -> images.ImagesResourceWithStreamingResponse:
+ """Generate images from text prompts using various AI models."""
from .resources.images import ImagesResourceWithStreamingResponse
return ImagesResourceWithStreamingResponse(self._client.images)
@cached_property
def responses(self) -> responses.ResponsesResourceWithStreamingResponse:
+ """Generate text-to-text responses from text prompts."""
from .resources.responses import ResponsesResourceWithStreamingResponse
return ResponsesResourceWithStreamingResponse(self._client.responses)
@cached_property
def gpu_droplets(self) -> gpu_droplets.GPUDropletsResourceWithStreamingResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
from .resources.gpu_droplets import GPUDropletsResourceWithStreamingResponse
return GPUDropletsResourceWithStreamingResponse(self._client.gpu_droplets)
@@ -1054,6 +1269,9 @@ def inference(self) -> inference.InferenceResourceWithStreamingResponse:
def knowledge_bases(
self,
) -> knowledge_bases.KnowledgeBasesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.knowledge_bases import (
KnowledgeBasesResourceWithStreamingResponse,
)
@@ -1062,12 +1280,16 @@ def knowledge_bases(
@cached_property
def models(self) -> models.ModelsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.models import ModelsResourceWithStreamingResponse
return ModelsResourceWithStreamingResponse(self._client.models)
@cached_property
def regions(self) -> regions.RegionsResourceWithStreamingResponse:
+ """Provides information about DigitalOcean data center regions."""
from .resources.regions import RegionsResourceWithStreamingResponse
return RegionsResourceWithStreamingResponse(self._client.regions)
@@ -1098,6 +1320,34 @@ def apps(self) -> apps.AppsResourceWithStreamingResponse:
@cached_property
def billing(self) -> billing.BillingResourceWithStreamingResponse:
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
from .resources.billing import BillingResourceWithStreamingResponse
return BillingResourceWithStreamingResponse(self._client.billing)
@@ -1111,6 +1361,9 @@ def __init__(self, client: AsyncGradient) -> None:
@cached_property
def agents(self) -> agents.AsyncAgentsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.agents import AsyncAgentsResourceWithStreamingResponse
return AsyncAgentsResourceWithStreamingResponse(self._client.agents)
@@ -1123,12 +1376,14 @@ def chat(self) -> chat.AsyncChatResourceWithStreamingResponse:
@cached_property
def images(self) -> images.AsyncImagesResourceWithStreamingResponse:
+ """Generate images from text prompts using various AI models."""
from .resources.images import AsyncImagesResourceWithStreamingResponse
return AsyncImagesResourceWithStreamingResponse(self._client.images)
@cached_property
def responses(self) -> responses.AsyncResponsesResourceWithStreamingResponse:
+ """Generate text-to-text responses from text prompts."""
from .resources.responses import AsyncResponsesResourceWithStreamingResponse
return AsyncResponsesResourceWithStreamingResponse(self._client.responses)
@@ -1137,6 +1392,16 @@ def responses(self) -> responses.AsyncResponsesResourceWithStreamingResponse:
def gpu_droplets(
self,
) -> gpu_droplets.AsyncGPUDropletsResourceWithStreamingResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
from .resources.gpu_droplets import (
AsyncGPUDropletsResourceWithStreamingResponse,
)
@@ -1153,6 +1418,9 @@ def inference(self) -> inference.AsyncInferenceResourceWithStreamingResponse:
def knowledge_bases(
self,
) -> knowledge_bases.AsyncKnowledgeBasesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.knowledge_bases import (
AsyncKnowledgeBasesResourceWithStreamingResponse,
)
@@ -1161,12 +1429,16 @@ def knowledge_bases(
@cached_property
def models(self) -> models.AsyncModelsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
from .resources.models import AsyncModelsResourceWithStreamingResponse
return AsyncModelsResourceWithStreamingResponse(self._client.models)
@cached_property
def regions(self) -> regions.AsyncRegionsResourceWithStreamingResponse:
+ """Provides information about DigitalOcean data center regions."""
from .resources.regions import AsyncRegionsResourceWithStreamingResponse
return AsyncRegionsResourceWithStreamingResponse(self._client.regions)
@@ -1197,6 +1469,34 @@ def apps(self) -> apps.AsyncAppsResourceWithStreamingResponse:
@cached_property
def billing(self) -> billing.AsyncBillingResourceWithStreamingResponse:
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
from .resources.billing import AsyncBillingResourceWithStreamingResponse
return AsyncBillingResourceWithStreamingResponse(self._client.billing)
diff --git a/src/gradient/resources/agents/agents.py b/src/gradient/resources/agents/agents.py
index 33a59788..e6790b29 100644
--- a/src/gradient/resources/agents/agents.py
+++ b/src/gradient/resources/agents/agents.py
@@ -119,8 +119,15 @@
class AgentsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def api_keys(self) -> APIKeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return APIKeysResource(self._client)
@cached_property
@@ -129,34 +136,58 @@ def chat(self) -> ChatResource:
@cached_property
def evaluation_metrics(self) -> EvaluationMetricsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationMetricsResource(self._client)
@cached_property
def evaluation_runs(self) -> EvaluationRunsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationRunsResource(self._client)
@cached_property
def evaluation_test_cases(self) -> EvaluationTestCasesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationTestCasesResource(self._client)
@cached_property
def evaluation_datasets(self) -> EvaluationDatasetsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationDatasetsResource(self._client)
@cached_property
def functions(self) -> FunctionsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return FunctionsResource(self._client)
@cached_property
def versions(self) -> VersionsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return VersionsResource(self._client)
@cached_property
def knowledge_bases(self) -> KnowledgeBasesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KnowledgeBasesResource(self._client)
@cached_property
def routes(self) -> RoutesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return RoutesResource(self._client)
@cached_property
@@ -716,8 +747,15 @@ def wait_until_ready(
class AsyncAgentsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def api_keys(self) -> AsyncAPIKeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAPIKeysResource(self._client)
@cached_property
@@ -726,34 +764,58 @@ def chat(self) -> AsyncChatResource:
@cached_property
def evaluation_metrics(self) -> AsyncEvaluationMetricsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationMetricsResource(self._client)
@cached_property
def evaluation_runs(self) -> AsyncEvaluationRunsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationRunsResource(self._client)
@cached_property
def evaluation_test_cases(self) -> AsyncEvaluationTestCasesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationTestCasesResource(self._client)
@cached_property
def evaluation_datasets(self) -> AsyncEvaluationDatasetsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationDatasetsResource(self._client)
@cached_property
def functions(self) -> AsyncFunctionsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncFunctionsResource(self._client)
@cached_property
def versions(self) -> AsyncVersionsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncVersionsResource(self._client)
@cached_property
def knowledge_bases(self) -> AsyncKnowledgeBasesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKnowledgeBasesResource(self._client)
@cached_property
def routes(self) -> AsyncRoutesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncRoutesResource(self._client)
@cached_property
@@ -1345,6 +1407,9 @@ def __init__(self, agents: AgentsResource) -> None:
@cached_property
def api_keys(self) -> APIKeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return APIKeysResourceWithRawResponse(self._agents.api_keys)
@cached_property
@@ -1353,34 +1418,58 @@ def chat(self) -> ChatResourceWithRawResponse:
@cached_property
def evaluation_metrics(self) -> EvaluationMetricsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationMetricsResourceWithRawResponse(self._agents.evaluation_metrics)
@cached_property
def evaluation_runs(self) -> EvaluationRunsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationRunsResourceWithRawResponse(self._agents.evaluation_runs)
@cached_property
def evaluation_test_cases(self) -> EvaluationTestCasesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationTestCasesResourceWithRawResponse(self._agents.evaluation_test_cases)
@cached_property
def evaluation_datasets(self) -> EvaluationDatasetsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationDatasetsResourceWithRawResponse(self._agents.evaluation_datasets)
@cached_property
def functions(self) -> FunctionsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return FunctionsResourceWithRawResponse(self._agents.functions)
@cached_property
def versions(self) -> VersionsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return VersionsResourceWithRawResponse(self._agents.versions)
@cached_property
def knowledge_bases(self) -> KnowledgeBasesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KnowledgeBasesResourceWithRawResponse(self._agents.knowledge_bases)
@cached_property
def routes(self) -> RoutesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return RoutesResourceWithRawResponse(self._agents.routes)
@@ -1415,6 +1504,9 @@ def __init__(self, agents: AsyncAgentsResource) -> None:
@cached_property
def api_keys(self) -> AsyncAPIKeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAPIKeysResourceWithRawResponse(self._agents.api_keys)
@cached_property
@@ -1423,34 +1515,58 @@ def chat(self) -> AsyncChatResourceWithRawResponse:
@cached_property
def evaluation_metrics(self) -> AsyncEvaluationMetricsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationMetricsResourceWithRawResponse(self._agents.evaluation_metrics)
@cached_property
def evaluation_runs(self) -> AsyncEvaluationRunsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationRunsResourceWithRawResponse(self._agents.evaluation_runs)
@cached_property
def evaluation_test_cases(self) -> AsyncEvaluationTestCasesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationTestCasesResourceWithRawResponse(self._agents.evaluation_test_cases)
@cached_property
def evaluation_datasets(self) -> AsyncEvaluationDatasetsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationDatasetsResourceWithRawResponse(self._agents.evaluation_datasets)
@cached_property
def functions(self) -> AsyncFunctionsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncFunctionsResourceWithRawResponse(self._agents.functions)
@cached_property
def versions(self) -> AsyncVersionsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncVersionsResourceWithRawResponse(self._agents.versions)
@cached_property
def knowledge_bases(self) -> AsyncKnowledgeBasesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKnowledgeBasesResourceWithRawResponse(self._agents.knowledge_bases)
@cached_property
def routes(self) -> AsyncRoutesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncRoutesResourceWithRawResponse(self._agents.routes)
@@ -1485,6 +1601,9 @@ def __init__(self, agents: AgentsResource) -> None:
@cached_property
def api_keys(self) -> APIKeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return APIKeysResourceWithStreamingResponse(self._agents.api_keys)
@cached_property
@@ -1493,34 +1612,58 @@ def chat(self) -> ChatResourceWithStreamingResponse:
@cached_property
def evaluation_metrics(self) -> EvaluationMetricsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationMetricsResourceWithStreamingResponse(self._agents.evaluation_metrics)
@cached_property
def evaluation_runs(self) -> EvaluationRunsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationRunsResourceWithStreamingResponse(self._agents.evaluation_runs)
@cached_property
def evaluation_test_cases(self) -> EvaluationTestCasesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationTestCasesResourceWithStreamingResponse(self._agents.evaluation_test_cases)
@cached_property
def evaluation_datasets(self) -> EvaluationDatasetsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return EvaluationDatasetsResourceWithStreamingResponse(self._agents.evaluation_datasets)
@cached_property
def functions(self) -> FunctionsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return FunctionsResourceWithStreamingResponse(self._agents.functions)
@cached_property
def versions(self) -> VersionsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return VersionsResourceWithStreamingResponse(self._agents.versions)
@cached_property
def knowledge_bases(self) -> KnowledgeBasesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KnowledgeBasesResourceWithStreamingResponse(self._agents.knowledge_bases)
@cached_property
def routes(self) -> RoutesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return RoutesResourceWithStreamingResponse(self._agents.routes)
@@ -1555,6 +1698,9 @@ def __init__(self, agents: AsyncAgentsResource) -> None:
@cached_property
def api_keys(self) -> AsyncAPIKeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAPIKeysResourceWithStreamingResponse(self._agents.api_keys)
@cached_property
@@ -1563,32 +1709,56 @@ def chat(self) -> AsyncChatResourceWithStreamingResponse:
@cached_property
def evaluation_metrics(self) -> AsyncEvaluationMetricsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationMetricsResourceWithStreamingResponse(self._agents.evaluation_metrics)
@cached_property
def evaluation_runs(self) -> AsyncEvaluationRunsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationRunsResourceWithStreamingResponse(self._agents.evaluation_runs)
@cached_property
def evaluation_test_cases(self) -> AsyncEvaluationTestCasesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationTestCasesResourceWithStreamingResponse(self._agents.evaluation_test_cases)
@cached_property
def evaluation_datasets(self) -> AsyncEvaluationDatasetsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncEvaluationDatasetsResourceWithStreamingResponse(self._agents.evaluation_datasets)
@cached_property
def functions(self) -> AsyncFunctionsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncFunctionsResourceWithStreamingResponse(self._agents.functions)
@cached_property
def versions(self) -> AsyncVersionsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncVersionsResourceWithStreamingResponse(self._agents.versions)
@cached_property
def knowledge_bases(self) -> AsyncKnowledgeBasesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKnowledgeBasesResourceWithStreamingResponse(self._agents.knowledge_bases)
@cached_property
def routes(self) -> AsyncRoutesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncRoutesResourceWithStreamingResponse(self._agents.routes)
diff --git a/src/gradient/resources/agents/api_keys.py b/src/gradient/resources/agents/api_keys.py
index 174ebf60..8a045851 100644
--- a/src/gradient/resources/agents/api_keys.py
+++ b/src/gradient/resources/agents/api_keys.py
@@ -26,6 +26,10 @@
class APIKeysResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> APIKeysResourceWithRawResponse:
"""
@@ -282,6 +286,10 @@ def regenerate(
class AsyncAPIKeysResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/chat/chat.py b/src/gradient/resources/agents/chat/chat.py
index 80947cfb..a087c84d 100644
--- a/src/gradient/resources/agents/chat/chat.py
+++ b/src/gradient/resources/agents/chat/chat.py
@@ -19,6 +19,9 @@
class ChatResource(SyncAPIResource):
@cached_property
def completions(self) -> CompletionsResource:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return CompletionsResource(self._client)
@cached_property
@@ -44,6 +47,9 @@ def with_streaming_response(self) -> ChatResourceWithStreamingResponse:
class AsyncChatResource(AsyncAPIResource):
@cached_property
def completions(self) -> AsyncCompletionsResource:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return AsyncCompletionsResource(self._client)
@cached_property
@@ -72,6 +78,9 @@ def __init__(self, chat: ChatResource) -> None:
@cached_property
def completions(self) -> CompletionsResourceWithRawResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return CompletionsResourceWithRawResponse(self._chat.completions)
@@ -81,6 +90,9 @@ def __init__(self, chat: AsyncChatResource) -> None:
@cached_property
def completions(self) -> AsyncCompletionsResourceWithRawResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return AsyncCompletionsResourceWithRawResponse(self._chat.completions)
@@ -90,6 +102,9 @@ def __init__(self, chat: ChatResource) -> None:
@cached_property
def completions(self) -> CompletionsResourceWithStreamingResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return CompletionsResourceWithStreamingResponse(self._chat.completions)
@@ -99,4 +114,7 @@ def __init__(self, chat: AsyncChatResource) -> None:
@cached_property
def completions(self) -> AsyncCompletionsResourceWithStreamingResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return AsyncCompletionsResourceWithStreamingResponse(self._chat.completions)
diff --git a/src/gradient/resources/agents/chat/completions.py b/src/gradient/resources/agents/chat/completions.py
index 374fdc16..fdad67bb 100644
--- a/src/gradient/resources/agents/chat/completions.py
+++ b/src/gradient/resources/agents/chat/completions.py
@@ -27,6 +27,10 @@
class CompletionsResource(SyncAPIResource):
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
+
@cached_property
def with_raw_response(self) -> CompletionsResourceWithRawResponse:
"""
@@ -516,6 +520,10 @@ def create(
class AsyncCompletionsResource(AsyncAPIResource):
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncCompletionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_datasets.py b/src/gradient/resources/agents/evaluation_datasets.py
index db9b473d..47ba1ba1 100644
--- a/src/gradient/resources/agents/evaluation_datasets.py
+++ b/src/gradient/resources/agents/evaluation_datasets.py
@@ -32,6 +32,10 @@
class EvaluationDatasetsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> EvaluationDatasetsResourceWithRawResponse:
"""
@@ -144,6 +148,10 @@ def create_file_upload_presigned_urls(
class AsyncEvaluationDatasetsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncEvaluationDatasetsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py b/src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py
index 0079d59b..20e6e5b1 100644
--- a/src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py
+++ b/src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py
@@ -19,6 +19,9 @@
class AnthropicResource(SyncAPIResource):
@cached_property
def keys(self) -> KeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KeysResource(self._client)
@cached_property
@@ -44,6 +47,9 @@ def with_streaming_response(self) -> AnthropicResourceWithStreamingResponse:
class AsyncAnthropicResource(AsyncAPIResource):
@cached_property
def keys(self) -> AsyncKeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKeysResource(self._client)
@cached_property
@@ -72,6 +78,9 @@ def __init__(self, anthropic: AnthropicResource) -> None:
@cached_property
def keys(self) -> KeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KeysResourceWithRawResponse(self._anthropic.keys)
@@ -81,6 +90,9 @@ def __init__(self, anthropic: AsyncAnthropicResource) -> None:
@cached_property
def keys(self) -> AsyncKeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKeysResourceWithRawResponse(self._anthropic.keys)
@@ -90,6 +102,9 @@ def __init__(self, anthropic: AnthropicResource) -> None:
@cached_property
def keys(self) -> KeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KeysResourceWithStreamingResponse(self._anthropic.keys)
@@ -99,4 +114,7 @@ def __init__(self, anthropic: AsyncAnthropicResource) -> None:
@cached_property
def keys(self) -> AsyncKeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKeysResourceWithStreamingResponse(self._anthropic.keys)
diff --git a/src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py b/src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py
index e015bf5c..195d8f88 100644
--- a/src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py
+++ b/src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py
@@ -32,6 +32,10 @@
class KeysResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> KeysResourceWithRawResponse:
"""
@@ -324,6 +328,10 @@ def list_agents(
class AsyncKeysResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncKeysResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py b/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py
index b9080132..1b081c70 100644
--- a/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py
+++ b/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py
@@ -63,8 +63,15 @@
class EvaluationMetricsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def workspaces(self) -> WorkspacesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return WorkspacesResource(self._client)
@cached_property
@@ -77,10 +84,16 @@ def openai(self) -> OpenAIResource:
@cached_property
def oauth2(self) -> Oauth2Resource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return Oauth2Resource(self._client)
@cached_property
def scheduled_indexing(self) -> ScheduledIndexingResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return ScheduledIndexingResource(self._client)
@cached_property
@@ -176,8 +189,15 @@ def list_regions(
class AsyncEvaluationMetricsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def workspaces(self) -> AsyncWorkspacesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncWorkspacesResource(self._client)
@cached_property
@@ -190,10 +210,16 @@ def openai(self) -> AsyncOpenAIResource:
@cached_property
def oauth2(self) -> AsyncOauth2Resource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncOauth2Resource(self._client)
@cached_property
def scheduled_indexing(self) -> AsyncScheduledIndexingResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncScheduledIndexingResource(self._client)
@cached_property
@@ -301,6 +327,9 @@ def __init__(self, evaluation_metrics: EvaluationMetricsResource) -> None:
@cached_property
def workspaces(self) -> WorkspacesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return WorkspacesResourceWithRawResponse(self._evaluation_metrics.workspaces)
@cached_property
@@ -313,10 +342,16 @@ def openai(self) -> OpenAIResourceWithRawResponse:
@cached_property
def oauth2(self) -> Oauth2ResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return Oauth2ResourceWithRawResponse(self._evaluation_metrics.oauth2)
@cached_property
def scheduled_indexing(self) -> ScheduledIndexingResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return ScheduledIndexingResourceWithRawResponse(self._evaluation_metrics.scheduled_indexing)
@@ -333,6 +368,9 @@ def __init__(self, evaluation_metrics: AsyncEvaluationMetricsResource) -> None:
@cached_property
def workspaces(self) -> AsyncWorkspacesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncWorkspacesResourceWithRawResponse(self._evaluation_metrics.workspaces)
@cached_property
@@ -345,10 +383,16 @@ def openai(self) -> AsyncOpenAIResourceWithRawResponse:
@cached_property
def oauth2(self) -> AsyncOauth2ResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncOauth2ResourceWithRawResponse(self._evaluation_metrics.oauth2)
@cached_property
def scheduled_indexing(self) -> AsyncScheduledIndexingResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncScheduledIndexingResourceWithRawResponse(self._evaluation_metrics.scheduled_indexing)
@@ -365,6 +409,9 @@ def __init__(self, evaluation_metrics: EvaluationMetricsResource) -> None:
@cached_property
def workspaces(self) -> WorkspacesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return WorkspacesResourceWithStreamingResponse(self._evaluation_metrics.workspaces)
@cached_property
@@ -377,10 +424,16 @@ def openai(self) -> OpenAIResourceWithStreamingResponse:
@cached_property
def oauth2(self) -> Oauth2ResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return Oauth2ResourceWithStreamingResponse(self._evaluation_metrics.oauth2)
@cached_property
def scheduled_indexing(self) -> ScheduledIndexingResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return ScheduledIndexingResourceWithStreamingResponse(self._evaluation_metrics.scheduled_indexing)
@@ -397,6 +450,9 @@ def __init__(self, evaluation_metrics: AsyncEvaluationMetricsResource) -> None:
@cached_property
def workspaces(self) -> AsyncWorkspacesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncWorkspacesResourceWithStreamingResponse(self._evaluation_metrics.workspaces)
@cached_property
@@ -409,8 +465,14 @@ def openai(self) -> AsyncOpenAIResourceWithStreamingResponse:
@cached_property
def oauth2(self) -> AsyncOauth2ResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncOauth2ResourceWithStreamingResponse(self._evaluation_metrics.oauth2)
@cached_property
def scheduled_indexing(self) -> AsyncScheduledIndexingResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncScheduledIndexingResourceWithStreamingResponse(self._evaluation_metrics.scheduled_indexing)
diff --git a/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py b/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py
index 256040ba..137aa164 100644
--- a/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py
+++ b/src/gradient/resources/agents/evaluation_metrics/oauth2/dropbox.py
@@ -22,6 +22,10 @@
class DropboxResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> DropboxResourceWithRawResponse:
"""
@@ -90,6 +94,10 @@ def create_tokens(
class AsyncDropboxResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncDropboxResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py b/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py
index 335e58d7..0cf47ca6 100644
--- a/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py
+++ b/src/gradient/resources/agents/evaluation_metrics/oauth2/oauth2.py
@@ -30,8 +30,15 @@
class Oauth2Resource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def dropbox(self) -> DropboxResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return DropboxResource(self._client)
@cached_property
@@ -104,8 +111,15 @@ def generate_url(
class AsyncOauth2Resource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def dropbox(self) -> AsyncDropboxResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncDropboxResource(self._client)
@cached_property
@@ -187,6 +201,9 @@ def __init__(self, oauth2: Oauth2Resource) -> None:
@cached_property
def dropbox(self) -> DropboxResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return DropboxResourceWithRawResponse(self._oauth2.dropbox)
@@ -200,6 +217,9 @@ def __init__(self, oauth2: AsyncOauth2Resource) -> None:
@cached_property
def dropbox(self) -> AsyncDropboxResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncDropboxResourceWithRawResponse(self._oauth2.dropbox)
@@ -213,6 +233,9 @@ def __init__(self, oauth2: Oauth2Resource) -> None:
@cached_property
def dropbox(self) -> DropboxResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return DropboxResourceWithStreamingResponse(self._oauth2.dropbox)
@@ -226,4 +249,7 @@ def __init__(self, oauth2: AsyncOauth2Resource) -> None:
@cached_property
def dropbox(self) -> AsyncDropboxResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncDropboxResourceWithStreamingResponse(self._oauth2.dropbox)
diff --git a/src/gradient/resources/agents/evaluation_metrics/openai/keys.py b/src/gradient/resources/agents/evaluation_metrics/openai/keys.py
index 9ab5cbad..ffe4992d 100644
--- a/src/gradient/resources/agents/evaluation_metrics/openai/keys.py
+++ b/src/gradient/resources/agents/evaluation_metrics/openai/keys.py
@@ -32,6 +32,10 @@
class KeysResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> KeysResourceWithRawResponse:
"""
@@ -322,6 +326,10 @@ def list_agents(
class AsyncKeysResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncKeysResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_metrics/openai/openai.py b/src/gradient/resources/agents/evaluation_metrics/openai/openai.py
index 00fd8a7d..4963c752 100644
--- a/src/gradient/resources/agents/evaluation_metrics/openai/openai.py
+++ b/src/gradient/resources/agents/evaluation_metrics/openai/openai.py
@@ -19,6 +19,9 @@
class OpenAIResource(SyncAPIResource):
@cached_property
def keys(self) -> KeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KeysResource(self._client)
@cached_property
@@ -44,6 +47,9 @@ def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse:
class AsyncOpenAIResource(AsyncAPIResource):
@cached_property
def keys(self) -> AsyncKeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKeysResource(self._client)
@cached_property
@@ -72,6 +78,9 @@ def __init__(self, openai: OpenAIResource) -> None:
@cached_property
def keys(self) -> KeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KeysResourceWithRawResponse(self._openai.keys)
@@ -81,6 +90,9 @@ def __init__(self, openai: AsyncOpenAIResource) -> None:
@cached_property
def keys(self) -> AsyncKeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKeysResourceWithRawResponse(self._openai.keys)
@@ -90,6 +102,9 @@ def __init__(self, openai: OpenAIResource) -> None:
@cached_property
def keys(self) -> KeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return KeysResourceWithStreamingResponse(self._openai.keys)
@@ -99,4 +114,7 @@ def __init__(self, openai: AsyncOpenAIResource) -> None:
@cached_property
def keys(self) -> AsyncKeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncKeysResourceWithStreamingResponse(self._openai.keys)
diff --git a/src/gradient/resources/agents/evaluation_metrics/scheduled_indexing.py b/src/gradient/resources/agents/evaluation_metrics/scheduled_indexing.py
index e346f7ae..b3da363c 100644
--- a/src/gradient/resources/agents/evaluation_metrics/scheduled_indexing.py
+++ b/src/gradient/resources/agents/evaluation_metrics/scheduled_indexing.py
@@ -26,6 +26,10 @@
class ScheduledIndexingResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> ScheduledIndexingResourceWithRawResponse:
"""
@@ -172,6 +176,10 @@ def delete(
class AsyncScheduledIndexingResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncScheduledIndexingResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py
index 7f9a766a..41f3f993 100644
--- a/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py
+++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py
@@ -23,6 +23,10 @@
class AgentsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AgentsResourceWithRawResponse:
"""
@@ -151,6 +155,10 @@ def move(
class AsyncAgentsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py
index 73539bbd..7c2be668 100644
--- a/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py
+++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py
@@ -37,8 +37,15 @@
class WorkspacesResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def agents(self) -> AgentsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AgentsResource(self._client)
@cached_property
@@ -299,8 +306,15 @@ def list_evaluation_test_cases(
class AsyncWorkspacesResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def agents(self) -> AsyncAgentsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAgentsResource(self._client)
@cached_property
@@ -585,6 +599,9 @@ def __init__(self, workspaces: WorkspacesResource) -> None:
@cached_property
def agents(self) -> AgentsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AgentsResourceWithRawResponse(self._workspaces.agents)
@@ -613,6 +630,9 @@ def __init__(self, workspaces: AsyncWorkspacesResource) -> None:
@cached_property
def agents(self) -> AsyncAgentsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAgentsResourceWithRawResponse(self._workspaces.agents)
@@ -641,6 +661,9 @@ def __init__(self, workspaces: WorkspacesResource) -> None:
@cached_property
def agents(self) -> AgentsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AgentsResourceWithStreamingResponse(self._workspaces.agents)
@@ -669,4 +692,7 @@ def __init__(self, workspaces: AsyncWorkspacesResource) -> None:
@cached_property
def agents(self) -> AsyncAgentsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAgentsResourceWithStreamingResponse(self._workspaces.agents)
diff --git a/src/gradient/resources/agents/evaluation_runs.py b/src/gradient/resources/agents/evaluation_runs.py
index 2b5745af..50d51156 100644
--- a/src/gradient/resources/agents/evaluation_runs.py
+++ b/src/gradient/resources/agents/evaluation_runs.py
@@ -25,6 +25,10 @@
class EvaluationRunsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> EvaluationRunsResourceWithRawResponse:
"""
@@ -231,6 +235,10 @@ def retrieve_results(
class AsyncEvaluationRunsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncEvaluationRunsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/evaluation_test_cases.py b/src/gradient/resources/agents/evaluation_test_cases.py
index 0e8cce03..cb47155d 100644
--- a/src/gradient/resources/agents/evaluation_test_cases.py
+++ b/src/gradient/resources/agents/evaluation_test_cases.py
@@ -34,6 +34,10 @@
class EvaluationTestCasesResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> EvaluationTestCasesResourceWithRawResponse:
"""
@@ -298,6 +302,10 @@ def list_evaluation_runs(
class AsyncEvaluationTestCasesResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncEvaluationTestCasesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/functions.py b/src/gradient/resources/agents/functions.py
index 3d995d24..fc58d899 100644
--- a/src/gradient/resources/agents/functions.py
+++ b/src/gradient/resources/agents/functions.py
@@ -24,6 +24,10 @@
class FunctionsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> FunctionsResourceWithRawResponse:
"""
@@ -229,6 +233,10 @@ def delete(
class AsyncFunctionsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncFunctionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/knowledge_bases.py b/src/gradient/resources/agents/knowledge_bases.py
index deefd123..57268294 100644
--- a/src/gradient/resources/agents/knowledge_bases.py
+++ b/src/gradient/resources/agents/knowledge_bases.py
@@ -21,6 +21,10 @@
class KnowledgeBasesResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> KnowledgeBasesResourceWithRawResponse:
"""
@@ -160,6 +164,10 @@ def detach(
class AsyncKnowledgeBasesResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncKnowledgeBasesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/routes.py b/src/gradient/resources/agents/routes.py
index dc37b7d2..3f80e8e8 100644
--- a/src/gradient/resources/agents/routes.py
+++ b/src/gradient/resources/agents/routes.py
@@ -25,6 +25,10 @@
class RoutesResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> RoutesResourceWithRawResponse:
"""
@@ -251,6 +255,10 @@ def view(
class AsyncRoutesResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncRoutesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/agents/versions.py b/src/gradient/resources/agents/versions.py
index 0331344a..90b55087 100644
--- a/src/gradient/resources/agents/versions.py
+++ b/src/gradient/resources/agents/versions.py
@@ -23,6 +23,10 @@
class VersionsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> VersionsResourceWithRawResponse:
"""
@@ -145,6 +149,10 @@ def list(
class AsyncVersionsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncVersionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/apps/apps.py b/src/gradient/resources/apps/apps.py
index 889f2406..1caa9ee7 100644
--- a/src/gradient/resources/apps/apps.py
+++ b/src/gradient/resources/apps/apps.py
@@ -19,6 +19,18 @@
class AppsResource(SyncAPIResource):
@cached_property
def job_invocations(self) -> JobInvocationsResource:
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
return JobInvocationsResource(self._client)
@cached_property
@@ -44,6 +56,18 @@ def with_streaming_response(self) -> AppsResourceWithStreamingResponse:
class AsyncAppsResource(AsyncAPIResource):
@cached_property
def job_invocations(self) -> AsyncJobInvocationsResource:
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
return AsyncJobInvocationsResource(self._client)
@cached_property
@@ -72,6 +96,18 @@ def __init__(self, apps: AppsResource) -> None:
@cached_property
def job_invocations(self) -> JobInvocationsResourceWithRawResponse:
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
return JobInvocationsResourceWithRawResponse(self._apps.job_invocations)
@@ -81,6 +117,18 @@ def __init__(self, apps: AsyncAppsResource) -> None:
@cached_property
def job_invocations(self) -> AsyncJobInvocationsResourceWithRawResponse:
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
return AsyncJobInvocationsResourceWithRawResponse(self._apps.job_invocations)
@@ -90,6 +138,18 @@ def __init__(self, apps: AppsResource) -> None:
@cached_property
def job_invocations(self) -> JobInvocationsResourceWithStreamingResponse:
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
return JobInvocationsResourceWithStreamingResponse(self._apps.job_invocations)
@@ -99,4 +159,16 @@ def __init__(self, apps: AsyncAppsResource) -> None:
@cached_property
def job_invocations(self) -> AsyncJobInvocationsResourceWithStreamingResponse:
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
return AsyncJobInvocationsResourceWithStreamingResponse(self._apps.job_invocations)
diff --git a/src/gradient/resources/apps/job_invocations.py b/src/gradient/resources/apps/job_invocations.py
index 449dd829..02bc5a95 100644
--- a/src/gradient/resources/apps/job_invocations.py
+++ b/src/gradient/resources/apps/job_invocations.py
@@ -22,6 +22,19 @@
class JobInvocationsResource(SyncAPIResource):
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
+
@cached_property
def with_raw_response(self) -> JobInvocationsResourceWithRawResponse:
"""
@@ -88,6 +101,19 @@ def cancel(
class AsyncJobInvocationsResource(AsyncAPIResource):
+ """
+ App Platform is a Platform-as-a-Service (PaaS) offering from DigitalOcean that allows
+ developers to publish code directly to DigitalOcean servers without worrying about the
+ underlying infrastructure.
+
+ Most API operations are centered around a few core object types. Following are the
+ definitions of these types. These definitions will be omitted from the operation-specific
+ documentation.
+
+ For documentation on app specifications (`AppSpec` objects), please refer to the
+ [product documentation](https://docs.digitalocean.com/products/app-platform/reference/app-spec/)).
+ """
+
@cached_property
def with_raw_response(self) -> AsyncJobInvocationsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/billing.py b/src/gradient/resources/billing.py
index f9f42aad..8261cfa9 100644
--- a/src/gradient/resources/billing.py
+++ b/src/gradient/resources/billing.py
@@ -25,6 +25,35 @@
class BillingResource(SyncAPIResource):
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
+
@cached_property
def with_raw_response(self) -> BillingResourceWithRawResponse:
"""
@@ -108,6 +137,35 @@ def list_insights(
class AsyncBillingResource(AsyncAPIResource):
+ """
+ The billing endpoints allow you to retrieve your account balance, invoices,
+ billing history, and insights.
+
+ **Balance:** By sending requests to the `/v2/customers/my/balance` endpoint, you can
+ retrieve the balance information for the requested customer account.
+
+ **Invoices:** [Invoices](https://docs.digitalocean.com/platform/billing/invoices/)
+ are generated on the first of each month for every DigitalOcean
+ customer. An invoice preview is generated daily, which can be accessed
+ with the `preview` keyword in place of `$INVOICE_UUID`. To interact with
+ invoices, you will generally send requests to the invoices endpoint at
+ `/v2/customers/my/invoices`.
+
+ **Billing History:** Billing history is a record of billing events for your account.
+ For example, entries may include events like payments made, invoices
+ issued, or credits granted. To interact with invoices, you
+ will generally send requests to the invoices endpoint at
+ `/v2/customers/my/billing_history`.
+
+ **Billing Insights:** Day-over-day changes in billing resource usage based on nightly invoice items,
+ including total amount, region, SKU, and description for a specified date range.
+ It is important to note that the daily resource usage may not reflect month-end billing totals when totaled for
+ a given month as nightly invoice items do not necessarily encompass all invoicing factors for the entire month.
+ `v2/billing/{account_urn}/insights/{start_date}/{end_date}` where account_urn is the URN of the customer
+ account, can be a team (do:team:uuid) or an organization (do:teamgroup:uuid). The date range specified by
+ start_date and end_date must be in YYYY-MM-DD format.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncBillingResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/chat/chat.py b/src/gradient/resources/chat/chat.py
index ac933129..1175e6db 100644
--- a/src/gradient/resources/chat/chat.py
+++ b/src/gradient/resources/chat/chat.py
@@ -19,6 +19,9 @@
class ChatResource(SyncAPIResource):
@cached_property
def completions(self) -> CompletionsResource:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return CompletionsResource(self._client)
@cached_property
@@ -44,6 +47,9 @@ def with_streaming_response(self) -> ChatResourceWithStreamingResponse:
class AsyncChatResource(AsyncAPIResource):
@cached_property
def completions(self) -> AsyncCompletionsResource:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return AsyncCompletionsResource(self._client)
@cached_property
@@ -72,6 +78,9 @@ def __init__(self, chat: ChatResource) -> None:
@cached_property
def completions(self) -> CompletionsResourceWithRawResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return CompletionsResourceWithRawResponse(self._chat.completions)
@@ -81,6 +90,9 @@ def __init__(self, chat: AsyncChatResource) -> None:
@cached_property
def completions(self) -> AsyncCompletionsResourceWithRawResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return AsyncCompletionsResourceWithRawResponse(self._chat.completions)
@@ -90,6 +102,9 @@ def __init__(self, chat: ChatResource) -> None:
@cached_property
def completions(self) -> CompletionsResourceWithStreamingResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return CompletionsResourceWithStreamingResponse(self._chat.completions)
@@ -99,4 +114,7 @@ def __init__(self, chat: AsyncChatResource) -> None:
@cached_property
def completions(self) -> AsyncCompletionsResourceWithStreamingResponse:
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
return AsyncCompletionsResourceWithStreamingResponse(self._chat.completions)
diff --git a/src/gradient/resources/chat/completions.py b/src/gradient/resources/chat/completions.py
index 779fffb0..d2ae1071 100644
--- a/src/gradient/resources/chat/completions.py
+++ b/src/gradient/resources/chat/completions.py
@@ -27,6 +27,10 @@
class CompletionsResource(SyncAPIResource):
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
+
@cached_property
def with_raw_response(self) -> CompletionsResourceWithRawResponse:
"""
@@ -517,6 +521,10 @@ def create(
class AsyncCompletionsResource(AsyncAPIResource):
+ """
+ Given a list of messages comprising a conversation, the model will return a response.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncCompletionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/databases/schema_registry/config.py b/src/gradient/resources/databases/schema_registry/config.py
index e012dd77..825bd2ae 100644
--- a/src/gradient/resources/databases/schema_registry/config.py
+++ b/src/gradient/resources/databases/schema_registry/config.py
@@ -27,6 +27,32 @@
class ConfigResource(SyncAPIResource):
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
+
@cached_property
def with_raw_response(self) -> ConfigResourceWithRawResponse:
"""
@@ -230,6 +256,32 @@ def update_subject(
class AsyncConfigResource(AsyncAPIResource):
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
+
@cached_property
def with_raw_response(self) -> AsyncConfigResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/databases/schema_registry/schema_registry.py b/src/gradient/resources/databases/schema_registry/schema_registry.py
index dd7d3dbe..de047c50 100644
--- a/src/gradient/resources/databases/schema_registry/schema_registry.py
+++ b/src/gradient/resources/databases/schema_registry/schema_registry.py
@@ -19,6 +19,31 @@
class SchemaRegistryResource(SyncAPIResource):
@cached_property
def config(self) -> ConfigResource:
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
return ConfigResource(self._client)
@cached_property
@@ -44,6 +69,31 @@ def with_streaming_response(self) -> SchemaRegistryResourceWithStreamingResponse
class AsyncSchemaRegistryResource(AsyncAPIResource):
@cached_property
def config(self) -> AsyncConfigResource:
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
return AsyncConfigResource(self._client)
@cached_property
@@ -72,6 +122,31 @@ def __init__(self, schema_registry: SchemaRegistryResource) -> None:
@cached_property
def config(self) -> ConfigResourceWithRawResponse:
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
return ConfigResourceWithRawResponse(self._schema_registry.config)
@@ -81,6 +156,31 @@ def __init__(self, schema_registry: AsyncSchemaRegistryResource) -> None:
@cached_property
def config(self) -> AsyncConfigResourceWithRawResponse:
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
return AsyncConfigResourceWithRawResponse(self._schema_registry.config)
@@ -90,6 +190,31 @@ def __init__(self, schema_registry: SchemaRegistryResource) -> None:
@cached_property
def config(self) -> ConfigResourceWithStreamingResponse:
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
return ConfigResourceWithStreamingResponse(self._schema_registry.config)
@@ -99,4 +224,29 @@ def __init__(self, schema_registry: AsyncSchemaRegistryResource) -> None:
@cached_property
def config(self) -> AsyncConfigResourceWithStreamingResponse:
+ """
+ DigitalOcean's [managed database service](https://docs.digitalocean.com/products/databases)
+ simplifies the creation and management of highly available database clusters. Currently, it
+ offers support for [PostgreSQL](http://docs.digitalocean.com/products/databases/postgresql/),
+ [Caching](https://docs.digitalocean.com/products/databases/redis/),
+ [Valkey](https://docs.digitalocean.com/products/databases/valkey/),
+ [MySQL](https://docs.digitalocean.com/products/databases/mysql/),
+ [MongoDB](https://docs.digitalocean.com/products/databases/mongodb/), and
+ [OpenSearch](https://docs.digitalocean.com/products/databases/opensearch/).
+
+ By sending requests to the `/v2/databases` endpoint, you can list, create, or delete
+ database clusters as well as scale the size of a cluster, add or remove read-only replicas,
+ and manage other configuration details.
+
+ Database clusters may be deployed in a multi-node, high-availability configuration.
+ If your machine type is above the basic nodes, your node plan is above the smallest option,
+ or you are running MongoDB, you may additionally include up to two standby nodes in your cluster.
+
+ The size of individual nodes in a database cluster is represented by a human-readable slug,
+ which is used in some of the following requests. Each slug denotes the node's identifier,
+ CPU count, and amount of RAM, in that order.
+
+ For a list of currently available database slugs and options, use the `/v2/databases/options` endpoint or use the
+ `doctl databases options` [command](https://docs.digitalocean.com/reference/doctl/reference/databases/options).
+ """
return AsyncConfigResourceWithStreamingResponse(self._schema_registry.config)
diff --git a/src/gradient/resources/gpu_droplets/account/account.py b/src/gradient/resources/gpu_droplets/account/account.py
index 5bcaf269..05f71ea4 100644
--- a/src/gradient/resources/gpu_droplets/account/account.py
+++ b/src/gradient/resources/gpu_droplets/account/account.py
@@ -19,6 +19,7 @@
class AccountResource(SyncAPIResource):
@cached_property
def keys(self) -> KeysResource:
+ """Manage SSH keys available on your account."""
return KeysResource(self._client)
@cached_property
@@ -44,6 +45,7 @@ def with_streaming_response(self) -> AccountResourceWithStreamingResponse:
class AsyncAccountResource(AsyncAPIResource):
@cached_property
def keys(self) -> AsyncKeysResource:
+ """Manage SSH keys available on your account."""
return AsyncKeysResource(self._client)
@cached_property
@@ -72,6 +74,7 @@ def __init__(self, account: AccountResource) -> None:
@cached_property
def keys(self) -> KeysResourceWithRawResponse:
+ """Manage SSH keys available on your account."""
return KeysResourceWithRawResponse(self._account.keys)
@@ -81,6 +84,7 @@ def __init__(self, account: AsyncAccountResource) -> None:
@cached_property
def keys(self) -> AsyncKeysResourceWithRawResponse:
+ """Manage SSH keys available on your account."""
return AsyncKeysResourceWithRawResponse(self._account.keys)
@@ -90,6 +94,7 @@ def __init__(self, account: AccountResource) -> None:
@cached_property
def keys(self) -> KeysResourceWithStreamingResponse:
+ """Manage SSH keys available on your account."""
return KeysResourceWithStreamingResponse(self._account.keys)
@@ -99,4 +104,5 @@ def __init__(self, account: AsyncAccountResource) -> None:
@cached_property
def keys(self) -> AsyncKeysResourceWithStreamingResponse:
+ """Manage SSH keys available on your account."""
return AsyncKeysResourceWithStreamingResponse(self._account.keys)
diff --git a/src/gradient/resources/gpu_droplets/account/keys.py b/src/gradient/resources/gpu_droplets/account/keys.py
index f50b9945..22aa6f0d 100644
--- a/src/gradient/resources/gpu_droplets/account/keys.py
+++ b/src/gradient/resources/gpu_droplets/account/keys.py
@@ -27,6 +27,8 @@
class KeysResource(SyncAPIResource):
+ """Manage SSH keys available on your account."""
+
@cached_property
def with_raw_response(self) -> KeysResourceWithRawResponse:
"""
@@ -266,6 +268,8 @@ def delete(
class AsyncKeysResource(AsyncAPIResource):
+ """Manage SSH keys available on your account."""
+
@cached_property
def with_raw_response(self) -> AsyncKeysResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/actions.py b/src/gradient/resources/gpu_droplets/actions.py
index a708fb67..dd8e55dc 100644
--- a/src/gradient/resources/gpu_droplets/actions.py
+++ b/src/gradient/resources/gpu_droplets/actions.py
@@ -29,6 +29,27 @@
class ActionsResource(SyncAPIResource):
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
+
@cached_property
def with_raw_response(self) -> ActionsResourceWithRawResponse:
"""
@@ -1002,6 +1023,27 @@ def initiate(
class AsyncActionsResource(AsyncAPIResource):
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncActionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/autoscale.py b/src/gradient/resources/gpu_droplets/autoscale.py
index 8df17f7a..b529d9da 100644
--- a/src/gradient/resources/gpu_droplets/autoscale.py
+++ b/src/gradient/resources/gpu_droplets/autoscale.py
@@ -34,6 +34,10 @@
class AutoscaleResource(SyncAPIResource):
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
+
@cached_property
def with_raw_response(self) -> AutoscaleResourceWithRawResponse:
"""
@@ -441,6 +445,10 @@ def list_members(
class AsyncAutoscaleResource(AsyncAPIResource):
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncAutoscaleResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/backups.py b/src/gradient/resources/gpu_droplets/backups.py
index 065aa3d1..a924c93b 100644
--- a/src/gradient/resources/gpu_droplets/backups.py
+++ b/src/gradient/resources/gpu_droplets/backups.py
@@ -25,6 +25,17 @@
class BackupsResource(SyncAPIResource):
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
+
@cached_property
def with_raw_response(self) -> BackupsResourceWithRawResponse:
"""
@@ -207,6 +218,17 @@ def retrieve_policy(
class AsyncBackupsResource(AsyncAPIResource):
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncBackupsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py
index 2ccad852..04c8f5b3 100644
--- a/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py
+++ b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py
@@ -27,6 +27,17 @@
class DestroyWithAssociatedResourcesResource(SyncAPIResource):
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
+
@cached_property
def with_raw_response(self) -> DestroyWithAssociatedResourcesResourceWithRawResponse:
"""
@@ -283,6 +294,17 @@ def retry(
class AsyncDestroyWithAssociatedResourcesResource(AsyncAPIResource):
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncDestroyWithAssociatedResourcesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/firewalls/droplets.py b/src/gradient/resources/gpu_droplets/firewalls/droplets.py
index 90bcb47e..b77bf1dc 100644
--- a/src/gradient/resources/gpu_droplets/firewalls/droplets.py
+++ b/src/gradient/resources/gpu_droplets/firewalls/droplets.py
@@ -23,6 +23,14 @@
class DropletsResource(SyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def with_raw_response(self) -> DropletsResourceWithRawResponse:
"""
@@ -136,6 +144,14 @@ def remove(
class AsyncDropletsResource(AsyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncDropletsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/firewalls/firewalls.py b/src/gradient/resources/gpu_droplets/firewalls/firewalls.py
index a5fee406..4367941c 100644
--- a/src/gradient/resources/gpu_droplets/firewalls/firewalls.py
+++ b/src/gradient/resources/gpu_droplets/firewalls/firewalls.py
@@ -50,16 +50,45 @@
class FirewallsResource(SyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def droplets(self) -> DropletsResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return DropletsResource(self._client)
@cached_property
def tags(self) -> TagsResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return TagsResource(self._client)
@cached_property
def rules(self) -> RulesResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return RulesResource(self._client)
@cached_property
@@ -283,16 +312,45 @@ def delete(
class AsyncFirewallsResource(AsyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def droplets(self) -> AsyncDropletsResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncDropletsResource(self._client)
@cached_property
def tags(self) -> AsyncTagsResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncTagsResource(self._client)
@cached_property
def rules(self) -> AsyncRulesResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncRulesResource(self._client)
@cached_property
@@ -537,14 +595,35 @@ def __init__(self, firewalls: FirewallsResource) -> None:
@cached_property
def droplets(self) -> DropletsResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return DropletsResourceWithRawResponse(self._firewalls.droplets)
@cached_property
def tags(self) -> TagsResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return TagsResourceWithRawResponse(self._firewalls.tags)
@cached_property
def rules(self) -> RulesResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return RulesResourceWithRawResponse(self._firewalls.rules)
@@ -570,14 +649,35 @@ def __init__(self, firewalls: AsyncFirewallsResource) -> None:
@cached_property
def droplets(self) -> AsyncDropletsResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncDropletsResourceWithRawResponse(self._firewalls.droplets)
@cached_property
def tags(self) -> AsyncTagsResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncTagsResourceWithRawResponse(self._firewalls.tags)
@cached_property
def rules(self) -> AsyncRulesResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncRulesResourceWithRawResponse(self._firewalls.rules)
@@ -603,14 +703,35 @@ def __init__(self, firewalls: FirewallsResource) -> None:
@cached_property
def droplets(self) -> DropletsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return DropletsResourceWithStreamingResponse(self._firewalls.droplets)
@cached_property
def tags(self) -> TagsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return TagsResourceWithStreamingResponse(self._firewalls.tags)
@cached_property
def rules(self) -> RulesResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return RulesResourceWithStreamingResponse(self._firewalls.rules)
@@ -636,12 +757,33 @@ def __init__(self, firewalls: AsyncFirewallsResource) -> None:
@cached_property
def droplets(self) -> AsyncDropletsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncDropletsResourceWithStreamingResponse(self._firewalls.droplets)
@cached_property
def tags(self) -> AsyncTagsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncTagsResourceWithStreamingResponse(self._firewalls.tags)
@cached_property
def rules(self) -> AsyncRulesResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncRulesResourceWithStreamingResponse(self._firewalls.rules)
diff --git a/src/gradient/resources/gpu_droplets/firewalls/rules.py b/src/gradient/resources/gpu_droplets/firewalls/rules.py
index f669fc6d..bea27fe4 100644
--- a/src/gradient/resources/gpu_droplets/firewalls/rules.py
+++ b/src/gradient/resources/gpu_droplets/firewalls/rules.py
@@ -23,6 +23,14 @@
class RulesResource(SyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def with_raw_response(self) -> RulesResourceWithRawResponse:
"""
@@ -148,6 +156,14 @@ def remove(
class AsyncRulesResource(AsyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncRulesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/firewalls/tags.py b/src/gradient/resources/gpu_droplets/firewalls/tags.py
index 82d613fb..c38788be 100644
--- a/src/gradient/resources/gpu_droplets/firewalls/tags.py
+++ b/src/gradient/resources/gpu_droplets/firewalls/tags.py
@@ -23,6 +23,14 @@
class TagsResource(SyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def with_raw_response(self) -> TagsResourceWithRawResponse:
"""
@@ -142,6 +150,14 @@ def remove(
class AsyncTagsResource(AsyncAPIResource):
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncTagsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/floating_ips/actions.py b/src/gradient/resources/gpu_droplets/floating_ips/actions.py
index f73d5707..83e0b918 100644
--- a/src/gradient/resources/gpu_droplets/floating_ips/actions.py
+++ b/src/gradient/resources/gpu_droplets/floating_ips/actions.py
@@ -26,6 +26,28 @@
class ActionsResource(SyncAPIResource):
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
+
@cached_property
def with_raw_response(self) -> ActionsResourceWithRawResponse:
"""
@@ -228,6 +250,28 @@ def list(
class AsyncActionsResource(AsyncAPIResource):
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncActionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py b/src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py
index f55bfd41..0e65e6aa 100644
--- a/src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py
+++ b/src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py
@@ -34,8 +34,51 @@
class FloatingIPsResource(SyncAPIResource):
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
+
@cached_property
def actions(self) -> ActionsResource:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return ActionsResource(self._client)
@cached_property
@@ -285,8 +328,51 @@ def delete(
class AsyncFloatingIPsResource(AsyncAPIResource):
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
+
@cached_property
def actions(self) -> AsyncActionsResource:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return AsyncActionsResource(self._client)
@cached_property
@@ -554,6 +640,27 @@ def __init__(self, floating_ips: FloatingIPsResource) -> None:
@cached_property
def actions(self) -> ActionsResourceWithRawResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return ActionsResourceWithRawResponse(self._floating_ips.actions)
@@ -576,6 +683,27 @@ def __init__(self, floating_ips: AsyncFloatingIPsResource) -> None:
@cached_property
def actions(self) -> AsyncActionsResourceWithRawResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return AsyncActionsResourceWithRawResponse(self._floating_ips.actions)
@@ -598,6 +726,27 @@ def __init__(self, floating_ips: FloatingIPsResource) -> None:
@cached_property
def actions(self) -> ActionsResourceWithStreamingResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return ActionsResourceWithStreamingResponse(self._floating_ips.actions)
@@ -620,4 +769,25 @@ def __init__(self, floating_ips: AsyncFloatingIPsResource) -> None:
@cached_property
def actions(self) -> AsyncActionsResourceWithStreamingResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ Floating IP actions are commands that can be given to a DigitalOcean
+ floating IP. These requests are made on the actions endpoint of a specific
+ floating IP.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return AsyncActionsResourceWithStreamingResponse(self._floating_ips.actions)
diff --git a/src/gradient/resources/gpu_droplets/gpu_droplets.py b/src/gradient/resources/gpu_droplets/gpu_droplets.py
index c9f84747..5c0b1274 100644
--- a/src/gradient/resources/gpu_droplets/gpu_droplets.py
+++ b/src/gradient/resources/gpu_droplets/gpu_droplets.py
@@ -135,48 +135,197 @@
class GPUDropletsResource(SyncAPIResource):
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
+
@cached_property
def backups(self) -> BackupsResource:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return BackupsResource(self._client)
@cached_property
def actions(self) -> ActionsResource:
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
return ActionsResource(self._client)
@cached_property
def destroy_with_associated_resources(self) -> DestroyWithAssociatedResourcesResource:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return DestroyWithAssociatedResourcesResource(self._client)
@cached_property
def autoscale(self) -> AutoscaleResource:
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
return AutoscaleResource(self._client)
@cached_property
def firewalls(self) -> FirewallsResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return FirewallsResource(self._client)
@cached_property
def floating_ips(self) -> FloatingIPsResource:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
return FloatingIPsResource(self._client)
@cached_property
def images(self) -> ImagesResource:
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
return ImagesResource(self._client)
@cached_property
def load_balancers(self) -> LoadBalancersResource:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return LoadBalancersResource(self._client)
@cached_property
def sizes(self) -> SizesResource:
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
return SizesResource(self._client)
@cached_property
def snapshots(self) -> SnapshotsResource:
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
return SnapshotsResource(self._client)
@cached_property
def volumes(self) -> VolumesResource:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return VolumesResource(self._client)
@cached_property
@@ -906,48 +1055,197 @@ def list_snapshots(
class AsyncGPUDropletsResource(AsyncAPIResource):
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
+
@cached_property
def backups(self) -> AsyncBackupsResource:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return AsyncBackupsResource(self._client)
@cached_property
def actions(self) -> AsyncActionsResource:
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
return AsyncActionsResource(self._client)
@cached_property
def destroy_with_associated_resources(self) -> AsyncDestroyWithAssociatedResourcesResource:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return AsyncDestroyWithAssociatedResourcesResource(self._client)
@cached_property
def autoscale(self) -> AsyncAutoscaleResource:
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
return AsyncAutoscaleResource(self._client)
@cached_property
def firewalls(self) -> AsyncFirewallsResource:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncFirewallsResource(self._client)
@cached_property
def floating_ips(self) -> AsyncFloatingIPsResource:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
return AsyncFloatingIPsResource(self._client)
@cached_property
def images(self) -> AsyncImagesResource:
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
return AsyncImagesResource(self._client)
@cached_property
def load_balancers(self) -> AsyncLoadBalancersResource:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncLoadBalancersResource(self._client)
@cached_property
def sizes(self) -> AsyncSizesResource:
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
return AsyncSizesResource(self._client)
@cached_property
def snapshots(self) -> AsyncSnapshotsResource:
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
return AsyncSnapshotsResource(self._client)
@cached_property
def volumes(self) -> AsyncVolumesResource:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return AsyncVolumesResource(self._client)
@cached_property
@@ -1710,48 +2008,186 @@ def __init__(self, gpu_droplets: GPUDropletsResource) -> None:
@cached_property
def backups(self) -> BackupsResourceWithRawResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return BackupsResourceWithRawResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> ActionsResourceWithRawResponse:
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
return ActionsResourceWithRawResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> DestroyWithAssociatedResourcesResourceWithRawResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return DestroyWithAssociatedResourcesResourceWithRawResponse(
self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AutoscaleResourceWithRawResponse:
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
return AutoscaleResourceWithRawResponse(self._gpu_droplets.autoscale)
@cached_property
def firewalls(self) -> FirewallsResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return FirewallsResourceWithRawResponse(self._gpu_droplets.firewalls)
@cached_property
def floating_ips(self) -> FloatingIPsResourceWithRawResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
return FloatingIPsResourceWithRawResponse(self._gpu_droplets.floating_ips)
@cached_property
def images(self) -> ImagesResourceWithRawResponse:
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
return ImagesResourceWithRawResponse(self._gpu_droplets.images)
@cached_property
def load_balancers(self) -> LoadBalancersResourceWithRawResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return LoadBalancersResourceWithRawResponse(self._gpu_droplets.load_balancers)
@cached_property
def sizes(self) -> SizesResourceWithRawResponse:
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
return SizesResourceWithRawResponse(self._gpu_droplets.sizes)
@cached_property
def snapshots(self) -> SnapshotsResourceWithRawResponse:
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
return SnapshotsResourceWithRawResponse(self._gpu_droplets.snapshots)
@cached_property
def volumes(self) -> VolumesResourceWithRawResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return VolumesResourceWithRawResponse(self._gpu_droplets.volumes)
@cached_property
@@ -1793,48 +2229,186 @@ def __init__(self, gpu_droplets: AsyncGPUDropletsResource) -> None:
@cached_property
def backups(self) -> AsyncBackupsResourceWithRawResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return AsyncBackupsResourceWithRawResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> AsyncActionsResourceWithRawResponse:
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
return AsyncActionsResourceWithRawResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> AsyncDestroyWithAssociatedResourcesResourceWithRawResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return AsyncDestroyWithAssociatedResourcesResourceWithRawResponse(
self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AsyncAutoscaleResourceWithRawResponse:
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
return AsyncAutoscaleResourceWithRawResponse(self._gpu_droplets.autoscale)
@cached_property
def firewalls(self) -> AsyncFirewallsResourceWithRawResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncFirewallsResourceWithRawResponse(self._gpu_droplets.firewalls)
@cached_property
def floating_ips(self) -> AsyncFloatingIPsResourceWithRawResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
return AsyncFloatingIPsResourceWithRawResponse(self._gpu_droplets.floating_ips)
@cached_property
def images(self) -> AsyncImagesResourceWithRawResponse:
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
return AsyncImagesResourceWithRawResponse(self._gpu_droplets.images)
@cached_property
def load_balancers(self) -> AsyncLoadBalancersResourceWithRawResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncLoadBalancersResourceWithRawResponse(self._gpu_droplets.load_balancers)
@cached_property
def sizes(self) -> AsyncSizesResourceWithRawResponse:
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
return AsyncSizesResourceWithRawResponse(self._gpu_droplets.sizes)
@cached_property
def snapshots(self) -> AsyncSnapshotsResourceWithRawResponse:
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
return AsyncSnapshotsResourceWithRawResponse(self._gpu_droplets.snapshots)
@cached_property
def volumes(self) -> AsyncVolumesResourceWithRawResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return AsyncVolumesResourceWithRawResponse(self._gpu_droplets.volumes)
@cached_property
@@ -1876,48 +2450,186 @@ def __init__(self, gpu_droplets: GPUDropletsResource) -> None:
@cached_property
def backups(self) -> BackupsResourceWithStreamingResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return BackupsResourceWithStreamingResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> ActionsResourceWithStreamingResponse:
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
return ActionsResourceWithStreamingResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> DestroyWithAssociatedResourcesResourceWithStreamingResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return DestroyWithAssociatedResourcesResourceWithStreamingResponse(
self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AutoscaleResourceWithStreamingResponse:
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
return AutoscaleResourceWithStreamingResponse(self._gpu_droplets.autoscale)
@cached_property
def firewalls(self) -> FirewallsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return FirewallsResourceWithStreamingResponse(self._gpu_droplets.firewalls)
@cached_property
def floating_ips(self) -> FloatingIPsResourceWithStreamingResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
return FloatingIPsResourceWithStreamingResponse(self._gpu_droplets.floating_ips)
@cached_property
def images(self) -> ImagesResourceWithStreamingResponse:
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
return ImagesResourceWithStreamingResponse(self._gpu_droplets.images)
@cached_property
def load_balancers(self) -> LoadBalancersResourceWithStreamingResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return LoadBalancersResourceWithStreamingResponse(self._gpu_droplets.load_balancers)
@cached_property
def sizes(self) -> SizesResourceWithStreamingResponse:
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
return SizesResourceWithStreamingResponse(self._gpu_droplets.sizes)
@cached_property
def snapshots(self) -> SnapshotsResourceWithStreamingResponse:
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
return SnapshotsResourceWithStreamingResponse(self._gpu_droplets.snapshots)
@cached_property
def volumes(self) -> VolumesResourceWithStreamingResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return VolumesResourceWithStreamingResponse(self._gpu_droplets.volumes)
@cached_property
@@ -1959,48 +2671,186 @@ def __init__(self, gpu_droplets: AsyncGPUDropletsResource) -> None:
@cached_property
def backups(self) -> AsyncBackupsResourceWithStreamingResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return AsyncBackupsResourceWithStreamingResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> AsyncActionsResourceWithStreamingResponse:
+ """Droplet actions are tasks that can be executed on a Droplet.
+
+ These can be
+ things like rebooting, resizing, snapshotting, etc.
+
+ Droplet action requests are generally targeted at one of the "actions"
+ endpoints for a specific Droplet. The specific actions are usually
+ initiated by sending a POST request with the action and arguments as
+ parameters.
+
+ Droplet action requests create a Droplet actions object, which can be used
+ to get information about the status of an action. Creating a Droplet
+ action is asynchronous: the HTTP call will return the action object before
+ the action has finished processing on the Droplet. The current status of
+ an action can be retrieved from either the Droplet actions endpoint or the
+ global actions endpoint. If a Droplet action is uncompleted it may block
+ the creation of a subsequent action for that Droplet, the locked attribute
+ of the Droplet will be true and attempts to create a Droplet action will
+ fail with a status of 422.
+ """
return AsyncActionsResourceWithStreamingResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse:
+ """
+ A [Droplet](https://docs.digitalocean.com/products/droplets/) is a DigitalOcean
+ virtual machine. By sending requests to the Droplet endpoint, you can
+ list, create, or delete Droplets.
+
+ Some of the attributes will have an object value. The `region` and `image`
+ objects will all contain the standard attributes of their associated
+ types. Find more information about each of these objects in their
+ respective sections.
+ """
return AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse(
self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AsyncAutoscaleResourceWithStreamingResponse:
+ """
+ Droplet autoscale pools manage automatic horizontal scaling for your applications based on resource usage (CPU, memory, or both) or a static configuration.
+ """
return AsyncAutoscaleResourceWithStreamingResponse(self._gpu_droplets.autoscale)
@cached_property
def firewalls(self) -> AsyncFirewallsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Cloud Firewalls](https://docs.digitalocean.com/products/networking/firewalls/)
+ provide the ability to restrict network access to and from a Droplet
+ allowing you to define which ports will accept inbound or outbound
+ connections. By sending requests to the `/v2/firewalls` endpoint, you can
+ list, create, or delete firewalls as well as modify access rules.
+ """
return AsyncFirewallsResourceWithStreamingResponse(self._gpu_droplets.firewalls)
@cached_property
def floating_ips(self) -> AsyncFloatingIPsResourceWithStreamingResponse:
+ """
+ As of 16 June 2022, we have renamed the Floating IP product to [Reserved IPs](https://docs.digitalocean.com/reference/api/api-reference/#tag/Reserved-IPs).
+ The Reserved IP product's endpoints function the exact same way as Floating IPs.
+ The only difference is the name change throughout the URLs and fields.
+ For example, the `floating_ips` field is now the `reserved_ips` field.
+ The Floating IP endpoints will remain active until fall 2023 before being
+ permanently deprecated.
+
+ With the exception of the [Projects API](https://docs.digitalocean.com/reference/api/api-reference/#tag/Projects),
+ we will reflect this change as an additional field in the responses across the API
+ where the `floating_ip` field is used. For example, the Droplet metadata response
+ will contain the field `reserved_ips` in addition to the `floating_ips` field.
+ Floating IPs retrieved using the Projects API will retain the original name.
+
+ [DigitalOcean Floating IPs](https://docs.digitalocean.com/products/networking/reserved-ips/)
+ are publicly-accessible static IP addresses that can be mapped to one of
+ your Droplets. They can be used to create highly available setups or other
+ configurations requiring movable addresses.
+
+ Floating IPs are bound to a specific region.
+ """
return AsyncFloatingIPsResourceWithStreamingResponse(self._gpu_droplets.floating_ips)
@cached_property
def images(self) -> AsyncImagesResourceWithStreamingResponse:
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
return AsyncImagesResourceWithStreamingResponse(self._gpu_droplets.images)
@cached_property
def load_balancers(self) -> AsyncLoadBalancersResourceWithStreamingResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncLoadBalancersResourceWithStreamingResponse(self._gpu_droplets.load_balancers)
@cached_property
def sizes(self) -> AsyncSizesResourceWithStreamingResponse:
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
return AsyncSizesResourceWithStreamingResponse(self._gpu_droplets.sizes)
@cached_property
def snapshots(self) -> AsyncSnapshotsResourceWithStreamingResponse:
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
return AsyncSnapshotsResourceWithStreamingResponse(self._gpu_droplets.snapshots)
@cached_property
def volumes(self) -> AsyncVolumesResourceWithStreamingResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return AsyncVolumesResourceWithStreamingResponse(self._gpu_droplets.volumes)
@cached_property
diff --git a/src/gradient/resources/gpu_droplets/images/actions.py b/src/gradient/resources/gpu_droplets/images/actions.py
index d2d33f11..19c70af8 100644
--- a/src/gradient/resources/gpu_droplets/images/actions.py
+++ b/src/gradient/resources/gpu_droplets/images/actions.py
@@ -25,6 +25,16 @@
class ActionsResource(SyncAPIResource):
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
+
@cached_property
def with_raw_response(self) -> ActionsResourceWithRawResponse:
"""
@@ -263,6 +273,16 @@ def list(
class AsyncActionsResource(AsyncAPIResource):
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncActionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/images/images.py b/src/gradient/resources/gpu_droplets/images/images.py
index 83e04d13..d9a904cf 100644
--- a/src/gradient/resources/gpu_droplets/images/images.py
+++ b/src/gradient/resources/gpu_droplets/images/images.py
@@ -36,8 +36,44 @@
class ImagesResource(SyncAPIResource):
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
+
@cached_property
def actions(self) -> ActionsResource:
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
return ActionsResource(self._client)
@cached_property
@@ -402,8 +438,44 @@ def delete(
class AsyncImagesResource(AsyncAPIResource):
+ """
+ A DigitalOcean [image](https://docs.digitalocean.com/products/images/) can be
+ used to create a Droplet and may come in a number of flavors. Currently,
+ there are five types of images: snapshots, backups, applications,
+ distributions, and custom images.
+
+ * [Snapshots](https://docs.digitalocean.com/products/snapshots/) provide
+ a full copy of an existing Droplet instance taken on demand.
+
+ * [Backups](https://docs.digitalocean.com/products/backups/) are similar
+ to snapshots but are created automatically at regular intervals when
+ enabled for a Droplet.
+
+ * [Custom images](https://docs.digitalocean.com/products/custom-images/)
+ are Linux-based virtual machine images (raw, qcow2, vhdx, vdi, and vmdk
+ formats are supported) that you may upload for use on DigitalOcean.
+
+ * Distributions are the public Linux distributions that are available to
+ be used as a base to create Droplets.
+
+ * Applications, or [1-Click Apps](https://docs.digitalocean.com/products/marketplace/),
+ are distributions pre-configured with additional software.
+
+ To interact with images, you will generally send requests to the images
+ endpoint at /v2/images.
+ """
+
@cached_property
def actions(self) -> AsyncActionsResource:
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
return AsyncActionsResource(self._client)
@cached_property
@@ -789,6 +861,15 @@ def __init__(self, images: ImagesResource) -> None:
@cached_property
def actions(self) -> ActionsResourceWithRawResponse:
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
return ActionsResourceWithRawResponse(self._images.actions)
@@ -814,6 +895,15 @@ def __init__(self, images: AsyncImagesResource) -> None:
@cached_property
def actions(self) -> AsyncActionsResourceWithRawResponse:
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
return AsyncActionsResourceWithRawResponse(self._images.actions)
@@ -839,6 +929,15 @@ def __init__(self, images: ImagesResource) -> None:
@cached_property
def actions(self) -> ActionsResourceWithStreamingResponse:
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
return ActionsResourceWithStreamingResponse(self._images.actions)
@@ -864,4 +963,13 @@ def __init__(self, images: AsyncImagesResource) -> None:
@cached_property
def actions(self) -> AsyncActionsResourceWithStreamingResponse:
+ """Image actions are commands that can be given to a DigitalOcean image.
+
+ In
+ general, these requests are made on the actions endpoint of a specific
+ image.
+
+ An image action object is returned. These objects hold the current status
+ of the requested action.
+ """
return AsyncActionsResourceWithStreamingResponse(self._images.actions)
diff --git a/src/gradient/resources/gpu_droplets/load_balancers/droplets.py b/src/gradient/resources/gpu_droplets/load_balancers/droplets.py
index ddcdc63a..9f4b3e5e 100644
--- a/src/gradient/resources/gpu_droplets/load_balancers/droplets.py
+++ b/src/gradient/resources/gpu_droplets/load_balancers/droplets.py
@@ -23,6 +23,14 @@
class DropletsResource(SyncAPIResource):
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
+
@cached_property
def with_raw_response(self) -> DropletsResourceWithRawResponse:
"""
@@ -139,6 +147,14 @@ def remove(
class AsyncDropletsResource(AsyncAPIResource):
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncDropletsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py b/src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py
index 8f9092e0..51842e8c 100644
--- a/src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py
+++ b/src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py
@@ -24,6 +24,14 @@
class ForwardingRulesResource(SyncAPIResource):
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
+
@cached_property
def with_raw_response(self) -> ForwardingRulesResourceWithRawResponse:
"""
@@ -139,6 +147,14 @@ def remove(
class AsyncForwardingRulesResource(AsyncAPIResource):
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncForwardingRulesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py
index 2a1e52d9..1316036b 100644
--- a/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py
+++ b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py
@@ -54,12 +54,34 @@
class LoadBalancersResource(SyncAPIResource):
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
+
@cached_property
def droplets(self) -> DropletsResource:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return DropletsResource(self._client)
@cached_property
def forwarding_rules(self) -> ForwardingRulesResource:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return ForwardingRulesResource(self._client)
@cached_property
@@ -1066,12 +1088,34 @@ def delete_cache(
class AsyncLoadBalancersResource(AsyncAPIResource):
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
+
@cached_property
def droplets(self) -> AsyncDropletsResource:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncDropletsResource(self._client)
@cached_property
def forwarding_rules(self) -> AsyncForwardingRulesResource:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncForwardingRulesResource(self._client)
@cached_property
@@ -2102,10 +2146,24 @@ def __init__(self, load_balancers: LoadBalancersResource) -> None:
@cached_property
def droplets(self) -> DropletsResourceWithRawResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return DropletsResourceWithRawResponse(self._load_balancers.droplets)
@cached_property
def forwarding_rules(self) -> ForwardingRulesResourceWithRawResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return ForwardingRulesResourceWithRawResponse(self._load_balancers.forwarding_rules)
@@ -2134,10 +2192,24 @@ def __init__(self, load_balancers: AsyncLoadBalancersResource) -> None:
@cached_property
def droplets(self) -> AsyncDropletsResourceWithRawResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncDropletsResourceWithRawResponse(self._load_balancers.droplets)
@cached_property
def forwarding_rules(self) -> AsyncForwardingRulesResourceWithRawResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncForwardingRulesResourceWithRawResponse(self._load_balancers.forwarding_rules)
@@ -2166,10 +2238,24 @@ def __init__(self, load_balancers: LoadBalancersResource) -> None:
@cached_property
def droplets(self) -> DropletsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return DropletsResourceWithStreamingResponse(self._load_balancers.droplets)
@cached_property
def forwarding_rules(self) -> ForwardingRulesResourceWithStreamingResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return ForwardingRulesResourceWithStreamingResponse(self._load_balancers.forwarding_rules)
@@ -2198,8 +2284,22 @@ def __init__(self, load_balancers: AsyncLoadBalancersResource) -> None:
@cached_property
def droplets(self) -> AsyncDropletsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncDropletsResourceWithStreamingResponse(self._load_balancers.droplets)
@cached_property
def forwarding_rules(self) -> AsyncForwardingRulesResourceWithStreamingResponse:
+ """
+ [DigitalOcean Load Balancers](https://docs.digitalocean.com/products/networking/load-balancers/)
+ provide a way to distribute traffic across multiple Droplets. By sending
+ requests to the `/v2/load_balancers` endpoint, you can list, create, or
+ delete load balancers as well as add or remove Droplets, forwarding rules,
+ and other configuration details.
+ """
return AsyncForwardingRulesResourceWithStreamingResponse(self._load_balancers.forwarding_rules)
diff --git a/src/gradient/resources/gpu_droplets/sizes.py b/src/gradient/resources/gpu_droplets/sizes.py
index 9893903f..83d77052 100644
--- a/src/gradient/resources/gpu_droplets/sizes.py
+++ b/src/gradient/resources/gpu_droplets/sizes.py
@@ -22,6 +22,17 @@
class SizesResource(SyncAPIResource):
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
+
@cached_property
def with_raw_response(self) -> SizesResourceWithRawResponse:
"""
@@ -93,6 +104,17 @@ def list(
class AsyncSizesResource(AsyncAPIResource):
+ """
+ The sizes objects represent different packages of hardware resources that
+ can be used for Droplets. When a Droplet is created, a size must be
+ selected so that the correct resources can be allocated.
+
+ Each size represents a plan that bundles together specific sets of
+ resources. This includes the amount of RAM, the number of virtual CPUs,
+ disk space, and transfer. The size object also includes the pricing
+ details and the regions that the size is available in.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncSizesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/snapshots.py b/src/gradient/resources/gpu_droplets/snapshots.py
index 78bd01ac..12cf20da 100644
--- a/src/gradient/resources/gpu_droplets/snapshots.py
+++ b/src/gradient/resources/gpu_droplets/snapshots.py
@@ -26,6 +26,18 @@
class SnapshotsResource(SyncAPIResource):
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
+
@cached_property
def with_raw_response(self) -> SnapshotsResourceWithRawResponse:
"""
@@ -196,6 +208,18 @@ def delete(
class AsyncSnapshotsResource(AsyncAPIResource):
+ """
+ [Snapshots](https://docs.digitalocean.com/products/snapshots/) are saved
+ instances of a Droplet or a block storage volume, which is reflected in
+ the `resource_type` attribute. In order to avoid problems with compressing
+ filesystems, each defines a `min_disk_size` attribute which is the minimum
+ size of the Droplet or volume disk when creating a new resource from the
+ saved snapshot.
+
+ To interact with snapshots, you will generally send requests to the
+ snapshots endpoint at `/v2/snapshots`.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncSnapshotsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/volumes/actions.py b/src/gradient/resources/gpu_droplets/volumes/actions.py
index 1c0c66a0..9b145567 100644
--- a/src/gradient/resources/gpu_droplets/volumes/actions.py
+++ b/src/gradient/resources/gpu_droplets/volumes/actions.py
@@ -33,6 +33,16 @@
class ActionsResource(SyncAPIResource):
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
+
@cached_property
def with_raw_response(self) -> ActionsResourceWithRawResponse:
"""
@@ -758,6 +768,16 @@ def initiate_by_name(
class AsyncActionsResource(AsyncAPIResource):
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncActionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/volumes/snapshots.py b/src/gradient/resources/gpu_droplets/volumes/snapshots.py
index 694de074..8e2d6422 100644
--- a/src/gradient/resources/gpu_droplets/volumes/snapshots.py
+++ b/src/gradient/resources/gpu_droplets/volumes/snapshots.py
@@ -26,6 +26,20 @@
class SnapshotsResource(SyncAPIResource):
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
+
@cached_property
def with_raw_response(self) -> SnapshotsResourceWithRawResponse:
"""
@@ -227,6 +241,20 @@ def delete(
class AsyncSnapshotsResource(AsyncAPIResource):
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
+
@cached_property
def with_raw_response(self) -> AsyncSnapshotsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/gpu_droplets/volumes/volumes.py b/src/gradient/resources/gpu_droplets/volumes/volumes.py
index fb86c288..76b1db6b 100644
--- a/src/gradient/resources/gpu_droplets/volumes/volumes.py
+++ b/src/gradient/resources/gpu_droplets/volumes/volumes.py
@@ -43,12 +43,48 @@
class VolumesResource(SyncAPIResource):
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
+
@cached_property
def actions(self) -> ActionsResource:
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return ActionsResource(self._client)
@cached_property
def snapshots(self) -> SnapshotsResource:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return SnapshotsResource(self._client)
@cached_property
@@ -536,12 +572,48 @@ def delete_by_name(
class AsyncVolumesResource(AsyncAPIResource):
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
+
@cached_property
def actions(self) -> AsyncActionsResource:
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return AsyncActionsResource(self._client)
@cached_property
def snapshots(self) -> AsyncSnapshotsResource:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return AsyncSnapshotsResource(self._client)
@cached_property
@@ -1050,10 +1122,32 @@ def __init__(self, volumes: VolumesResource) -> None:
@cached_property
def actions(self) -> ActionsResourceWithRawResponse:
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return ActionsResourceWithRawResponse(self._volumes.actions)
@cached_property
def snapshots(self) -> SnapshotsResourceWithRawResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return SnapshotsResourceWithRawResponse(self._volumes.snapshots)
@@ -1079,10 +1173,32 @@ def __init__(self, volumes: AsyncVolumesResource) -> None:
@cached_property
def actions(self) -> AsyncActionsResourceWithRawResponse:
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return AsyncActionsResourceWithRawResponse(self._volumes.actions)
@cached_property
def snapshots(self) -> AsyncSnapshotsResourceWithRawResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return AsyncSnapshotsResourceWithRawResponse(self._volumes.snapshots)
@@ -1108,10 +1224,32 @@ def __init__(self, volumes: VolumesResource) -> None:
@cached_property
def actions(self) -> ActionsResourceWithStreamingResponse:
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return ActionsResourceWithStreamingResponse(self._volumes.actions)
@cached_property
def snapshots(self) -> SnapshotsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return SnapshotsResourceWithStreamingResponse(self._volumes.snapshots)
@@ -1137,8 +1275,30 @@ def __init__(self, volumes: AsyncVolumesResource) -> None:
@cached_property
def actions(self) -> AsyncActionsResourceWithStreamingResponse:
+ """
+ Block storage actions are commands that can be given to a DigitalOcean
+ Block Storage Volume. An example would be detaching or attaching a volume
+ from a Droplet. These requests are made on the
+ `/v2/volumes/$VOLUME_ID/actions` endpoint.
+
+ An action object is returned. These objects hold the current status of the
+ requested action.
+ """
return AsyncActionsResourceWithStreamingResponse(self._volumes.actions)
@cached_property
def snapshots(self) -> AsyncSnapshotsResourceWithStreamingResponse:
+ """
+ [DigitalOcean Block Storage Volumes](https://docs.digitalocean.com/products/volumes/)
+ provide expanded storage capacity for your Droplets and can be moved
+ between Droplets within a specific region.
+
+ Volumes function as raw block devices, meaning they appear to the
+ operating system as locally attached storage which can be formatted using
+ any file system supported by the OS. They may be created in sizes from
+ 1GiB to 16TiB.
+
+ By sending requests to the `/v2/volumes` endpoint, you can list, create, or
+ delete volumes as well as attach and detach them from Droplets
+ """
return AsyncSnapshotsResourceWithStreamingResponse(self._volumes.snapshots)
diff --git a/src/gradient/resources/images.py b/src/gradient/resources/images.py
index 14bbfeaa..c790345a 100644
--- a/src/gradient/resources/images.py
+++ b/src/gradient/resources/images.py
@@ -27,6 +27,8 @@
class ImagesResource(SyncAPIResource):
+ """Generate images from text prompts using various AI models."""
+
@cached_property
def with_raw_response(self) -> ImagesResourceWithRawResponse:
"""
@@ -349,6 +351,8 @@ def generate(
class AsyncImagesResource(AsyncAPIResource):
+ """Generate images from text prompts using various AI models."""
+
@cached_property
def with_raw_response(self) -> AsyncImagesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/inference/api_keys.py b/src/gradient/resources/inference/api_keys.py
index 8dfa54e1..045c6f41 100644
--- a/src/gradient/resources/inference/api_keys.py
+++ b/src/gradient/resources/inference/api_keys.py
@@ -26,6 +26,10 @@
class APIKeysResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> APIKeysResourceWithRawResponse:
"""
@@ -252,6 +256,10 @@ def update_regenerate(
class AsyncAPIKeysResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/inference/inference.py b/src/gradient/resources/inference/inference.py
index d22543b3..1da78154 100644
--- a/src/gradient/resources/inference/inference.py
+++ b/src/gradient/resources/inference/inference.py
@@ -19,6 +19,9 @@
class InferenceResource(SyncAPIResource):
@cached_property
def api_keys(self) -> APIKeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return APIKeysResource(self._client)
@cached_property
@@ -44,6 +47,9 @@ def with_streaming_response(self) -> InferenceResourceWithStreamingResponse:
class AsyncInferenceResource(AsyncAPIResource):
@cached_property
def api_keys(self) -> AsyncAPIKeysResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAPIKeysResource(self._client)
@cached_property
@@ -72,6 +78,9 @@ def __init__(self, inference: InferenceResource) -> None:
@cached_property
def api_keys(self) -> APIKeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return APIKeysResourceWithRawResponse(self._inference.api_keys)
@@ -81,6 +90,9 @@ def __init__(self, inference: AsyncInferenceResource) -> None:
@cached_property
def api_keys(self) -> AsyncAPIKeysResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAPIKeysResourceWithRawResponse(self._inference.api_keys)
@@ -90,6 +102,9 @@ def __init__(self, inference: InferenceResource) -> None:
@cached_property
def api_keys(self) -> APIKeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return APIKeysResourceWithStreamingResponse(self._inference.api_keys)
@@ -99,4 +114,7 @@ def __init__(self, inference: AsyncInferenceResource) -> None:
@cached_property
def api_keys(self) -> AsyncAPIKeysResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAPIKeysResourceWithStreamingResponse(self._inference.api_keys)
diff --git a/src/gradient/resources/knowledge_bases/data_sources.py b/src/gradient/resources/knowledge_bases/data_sources.py
index 6c339108..5c6eafd2 100644
--- a/src/gradient/resources/knowledge_bases/data_sources.py
+++ b/src/gradient/resources/knowledge_bases/data_sources.py
@@ -37,6 +37,10 @@
class DataSourcesResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> DataSourcesResourceWithRawResponse:
"""
@@ -350,6 +354,10 @@ def create_presigned_urls(
class AsyncDataSourcesResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncDataSourcesResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/knowledge_bases/indexing_jobs.py b/src/gradient/resources/knowledge_bases/indexing_jobs.py
index 2759c3fa..47db5b10 100644
--- a/src/gradient/resources/knowledge_bases/indexing_jobs.py
+++ b/src/gradient/resources/knowledge_bases/indexing_jobs.py
@@ -35,6 +35,10 @@
class IndexingJobsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> IndexingJobsResourceWithRawResponse:
"""
@@ -403,6 +407,10 @@ def wait_for_completion(
class AsyncIndexingJobsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncIndexingJobsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/knowledge_bases/knowledge_bases.py b/src/gradient/resources/knowledge_bases/knowledge_bases.py
index 9c22a60c..dd4c3a8f 100644
--- a/src/gradient/resources/knowledge_bases/knowledge_bases.py
+++ b/src/gradient/resources/knowledge_bases/knowledge_bases.py
@@ -79,12 +79,22 @@ class KnowledgeBaseTimeoutError(Exception):
class KnowledgeBasesResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def data_sources(self) -> DataSourcesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return DataSourcesResource(self._client)
@cached_property
def indexing_jobs(self) -> IndexingJobsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return IndexingJobsResource(self._client)
@cached_property
@@ -507,12 +517,22 @@ def list_indexing_jobs(
class AsyncKnowledgeBasesResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def data_sources(self) -> AsyncDataSourcesResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncDataSourcesResource(self._client)
@cached_property
def indexing_jobs(self) -> AsyncIndexingJobsResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncIndexingJobsResource(self._client)
@cached_property
@@ -964,10 +984,16 @@ def __init__(self, knowledge_bases: KnowledgeBasesResource) -> None:
@cached_property
def data_sources(self) -> DataSourcesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return DataSourcesResourceWithRawResponse(self._knowledge_bases.data_sources)
@cached_property
def indexing_jobs(self) -> IndexingJobsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return IndexingJobsResourceWithRawResponse(self._knowledge_bases.indexing_jobs)
@@ -999,10 +1025,16 @@ def __init__(self, knowledge_bases: AsyncKnowledgeBasesResource) -> None:
@cached_property
def data_sources(self) -> AsyncDataSourcesResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncDataSourcesResourceWithRawResponse(self._knowledge_bases.data_sources)
@cached_property
def indexing_jobs(self) -> AsyncIndexingJobsResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncIndexingJobsResourceWithRawResponse(self._knowledge_bases.indexing_jobs)
@@ -1034,10 +1066,16 @@ def __init__(self, knowledge_bases: KnowledgeBasesResource) -> None:
@cached_property
def data_sources(self) -> DataSourcesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return DataSourcesResourceWithStreamingResponse(self._knowledge_bases.data_sources)
@cached_property
def indexing_jobs(self) -> IndexingJobsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return IndexingJobsResourceWithStreamingResponse(self._knowledge_bases.indexing_jobs)
@@ -1069,8 +1107,14 @@ def __init__(self, knowledge_bases: AsyncKnowledgeBasesResource) -> None:
@cached_property
def data_sources(self) -> AsyncDataSourcesResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncDataSourcesResourceWithStreamingResponse(self._knowledge_bases.data_sources)
@cached_property
def indexing_jobs(self) -> AsyncIndexingJobsResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncIndexingJobsResourceWithStreamingResponse(self._knowledge_bases.indexing_jobs)
diff --git a/src/gradient/resources/models/models.py b/src/gradient/resources/models/models.py
index 650c49c9..568fc325 100644
--- a/src/gradient/resources/models/models.py
+++ b/src/gradient/resources/models/models.py
@@ -33,6 +33,10 @@
class ModelsResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def providers(self) -> ProvidersResource:
return ProvidersResource(self._client)
@@ -134,6 +138,10 @@ def list(
class AsyncModelsResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def providers(self) -> AsyncProvidersResource:
return AsyncProvidersResource(self._client)
diff --git a/src/gradient/resources/models/providers/anthropic.py b/src/gradient/resources/models/providers/anthropic.py
index 33b2ec80..ce44fe06 100644
--- a/src/gradient/resources/models/providers/anthropic.py
+++ b/src/gradient/resources/models/providers/anthropic.py
@@ -32,6 +32,10 @@
class AnthropicResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AnthropicResourceWithRawResponse:
"""
@@ -324,6 +328,10 @@ def list_agents(
class AsyncAnthropicResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncAnthropicResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/models/providers/openai.py b/src/gradient/resources/models/providers/openai.py
index 5bdc3f20..e048a32f 100644
--- a/src/gradient/resources/models/providers/openai.py
+++ b/src/gradient/resources/models/providers/openai.py
@@ -32,6 +32,10 @@
class OpenAIResource(SyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> OpenAIResourceWithRawResponse:
"""
@@ -322,6 +326,10 @@ def retrieve_agents(
class AsyncOpenAIResource(AsyncAPIResource):
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/models/providers/providers.py b/src/gradient/resources/models/providers/providers.py
index efb71ec5..b77bf5af 100644
--- a/src/gradient/resources/models/providers/providers.py
+++ b/src/gradient/resources/models/providers/providers.py
@@ -27,10 +27,16 @@
class ProvidersResource(SyncAPIResource):
@cached_property
def anthropic(self) -> AnthropicResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AnthropicResource(self._client)
@cached_property
def openai(self) -> OpenAIResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return OpenAIResource(self._client)
@cached_property
@@ -56,10 +62,16 @@ def with_streaming_response(self) -> ProvidersResourceWithStreamingResponse:
class AsyncProvidersResource(AsyncAPIResource):
@cached_property
def anthropic(self) -> AsyncAnthropicResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAnthropicResource(self._client)
@cached_property
def openai(self) -> AsyncOpenAIResource:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncOpenAIResource(self._client)
@cached_property
@@ -88,10 +100,16 @@ def __init__(self, providers: ProvidersResource) -> None:
@cached_property
def anthropic(self) -> AnthropicResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AnthropicResourceWithRawResponse(self._providers.anthropic)
@cached_property
def openai(self) -> OpenAIResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return OpenAIResourceWithRawResponse(self._providers.openai)
@@ -101,10 +119,16 @@ def __init__(self, providers: AsyncProvidersResource) -> None:
@cached_property
def anthropic(self) -> AsyncAnthropicResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAnthropicResourceWithRawResponse(self._providers.anthropic)
@cached_property
def openai(self) -> AsyncOpenAIResourceWithRawResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncOpenAIResourceWithRawResponse(self._providers.openai)
@@ -114,10 +138,16 @@ def __init__(self, providers: ProvidersResource) -> None:
@cached_property
def anthropic(self) -> AnthropicResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AnthropicResourceWithStreamingResponse(self._providers.anthropic)
@cached_property
def openai(self) -> OpenAIResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return OpenAIResourceWithStreamingResponse(self._providers.openai)
@@ -127,8 +157,14 @@ def __init__(self, providers: AsyncProvidersResource) -> None:
@cached_property
def anthropic(self) -> AsyncAnthropicResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncAnthropicResourceWithStreamingResponse(self._providers.anthropic)
@cached_property
def openai(self) -> AsyncOpenAIResourceWithStreamingResponse:
+ """
+ The API lets you build GPU-powered AI agents with pre-built or custom foundation models, function and agent routes, and RAG pipelines with knowledge bases.
+ """
return AsyncOpenAIResourceWithStreamingResponse(self._providers.openai)
diff --git a/src/gradient/resources/nfs/nfs.py b/src/gradient/resources/nfs/nfs.py
index a46df265..a4e47d9c 100644
--- a/src/gradient/resources/nfs/nfs.py
+++ b/src/gradient/resources/nfs/nfs.py
@@ -37,6 +37,10 @@
class NfsResource(SyncAPIResource):
@cached_property
def snapshots(self) -> SnapshotsResource:
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
return SnapshotsResource(self._client)
@cached_property
@@ -450,6 +454,10 @@ def initiate_action(
class AsyncNfsResource(AsyncAPIResource):
@cached_property
def snapshots(self) -> AsyncSnapshotsResource:
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
return AsyncSnapshotsResource(self._client)
@cached_property
@@ -882,6 +890,10 @@ def __init__(self, nfs: NfsResource) -> None:
@cached_property
def snapshots(self) -> SnapshotsResourceWithRawResponse:
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
return SnapshotsResourceWithRawResponse(self._nfs.snapshots)
@@ -907,6 +919,10 @@ def __init__(self, nfs: AsyncNfsResource) -> None:
@cached_property
def snapshots(self) -> AsyncSnapshotsResourceWithRawResponse:
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
return AsyncSnapshotsResourceWithRawResponse(self._nfs.snapshots)
@@ -932,6 +948,10 @@ def __init__(self, nfs: NfsResource) -> None:
@cached_property
def snapshots(self) -> SnapshotsResourceWithStreamingResponse:
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
return SnapshotsResourceWithStreamingResponse(self._nfs.snapshots)
@@ -957,4 +977,8 @@ def __init__(self, nfs: AsyncNfsResource) -> None:
@cached_property
def snapshots(self) -> AsyncSnapshotsResourceWithStreamingResponse:
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
return AsyncSnapshotsResourceWithStreamingResponse(self._nfs.snapshots)
diff --git a/src/gradient/resources/nfs/snapshots.py b/src/gradient/resources/nfs/snapshots.py
index 65b56e03..4e694ce6 100644
--- a/src/gradient/resources/nfs/snapshots.py
+++ b/src/gradient/resources/nfs/snapshots.py
@@ -23,6 +23,11 @@
class SnapshotsResource(SyncAPIResource):
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
+
@cached_property
def with_raw_response(self) -> SnapshotsResourceWithRawResponse:
"""
@@ -191,6 +196,11 @@ def delete(
class AsyncSnapshotsResource(AsyncAPIResource):
+ """
+ NFS lets you create fully managed, POSIX-compliant network file storage that delivers secure,
+ high-performance shared storage right inside your VPC. This enables seamless data sharing across Droplets in a VPC.
+ """
+
@cached_property
def with_raw_response(self) -> AsyncSnapshotsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/regions.py b/src/gradient/resources/regions.py
index 3b0f22fa..e89f7c0c 100644
--- a/src/gradient/resources/regions.py
+++ b/src/gradient/resources/regions.py
@@ -22,6 +22,8 @@
class RegionsResource(SyncAPIResource):
+ """Provides information about DigitalOcean data center regions."""
+
@cached_property
def with_raw_response(self) -> RegionsResourceWithRawResponse:
"""
@@ -92,6 +94,8 @@ def list(
class AsyncRegionsResource(AsyncAPIResource):
+ """Provides information about DigitalOcean data center regions."""
+
@cached_property
def with_raw_response(self) -> AsyncRegionsResourceWithRawResponse:
"""
diff --git a/src/gradient/resources/responses.py b/src/gradient/resources/responses.py
index d0892fa9..936a97a8 100644
--- a/src/gradient/resources/responses.py
+++ b/src/gradient/resources/responses.py
@@ -27,6 +27,8 @@
class ResponsesResource(SyncAPIResource):
+ """Generate text-to-text responses from text prompts."""
+
@cached_property
def with_raw_response(self) -> ResponsesResourceWithRawResponse:
"""
@@ -426,6 +428,8 @@ def create(
class AsyncResponsesResource(AsyncAPIResource):
+ """Generate text-to-text responses from text prompts."""
+
@cached_property
def with_raw_response(self) -> AsyncResponsesResourceWithRawResponse:
"""
From 7b8d01625e173bf89a15fdcf8eb665f126b1ecf4 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Tue, 3 Mar 2026 11:15:26 +0000
Subject: [PATCH 12/17] codegen metadata
---
.stats.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.stats.yml b/.stats.yml
index 0d591538..903f66b5 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-2344b44246a44d39ad5b74d3077bd2958745aad67feb15970756532fa0b3f9d6.yml
openapi_spec_hash: a1913979235ce152a8dc380fabe5362e
-config_hash: 6c9a04f3cc5dd88e1e4f0ae42d98ba9a
+config_hash: eba2c4c8469d877f806f66ac2a91f4ec
From 56acb2e47da30e09ab6cbf42c285bf6480a376fe Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Thu, 5 Mar 2026 13:00:53 +0000
Subject: [PATCH 13/17] chore(internal): codegen related update
---
src/gradient/types/response_create_params.py | 14 +++-----------
1 file changed, 3 insertions(+), 11 deletions(-)
diff --git a/src/gradient/types/response_create_params.py b/src/gradient/types/response_create_params.py
index 9b870f3c..42716bb3 100644
--- a/src/gradient/types/response_create_params.py
+++ b/src/gradient/types/response_create_params.py
@@ -130,7 +130,7 @@ class ResponseCreateParamsBase(TypedDict, total=False):
"""
-class InputUnionMember1UnionMember0ContentTyped(TypedDict, total=False):
+class InputUnionMember1UnionMember0Content(TypedDict, total=False, extra_items=object): # type: ignore[call-arg]
text: str
"""The reasoning text content"""
@@ -138,10 +138,7 @@ class InputUnionMember1UnionMember0ContentTyped(TypedDict, total=False):
"""The type of content"""
-InputUnionMember1UnionMember0Content: TypeAlias = Union[InputUnionMember1UnionMember0ContentTyped, Dict[str, object]]
-
-
-class InputUnionMember1UnionMember0Typed(TypedDict, total=False):
+class InputUnionMember1UnionMember0(TypedDict, total=False, extra_items=object): # type: ignore[call-arg]
type: Required[Literal["function_call", "function_call_output", "reasoning"]]
"""
The type of input item (must be function_call, function_call_output, or
@@ -176,9 +173,6 @@ class InputUnionMember1UnionMember0Typed(TypedDict, total=False):
"""Summary of the reasoning (optional for reasoning)"""
-InputUnionMember1UnionMember0: TypeAlias = Union[InputUnionMember1UnionMember0Typed, Dict[str, object]]
-
-
class InputUnionMember1UnionMember1ContentUnionMember1UnionMember0(TypedDict, total=False):
text: Required[str]
"""The text content"""
@@ -218,7 +212,7 @@ class InputUnionMember1UnionMember1ToolCall(TypedDict, total=False):
"""The type of the tool. Currently, only `function` is supported."""
-class InputUnionMember1UnionMember1Typed(TypedDict, total=False):
+class InputUnionMember1UnionMember1(TypedDict, total=False, extra_items=object): # type: ignore[call-arg]
content: Required[Union[str, Iterable[InputUnionMember1UnionMember1ContentUnionMember1]]]
"""The content of the message (string or content parts array)"""
@@ -235,8 +229,6 @@ class InputUnionMember1UnionMember1Typed(TypedDict, total=False):
"""Optional type identifier for message items (used by some clients like Codex)"""
-InputUnionMember1UnionMember1: TypeAlias = Union[InputUnionMember1UnionMember1Typed, Dict[str, object]]
-
InputUnionMember1: TypeAlias = Union[InputUnionMember1UnionMember0, InputUnionMember1UnionMember1]
From 6f506cb1dae5679b5ed992822658f333e382340e Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Sat, 7 Mar 2026 14:58:13 +0000
Subject: [PATCH 14/17] chore(internal): codegen related update
---
.github/workflows/ci.yml | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 9fb3fd00..d53cf87b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -61,14 +61,18 @@ jobs:
run: rye build
- name: Get GitHub OIDC Token
- if: github.repository == 'stainless-sdks/gradient-python'
+ if: |-
+ github.repository == 'stainless-sdks/gradient-python' &&
+ !startsWith(github.ref, 'refs/heads/stl/')
id: github-oidc
uses: actions/github-script@v8
with:
script: core.setOutput('github_token', await core.getIDToken());
- name: Upload tarball
- if: github.repository == 'stainless-sdks/gradient-python'
+ if: |-
+ github.repository == 'stainless-sdks/gradient-python' &&
+ !startsWith(github.ref, 'refs/heads/stl/')
env:
URL: https://pkg.stainless.com/s
AUTH: ${{ steps.github-oidc.outputs.github_token }}
From 36b6036ffdb090434d0e0596b01baf0fe2bebe1e Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 9 Mar 2026 15:38:18 +0000
Subject: [PATCH 15/17] codegen metadata
---
.stats.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.stats.yml b/.stats.yml
index 903f66b5..ba3a611d 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-2344b44246a44d39ad5b74d3077bd2958745aad67feb15970756532fa0b3f9d6.yml
openapi_spec_hash: a1913979235ce152a8dc380fabe5362e
-config_hash: eba2c4c8469d877f806f66ac2a91f4ec
+config_hash: 13e570f98198e8fd1dfcb7ca59d73e0d
From 2d4da01e55e7f7a96aea98232ef05b28ebbcbada Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Wed, 11 Mar 2026 16:19:34 +0000
Subject: [PATCH 16/17] feat(api): update urls for endpoints
---
.stats.yml | 6 +-
.../resources/agents/chat/completions.py | 28 ++
src/gradient/resources/chat/completions.py | 28 ++
src/gradient/resources/nfs/nfs.py | 264 ++++++++++++------
src/gradient/resources/nfs/snapshots.py | 12 +-
.../agents/chat/completion_create_params.py | 112 +++++++-
.../types/chat/completion_create_params.py | 112 +++++++-
src/gradient/types/nf_create_params.py | 3 +
src/gradient/types/nf_delete_params.py | 4 +-
.../types/nf_initiate_action_params.py | 48 +++-
src/gradient/types/nf_list_params.py | 4 +-
src/gradient/types/nf_retrieve_params.py | 4 +-
.../types/nfs/snapshot_delete_params.py | 4 +-
.../types/nfs/snapshot_list_params.py | 4 +-
.../types/nfs/snapshot_retrieve_params.py | 4 +-
.../agents/chat/test_completions.py | 4 +
tests/api_resources/chat/test_completions.py | 4 +
tests/api_resources/nfs/test_snapshots.py | 68 +++--
tests/api_resources/test_nfs.py | 258 ++++++++++++-----
19 files changed, 761 insertions(+), 210 deletions(-)
diff --git a/.stats.yml b/.stats.yml
index ba3a611d..fa21fe25 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-2344b44246a44d39ad5b74d3077bd2958745aad67feb15970756532fa0b3f9d6.yml
-openapi_spec_hash: a1913979235ce152a8dc380fabe5362e
-config_hash: 13e570f98198e8fd1dfcb7ca59d73e0d
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-93df5cb3d61c1bbcab2a4bbf3f0775454bd185a400bc140c61dbf36e108dbf89.yml
+openapi_spec_hash: 0126adf782c5feac3d8e682e466c3cf7
+config_hash: 3f968a57adb20643373c134efc9af01a
diff --git a/src/gradient/resources/agents/chat/completions.py b/src/gradient/resources/agents/chat/completions.py
index fdad67bb..619a2712 100644
--- a/src/gradient/resources/agents/chat/completions.py
+++ b/src/gradient/resources/agents/chat/completions.py
@@ -64,6 +64,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -129,6 +130,9 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -193,6 +197,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -260,6 +265,9 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -321,6 +329,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -388,6 +397,9 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -451,6 +463,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -491,6 +504,7 @@ def create(
"metadata": metadata,
"n": n,
"presence_penalty": presence_penalty,
+ "reasoning_effort": reasoning_effort,
"stop": stop,
"stream": stream,
"stream_options": stream_options,
@@ -557,6 +571,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -622,6 +637,9 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -686,6 +704,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -753,6 +772,9 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -814,6 +836,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -881,6 +904,9 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -941,6 +967,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -981,6 +1008,7 @@ async def create(
"metadata": metadata,
"n": n,
"presence_penalty": presence_penalty,
+ "reasoning_effort": reasoning_effort,
"stop": stop,
"stream": stream,
"stream_options": stream_options,
diff --git a/src/gradient/resources/chat/completions.py b/src/gradient/resources/chat/completions.py
index d2ae1071..2052db35 100644
--- a/src/gradient/resources/chat/completions.py
+++ b/src/gradient/resources/chat/completions.py
@@ -64,6 +64,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -129,6 +130,9 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -194,6 +198,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -261,6 +266,9 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -322,6 +330,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -389,6 +398,9 @@ def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -449,6 +461,7 @@ def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -492,6 +505,7 @@ def create(
"metadata": metadata,
"n": n,
"presence_penalty": presence_penalty,
+ "reasoning_effort": reasoning_effort,
"stop": stop,
"stream": stream,
"stream_options": stream_options,
@@ -558,6 +572,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -623,6 +638,9 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -688,6 +706,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -755,6 +774,9 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -816,6 +838,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
temperature: Optional[float] | Omit = omit,
@@ -883,6 +906,9 @@ async def create(
whether they appear in the text so far, increasing the model's likelihood to
talk about new topics.
+ reasoning_effort: Constrains effort on reasoning for reasoning models. Reducing reasoning effort
+ can result in faster responses and fewer tokens used on reasoning in a response.
+
stop: Up to 4 sequences where the API will stop generating further tokens. The
returned text will not contain the stop sequence.
@@ -943,6 +969,7 @@ async def create(
metadata: Optional[Dict[str, str]] | Omit = omit,
n: Optional[int] | Omit = omit,
presence_penalty: Optional[float] | Omit = omit,
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]] | Omit = omit,
stop: Union[Optional[str], SequenceNotStr[str], None] | Omit = omit,
stream: Optional[Literal[False]] | Literal[True] | Omit = omit,
stream_options: Optional[completion_create_params.StreamOptions] | Omit = omit,
@@ -986,6 +1013,7 @@ async def create(
"metadata": metadata,
"n": n,
"presence_penalty": presence_penalty,
+ "reasoning_effort": reasoning_effort,
"stop": stop,
"stream": stream,
"stream_options": stream_options,
diff --git a/src/gradient/resources/nfs/nfs.py b/src/gradient/resources/nfs/nfs.py
index a4e47d9c..ec50bdf8 100644
--- a/src/gradient/resources/nfs/nfs.py
+++ b/src/gradient/resources/nfs/nfs.py
@@ -69,6 +69,7 @@ def create(
region: str,
size_gib: int,
vpc_ids: SequenceNotStr[str],
+ performance_tier: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -88,6 +89,8 @@ def create(
vpc_ids: List of VPC IDs that should be able to access the share.
+ performance_tier: The performance tier of the share.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -104,6 +107,7 @@ def create(
"region": region,
"size_gib": size_gib,
"vpc_ids": vpc_ids,
+ "performance_tier": performance_tier,
},
nf_create_params.NfCreateParams,
),
@@ -117,7 +121,7 @@ def retrieve(
self,
nfs_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -160,7 +164,7 @@ def retrieve(
def list(
self,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -201,7 +205,7 @@ def delete(
self,
nfs_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -248,9 +252,9 @@ def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionResizeParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -263,18 +267,19 @@ def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
- region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
-
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -290,9 +295,9 @@ def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionSnapshotParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -305,18 +310,19 @@ def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
- region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
-
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -332,9 +338,9 @@ def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionAttachParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -347,18 +353,19 @@ def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
- region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
-
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -374,9 +381,9 @@ def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionDetachParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -389,18 +396,62 @@ def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
+ type: The type of action to initiate for the NFS share (such as resize or snapshot).
+
region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ ...
+
+ @overload
+ def initiate_action(
+ self,
+ nfs_id: str,
+ *,
+ type: Literal["resize", "snapshot"],
+ params: nf_initiate_action_params.NfsActionSwitchPerformanceTierParams | Omit = omit,
+ region: str | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> NfInitiateActionResponse:
+ """
+ To execute an action (such as resize) on a specified NFS share, send a POST
+ request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
+ `type` attribute to on of the supported action types:
+
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
+
+ Args:
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -411,18 +462,19 @@ def initiate_action(
"""
...
- @required_args(["region", "type"])
+ @required_args(["type"])
def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionResizeParams
| nf_initiate_action_params.NfsActionSnapshotParams
| nf_initiate_action_params.NfsActionAttachParams
| nf_initiate_action_params.NfsActionDetachParams
+ | nf_initiate_action_params.NfsActionSwitchPerformanceTierParams
| Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -438,9 +490,9 @@ def initiate_action(
else f"https://api.digitalocean.com/v2/nfs/{nfs_id}/actions",
body=maybe_transform(
{
- "region": region,
"type": type,
"params": params,
+ "region": region,
},
nf_initiate_action_params.NfInitiateActionParams,
),
@@ -486,6 +538,7 @@ async def create(
region: str,
size_gib: int,
vpc_ids: SequenceNotStr[str],
+ performance_tier: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -505,6 +558,8 @@ async def create(
vpc_ids: List of VPC IDs that should be able to access the share.
+ performance_tier: The performance tier of the share.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -521,6 +576,7 @@ async def create(
"region": region,
"size_gib": size_gib,
"vpc_ids": vpc_ids,
+ "performance_tier": performance_tier,
},
nf_create_params.NfCreateParams,
),
@@ -534,7 +590,7 @@ async def retrieve(
self,
nfs_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -577,7 +633,7 @@ async def retrieve(
async def list(
self,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -618,7 +674,7 @@ async def delete(
self,
nfs_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -665,9 +721,9 @@ async def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionResizeParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -680,18 +736,19 @@ async def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
- region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
-
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -707,9 +764,9 @@ async def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionSnapshotParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -722,18 +779,19 @@ async def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
- region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
-
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -749,9 +807,9 @@ async def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionAttachParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -764,18 +822,19 @@ async def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
- region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
-
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -791,9 +850,9 @@ async def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionDetachParams | Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -806,18 +865,62 @@ async def initiate_action(
request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
`type` attribute to on of the supported action types:
- | Action | Details |
- | ----------------------- | -------------------------------------------------------------------------------- |
- | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
- | `snapshot` | Takes a snapshot of an NFS share |
- | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
- | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
Args:
+ type: The type of action to initiate for the NFS share (such as resize or snapshot).
+
region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ ...
+
+ @overload
+ async def initiate_action(
+ self,
+ nfs_id: str,
+ *,
+ type: Literal["resize", "snapshot"],
+ params: nf_initiate_action_params.NfsActionSwitchPerformanceTierParams | Omit = omit,
+ region: str | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> NfInitiateActionResponse:
+ """
+ To execute an action (such as resize) on a specified NFS share, send a POST
+ request to `/v2/nfs/{nfs_id}/actions`. In the JSON body to the request, set the
+ `type` attribute to on of the supported action types:
+
+ | Action | Details |
+ | -------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- |
+ | `resize` | Resizes an NFS share. Set the size_gib attribute to a desired value in GiB |
+ | `snapshot` | Takes a snapshot of an NFS share |
+ | `attach` | Attaches an NFS share to a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `detach` | Detaches an NFS share from a VPC. Set the vpc_id attribute to the desired VPC ID |
+ | `switch_performance_tier` | Switches the performance tier of an NFS share. Set the performance_tier attribute to the desired tier (e.g., standard, high) |
+
+ Args:
type: The type of action to initiate for the NFS share (such as resize or snapshot).
+ region: The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides.
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -828,18 +931,19 @@ async def initiate_action(
"""
...
- @required_args(["region", "type"])
+ @required_args(["type"])
async def initiate_action(
self,
nfs_id: str,
*,
- region: str,
type: Literal["resize", "snapshot"],
params: nf_initiate_action_params.NfsActionResizeParams
| nf_initiate_action_params.NfsActionSnapshotParams
| nf_initiate_action_params.NfsActionAttachParams
| nf_initiate_action_params.NfsActionDetachParams
+ | nf_initiate_action_params.NfsActionSwitchPerformanceTierParams
| Omit = omit,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -855,9 +959,9 @@ async def initiate_action(
else f"https://api.digitalocean.com/v2/nfs/{nfs_id}/actions",
body=await async_maybe_transform(
{
- "region": region,
"type": type,
"params": params,
+ "region": region,
},
nf_initiate_action_params.NfInitiateActionParams,
),
diff --git a/src/gradient/resources/nfs/snapshots.py b/src/gradient/resources/nfs/snapshots.py
index 4e694ce6..209e7da9 100644
--- a/src/gradient/resources/nfs/snapshots.py
+++ b/src/gradient/resources/nfs/snapshots.py
@@ -51,7 +51,7 @@ def retrieve(
self,
nfs_snapshot_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -95,7 +95,7 @@ def retrieve(
def list(
self,
*,
- region: str,
+ region: str | Omit = omit,
share_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -152,7 +152,7 @@ def delete(
self,
nfs_snapshot_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -224,7 +224,7 @@ async def retrieve(
self,
nfs_snapshot_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -268,7 +268,7 @@ async def retrieve(
async def list(
self,
*,
- region: str,
+ region: str | Omit = omit,
share_id: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
@@ -325,7 +325,7 @@ async def delete(
self,
nfs_snapshot_id: str,
*,
- region: str,
+ region: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
diff --git a/src/gradient/types/agents/chat/completion_create_params.py b/src/gradient/types/agents/chat/completion_create_params.py
index 797c6ea3..f01fa283 100644
--- a/src/gradient/types/agents/chat/completion_create_params.py
+++ b/src/gradient/types/agents/chat/completion_create_params.py
@@ -28,9 +28,17 @@
"MessageChatCompletionRequestUserMessageContent",
"MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartText",
"MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartTextCacheControl",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURL",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURLImageURL",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURL",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURLVideoURL",
"MessageChatCompletionRequestUserMessageContentArrayOfContentPart",
"MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartText",
"MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartTextCacheControl",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURL",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURLImageURL",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURL",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURLVideoURL",
"MessageChatCompletionRequestAssistantMessage",
"MessageChatCompletionRequestAssistantMessageContent",
"MessageChatCompletionRequestAssistantMessageContentChatCompletionRequestContentPartText",
@@ -128,6 +136,13 @@ class CompletionCreateParamsBase(TypedDict, total=False):
far, increasing the model's likelihood to talk about new topics.
"""
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]]
+ """Constrains effort on reasoning for reasoning models.
+
+ Reducing reasoning effort can result in faster responses and fewer tokens used
+ on reasoning in a response.
+ """
+
stop: Union[Optional[str], SequenceNotStr[str], None]
"""Up to 4 sequences where the API will stop generating further tokens.
@@ -364,6 +379,47 @@ class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContent
"""Cache control settings for the content part."""
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURLImageURL(
+ TypedDict, total=False
+):
+ """Image URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing image content."""
+
+ detail: Literal["auto", "low", "high"]
+ """Optional detail level for image understanding."""
+
+
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURL(TypedDict, total=False):
+ """Content part with type and image URL."""
+
+ image_url: Required[MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURLImageURL]
+ """Image URL settings."""
+
+ type: Required[Literal["image_url"]]
+ """The type of content part"""
+
+
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURLVideoURL(
+ TypedDict, total=False
+):
+ """Video URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing video content."""
+
+
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURL(TypedDict, total=False):
+ """Content part with type and video URL."""
+
+ type: Required[Literal["video_url"]]
+ """The type of content part"""
+
+ video_url: Required[MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURLVideoURL]
+ """Video URL settings."""
+
+
class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartTextCacheControl(
TypedDict, total=False
):
@@ -393,13 +449,67 @@ class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatComple
"""Cache control settings for the content part."""
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURLImageURL(
+ TypedDict, total=False
+):
+ """Image URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing image content."""
+
+ detail: Literal["auto", "low", "high"]
+ """Optional detail level for image understanding."""
+
+
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURL(
+ TypedDict, total=False
+):
+ """Content part with type and image URL."""
+
+ image_url: Required[
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURLImageURL
+ ]
+ """Image URL settings."""
+
+ type: Required[Literal["image_url"]]
+ """The type of content part"""
+
+
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURLVideoURL(
+ TypedDict, total=False
+):
+ """Video URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing video content."""
+
+
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURL(
+ TypedDict, total=False
+):
+ """Content part with type and video URL."""
+
+ type: Required[Literal["video_url"]]
+ """The type of content part"""
+
+ video_url: Required[
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURLVideoURL
+ ]
+ """Video URL settings."""
+
+
MessageChatCompletionRequestUserMessageContentArrayOfContentPart: TypeAlias = Union[
- str, MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartText
+ str,
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartText,
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURL,
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURL,
]
MessageChatCompletionRequestUserMessageContent: TypeAlias = Union[
str,
MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartText,
+ MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURL,
+ MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURL,
SequenceNotStr[MessageChatCompletionRequestUserMessageContentArrayOfContentPart],
]
diff --git a/src/gradient/types/chat/completion_create_params.py b/src/gradient/types/chat/completion_create_params.py
index bf5bd49d..925eea7e 100644
--- a/src/gradient/types/chat/completion_create_params.py
+++ b/src/gradient/types/chat/completion_create_params.py
@@ -28,9 +28,17 @@
"MessageChatCompletionRequestUserMessageContent",
"MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartText",
"MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartTextCacheControl",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURL",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURLImageURL",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURL",
+ "MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURLVideoURL",
"MessageChatCompletionRequestUserMessageContentArrayOfContentPart",
"MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartText",
"MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartTextCacheControl",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURL",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURLImageURL",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURL",
+ "MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURLVideoURL",
"MessageChatCompletionRequestAssistantMessage",
"MessageChatCompletionRequestAssistantMessageContent",
"MessageChatCompletionRequestAssistantMessageContentChatCompletionRequestContentPartText",
@@ -128,6 +136,13 @@ class CompletionCreateParamsBase(TypedDict, total=False):
far, increasing the model's likelihood to talk about new topics.
"""
+ reasoning_effort: Optional[Literal["none", "minimal", "low", "medium", "high", "xhigh"]]
+ """Constrains effort on reasoning for reasoning models.
+
+ Reducing reasoning effort can result in faster responses and fewer tokens used
+ on reasoning in a response.
+ """
+
stop: Union[Optional[str], SequenceNotStr[str], None]
"""Up to 4 sequences where the API will stop generating further tokens.
@@ -364,6 +379,47 @@ class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContent
"""Cache control settings for the content part."""
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURLImageURL(
+ TypedDict, total=False
+):
+ """Image URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing image content."""
+
+ detail: Literal["auto", "low", "high"]
+ """Optional detail level for image understanding."""
+
+
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURL(TypedDict, total=False):
+ """Content part with type and image URL."""
+
+ image_url: Required[MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURLImageURL]
+ """Image URL settings."""
+
+ type: Required[Literal["image_url"]]
+ """The type of content part"""
+
+
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURLVideoURL(
+ TypedDict, total=False
+):
+ """Video URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing video content."""
+
+
+class MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURL(TypedDict, total=False):
+ """Content part with type and video URL."""
+
+ type: Required[Literal["video_url"]]
+ """The type of content part"""
+
+ video_url: Required[MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURLVideoURL]
+ """Video URL settings."""
+
+
class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartTextCacheControl(
TypedDict, total=False
):
@@ -393,13 +449,67 @@ class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatComple
"""Cache control settings for the content part."""
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURLImageURL(
+ TypedDict, total=False
+):
+ """Image URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing image content."""
+
+ detail: Literal["auto", "low", "high"]
+ """Optional detail level for image understanding."""
+
+
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURL(
+ TypedDict, total=False
+):
+ """Content part with type and image URL."""
+
+ image_url: Required[
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURLImageURL
+ ]
+ """Image URL settings."""
+
+ type: Required[Literal["image_url"]]
+ """The type of content part"""
+
+
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURLVideoURL(
+ TypedDict, total=False
+):
+ """Video URL settings."""
+
+ url: Required[str]
+ """A URL or data URL containing video content."""
+
+
+class MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURL(
+ TypedDict, total=False
+):
+ """Content part with type and video URL."""
+
+ type: Required[Literal["video_url"]]
+ """The type of content part"""
+
+ video_url: Required[
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURLVideoURL
+ ]
+ """Video URL settings."""
+
+
MessageChatCompletionRequestUserMessageContentArrayOfContentPart: TypeAlias = Union[
- str, MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartText
+ str,
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartText,
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartImageURL,
+ MessageChatCompletionRequestUserMessageContentArrayOfContentPartChatCompletionRequestContentPartVideoURL,
]
MessageChatCompletionRequestUserMessageContent: TypeAlias = Union[
str,
MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartText,
+ MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartImageURL,
+ MessageChatCompletionRequestUserMessageContentChatCompletionRequestContentPartVideoURL,
SequenceNotStr[MessageChatCompletionRequestUserMessageContentArrayOfContentPart],
]
diff --git a/src/gradient/types/nf_create_params.py b/src/gradient/types/nf_create_params.py
index 327beb2e..fab12a16 100644
--- a/src/gradient/types/nf_create_params.py
+++ b/src/gradient/types/nf_create_params.py
@@ -21,3 +21,6 @@ class NfCreateParams(TypedDict, total=False):
vpc_ids: Required[SequenceNotStr[str]]
"""List of VPC IDs that should be able to access the share."""
+
+ performance_tier: str
+ """The performance tier of the share."""
diff --git a/src/gradient/types/nf_delete_params.py b/src/gradient/types/nf_delete_params.py
index a11474e5..c507a370 100644
--- a/src/gradient/types/nf_delete_params.py
+++ b/src/gradient/types/nf_delete_params.py
@@ -2,11 +2,11 @@
from __future__ import annotations
-from typing_extensions import Required, TypedDict
+from typing_extensions import TypedDict
__all__ = ["NfDeleteParams"]
class NfDeleteParams(TypedDict, total=False):
- region: Required[str]
+ region: str
"""The DigitalOcean region slug (e.g., nyc2, atl1) where the NFS share resides."""
diff --git a/src/gradient/types/nf_initiate_action_params.py b/src/gradient/types/nf_initiate_action_params.py
index 4b297210..1080d816 100644
--- a/src/gradient/types/nf_initiate_action_params.py
+++ b/src/gradient/types/nf_initiate_action_params.py
@@ -15,18 +15,20 @@
"NfsActionAttachParams",
"NfsActionDetach",
"NfsActionDetachParams",
+ "NfsActionSwitchPerformanceTier",
+ "NfsActionSwitchPerformanceTierParams",
]
class NfsActionResize(TypedDict, total=False):
- region: Required[str]
- """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
-
type: Required[Literal["resize", "snapshot"]]
"""The type of action to initiate for the NFS share (such as resize or snapshot)."""
params: NfsActionResizeParams
+ region: str
+ """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
+
class NfsActionResizeParams(TypedDict, total=False):
size_gib: Required[int]
@@ -34,14 +36,14 @@ class NfsActionResizeParams(TypedDict, total=False):
class NfsActionSnapshot(TypedDict, total=False):
- region: Required[str]
- """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
-
type: Required[Literal["resize", "snapshot"]]
"""The type of action to initiate for the NFS share (such as resize or snapshot)."""
params: NfsActionSnapshotParams
+ region: str
+ """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
+
class NfsActionSnapshotParams(TypedDict, total=False):
name: Required[str]
@@ -49,14 +51,14 @@ class NfsActionSnapshotParams(TypedDict, total=False):
class NfsActionAttach(TypedDict, total=False):
- region: Required[str]
- """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
-
type: Required[Literal["resize", "snapshot"]]
"""The type of action to initiate for the NFS share (such as resize or snapshot)."""
params: NfsActionAttachParams
+ region: str
+ """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
+
class NfsActionAttachParams(TypedDict, total=False):
vpc_id: Required[str]
@@ -64,18 +66,38 @@ class NfsActionAttachParams(TypedDict, total=False):
class NfsActionDetach(TypedDict, total=False):
- region: Required[str]
- """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
-
type: Required[Literal["resize", "snapshot"]]
"""The type of action to initiate for the NFS share (such as resize or snapshot)."""
params: NfsActionDetachParams
+ region: str
+ """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
+
class NfsActionDetachParams(TypedDict, total=False):
vpc_id: Required[str]
"""The ID of the VPC from which the NFS share will be detached"""
-NfInitiateActionParams: TypeAlias = Union[NfsActionResize, NfsActionSnapshot, NfsActionAttach, NfsActionDetach]
+class NfsActionSwitchPerformanceTier(TypedDict, total=False):
+ type: Required[Literal["resize", "snapshot"]]
+ """The type of action to initiate for the NFS share (such as resize or snapshot)."""
+
+ params: NfsActionSwitchPerformanceTierParams
+
+ region: str
+ """The DigitalOcean region slug (e.g. atl1, nyc2) where the NFS snapshot resides."""
+
+
+class NfsActionSwitchPerformanceTierParams(TypedDict, total=False):
+ performance_tier: Required[str]
+ """
+ The performance tier to which the NFS share will be switched (e.g., standard,
+ high).
+ """
+
+
+NfInitiateActionParams: TypeAlias = Union[
+ NfsActionResize, NfsActionSnapshot, NfsActionAttach, NfsActionDetach, NfsActionSwitchPerformanceTier
+]
diff --git a/src/gradient/types/nf_list_params.py b/src/gradient/types/nf_list_params.py
index bc53c284..52b4d96d 100644
--- a/src/gradient/types/nf_list_params.py
+++ b/src/gradient/types/nf_list_params.py
@@ -2,11 +2,11 @@
from __future__ import annotations
-from typing_extensions import Required, TypedDict
+from typing_extensions import TypedDict
__all__ = ["NfListParams"]
class NfListParams(TypedDict, total=False):
- region: Required[str]
+ region: str
"""The DigitalOcean region slug (e.g., nyc2, atl1) where the NFS share resides."""
diff --git a/src/gradient/types/nf_retrieve_params.py b/src/gradient/types/nf_retrieve_params.py
index 292053d9..6d7ba724 100644
--- a/src/gradient/types/nf_retrieve_params.py
+++ b/src/gradient/types/nf_retrieve_params.py
@@ -2,11 +2,11 @@
from __future__ import annotations
-from typing_extensions import Required, TypedDict
+from typing_extensions import TypedDict
__all__ = ["NfRetrieveParams"]
class NfRetrieveParams(TypedDict, total=False):
- region: Required[str]
+ region: str
"""The DigitalOcean region slug (e.g., nyc2, atl1) where the NFS share resides."""
diff --git a/src/gradient/types/nfs/snapshot_delete_params.py b/src/gradient/types/nfs/snapshot_delete_params.py
index 1b26149e..844da45e 100644
--- a/src/gradient/types/nfs/snapshot_delete_params.py
+++ b/src/gradient/types/nfs/snapshot_delete_params.py
@@ -2,11 +2,11 @@
from __future__ import annotations
-from typing_extensions import Required, TypedDict
+from typing_extensions import TypedDict
__all__ = ["SnapshotDeleteParams"]
class SnapshotDeleteParams(TypedDict, total=False):
- region: Required[str]
+ region: str
"""The DigitalOcean region slug (e.g., nyc2, atl1) where the NFS share resides."""
diff --git a/src/gradient/types/nfs/snapshot_list_params.py b/src/gradient/types/nfs/snapshot_list_params.py
index 8c4c6946..64f9543b 100644
--- a/src/gradient/types/nfs/snapshot_list_params.py
+++ b/src/gradient/types/nfs/snapshot_list_params.py
@@ -2,13 +2,13 @@
from __future__ import annotations
-from typing_extensions import Required, TypedDict
+from typing_extensions import TypedDict
__all__ = ["SnapshotListParams"]
class SnapshotListParams(TypedDict, total=False):
- region: Required[str]
+ region: str
"""The DigitalOcean region slug (e.g., nyc2, atl1) where the NFS share resides."""
share_id: str
diff --git a/src/gradient/types/nfs/snapshot_retrieve_params.py b/src/gradient/types/nfs/snapshot_retrieve_params.py
index d1e1f8e8..2c3fcda5 100644
--- a/src/gradient/types/nfs/snapshot_retrieve_params.py
+++ b/src/gradient/types/nfs/snapshot_retrieve_params.py
@@ -2,11 +2,11 @@
from __future__ import annotations
-from typing_extensions import Required, TypedDict
+from typing_extensions import TypedDict
__all__ = ["SnapshotRetrieveParams"]
class SnapshotRetrieveParams(TypedDict, total=False):
- region: Required[str]
+ region: str
"""The DigitalOcean region slug (e.g., nyc2, atl1) where the NFS share resides."""
diff --git a/tests/api_resources/agents/chat/test_completions.py b/tests/api_resources/agents/chat/test_completions.py
index 30d797ff..7574766e 100644
--- a/tests/api_resources/agents/chat/test_completions.py
+++ b/tests/api_resources/agents/chat/test_completions.py
@@ -50,6 +50,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream=False,
stream_options={"include_usage": True},
@@ -144,6 +145,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream_options={"include_usage": True},
temperature=1,
@@ -245,6 +247,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream=False,
stream_options={"include_usage": True},
@@ -339,6 +342,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream_options={"include_usage": True},
temperature=1,
diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py
index 81125c20..54e98640 100644
--- a/tests/api_resources/chat/test_completions.py
+++ b/tests/api_resources/chat/test_completions.py
@@ -50,6 +50,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream=False,
stream_options={"include_usage": True},
@@ -144,6 +145,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream_options={"include_usage": True},
temperature=1,
@@ -242,6 +244,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream=False,
stream_options={"include_usage": True},
@@ -336,6 +339,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
metadata={"foo": "string"},
n=1,
presence_penalty=-2,
+ reasoning_effort="none",
stop="\n",
stream_options={"include_usage": True},
temperature=1,
diff --git a/tests/api_resources/nfs/test_snapshots.py b/tests/api_resources/nfs/test_snapshots.py
index 5b229811..5068f951 100644
--- a/tests/api_resources/nfs/test_snapshots.py
+++ b/tests/api_resources/nfs/test_snapshots.py
@@ -23,6 +23,14 @@ class TestSnapshots:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
+ snapshot = client.nfs.snapshots.retrieve(
+ nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_retrieve_with_all_params(self, client: Gradient) -> None:
snapshot = client.nfs.snapshots.retrieve(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -34,7 +42,6 @@ def test_method_retrieve(self, client: Gradient) -> None:
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.nfs.snapshots.with_raw_response.retrieve(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -47,7 +54,6 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.nfs.snapshots.with_streaming_response.retrieve(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -63,15 +69,12 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
client.nfs.snapshots.with_raw_response.retrieve(
nfs_snapshot_id="",
- region="region",
)
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
- snapshot = client.nfs.snapshots.list(
- region="region",
- )
+ snapshot = client.nfs.snapshots.list()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@@ -86,9 +89,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
- response = client.nfs.snapshots.with_raw_response.list(
- region="region",
- )
+ response = client.nfs.snapshots.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -98,9 +99,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
- with client.nfs.snapshots.with_streaming_response.list(
- region="region",
- ) as response:
+ with client.nfs.snapshots.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -112,6 +111,14 @@ def test_streaming_response_list(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
+ snapshot = client.nfs.snapshots.delete(
+ nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert snapshot is None
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_delete_with_all_params(self, client: Gradient) -> None:
snapshot = client.nfs.snapshots.delete(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -123,7 +130,6 @@ def test_method_delete(self, client: Gradient) -> None:
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.nfs.snapshots.with_raw_response.delete(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -136,7 +142,6 @@ def test_raw_response_delete(self, client: Gradient) -> None:
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.nfs.snapshots.with_streaming_response.delete(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -152,7 +157,6 @@ def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
client.nfs.snapshots.with_raw_response.delete(
nfs_snapshot_id="",
- region="region",
)
@@ -164,6 +168,14 @@ class TestAsyncSnapshots:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
+ snapshot = await async_client.nfs.snapshots.retrieve(
+ nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.nfs.snapshots.retrieve(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -175,7 +187,6 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.snapshots.with_raw_response.retrieve(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -188,7 +199,6 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.snapshots.with_streaming_response.retrieve(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -204,15 +214,12 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
await async_client.nfs.snapshots.with_raw_response.retrieve(
nfs_snapshot_id="",
- region="region",
)
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
- snapshot = await async_client.nfs.snapshots.list(
- region="region",
- )
+ snapshot = await async_client.nfs.snapshots.list()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@@ -227,9 +234,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) ->
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
- response = await async_client.nfs.snapshots.with_raw_response.list(
- region="region",
- )
+ response = await async_client.nfs.snapshots.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -239,9 +244,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
- async with async_client.nfs.snapshots.with_streaming_response.list(
- region="region",
- ) as response:
+ async with async_client.nfs.snapshots.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -253,6 +256,14 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
+ snapshot = await async_client.nfs.snapshots.delete(
+ nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert snapshot is None
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_delete_with_all_params(self, async_client: AsyncGradient) -> None:
snapshot = await async_client.nfs.snapshots.delete(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -264,7 +275,6 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.snapshots.with_raw_response.delete(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -277,7 +287,6 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.snapshots.with_streaming_response.delete(
nfs_snapshot_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -293,5 +302,4 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_snapshot_id` but received ''"):
await async_client.nfs.snapshots.with_raw_response.delete(
nfs_snapshot_id="",
- region="region",
)
diff --git a/tests/api_resources/test_nfs.py b/tests/api_resources/test_nfs.py
index e60033eb..9a09e99b 100644
--- a/tests/api_resources/test_nfs.py
+++ b/tests/api_resources/test_nfs.py
@@ -33,6 +33,18 @@ def test_method_create(self, client: Gradient) -> None:
)
assert_matches_type(NfCreateResponse, nf, path=["response"])
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_create_with_all_params(self, client: Gradient) -> None:
+ nf = client.nfs.create(
+ name="sammy-share-drive",
+ region="atl1",
+ size_gib=1024,
+ vpc_ids=["796c6fe3-2a1d-4da2-9f3e-38239827dc91"],
+ performance_tier="standard",
+ )
+ assert_matches_type(NfCreateResponse, nf, path=["response"])
+
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_create(self, client: Gradient) -> None:
@@ -68,6 +80,14 @@ def test_streaming_response_create(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_retrieve(self, client: Gradient) -> None:
+ nf = client.nfs.retrieve(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert_matches_type(NfRetrieveResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_retrieve_with_all_params(self, client: Gradient) -> None:
nf = client.nfs.retrieve(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -79,7 +99,6 @@ def test_method_retrieve(self, client: Gradient) -> None:
def test_raw_response_retrieve(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.retrieve(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -92,7 +111,6 @@ def test_raw_response_retrieve(self, client: Gradient) -> None:
def test_streaming_response_retrieve(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.retrieve(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -108,12 +126,17 @@ def test_path_params_retrieve(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
client.nfs.with_raw_response.retrieve(
nfs_id="",
- region="region",
)
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_list(self, client: Gradient) -> None:
+ nf = client.nfs.list()
+ assert_matches_type(NfListResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_list_with_all_params(self, client: Gradient) -> None:
nf = client.nfs.list(
region="region",
)
@@ -122,9 +145,7 @@ def test_method_list(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_raw_response_list(self, client: Gradient) -> None:
- response = client.nfs.with_raw_response.list(
- region="region",
- )
+ response = client.nfs.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -134,9 +155,7 @@ def test_raw_response_list(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_streaming_response_list(self, client: Gradient) -> None:
- with client.nfs.with_streaming_response.list(
- region="region",
- ) as response:
+ with client.nfs.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -148,6 +167,14 @@ def test_streaming_response_list(self, client: Gradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
def test_method_delete(self, client: Gradient) -> None:
+ nf = client.nfs.delete(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert nf is None
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_delete_with_all_params(self, client: Gradient) -> None:
nf = client.nfs.delete(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -159,7 +186,6 @@ def test_method_delete(self, client: Gradient) -> None:
def test_raw_response_delete(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.delete(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -172,7 +198,6 @@ def test_raw_response_delete(self, client: Gradient) -> None:
def test_streaming_response_delete(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.delete(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -188,7 +213,6 @@ def test_path_params_delete(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
client.nfs.with_raw_response.delete(
nfs_id="",
- region="region",
)
@pytest.mark.skip(reason="Mock server tests are disabled")
@@ -196,7 +220,6 @@ def test_path_params_delete(self, client: Gradient) -> None:
def test_method_initiate_action_overload_1(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -206,9 +229,9 @@ def test_method_initiate_action_overload_1(self, client: Gradient) -> None:
def test_method_initiate_action_with_all_params_overload_1(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"size_gib": 2048},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -217,7 +240,6 @@ def test_method_initiate_action_with_all_params_overload_1(self, client: Gradien
def test_raw_response_initiate_action_overload_1(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -231,7 +253,6 @@ def test_raw_response_initiate_action_overload_1(self, client: Gradient) -> None
def test_streaming_response_initiate_action_overload_1(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -248,7 +269,6 @@ def test_path_params_initiate_action_overload_1(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
type="resize",
)
@@ -257,7 +277,6 @@ def test_path_params_initiate_action_overload_1(self, client: Gradient) -> None:
def test_method_initiate_action_overload_2(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -267,9 +286,9 @@ def test_method_initiate_action_overload_2(self, client: Gradient) -> None:
def test_method_initiate_action_with_all_params_overload_2(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"name": "daily-backup"},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -278,7 +297,6 @@ def test_method_initiate_action_with_all_params_overload_2(self, client: Gradien
def test_raw_response_initiate_action_overload_2(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -292,7 +310,6 @@ def test_raw_response_initiate_action_overload_2(self, client: Gradient) -> None
def test_streaming_response_initiate_action_overload_2(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -309,7 +326,6 @@ def test_path_params_initiate_action_overload_2(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
type="resize",
)
@@ -318,7 +334,6 @@ def test_path_params_initiate_action_overload_2(self, client: Gradient) -> None:
def test_method_initiate_action_overload_3(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -328,9 +343,9 @@ def test_method_initiate_action_overload_3(self, client: Gradient) -> None:
def test_method_initiate_action_with_all_params_overload_3(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"vpc_id": "vpc-id-123"},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -339,7 +354,6 @@ def test_method_initiate_action_with_all_params_overload_3(self, client: Gradien
def test_raw_response_initiate_action_overload_3(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -353,7 +367,6 @@ def test_raw_response_initiate_action_overload_3(self, client: Gradient) -> None
def test_streaming_response_initiate_action_overload_3(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -370,7 +383,6 @@ def test_path_params_initiate_action_overload_3(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
type="resize",
)
@@ -379,7 +391,6 @@ def test_path_params_initiate_action_overload_3(self, client: Gradient) -> None:
def test_method_initiate_action_overload_4(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -389,9 +400,9 @@ def test_method_initiate_action_overload_4(self, client: Gradient) -> None:
def test_method_initiate_action_with_all_params_overload_4(self, client: Gradient) -> None:
nf = client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"vpc_id": "vpc-id-123"},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -400,7 +411,6 @@ def test_method_initiate_action_with_all_params_overload_4(self, client: Gradien
def test_raw_response_initiate_action_overload_4(self, client: Gradient) -> None:
response = client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -414,7 +424,6 @@ def test_raw_response_initiate_action_overload_4(self, client: Gradient) -> None
def test_streaming_response_initiate_action_overload_4(self, client: Gradient) -> None:
with client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -431,7 +440,63 @@ def test_path_params_initiate_action_overload_4(self, client: Gradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
+ type="resize",
+ )
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_initiate_action_overload_5(self, client: Gradient) -> None:
+ nf = client.nfs.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ )
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_method_initiate_action_with_all_params_overload_5(self, client: Gradient) -> None:
+ nf = client.nfs.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ params={"performance_tier": "standard"},
+ region="atl1",
+ )
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_raw_response_initiate_action_overload_5(self, client: Gradient) -> None:
+ response = client.nfs.with_raw_response.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ nf = response.parse()
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_streaming_response_initiate_action_overload_5(self, client: Gradient) -> None:
+ with client.nfs.with_streaming_response.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ nf = response.parse()
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ def test_path_params_initiate_action_overload_5(self, client: Gradient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
+ client.nfs.with_raw_response.initiate_action(
+ nfs_id="",
type="resize",
)
@@ -452,6 +517,18 @@ async def test_method_create(self, async_client: AsyncGradient) -> None:
)
assert_matches_type(NfCreateResponse, nf, path=["response"])
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None:
+ nf = await async_client.nfs.create(
+ name="sammy-share-drive",
+ region="atl1",
+ size_gib=1024,
+ vpc_ids=["796c6fe3-2a1d-4da2-9f3e-38239827dc91"],
+ performance_tier="standard",
+ )
+ assert_matches_type(NfCreateResponse, nf, path=["response"])
+
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradient) -> None:
@@ -487,6 +564,14 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
+ nf = await async_client.nfs.retrieve(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert_matches_type(NfRetrieveResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.retrieve(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -498,7 +583,6 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None:
async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.retrieve(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -511,7 +595,6 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None:
async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.retrieve(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -527,12 +610,17 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
await async_client.nfs.with_raw_response.retrieve(
nfs_id="",
- region="region",
)
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_list(self, async_client: AsyncGradient) -> None:
+ nf = await async_client.nfs.list()
+ assert_matches_type(NfListResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.list(
region="region",
)
@@ -541,9 +629,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
- response = await async_client.nfs.with_raw_response.list(
- region="region",
- )
+ response = await async_client.nfs.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -553,9 +639,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None:
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradient) -> None:
- async with async_client.nfs.with_streaming_response.list(
- region="region",
- ) as response:
+ async with async_client.nfs.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -567,6 +651,14 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
async def test_method_delete(self, async_client: AsyncGradient) -> None:
+ nf = await async_client.nfs.delete(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ )
+ assert nf is None
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_delete_with_all_params(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.delete(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
region="region",
@@ -578,7 +670,6 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None:
async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.delete(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
)
assert response.is_closed is True
@@ -591,7 +682,6 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None:
async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.delete(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="region",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -607,7 +697,6 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
await async_client.nfs.with_raw_response.delete(
nfs_id="",
- region="region",
)
@pytest.mark.skip(reason="Mock server tests are disabled")
@@ -615,7 +704,6 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None:
async def test_method_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -625,9 +713,9 @@ async def test_method_initiate_action_overload_1(self, async_client: AsyncGradie
async def test_method_initiate_action_with_all_params_overload_1(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"size_gib": 2048},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -636,7 +724,6 @@ async def test_method_initiate_action_with_all_params_overload_1(self, async_cli
async def test_raw_response_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -650,7 +737,6 @@ async def test_raw_response_initiate_action_overload_1(self, async_client: Async
async def test_streaming_response_initiate_action_overload_1(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -667,7 +753,6 @@ async def test_path_params_initiate_action_overload_1(self, async_client: AsyncG
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
await async_client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
type="resize",
)
@@ -676,7 +761,6 @@ async def test_path_params_initiate_action_overload_1(self, async_client: AsyncG
async def test_method_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -686,9 +770,9 @@ async def test_method_initiate_action_overload_2(self, async_client: AsyncGradie
async def test_method_initiate_action_with_all_params_overload_2(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"name": "daily-backup"},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -697,7 +781,6 @@ async def test_method_initiate_action_with_all_params_overload_2(self, async_cli
async def test_raw_response_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -711,7 +794,6 @@ async def test_raw_response_initiate_action_overload_2(self, async_client: Async
async def test_streaming_response_initiate_action_overload_2(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -728,7 +810,6 @@ async def test_path_params_initiate_action_overload_2(self, async_client: AsyncG
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
await async_client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
type="resize",
)
@@ -737,7 +818,6 @@ async def test_path_params_initiate_action_overload_2(self, async_client: AsyncG
async def test_method_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -747,9 +827,9 @@ async def test_method_initiate_action_overload_3(self, async_client: AsyncGradie
async def test_method_initiate_action_with_all_params_overload_3(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"vpc_id": "vpc-id-123"},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -758,7 +838,6 @@ async def test_method_initiate_action_with_all_params_overload_3(self, async_cli
async def test_raw_response_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -772,7 +851,6 @@ async def test_raw_response_initiate_action_overload_3(self, async_client: Async
async def test_streaming_response_initiate_action_overload_3(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -789,7 +867,6 @@ async def test_path_params_initiate_action_overload_3(self, async_client: AsyncG
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
await async_client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
type="resize",
)
@@ -798,7 +875,6 @@ async def test_path_params_initiate_action_overload_3(self, async_client: AsyncG
async def test_method_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -808,9 +884,9 @@ async def test_method_initiate_action_overload_4(self, async_client: AsyncGradie
async def test_method_initiate_action_with_all_params_overload_4(self, async_client: AsyncGradient) -> None:
nf = await async_client.nfs.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
params={"vpc_id": "vpc-id-123"},
+ region="atl1",
)
assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
@@ -819,7 +895,6 @@ async def test_method_initiate_action_with_all_params_overload_4(self, async_cli
async def test_raw_response_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
response = await async_client.nfs.with_raw_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
)
@@ -833,7 +908,6 @@ async def test_raw_response_initiate_action_overload_4(self, async_client: Async
async def test_streaming_response_initiate_action_overload_4(self, async_client: AsyncGradient) -> None:
async with async_client.nfs.with_streaming_response.initiate_action(
nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
- region="atl1",
type="resize",
) as response:
assert not response.is_closed
@@ -850,6 +924,62 @@ async def test_path_params_initiate_action_overload_4(self, async_client: AsyncG
with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
await async_client.nfs.with_raw_response.initiate_action(
nfs_id="",
- region="atl1",
+ type="resize",
+ )
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_initiate_action_overload_5(self, async_client: AsyncGradient) -> None:
+ nf = await async_client.nfs.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ )
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_method_initiate_action_with_all_params_overload_5(self, async_client: AsyncGradient) -> None:
+ nf = await async_client.nfs.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ params={"performance_tier": "standard"},
+ region="atl1",
+ )
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_raw_response_initiate_action_overload_5(self, async_client: AsyncGradient) -> None:
+ response = await async_client.nfs.with_raw_response.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ nf = await response.parse()
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_streaming_response_initiate_action_overload_5(self, async_client: AsyncGradient) -> None:
+ async with async_client.nfs.with_streaming_response.initiate_action(
+ nfs_id="0a1b2c3d-4e5f-6a7b-8c9d-0e1f2a3b4c5d",
+ type="resize",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ nf = await response.parse()
+ assert_matches_type(NfInitiateActionResponse, nf, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @pytest.mark.skip(reason="Mock server tests are disabled")
+ @parametrize
+ async def test_path_params_initiate_action_overload_5(self, async_client: AsyncGradient) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `nfs_id` but received ''"):
+ await async_client.nfs.with_raw_response.initiate_action(
+ nfs_id="",
type="resize",
)
From 210f8ac300e1715243926ab2b9045edb218fe19e Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Fri, 13 Mar 2026 12:22:30 +0000
Subject: [PATCH 17/17] codegen metadata
---
.stats.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.stats.yml b/.stats.yml
index fa21fe25..8fe2fd00 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-93df5cb3d61c1bbcab2a4bbf3f0775454bd185a400bc140c61dbf36e108dbf89.yml
-openapi_spec_hash: 0126adf782c5feac3d8e682e466c3cf7
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-24bdeb83fbe576e4dd92498321be977e820fd755babee77eb8ff419a6229b22f.yml
+openapi_spec_hash: f1faf4e4e0fc37e518fe55ff7c01086d
config_hash: 3f968a57adb20643373c134efc9af01a