Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions .speakeasy/workflow.lock
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@ sources:
- latest
mistral-google-cloud-source:
sourceNamespace: mistral-openapi-google-cloud
sourceRevisionDigest: sha256:4a5343e63c6a78152e472b00ccc46d7bcb15594496bc94c8040039d3a9d4c5f8
sourceBlobDigest: sha256:3327f078a11596abdcbc21cd8a1adcf0b2aa474975cd9ab1feb745a2e50d555f
sourceRevisionDigest: sha256:bc59aaf55dc46e94ddf6cc687292807629d7a17ee5f573a504e7e44fd365e147
sourceBlobDigest: sha256:545fe85c5dae11def2741fc7a99f297b7f0728c9677c3c7b94d56ddbed70581d
tags:
- latest
- speakeasy-sdk-regen-guillaume.dumont-fix-github-actions-1767965603
mistral-openapi:
sourceNamespace: mistral-openapi
sourceRevisionDigest: sha256:cb63bd997cefe7b3b36e91a475df57cb779bf79f183340e0713d8ffb16a2dabc
Expand All @@ -30,10 +31,10 @@ targets:
mistralai-gcp-sdk:
source: mistral-google-cloud-source
sourceNamespace: mistral-openapi-google-cloud
sourceRevisionDigest: sha256:4a5343e63c6a78152e472b00ccc46d7bcb15594496bc94c8040039d3a9d4c5f8
sourceBlobDigest: sha256:3327f078a11596abdcbc21cd8a1adcf0b2aa474975cd9ab1feb745a2e50d555f
sourceRevisionDigest: sha256:bc59aaf55dc46e94ddf6cc687292807629d7a17ee5f573a504e7e44fd365e147
sourceBlobDigest: sha256:545fe85c5dae11def2741fc7a99f297b7f0728c9677c3c7b94d56ddbed70581d
codeSamplesNamespace: mistral-openapi-google-cloud-code-samples
codeSamplesRevisionDigest: sha256:03b3e82c20d10faa8622f14696632b96b1a2e8d747b266fff345061298d5f3e4
codeSamplesRevisionDigest: sha256:aa98a5f632312a6f8a91bdfb70de90ffa494f41b7a3ef6fd38d740bcaf2abc40
mistralai-sdk:
source: mistral-openapi
sourceNamespace: mistral-openapi
Expand Down
3 changes: 3 additions & 0 deletions packages/mistralai_gcp/.gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
**/__pycache__/
**/.speakeasy/temp/
**/.speakeasy/logs/
.speakeasy/reports
README-PYPI.md
.venv/
Expand Down
64 changes: 38 additions & 26 deletions packages/mistralai_gcp/.speakeasy/gen.lock
Original file line number Diff line number Diff line change
@@ -1,41 +1,44 @@
lockVersion: 2.0.0
id: ec60f2d8-7869-45c1-918e-773d41a8cf74
management:
docChecksum: 28fe1ab59b4dee005217f2dbbd836060
docVersion: 0.0.2
speakeasyVersion: 1.517.3
generationVersion: 2.548.6
releaseVersion: 1.6.0
configChecksum: 66bf5911f59189922e03a75a72923b32
docChecksum: 05fc6f45406deac180ffc1df760c67f4
docVersion: 1.0.0
speakeasyVersion: 1.606.10
generationVersion: 2.687.13
releaseVersion: 2.0.0
configChecksum: a1b03996b9a524c2110678cbe2b68226
repoURL: https://github.com/mistralai/client-python.git
repoSubDirectory: packages/mistralai_gcp
installationURL: https://github.com/mistralai/client-python.git#subdirectory=packages/mistralai_gcp
published: true
features:
python:
additionalDependencies: 1.0.0
additionalProperties: 1.0.1
constsAndDefaults: 1.0.5
core: 5.12.3
core: 5.20.1
defaultEnabledRetries: 0.2.0
enumUnions: 0.1.0
envVarSecurityUsage: 0.3.2
examples: 3.0.1
examples: 3.0.2
flatRequests: 1.0.1
globalSecurity: 3.0.3
globalSecurityCallbacks: 1.0.0
globalSecurityFlattening: 1.0.0
globalServerURLs: 3.1.0
globalServerURLs: 3.1.1
methodArguments: 1.0.2
nameOverrides: 3.0.1
nullables: 1.0.1
openEnums: 1.0.0
openEnums: 1.0.1
responseFormat: 1.0.1
retries: 3.0.2
sdkHooks: 1.0.1
serverEvents: 1.0.7
sdkHooks: 1.1.0
serverEvents: 1.0.8
serverEventsSentinels: 0.1.0
serverIDs: 3.0.0
unions: 3.0.4
generatedFiles:
- .gitattributes
- .python-version
- .vscode/settings.json
- docs/models/arguments.md
- docs/models/assistantmessage.md
Expand Down Expand Up @@ -73,6 +76,7 @@ generatedFiles:
- docs/models/jsonschema.md
- docs/models/loc.md
- docs/models/messages.md
- docs/models/mistralpromptmode.md
- docs/models/prediction.md
- docs/models/referencechunk.md
- docs/models/referencechunktype.md
Expand All @@ -83,7 +87,11 @@ generatedFiles:
- docs/models/stop.md
- docs/models/systemmessage.md
- docs/models/systemmessagecontent.md
- docs/models/systemmessagecontentchunks.md
- docs/models/textchunk.md
- docs/models/thinkchunk.md
- docs/models/thinkchunktype.md
- docs/models/thinking.md
- docs/models/tool.md
- docs/models/toolcall.md
- docs/models/toolchoice.md
Expand Down Expand Up @@ -135,14 +143,20 @@ generatedFiles:
- src/mistralai_gcp/models/imageurl.py
- src/mistralai_gcp/models/imageurlchunk.py
- src/mistralai_gcp/models/jsonschema.py
- src/mistralai_gcp/models/mistralgcperror.py
- src/mistralai_gcp/models/mistralpromptmode.py
- src/mistralai_gcp/models/no_response_error.py
- src/mistralai_gcp/models/prediction.py
- src/mistralai_gcp/models/referencechunk.py
- src/mistralai_gcp/models/responseformat.py
- src/mistralai_gcp/models/responseformats.py
- src/mistralai_gcp/models/responsevalidationerror.py
- src/mistralai_gcp/models/sdkerror.py
- src/mistralai_gcp/models/security.py
- src/mistralai_gcp/models/systemmessage.py
- src/mistralai_gcp/models/systemmessagecontentchunks.py
- src/mistralai_gcp/models/textchunk.py
- src/mistralai_gcp/models/thinkchunk.py
- src/mistralai_gcp/models/tool.py
- src/mistralai_gcp/models/toolcall.py
- src/mistralai_gcp/models/toolchoice.py
Expand All @@ -153,12 +167,12 @@ generatedFiles:
- src/mistralai_gcp/models/usermessage.py
- src/mistralai_gcp/models/validationerror.py
- src/mistralai_gcp/py.typed
- src/mistralai_gcp/sdk.py
- src/mistralai_gcp/sdkconfiguration.py
- src/mistralai_gcp/types/__init__.py
- src/mistralai_gcp/types/basemodel.py
- src/mistralai_gcp/utils/__init__.py
- src/mistralai_gcp/utils/annotations.py
- src/mistralai_gcp/utils/datetimes.py
- src/mistralai_gcp/utils/enums.py
- src/mistralai_gcp/utils/eventstreaming.py
- src/mistralai_gcp/utils/forms.py
Expand All @@ -170,42 +184,40 @@ generatedFiles:
- src/mistralai_gcp/utils/retries.py
- src/mistralai_gcp/utils/security.py
- src/mistralai_gcp/utils/serializers.py
- src/mistralai_gcp/utils/unmarshal_json_response.py
- src/mistralai_gcp/utils/url.py
- src/mistralai_gcp/utils/values.py
examples:
stream_chat:
speakeasy-default-stream-chat:
requestBody:
application/json: {"model": "mistral-small-latest", "stream": true, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}]}
application/json: {"model": "mistral-large-latest", "stream": true, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}], "response_format": {"type": "text"}}
responses:
"422":
application/json: {}
"200": {}
chat_completion_v1_chat_completions_post:
speakeasy-default-chat-completion-v1-chat-completions-post:
requestBody:
application/json: {"model": "mistral-small-latest", "stream": false, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}]}
application/json: {"model": "mistral-large-latest", "stream": false, "messages": [{"content": "Who is the best French painter? Answer in one short sentence.", "role": "user"}], "response_format": {"type": "text"}}
responses:
"200":
application/json: {"id": "cmpl-e5cc70bb28c444948073e77776eb30ef", "object": "chat.completion", "model": "mistral-small-latest", "usage": {"prompt_tokens": 16, "completion_tokens": 34, "total_tokens": 50}, "created": 1702256327, "choices": [{"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}, {"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}, {"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}]}
application/json: {"id": "cmpl-e5cc70bb28c444948073e77776eb30ef", "object": "chat.completion", "model": "mistral-small-latest", "usage": {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0}, "created": 1702256327, "choices": []}
"422":
application/json: {}
stream_fim:
speakeasy-default-stream-fim:
requestBody:
application/json: {"model": "codestral-2405", "top_p": 1, "stream": true, "prompt": "def", "suffix": "return a+b"}
application/json: {"model": "codestral-latest", "top_p": 1, "stream": true, "prompt": "def", "suffix": "return a+b"}
responses:
"422":
application/json: {}
"200": {}
fim_completion_v1_fim_completions_post:
speakeasy-default-fim-completion-v1-fim-completions-post:
userExample:
requestBody:
application/json: {"model": "codestral-2405", "top_p": 1, "stream": false, "prompt": "def", "suffix": "return a+b"}
application/json: {"model": "codestral-latest", "top_p": 1, "stream": false, "prompt": "def", "suffix": "return a+b"}
responses:
"200":
application/json: {"id": "cmpl-e5cc70bb28c444948073e77776eb30ef", "object": "chat.completion", "model": "codestral-latest", "usage": {"prompt_tokens": 16, "completion_tokens": 34, "total_tokens": 50}, "created": 1702256327, "choices": [{"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}, {"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}, {"index": 0, "message": {"prefix": false, "role": "assistant"}, "finish_reason": "stop"}]}
"422":
application/json: {}
examplesVersion: 1.0.0
application/json: {"id": "447e3e0d457e42e98248b5d2ef52a2a3", "object": "chat.completion", "model": "codestral-2508", "usage": {"prompt_tokens": 8, "completion_tokens": 91, "total_tokens": 99}, "created": 1759496862, "choices": [{"index": 0, "message": {"content": "add_numbers(a: int, b: int) -> int:\n \"\"\"\n You are given two integers `a` and `b`. Your task is to write a function that\n returns the sum of these two integers. The function should be implemented in a\n way that it can handle very large integers (up to 10^18). As a reminder, your\n code has to be in python\n \"\"\"\n", "tool_calls": null, "prefix": false, "role": "assistant"}, "finish_reason": "stop"}]}
examplesVersion: 1.0.2
generatedTests: {}
releaseNotes: "## SDK Changes Detected:\n* `mistral_gcp.chat.complete()`: \n * `request` **Changed** **Breaking** :warning:\n * `response` **Changed**\n* `mistral_gcp.fim.complete()`: `response` **Changed**\n"
15 changes: 14 additions & 1 deletion packages/mistralai_gcp/.speakeasy/gen.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,37 @@ generation:
maintainOpenAPIOrder: true
usageSnippets:
optionalPropertyRendering: withExample
sdkInitStyle: constructor
useClassNamesForArrayFields: true
fixes:
nameResolutionDec2023: true
nameResolutionFeb2025: false
parameterOrderingFeb2024: true
requestResponseComponentNamesFeb2024: true
securityFeb2025: false
sharedErrorComponentsApr2025: false
auth:
oAuth2ClientCredentialsEnabled: true
oAuth2PasswordEnabled: false
tests:
generateTests: true
generateNewTests: false
skipResponseBodyAssertions: false
python:
version: 1.6.0
version: 2.0.0
additionalDependencies:
dev:
pytest: ^8.2.2
pytest-asyncio: ^0.23.7
main:
google-auth: ^2.31.0
requests: ^2.32.3
allowedRedefinedBuiltins:
- id
- object
authors:
- Mistral
baseErrorName: MistralGcpError
clientServerStatusCodesAsErrors: true
defaultErrorName: SDKError
description: Python Client SDK for the Mistral AI API in GCP.
Expand All @@ -46,8 +56,11 @@ python:
inputModelSuffix: input
maxMethodParams: 15
methodArguments: infer-optional-args
moduleName: ""
outputModelSuffix: output
packageManager: poetry
packageName: mistralai-gcp
pytestFilterWarnings: []
pytestTimeout: 0
responseFormat: flat
templateVersion: v2
11 changes: 11 additions & 0 deletions packages/mistralai_gcp/RELEASES.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@


## 2026-01-09 13:33:05
### Changes
Based on:
- OpenAPI Doc
- Speakeasy CLI 1.606.10 (2.687.13) https://github.com/speakeasy-api/speakeasy
### Generated
- [python v2.0.0] packages/mistralai_gcp
### Releases
- [PyPI v2.0.0] https://pypi.org/project/mistralai-gcp/2.0.0 - packages/mistralai_gcp
Loading