Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,30 +1,32 @@
# Mistral Python Client

This client uses poetry as a depedency and virtual environment manager.
You can use the Mistral Python client to interact with the Mistral AI API.

you can install poetry with@
This client uses `poetry` as a dependency and virtual environment manager.

You can install poetry with

```bash
pip install poetry
```

## Installing

poetry will set up a virtual environment and install dependencies with the following command:
`poetry` will set up a virtual environment and install dependencies with the following command:

```bash
poetry install
```

## Run examples

You can run the examples using `poetry run` or by entering the virtual environment using `poetry shell`.
You can run the examples in the `examples/` directory using `poetry run` or by entering the virtual environment using `poetry shell`.

### Using poetry run

```bash
cd examples
poetry run python async_chat.py
poetry run python chat_no_streaming.py
```

### Using poetry shell
Expand All @@ -33,5 +35,5 @@ poetry run python async_chat.py
cd examples
poetry shell

>> python async_chat.py
>> python chat_no_streaming.py
```
22 changes: 22 additions & 0 deletions examples/async_chat_no_streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import asyncio
import os

from mistral_client.async_client import MistralAsyncClient
from mistral_client.models.chat_completion import ChatMessage


async def main():
api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-tiny"

client = MistralAsyncClient(api_key=api_key)

chat_response = await client.chat(
model=model,
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
)
print(chat_response)


if __name__ == "__main__":
asyncio.run(main())
22 changes: 22 additions & 0 deletions examples/async_chat_with_streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import asyncio
import os

from mistral_client.async_client import MistralAsyncClient
from mistral_client.models.chat_completion import ChatMessage


async def main():
api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-tiny"

client = MistralAsyncClient(api_key=api_key)

async for chunk in client.chat_stream(
model=model,
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
):
print(chunk)


if __name__ == "__main__":
asyncio.run(main())
20 changes: 20 additions & 0 deletions examples/async_embeddings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import asyncio
import os

from mistral_client.async_client import MistralAsyncClient


async def main():
api_key = os.environ["MISTRAL_API_KEY"]

client = MistralAsyncClient(api_key=api_key)

embeddings_batch_response = await client.embeddings(
model="mistral-embed",
input=["What is the best French cheese?"] * 10,
)
print(embeddings_batch_response)


if __name__ == "__main__":
asyncio.run(main())
52 changes: 0 additions & 52 deletions examples/async_examples.py

This file was deleted.

21 changes: 21 additions & 0 deletions examples/chat_no_streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import os

from mistral_client.client import MistralClient
from mistral_client.models.chat_completion import ChatMessage


def main():
api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-tiny"

client = MistralClient(api_key=api_key)

chat_response = client.chat(
model=model,
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
)
print(chat_response)


if __name__ == "__main__":
main()
21 changes: 21 additions & 0 deletions examples/chat_with_streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import os

from mistral_client.client import MistralClient
from mistral_client.models.chat_completion import ChatMessage


def main():
api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-tiny"

client = MistralClient(api_key=api_key)

for chunk in client.chat_stream(
model=model,
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
):
print(chunk)


if __name__ == "__main__":
main()
20 changes: 20 additions & 0 deletions examples/embeddings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import os

from mistral_client.client import MistralClient


def main():
api_key = os.environ["MISTRAL_API_KEY"]

client = MistralClient(api_key=api_key)

embeddings_response = client.embeddings(
model="mistral-embed",
input=["What is the best French cheese?"] * 10,
)

print(embeddings_response)


if __name__ == "__main__":
main()
51 changes: 0 additions & 51 deletions examples/examples.py

This file was deleted.

16 changes: 16 additions & 0 deletions examples/list_models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import os

from mistral_client.client import MistralClient


def main():
api_key = os.environ["MISTRAL_API_KEY"]

client = MistralClient(api_key=api_key)

list_models_response = client.list_models()
print(list_models_response)


if __name__ == "__main__":
main()