class OllamaContainer(image: str = 'ollama/ollama:0.1.44', ollama_home: str | PathLike | None = None, **kwargs)ΒΆ

Ollama Container

Param:

image - the ollama image to use (default: ollama/ollama:0.1.44)

Param:

ollama_home - the directory to mount for model data (default: None)

you may pass pathlib.Path.home() / ".ollama" to re-use models that have already been pulled with ollama running on this host outside the container.

Examples

>>> from testcontainers.ollama import OllamaContainer
>>> with OllamaContainer() as ollama:
...     ollama.list_models()
[]
>>> from json import loads
>>> from pathlib import Path
>>> from requests import post
>>> from testcontainers.ollama import OllamaContainer
>>> def split_by_line(generator):
...     data = b''
...     for each_item in generator:
...         for line in each_item.splitlines(True):
...             data += line
...             if data.endswith((b'\r\r', b'\n\n', b'\r\n\r\n', b'\n')):
...                 yield from data.splitlines()
...                 data = b''
...     if data:
...         yield from data.splitlines()

>>> with OllamaContainer(ollama_home=Path.home() / ".ollama") as ollama:
...     if "llama3:latest" not in [e["name"] for e in ollama.list_models()]:
...         print("did not find 'llama3:latest', pulling")
...         ollama.pull_model("llama3:latest")
...     endpoint = ollama.get_endpoint()
...     for chunk in split_by_line(
...         post(url=f"{endpoint}/api/chat", stream=True, json={
...             "model": "llama3:latest",
...             "messages": [{
...                 "role": "user",
...                 "content": "what color is the sky? MAX ONE WORD"
...             }]
...         })
...      ):
...          print(loads(chunk)["message"]["content"], end="")
Blue.