Skip to content

EmbeddingWithOllama

Embedding class using Ollama model based on LangChain Embeddings class.

EmbeddingWithOllama

Bases: Embeddings

Embedding class using Ollama model based on LangChain Embeddings class.

Source code in aiagents4pharma/talk2knowledgegraphs/utils/embeddings/ollama.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
class EmbeddingWithOllama(Embeddings):
    """
    Embedding class using Ollama model based on LangChain Embeddings class.
    """
    def __init__(self, model_name: str):
        """
        Initialize the EmbeddingWithOllama class.

        Args:
            model_name: The name of the Ollama model to be used.
        """
        # Setup the Ollama server
        self.__setup(model_name)

        # Set parameters
        self.model_name = model_name

        # Prepare model
        self.model = OllamaEmbeddings(model=self.model_name)

    def __setup(self, model_name: str) -> None:
        """
        Check if the Ollama model is available and run the Ollama server if needed.

        Args:
            model_name: The name of the Ollama model to be used.
        """
        try:
            models_list = ollama.list()["models"]
            if model_name not in [m['model'].replace(":latest", "") for m in models_list]:
                ollama.pull(model_name)
                time.sleep(30)
                raise ValueError(f"Pulled {model_name} model")
        except Exception as e:
            with subprocess.Popen(
                "ollama serve", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
            ):
                time.sleep(10)
            raise ValueError(f"Error: {e} and restarted Ollama server.") from e

    def embed_documents(self, texts: List[str]) -> List[float]:
        """
        Generate embedding for a list of input texts using Ollama model.

        Args:
            texts: The list of texts to be embedded.

        Returns:
            The list of embeddings for the given texts.
        """

        # Generate the embedding
        embeddings = self.model.embed_documents(texts)

        return embeddings

    def embed_query(self, text: str) -> List[float]:
        """
        Generate embeddings for an input text using Ollama model.

        Args:
            text: A query to be embedded.
        Returns:
            The embeddings for the given query.
        """

        # Generate the embedding
        embeddings = self.model.embed_query(text)

        return embeddings

__init__(model_name)

Initialize the EmbeddingWithOllama class.

Parameters:

Name Type Description Default
model_name str

The name of the Ollama model to be used.

required
Source code in aiagents4pharma/talk2knowledgegraphs/utils/embeddings/ollama.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
def __init__(self, model_name: str):
    """
    Initialize the EmbeddingWithOllama class.

    Args:
        model_name: The name of the Ollama model to be used.
    """
    # Setup the Ollama server
    self.__setup(model_name)

    # Set parameters
    self.model_name = model_name

    # Prepare model
    self.model = OllamaEmbeddings(model=self.model_name)

__setup(model_name)

Check if the Ollama model is available and run the Ollama server if needed.

Parameters:

Name Type Description Default
model_name str

The name of the Ollama model to be used.

required
Source code in aiagents4pharma/talk2knowledgegraphs/utils/embeddings/ollama.py
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
def __setup(self, model_name: str) -> None:
    """
    Check if the Ollama model is available and run the Ollama server if needed.

    Args:
        model_name: The name of the Ollama model to be used.
    """
    try:
        models_list = ollama.list()["models"]
        if model_name not in [m['model'].replace(":latest", "") for m in models_list]:
            ollama.pull(model_name)
            time.sleep(30)
            raise ValueError(f"Pulled {model_name} model")
    except Exception as e:
        with subprocess.Popen(
            "ollama serve", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
        ):
            time.sleep(10)
        raise ValueError(f"Error: {e} and restarted Ollama server.") from e

embed_documents(texts)

Generate embedding for a list of input texts using Ollama model.

Parameters:

Name Type Description Default
texts List[str]

The list of texts to be embedded.

required

Returns:

Type Description
List[float]

The list of embeddings for the given texts.

Source code in aiagents4pharma/talk2knowledgegraphs/utils/embeddings/ollama.py
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
def embed_documents(self, texts: List[str]) -> List[float]:
    """
    Generate embedding for a list of input texts using Ollama model.

    Args:
        texts: The list of texts to be embedded.

    Returns:
        The list of embeddings for the given texts.
    """

    # Generate the embedding
    embeddings = self.model.embed_documents(texts)

    return embeddings

embed_query(text)

Generate embeddings for an input text using Ollama model.

Parameters:

Name Type Description Default
text str

A query to be embedded.

required

Returns: The embeddings for the given query.

Source code in aiagents4pharma/talk2knowledgegraphs/utils/embeddings/ollama.py
68
69
70
71
72
73
74
75
76
77
78
79
80
81
def embed_query(self, text: str) -> List[float]:
    """
    Generate embeddings for an input text using Ollama model.

    Args:
        text: A query to be embedded.
    Returns:
        The embeddings for the given query.
    """

    # Generate the embedding
    embeddings = self.model.embed_query(text)

    return embeddings