|
| 1 | +""" |
| 2 | +DescriptionNode Module |
| 3 | +""" |
| 4 | +from typing import List, Optional |
| 5 | +from .base_node import BaseNode |
| 6 | + |
| 7 | +class DescriptionNode(BaseNode): |
| 8 | + """ |
| 9 | + A node responsible for generating a description of a given document. This description is |
| 10 | + generated using a language model and is used for retrieving the right documents. |
| 11 | +
|
| 12 | + It allows scraping of big documents without exceeding the token limit of the language model. |
| 13 | +
|
| 14 | + Attributes: |
| 15 | + llm_model: An instance of a language model client, configured for generating answers. |
| 16 | + verbose (bool): A flag indicating whether to show print statements during execution. |
| 17 | +
|
| 18 | + Args: |
| 19 | + input (str): Boolean expression defining the input keys needed from the state. |
| 20 | + output (List[str]): List of output keys to be updated in the state. |
| 21 | + node_config (dict): Additional configuration for the node. |
| 22 | + node_name (str): The unique identifier name for the node, defaulting to "Parse". |
| 23 | + """ |
| 24 | + |
| 25 | + def __init__( |
| 26 | + self, |
| 27 | + input: str, |
| 28 | + output: List[str], |
| 29 | + node_config: Optional[dict] = None, |
| 30 | + node_name: str = "Description", |
| 31 | + ): |
| 32 | + super().__init__(node_name, "node", input, output, 2, node_config) |
| 33 | + |
| 34 | + self.llm_model = node_config["llm_model"] |
| 35 | + self.embedder_model = node_config.get("embedder_model", None) |
| 36 | + self.verbose = ( |
| 37 | + False if node_config is None else node_config.get("verbose", False) |
| 38 | + ) |
| 39 | + self.cache_path = node_config.get("cache_path", False) |
| 40 | + |
| 41 | + def execute(self, state: dict) -> dict: |
| 42 | + pass |
0 commit comments