Skip to content

Commit 336bf70

Browse files
initial creation of FetchNodeLevelK and DescriptionNode
1 parent 89de5b6 commit 336bf70

File tree

2 files changed

+81
-0
lines changed

2 files changed

+81
-0
lines changed
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
"""
2+
DescriptionNode Module
3+
"""
4+
from typing import List, Optional
5+
from .base_node import BaseNode
6+
7+
class DescriptionNode(BaseNode):
8+
"""
9+
A node responsible for generating a description of a given document. This description is
10+
generated using a language model and is used for retrieving the right documents.
11+
12+
It allows scraping of big documents without exceeding the token limit of the language model.
13+
14+
Attributes:
15+
llm_model: An instance of a language model client, configured for generating answers.
16+
verbose (bool): A flag indicating whether to show print statements during execution.
17+
18+
Args:
19+
input (str): Boolean expression defining the input keys needed from the state.
20+
output (List[str]): List of output keys to be updated in the state.
21+
node_config (dict): Additional configuration for the node.
22+
node_name (str): The unique identifier name for the node, defaulting to "Parse".
23+
"""
24+
25+
def __init__(
26+
self,
27+
input: str,
28+
output: List[str],
29+
node_config: Optional[dict] = None,
30+
node_name: str = "Description",
31+
):
32+
super().__init__(node_name, "node", input, output, 2, node_config)
33+
34+
self.llm_model = node_config["llm_model"]
35+
self.embedder_model = node_config.get("embedder_model", None)
36+
self.verbose = (
37+
False if node_config is None else node_config.get("verbose", False)
38+
)
39+
self.cache_path = node_config.get("cache_path", False)
40+
41+
def execute(self, state: dict) -> dict:
42+
pass
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
"""
2+
FetchNodeLevelK Module
3+
"""
4+
from typing import List, Optional
5+
from .base_node import BaseNode
6+
7+
class FetchNodeLevelK(BaseNode):
8+
"""
9+
A node responsible for fetching all the pages at a certain level of hyperlink the graph.
10+
11+
Attributes:
12+
llm_model: An instance of a language model client, configured for generating answers.
13+
verbose (bool): A flag indicating whether to show print statements during execution.
14+
15+
Args:
16+
input (str): Boolean expression defining the input keys needed from the state.
17+
output (List[str]): List of output keys to be updated in the state.
18+
node_config (dict): Additional configuration for the node.
19+
node_name (str): The unique identifier name for the node, defaulting to "Parse".
20+
"""
21+
22+
def __init__(
23+
self,
24+
input: str,
25+
output: List[str],
26+
node_config: Optional[dict] = None,
27+
node_name: str = "FetchLevelK",
28+
):
29+
super().__init__(node_name, "node", input, output, 2, node_config)
30+
31+
self.llm_model = node_config["llm_model"]
32+
self.embedder_model = node_config.get("embedder_model", None)
33+
self.verbose = (
34+
False if node_config is None else node_config.get("verbose", False)
35+
)
36+
self.cache_path = node_config.get("cache_path", False)
37+
38+
def execute(self, state: dict) -> dict:
39+
pass

0 commit comments

Comments
 (0)