Commit
·
46dd24e
1
Parent(s):
d0267ca
First commit.
Browse files- LCToolFlow.py +52 -0
- LCToolFlow.yaml +23 -0
- README.md +27 -0
- __init__.py +1 -0
- pip_requirements.py +1 -0
- run.py +63 -0
LCToolFlow.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
from copy import deepcopy
|
| 3 |
+
from typing import Any, Dict
|
| 4 |
+
|
| 5 |
+
import hydra
|
| 6 |
+
from langchain.tools import BaseTool
|
| 7 |
+
|
| 8 |
+
from flows.base_flows import AtomicFlow
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class LCToolFlow(AtomicFlow):
|
| 12 |
+
REQUIRED_KEYS_CONFIG = ["backend"]
|
| 13 |
+
|
| 14 |
+
# KEYS_TO_IGNORE_WHEN_RESETTING_NAMESPACE = {"backend"},TODO this will overwrite the KEYS_TO_IGNORE_WHEN_RESETTING_NAMESPACE in base_flows.py
|
| 15 |
+
|
| 16 |
+
SUPPORTS_CACHING: bool = False
|
| 17 |
+
|
| 18 |
+
backend: BaseTool
|
| 19 |
+
|
| 20 |
+
def __init__(self, backend: BaseTool, **kwargs) -> None:
|
| 21 |
+
super().__init__(**kwargs)
|
| 22 |
+
self.backend = backend
|
| 23 |
+
|
| 24 |
+
@classmethod
|
| 25 |
+
def _set_up_backend(cls, config: Dict[str, Any]) -> BaseTool:
|
| 26 |
+
if config["_target_"].startswith("."):
|
| 27 |
+
# assumption: cls is associated with relative data_transformation_configs
|
| 28 |
+
# for example, CF_Code and CF_Code.yaml should be in the same directory,
|
| 29 |
+
# and all _target_ in CF_Code.yaml should be relative
|
| 30 |
+
cls_parent_module = ".".join(cls.__module__.split(".")[:-1])
|
| 31 |
+
config["_target_"] = cls_parent_module + config["_target_"]
|
| 32 |
+
tool = hydra.utils.instantiate(config, _convert_="partial")
|
| 33 |
+
|
| 34 |
+
return tool
|
| 35 |
+
|
| 36 |
+
@classmethod
|
| 37 |
+
def instantiate_from_config(cls, config: Dict[str, Any]) -> LCToolFlow:
|
| 38 |
+
flow_config = deepcopy(config)
|
| 39 |
+
|
| 40 |
+
kwargs = {"flow_config": flow_config}
|
| 41 |
+
|
| 42 |
+
# ~~~ Set up LangChain backend ~~~
|
| 43 |
+
kwargs["backend"] = cls._set_up_backend(config["backend"])
|
| 44 |
+
|
| 45 |
+
# ~~~ Instantiate flow ~~~
|
| 46 |
+
return cls(**kwargs)
|
| 47 |
+
|
| 48 |
+
def run(self, input_data: Dict[str, Any]) -> Dict[str, Any]:
|
| 49 |
+
observation = self.backend.run(tool_input=input_data)
|
| 50 |
+
|
| 51 |
+
return {"observation": observation}
|
| 52 |
+
|
LCToolFlow.yaml
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: "search"
|
| 2 |
+
description: "useful when you need to look for the answer online, especially for recent events"
|
| 3 |
+
|
| 4 |
+
#######################################################
|
| 5 |
+
# Input keys
|
| 6 |
+
#######################################################
|
| 7 |
+
|
| 8 |
+
input_keys:
|
| 9 |
+
- "query"
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
#######################################################
|
| 13 |
+
# Output keys
|
| 14 |
+
#######################################################
|
| 15 |
+
|
| 16 |
+
output_keys:
|
| 17 |
+
- "observation"
|
| 18 |
+
|
| 19 |
+
keep_raw_response: false
|
| 20 |
+
clear_flow_namespace_on_run_end: false
|
| 21 |
+
|
| 22 |
+
backend:
|
| 23 |
+
_target_: langchain.tools.DuckDuckGoSearchRun
|
README.md
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: mit
|
| 3 |
+
---
|
| 4 |
+
ToDo
|
| 5 |
+
|
| 6 |
+
## Description
|
| 7 |
+
ToDo: Add description
|
| 8 |
+
|
| 9 |
+
< Flow description >
|
| 10 |
+
|
| 11 |
+
## Configuration parameters
|
| 12 |
+
|
| 13 |
+
< Name 1 > (< Type 1 >): < Description 1 >. Required parameter.
|
| 14 |
+
|
| 15 |
+
< Name 2 > (< Type 2 >): < Description 2 >. Default value is: < value 2 >
|
| 16 |
+
|
| 17 |
+
## Input interface
|
| 18 |
+
|
| 19 |
+
< Name 1 > (< Type 1 >): < Description 1 >.
|
| 20 |
+
|
| 21 |
+
(Note that the interface might depend on the state of the Flow.)
|
| 22 |
+
|
| 23 |
+
## Output interface
|
| 24 |
+
|
| 25 |
+
< Name 1 > (< Type 1 >): < Description 1 >.
|
| 26 |
+
|
| 27 |
+
(Note that the interface might depend on the state of the Flow.)
|
__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .LCToolFlow import LCToolFlow
|
pip_requirements.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# ToDo
|
run.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A simple script to run a Flow that can be used for development and debugging."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
import hydra
|
| 6 |
+
|
| 7 |
+
import flows
|
| 8 |
+
from flows.flow_launchers import FlowLauncher, ApiInfo
|
| 9 |
+
from flows.utils.general_helpers import read_yaml_file
|
| 10 |
+
|
| 11 |
+
from flows import logging
|
| 12 |
+
from flows.flow_cache import CACHING_PARAMETERS, clear_cache
|
| 13 |
+
|
| 14 |
+
CACHING_PARAMETERS.do_caching = False # Set to True to enable caching
|
| 15 |
+
# clear_cache() # Uncomment this line to clear the cache
|
| 16 |
+
|
| 17 |
+
logging.set_verbosity_debug()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
if __name__ == "__main__":
|
| 21 |
+
# ~~~ Set the API information ~~~
|
| 22 |
+
# OpenAI backend
|
| 23 |
+
# api_information = ApiInfo("openai", os.getenv("OPENAI_API_KEY"))
|
| 24 |
+
# Azure backend
|
| 25 |
+
api_information = ApiInfo("azure", os.getenv("AZURE_OPENAI_KEY"), os.getenv("AZURE_OPENAI_ENDPOINT"))
|
| 26 |
+
|
| 27 |
+
# ~~~ Instantiate the Flow ~~~
|
| 28 |
+
root_dir = "."
|
| 29 |
+
cfg_path = os.path.join(root_dir, "FlowName.yaml")
|
| 30 |
+
cfg = read_yaml_file(cfg_path)
|
| 31 |
+
|
| 32 |
+
flow_with_interfaces = {
|
| 33 |
+
"flow": hydra.utils.instantiate(cfg['flow'], _recursive_=False, _convert_="partial"),
|
| 34 |
+
"input_interface": (
|
| 35 |
+
None
|
| 36 |
+
if getattr(cfg, "input_interface", None) is None
|
| 37 |
+
else hydra.utils.instantiate(cfg['input_interface'], _recursive_=False)
|
| 38 |
+
),
|
| 39 |
+
"output_interface": (
|
| 40 |
+
None
|
| 41 |
+
if getattr(cfg, "output_interface", None) is None
|
| 42 |
+
else hydra.utils.instantiate(cfg['output_interface'], _recursive_=False)
|
| 43 |
+
),
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
# ~~~ Get the data ~~~
|
| 47 |
+
# This can be a list of samples
|
| 48 |
+
data = {"id": 0} # Add your data here
|
| 49 |
+
|
| 50 |
+
# ~~~ Run inference ~~~
|
| 51 |
+
path_to_output_file = None
|
| 52 |
+
# path_to_output_file = "output.jsonl" # Uncomment this line to save the output to disk
|
| 53 |
+
|
| 54 |
+
_, outputs = FlowLauncher.launch(
|
| 55 |
+
flow_with_interfaces=flow_with_interfaces,
|
| 56 |
+
data=data,
|
| 57 |
+
path_to_output_file=path_to_output_file,
|
| 58 |
+
api_information=api_information,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
# ~~~ Print the output ~~~
|
| 62 |
+
flow_output_data = outputs[0]
|
| 63 |
+
print(flow_output_data)
|