| | import os |
| |
|
| | import hydra |
| |
|
| | import aiflows |
| | from aiflows.backends.api_info import ApiInfo |
| | from aiflows.utils.general_helpers import read_yaml_file, quick_load_api_keys |
| |
|
| | from aiflows import logging |
| | from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache |
| |
|
| | from aiflows.utils import serving |
| | from aiflows.workers import run_dispatch_worker_thread |
| | from aiflows.messages import FlowMessage |
| | from aiflows.interfaces import KeyInterface |
| | from aiflows.utils.colink_utils import start_colink_server |
| | from aiflows.workers import run_dispatch_worker_thread |
| |
|
| | CACHING_PARAMETERS.do_caching = False |
| | |
| |
|
| | logging.set_verbosity_debug() |
| |
|
| | dependencies = [ |
| | {"url": "aiflows/ChatFlowModule", "revision": os.getcwd()}, |
| | ] |
| | from aiflows import flow_verse |
| | flow_verse.sync_dependencies(dependencies) |
| |
|
| | if __name__ == "__main__": |
| | |
| | |
| | FLOW_MODULES_PATH = "./" |
| | |
| | cl = start_colink_server() |
| | |
| |
|
| | |
| | root_dir = "." |
| | cfg_path = os.path.join(root_dir, "demo.yaml") |
| | cfg = read_yaml_file(cfg_path) |
| | |
| | |
| | |
| | api_information = [ApiInfo(backend_used="openai", |
| | api_key = os.getenv("OPENAI_API_KEY"))] |
| | |
| | |
| | |
| | |
| | |
| | |
| | quick_load_api_keys(cfg, api_information, key="api_infos") |
| |
|
| | |
| | |
| | serving.serve_flow( |
| | cl=cl, |
| | flow_class_name="flow_modules.aiflows.ChatFlowModule.ChatAtomicFlow", |
| | flow_endpoint="ChatAtomicFlow", |
| | ) |
| |
|
| | |
| | run_dispatch_worker_thread(cl) |
| |
|
| | |
| | proxy_flow= serving.get_flow_instance( |
| | cl=cl, |
| | flow_endpoint="ChatAtomicFlow", |
| | user_id="local", |
| | config_overrides= cfg |
| | ) |
| | |
| |
|
| | |
| | data = {"id": 0, "question": "What is the capital of France?"} |
| | |
| | |
| | |
| | |
| | input_message = proxy_flow.package_input_message(data = data) |
| | |
| | |
| | future = proxy_flow.get_reply_future(input_message) |
| | |
| | |
| | |
| | reply_data = future.get_data() |
| | |
| | |
| | print("~~~~~~Reply~~~~~~") |
| | print(reply_data) |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|