|
import os |
|
|
|
import hydra |
|
|
|
import aiflows |
|
from aiflows.backends.api_info import ApiInfo |
|
from aiflows.utils.general_helpers import read_yaml_file |
|
|
|
from aiflows import logging |
|
from aiflows.flow_cache import CACHING_PARAMETERS, clear_cache |
|
|
|
from aiflows.utils import serving |
|
from aiflows.workers import run_dispatch_worker_thread |
|
from aiflows.messages import FlowMessage |
|
from aiflows.interfaces import KeyInterface |
|
from aiflows.utils.colink_utils import start_colink_server |
|
from aiflows.workers import run_dispatch_worker_thread |
|
|
|
CACHING_PARAMETERS.do_caching = False |
|
|
|
|
|
logging.set_verbosity_debug() |
|
|
|
dependencies = [ |
|
{"url": "aiflows/HumanStandardInputFlowModule", "revision": os.getcwd()}, |
|
] |
|
from aiflows import flow_verse |
|
flow_verse.sync_dependencies(dependencies) |
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
FLOW_MODULES_PATH = "./" |
|
|
|
cl = start_colink_server() |
|
|
|
|
|
root_dir = "." |
|
cfg_path = os.path.join(root_dir, "demo.yaml") |
|
cfg = read_yaml_file(cfg_path) |
|
|
|
|
|
serving.serve_flow( |
|
cl = cl, |
|
flow_class_name="flow_modules.aiflows.HumanStandardInputFlowModule.HumanStandardInputFlow", |
|
flow_endpoint="HumanStandardInputFlow", |
|
) |
|
|
|
|
|
|
|
run_dispatch_worker_thread(cl) |
|
|
|
|
|
proxy_flow= serving.get_flow_instance( |
|
cl=cl, |
|
flow_endpoint="HumanStandardInputFlow", |
|
user_id="local", |
|
config_overrides = cfg |
|
) |
|
|
|
|
|
|
|
data = {"id": 0, "statement": "This flow works well"} |
|
|
|
|
|
|
|
input_message = FlowMessage( |
|
data=data, |
|
) |
|
|
|
|
|
|
|
|
|
|
|
future = proxy_flow.get_reply_future(input_message) |
|
|
|
|
|
|
|
reply_data = future.get_data() |
|
|
|
|
|
print("~~~~~~Reply~~~~~~") |
|
print(reply_data) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|