backend

 1# main.py
 2
 3import argparse
 4import os
 5import configparser
 6from WorkFlow import run_workflow_from_file
 7from langchain_community.llms import Ollama
 8from langchain.chat_models import ChatOpenAI
 9from Tee import Tee
10
11def main():
12    parser = argparse.ArgumentParser(description="Run workflow from a JSON graph definition.")
13    parser.add_argument('--graph', required=True, help="Path to the JSON file defining the graph.")
14    parser.add_argument('--keys', help="Path to the credentials file.")
15    parser.add_argument('--tee', help="File to write the output log to.")
16    parser.add_argument('--llm', help="use what llm")
17    
18    args = parser.parse_args()
19    
20    if args.tee:
21        tee = Tee(args.tee)
22
23    if args.keys:
24        config = configparser.ConfigParser()
25        config.read(args.keys)
26        os.environ["OPENAI_API_KEY"] = config['OpenAI']['api_key']
27        llm = ChatOpenAI(temperature=0.7, model_name="gpt-4o")
28    elif args.llm:
29        os.environ["OPENAI_API_KEY"] = "sk-proj-not-use-it"
30        llm = Ollama(model=args.llm)
31
32    run_workflow_from_file(args.graph, llm)
33
34    if args.tee:
35        tee.close()
36
37if __name__ == "__main__":
38    main()
def main():
12def main():
13    parser = argparse.ArgumentParser(description="Run workflow from a JSON graph definition.")
14    parser.add_argument('--graph', required=True, help="Path to the JSON file defining the graph.")
15    parser.add_argument('--keys', help="Path to the credentials file.")
16    parser.add_argument('--tee', help="File to write the output log to.")
17    parser.add_argument('--llm', help="use what llm")
18    
19    args = parser.parse_args()
20    
21    if args.tee:
22        tee = Tee(args.tee)
23
24    if args.keys:
25        config = configparser.ConfigParser()
26        config.read(args.keys)
27        os.environ["OPENAI_API_KEY"] = config['OpenAI']['api_key']
28        llm = ChatOpenAI(temperature=0.7, model_name="gpt-4o")
29    elif args.llm:
30        os.environ["OPENAI_API_KEY"] = "sk-proj-not-use-it"
31        llm = Ollama(model=args.llm)
32
33    run_workflow_from_file(args.graph, llm)
34
35    if args.tee:
36        tee.close()