|
a |
|
b/gui.py |
|
|
1 |
# -*- coding: utf-8 -*- |
|
|
2 |
""" |
|
|
3 |
@Time : 2024/1/15 11:45 |
|
|
4 |
@Auth : Juexiao Zhou |
|
|
5 |
@File :app.py |
|
|
6 |
@IDE :PyCharm |
|
|
7 |
@Page: www.joshuachou.ink |
|
|
8 |
""" |
|
|
9 |
|
|
|
10 |
import argparse |
|
|
11 |
import os |
|
|
12 |
from src.agent import Agent |
|
|
13 |
import gradio as gr |
|
|
14 |
import time |
|
|
15 |
import yaml |
|
|
16 |
import subprocess |
|
|
17 |
import sys |
|
|
18 |
|
|
|
19 |
def parse_args(): |
|
|
20 |
parser = argparse.ArgumentParser(description="AutoBA-GUI v0.1") |
|
|
21 |
parser.add_argument('--port', type=int, default=5904) |
|
|
22 |
args = parser.parse_args() |
|
|
23 |
return args |
|
|
24 |
|
|
|
25 |
def gradio_reset(): |
|
|
26 |
return None, None, None |
|
|
27 |
|
|
|
28 |
def get_an_example(): |
|
|
29 |
_input_file_path_description = "./experiments/case1.1/data/SRR1234567_R1.fastq.gz: Paired-end Illumina WGS reads, forward\n" \ |
|
|
30 |
"./experiments/case1.1/data/SRR1234567_R2.fastq.gz: Paired-end Illumina WGS reads, reverse\n" \ |
|
|
31 |
"./experiments/case1.1/data/TruSeq3-PE.fa: Adapter sequences for trimming\n" \ |
|
|
32 |
"./experiments/case1.1/data/Reference.fasta: Reference genome for the organism under study in FASTA format" |
|
|
33 |
_input_outoput_path = "./experiments/case1.1/output" |
|
|
34 |
_input_goal = "To perform genome assembly using paired-end WGS data" |
|
|
35 |
return _input_file_path_description, _input_outoput_path, _input_goal |
|
|
36 |
|
|
|
37 |
def print_to_textbox(*args, **kwargs): |
|
|
38 |
global HISTORY |
|
|
39 |
text = " ".join(map(str, args)) + "\n" |
|
|
40 |
if len(HISTORY) > 100: |
|
|
41 |
HISTORY = HISTORY[1:] |
|
|
42 |
HISTORY.append(text) |
|
|
43 |
else: |
|
|
44 |
HISTORY.append(text) |
|
|
45 |
|
|
|
46 |
def run(input_file_path_description, input_outoput_path, input_goal, model_engine, openai_api, execute): |
|
|
47 |
AIAgent = Agent(initial_data_list=input_file_path_description.split('\n'), |
|
|
48 |
output_dir=input_outoput_path, |
|
|
49 |
initial_goal_description=input_goal, |
|
|
50 |
model_engine=model_engine, |
|
|
51 |
openai_api=openai_api, |
|
|
52 |
execute=execute, |
|
|
53 |
blacklist='STAR,java,perl,annovar', |
|
|
54 |
gui_mode=False) |
|
|
55 |
AIAgent.run() |
|
|
56 |
return 'Job Finished!' |
|
|
57 |
|
|
|
58 |
|
|
|
59 |
if __name__ == '__main__': |
|
|
60 |
args = parse_args() |
|
|
61 |
# model_folder = args.model_root |
|
|
62 |
# model_files = [os.path.join(model_folder, f) for f in os.listdir(model_folder) if os.path.isfile(os.path.join(model_folder, f))] |
|
|
63 |
FORCE_STOP = False |
|
|
64 |
HISTORY = [] |
|
|
65 |
|
|
|
66 |
with gr.Blocks() as demo: |
|
|
67 |
gr.Markdown("""<h1 align="center">AutoBA-GUI v0.1</h1>""") |
|
|
68 |
|
|
|
69 |
gr.Markdown("""<h3>An AI Agent for Fully Automated Multi-omic Analyses</h3>""") |
|
|
70 |
|
|
|
71 |
with gr.Row(): |
|
|
72 |
with gr.Column(): |
|
|
73 |
input_file_path_description = gr.TextArea( |
|
|
74 |
label="File path and description", |
|
|
75 |
placeholder="Enter the absolute file path and file description in the following format, e.g.: \n" |
|
|
76 |
"/data/SRR1234567_R1.fastq.gz: Paired-end Illumina WGS reads, forward\n" |
|
|
77 |
"/data/SRR1234567_R2.fastq.gz: Paired-end Illumina WGS reads, reverse\n" |
|
|
78 |
"/data/TruSeq3-PE.fa: Adapter sequences for trimming\n" |
|
|
79 |
"/data/Reference.fasta: Reference genome for the organism under study in FASTA format", |
|
|
80 |
max_lines=999999, |
|
|
81 |
container=True, |
|
|
82 |
) |
|
|
83 |
|
|
|
84 |
input_outoput_path = gr.TextArea( |
|
|
85 |
label="Output path", |
|
|
86 |
placeholder="Enter absolute output path, e.g.: \n" |
|
|
87 |
"/output", |
|
|
88 |
max_lines=999999, |
|
|
89 |
container=True, |
|
|
90 |
) |
|
|
91 |
|
|
|
92 |
input_goal = gr.TextArea( |
|
|
93 |
label="Goal", |
|
|
94 |
placeholder="Describe your goal for analysis, e.g.: \n" |
|
|
95 |
"To perform genome assembly using paired-end WGS data", |
|
|
96 |
max_lines=999999, |
|
|
97 |
container=True, |
|
|
98 |
) |
|
|
99 |
|
|
|
100 |
local_model_engines = ['codellama-7bi', |
|
|
101 |
'codellama-13bi', |
|
|
102 |
'codellama-34bi', |
|
|
103 |
'llama2-7bc', |
|
|
104 |
'llama2-13bc', |
|
|
105 |
'llama2-70bc'] |
|
|
106 |
|
|
|
107 |
gpt_model_engines = ['gpt-3.5-turbo', |
|
|
108 |
'gpt-4', |
|
|
109 |
'gpt-3.5-turbo-1106', |
|
|
110 |
'gpt-4-0613', |
|
|
111 |
'gpt-4-32k-0613', |
|
|
112 |
'gpt-4-1106-preview'] |
|
|
113 |
|
|
|
114 |
model_engines = local_model_engines + gpt_model_engines |
|
|
115 |
|
|
|
116 |
with gr.Row(): |
|
|
117 |
model_engine = gr.Dropdown( |
|
|
118 |
label="Select model engine", |
|
|
119 |
choices=model_engines, |
|
|
120 |
value='gpt-4-1106-preview', |
|
|
121 |
max_choices=1, |
|
|
122 |
container=True, |
|
|
123 |
interactive=True |
|
|
124 |
) |
|
|
125 |
|
|
|
126 |
openai_api = gr.Textbox( |
|
|
127 |
label="OpenAI API", |
|
|
128 |
placeholder="sk-xxxxx. Leave it empty for local version ", |
|
|
129 |
max_lines=1, |
|
|
130 |
container=True, |
|
|
131 |
interactive=True |
|
|
132 |
) |
|
|
133 |
|
|
|
134 |
execute = gr.Checkbox( |
|
|
135 |
label="Execute Code", |
|
|
136 |
value=False, |
|
|
137 |
visible=True |
|
|
138 |
) |
|
|
139 |
|
|
|
140 |
with gr.Row(): |
|
|
141 |
example_button = gr.Button( |
|
|
142 |
value="Get an example", |
|
|
143 |
interactive=True, |
|
|
144 |
variant="primary" |
|
|
145 |
) |
|
|
146 |
|
|
|
147 |
upload_button = gr.Button( |
|
|
148 |
value="Run", |
|
|
149 |
interactive=True, |
|
|
150 |
variant="primary" |
|
|
151 |
) |
|
|
152 |
|
|
|
153 |
#clear = gr.Button("Restart") |
|
|
154 |
|
|
|
155 |
with gr.Row(): |
|
|
156 |
with gr.Column(): |
|
|
157 |
gr.Markdown('### Check command line for realtime outputs') |
|
|
158 |
output_log = gr.TextArea( |
|
|
159 |
label="Check command line for realtime outputs", |
|
|
160 |
max_lines=5, |
|
|
161 |
container=True, |
|
|
162 |
lines=5, |
|
|
163 |
) |
|
|
164 |
|
|
|
165 |
gr.Markdown("""This site was created by King Abdullah University of Science and Technology (KAUST).""") |
|
|
166 |
|
|
|
167 |
click_event = upload_button.click(run, |
|
|
168 |
[input_file_path_description, input_outoput_path, input_goal, model_engine, openai_api, |
|
|
169 |
execute], |
|
|
170 |
[output_log]) |
|
|
171 |
|
|
|
172 |
example_button.click(get_an_example, |
|
|
173 |
[], |
|
|
174 |
[input_file_path_description, input_outoput_path, input_goal]) |
|
|
175 |
|
|
|
176 |
demo.launch(share=True, server_port=args.port, server_name='0.0.0.0') |