Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,6 +9,34 @@ os.system('pip install dgl==1.0.2+cu116 -f https://data.dgl.ai/wheels/cu116/repo
|
|
| 9 |
os.environ["DGLBACKEND"] = "pytorch"
|
| 10 |
print('Modules installed')
|
| 11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
# ํ์ ๋ผ์ด๋ธ๋ฌ๋ฆฌ ์ํฌํธ
|
| 13 |
from datasets import load_dataset
|
| 14 |
import plotly.graph_objects as go
|
|
@@ -583,20 +611,23 @@ def combined_generation(name, strength, flexibility, speed, defense, size, abili
|
|
| 583 |
None
|
| 584 |
)
|
| 585 |
|
| 586 |
-
# Gradio ์ธํฐํ์ด์ค ์์
|
| 587 |
with gr.Blocks(theme='ParityError/Interstellar') as demo:
|
| 588 |
with gr.Row():
|
| 589 |
-
# ์ผ์ชฝ ์ด: ์ฑ๋ด ๋ฐ ์ปจํธ๋กค ํจ๋
|
| 590 |
with gr.Column(scale=1):
|
| 591 |
# ์ฑ๋ด ์ธํฐํ์ด์ค
|
| 592 |
gr.Markdown("# ๐ค AI ๋จ๋ฐฑ์ง ์ค๊ณ ๋์ฐ๋ฏธ")
|
| 593 |
chatbot = gr.Chatbot(height=600)
|
| 594 |
-
|
| 595 |
-
|
| 596 |
-
|
| 597 |
-
|
| 598 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 599 |
clear = gr.Button("๋ํ ๋ด์ฉ ์ง์ฐ๊ธฐ")
|
|
|
|
| 600 |
|
| 601 |
with gr.Accordion("์ฑํ
์ค์ ", open=False):
|
| 602 |
system_message = gr.Textbox(
|
|
@@ -886,7 +917,6 @@ with gr.Blocks(theme='ParityError/Interstellar') as demo:
|
|
| 886 |
msg.submit(process_chat, [msg, chatbot], [chatbot])
|
| 887 |
clear.click(lambda: None, None, chatbot, queue=False)
|
| 888 |
|
| 889 |
-
# UI ์ปจํธ๋กค ์ด๋ฒคํธ
|
| 890 |
seq_opt.change(
|
| 891 |
fn=toggle_seq_input,
|
| 892 |
inputs=[seq_opt],
|
|
@@ -894,6 +924,8 @@ with gr.Blocks(theme='ParityError/Interstellar') as demo:
|
|
| 894 |
queue=False
|
| 895 |
)
|
| 896 |
|
|
|
|
|
|
|
| 897 |
sec_str_opt.change(
|
| 898 |
fn=toggle_secondary_structure,
|
| 899 |
inputs=[sec_str_opt],
|
|
@@ -949,12 +981,6 @@ with gr.Blocks(theme='ParityError/Interstellar') as demo:
|
|
| 949 |
]
|
| 950 |
)
|
| 951 |
|
| 952 |
-
# ์ด๋ฒคํธ ์ฐ๊ฒฐ
|
| 953 |
-
# ์ฑ๋ด ์ด๋ฒคํธ
|
| 954 |
-
msg.submit(respond,
|
| 955 |
-
[msg, chatbot, system_message, max_tokens, temperature, top_p],
|
| 956 |
-
[chatbot])
|
| 957 |
-
clear.click(lambda: None, None, chatbot, queue=False)
|
| 958 |
|
| 959 |
# ์ฑ๋ด ์๋ต์ ๋ฐ๋ฅธ ๊ฒฐ๊ณผ ์
๋ฐ์ดํธ
|
| 960 |
msg.submit(
|
|
@@ -964,6 +990,14 @@ with gr.Blocks(theme='ParityError/Interstellar') as demo:
|
|
| 964 |
)
|
| 965 |
|
| 966 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 967 |
# ์คํ
|
| 968 |
demo.queue()
|
| 969 |
demo.launch(debug=True)
|
|
|
|
| 9 |
os.environ["DGLBACKEND"] = "pytorch"
|
| 10 |
print('Modules installed')
|
| 11 |
|
| 12 |
+
# ์ฌ๊ธฐ์ args ์ด๊ธฐํ ์ฝ๋ ์ถ๊ฐ --------------------------------
|
| 13 |
+
# ๊ธฐ๋ณธ args ์ค์
|
| 14 |
+
if not os.path.exists('./tmp'):
|
| 15 |
+
os.makedirs('./tmp')
|
| 16 |
+
|
| 17 |
+
if not os.path.exists('./tmp/args.json'):
|
| 18 |
+
default_args = {
|
| 19 |
+
'checkpoint': None,
|
| 20 |
+
'dump_trb': False,
|
| 21 |
+
'dump_args': True,
|
| 22 |
+
'save_best_plddt': True,
|
| 23 |
+
'T': 25,
|
| 24 |
+
'strand_bias': 0.0,
|
| 25 |
+
'loop_bias': 0.0,
|
| 26 |
+
'helix_bias': 0.0,
|
| 27 |
+
'd_t1d': 24,
|
| 28 |
+
'potentials': None,
|
| 29 |
+
'potential_scale': None,
|
| 30 |
+
'aa_composition': None
|
| 31 |
+
}
|
| 32 |
+
with open('./tmp/args.json', 'w') as f:
|
| 33 |
+
json.dump(default_args, f)
|
| 34 |
+
|
| 35 |
+
# args ๋ก๋
|
| 36 |
+
with open('./tmp/args.json', 'r') as f:
|
| 37 |
+
args = json.load(f)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
# ํ์ ๋ผ์ด๋ธ๋ฌ๋ฆฌ ์ํฌํธ
|
| 41 |
from datasets import load_dataset
|
| 42 |
import plotly.graph_objects as go
|
|
|
|
| 611 |
None
|
| 612 |
)
|
| 613 |
|
|
|
|
| 614 |
with gr.Blocks(theme='ParityError/Interstellar') as demo:
|
| 615 |
with gr.Row():
|
|
|
|
| 616 |
with gr.Column(scale=1):
|
| 617 |
# ์ฑ๋ด ์ธํฐํ์ด์ค
|
| 618 |
gr.Markdown("# ๐ค AI ๋จ๋ฐฑ์ง ์ค๊ณ ๋์ฐ๋ฏธ")
|
| 619 |
chatbot = gr.Chatbot(height=600)
|
| 620 |
+
# ์ฌ๊ธฐ์ ์์ ๋ ์
๋ ฅ ํ๋ ์ถ๊ฐ --------------------------------
|
| 621 |
+
with gr.Row():
|
| 622 |
+
msg = gr.Textbox(
|
| 623 |
+
label="๋ฉ์์ง๋ฅผ ์
๋ ฅํ์ธ์",
|
| 624 |
+
placeholder="์: COVID-19๋ฅผ ์น๋ฃํ ์ ์๋ ๋จ๋ฐฑ์ง์ ์์ฑํด์ฃผ์ธ์",
|
| 625 |
+
lines=2,
|
| 626 |
+
scale=4
|
| 627 |
+
)
|
| 628 |
+
submit_btn = gr.Button("์ ์ก", variant="primary", scale=1)
|
| 629 |
clear = gr.Button("๋ํ ๋ด์ฉ ์ง์ฐ๊ธฐ")
|
| 630 |
+
|
| 631 |
|
| 632 |
with gr.Accordion("์ฑํ
์ค์ ", open=False):
|
| 633 |
system_message = gr.Textbox(
|
|
|
|
| 917 |
msg.submit(process_chat, [msg, chatbot], [chatbot])
|
| 918 |
clear.click(lambda: None, None, chatbot, queue=False)
|
| 919 |
|
|
|
|
| 920 |
seq_opt.change(
|
| 921 |
fn=toggle_seq_input,
|
| 922 |
inputs=[seq_opt],
|
|
|
|
| 924 |
queue=False
|
| 925 |
)
|
| 926 |
|
| 927 |
+
|
| 928 |
+
|
| 929 |
sec_str_opt.change(
|
| 930 |
fn=toggle_secondary_structure,
|
| 931 |
inputs=[sec_str_opt],
|
|
|
|
| 981 |
]
|
| 982 |
)
|
| 983 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 984 |
|
| 985 |
# ์ฑ๋ด ์๋ต์ ๋ฐ๋ฅธ ๊ฒฐ๊ณผ ์
๋ฐ์ดํธ
|
| 986 |
msg.submit(
|
|
|
|
| 990 |
)
|
| 991 |
|
| 992 |
|
| 993 |
+
submit_btn.click(respond,
|
| 994 |
+
[msg, chatbot, system_message, max_tokens, temperature, top_p],
|
| 995 |
+
[chatbot])
|
| 996 |
+
msg.submit(respond,
|
| 997 |
+
[msg, chatbot, system_message, max_tokens, temperature, top_p],
|
| 998 |
+
[chatbot])
|
| 999 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
| 1000 |
+
|
| 1001 |
# ์คํ
|
| 1002 |
demo.queue()
|
| 1003 |
demo.launch(debug=True)
|