hzhwcmhf commited on
Commit
44b71e3
1 Parent(s): 1323448

fix dependency

Browse files
Files changed (2) hide show
  1. Dockerfile +1 -1
  2. app.py +1 -5
Dockerfile CHANGED
@@ -8,7 +8,7 @@ WORKDIR /home/user/app
8
 
9
  RUN sed -i 's http://deb.debian.org http://cdn-aws.deb.debian.org g' /etc/apt/sources.list && sed -i 's http://archive.ubuntu.com http://us-east-1.ec2.archive.ubuntu.com g' /etc/apt/sources.list && sed -i '/security/d' /etc/apt/sources.list && apt-get update && apt-get install -y git git-lfs ffmpeg libsm6 libxext6 cmake libgl1-mesa-glx && rm -rf /var/lib/apt/lists/* && git lfs install
10
 
11
- RUN pip install --no-cache-dir Cython "gradio==3.28.3" "torch==1.10.1" jieba subword-nmt sacremoses
12
 
13
  RUN git clone --recurse-submodules https://github.com/thu-coai/DA-Transformer.git && cd DA-Transformer && pip install -e . && cd dag_search && python3 setup.py build_ext --inplace && pip install -e . && cd ../..
14
 
 
8
 
9
  RUN sed -i 's http://deb.debian.org http://cdn-aws.deb.debian.org g' /etc/apt/sources.list && sed -i 's http://archive.ubuntu.com http://us-east-1.ec2.archive.ubuntu.com g' /etc/apt/sources.list && sed -i '/security/d' /etc/apt/sources.list && apt-get update && apt-get install -y git git-lfs ffmpeg libsm6 libxext6 cmake libgl1-mesa-glx && rm -rf /var/lib/apt/lists/* && git lfs install
10
 
11
+ RUN pip install --no-cache-dir Cython "gradio==3.28.3" "torch==1.10.1" jieba subword-nmt sacremoses transformers
12
 
13
  RUN git clone --recurse-submodules https://github.com/thu-coai/DA-Transformer.git && cd DA-Transformer && pip install -e . && cd dag_search && python3 setup.py build_ext --inplace && pip install -e . && cd ../..
14
 
app.py CHANGED
@@ -15,8 +15,6 @@ import matplotlib.pyplot as plt
15
  import fairseq
16
 
17
  logger = logging.getLogger(__name__)
18
- logger.info("start init")
19
-
20
 
21
  fairseq_path = os.path.dirname(os.path.dirname(fairseq.__file__))
22
 
@@ -202,6 +200,7 @@ def generate_detail(model, model_input):
202
 
203
  def load_model(model_name):
204
  assert model_name in available_models
 
205
  model = available_models[model_name]['class'].from_pretrained(**available_models[model_name]['args'])
206
  return model
207
 
@@ -513,7 +512,6 @@ def build_tab_detail():
513
  return load
514
 
515
  def build_demo():
516
- logger.info(f"build enter")
517
  with gr.Blocks(title="DA-Transformer Demo", theme=gr.themes.Base(), css=css) as demo:
518
  gr.Markdown(notice_markdown)
519
 
@@ -543,8 +541,6 @@ if __name__ == "__main__":
543
 
544
  workers = concurrent.futures.ThreadPoolExecutor(max_workers=1)
545
  demo = build_demo()
546
- logger.info(f"build over")
547
-
548
  demo.queue(concurrency_count=args.concurrency_count, status_update_rate=10,
549
  api_open=False).launch(server_name=args.host, server_port=args.port,
550
  share=args.share, max_threads=5)
 
15
  import fairseq
16
 
17
  logger = logging.getLogger(__name__)
 
 
18
 
19
  fairseq_path = os.path.dirname(os.path.dirname(fairseq.__file__))
20
 
 
200
 
201
  def load_model(model_name):
202
  assert model_name in available_models
203
+ logger.info(f"start loading {model_name}")
204
  model = available_models[model_name]['class'].from_pretrained(**available_models[model_name]['args'])
205
  return model
206
 
 
512
  return load
513
 
514
  def build_demo():
 
515
  with gr.Blocks(title="DA-Transformer Demo", theme=gr.themes.Base(), css=css) as demo:
516
  gr.Markdown(notice_markdown)
517
 
 
541
 
542
  workers = concurrent.futures.ThreadPoolExecutor(max_workers=1)
543
  demo = build_demo()
 
 
544
  demo.queue(concurrency_count=args.concurrency_count, status_update_rate=10,
545
  api_open=False).launch(server_name=args.host, server_port=args.port,
546
  share=args.share, max_threads=5)