chtlp commited on
Commit
4a55a99
1 Parent(s): b8f7a8b

revise readme and docs

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +10 -6
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- title: Evolutionary Scale Prediction Of Atomic Level Protein Structure
3
  emoji: 🌍
4
  colorFrom: yellow
5
  colorTo: purple
 
1
  ---
2
+ title: Protein Structure Modeling
3
  emoji: 🌍
4
  colorFrom: yellow
5
  colorTo: purple
app.py CHANGED
@@ -275,7 +275,7 @@ messages = [
275
 
276
  We have 120k proteins features stored in our database.
277
 
278
- The app uses the [MyScale](MyScale Database) to store and query protein sequence
279
  using vector search.
280
  """
281
  ]
@@ -308,10 +308,14 @@ if 'xq' not in st.session_state:
308
  st.title("Evolutionary Scale Modeling")
309
  start = [st.empty(), st.empty(), st.empty(), st.empty(), st.empty(), st.empty(), st.empty()]
310
  start[0].info(msg)
311
- option = st.selectbox('Application options', ('self-contact prediction', 'search the database', 'activity prediction','PDB viewer'))
 
 
 
 
312
 
313
  st.session_state.db_name_ref = 'default.esm_protein'
314
- if option == 'self-contact prediction':
315
  sequence = st.text_input('protein sequence', '')
316
  if st.button('Cas9 Enzyme'):
317
  sequence = 'GSGHMDKKYSIGLAIGTNSVGWAVITDEYKVPSKKFKVLGNTDRHSIKKNLIGALLFDSGETAEATRLKRTARRRYTRRKNRILYLQEIFSNEMAKV'
@@ -327,7 +331,7 @@ if 'xq' not in st.session_state:
327
  This methodology is based on ICLR 2021 paper, Transformer protein language models are unsupervised structure learners.
328
  (Rao et al. 2020) The MSA Transformer (ESM-MSA-1) takes a multiple sequence alignment (MSA) as input, and uses the tied row self-attention maps in the same way.""")
329
  st.session_state['xq'] = model
330
- elif option == 'search the database':
331
  sequence = st.text_input('protein sequence', '')
332
  st.write('Try an example:')
333
  if st.button('Cas9 Enzyme'):
@@ -353,7 +357,7 @@ if 'xq' not in st.session_state:
353
 
354
  start[2] = st.pyplot(visualize_3D_Coordinates(result_temp_coords).figure)
355
  st.session_state['xq'] = model
356
- elif option == 'activity prediction':
357
  st.text('we predict the biological activity of mutations of a protein, using fixed embeddings from ESM.')
358
  sequence = st.text_input('protein sequence', '')
359
  st.write('Try an example:')
@@ -362,7 +366,7 @@ if 'xq' not in st.session_state:
362
  elif st.button('PETase'):
363
  sequence = 'MGSSHHHHHHSSGLVPRGSHMRGPNPTAASLEASAGPFTVRSFTVSRPSGYGAGTVYYPTNAGGTVGAIAIVPGYTARQSSIKWWGPRLASHGFVVITIDTNSTLDQPSSRSSQQMAALRQVASLNGTSSSPIYGKVDTARMGVMGWSMGGGGSLISAANNPSLKAAAPQAPWDSSTNFSSVTVPTLIFACENDSIAPVNSSALPIYDSMSRNAKQFLEINGGSHSCANSGNSNQALIGKKGVAWMKRFMDNDTRYSTFACENPNSTRVSDFRTANCSLEDPAANKARKEAELAAATAEQ'
364
 
365
- elif option == 'PDB viewer':
366
  id_PDB = st.text_input('enter PDB ID', '')
367
  residues_marker = st.text_input('residues class', '')
368
  if residues_marker:
 
275
 
276
  We have 120k proteins features stored in our database.
277
 
278
+ The app uses MyScale to store and query protein sequence
279
  using vector search.
280
  """
281
  ]
 
308
  st.title("Evolutionary Scale Modeling")
309
  start = [st.empty(), st.empty(), st.empty(), st.empty(), st.empty(), st.empty(), st.empty()]
310
  start[0].info(msg)
311
+ function_list = ('self-contact prediction',
312
+ 'search the database for similar proteins',
313
+ 'activity prediction with similar proteins',
314
+ 'PDB viewer')
315
+ option = st.selectbox('Application options', function_list)
316
 
317
  st.session_state.db_name_ref = 'default.esm_protein'
318
+ if option == function_list[0]:
319
  sequence = st.text_input('protein sequence', '')
320
  if st.button('Cas9 Enzyme'):
321
  sequence = 'GSGHMDKKYSIGLAIGTNSVGWAVITDEYKVPSKKFKVLGNTDRHSIKKNLIGALLFDSGETAEATRLKRTARRRYTRRKNRILYLQEIFSNEMAKV'
 
331
  This methodology is based on ICLR 2021 paper, Transformer protein language models are unsupervised structure learners.
332
  (Rao et al. 2020) The MSA Transformer (ESM-MSA-1) takes a multiple sequence alignment (MSA) as input, and uses the tied row self-attention maps in the same way.""")
333
  st.session_state['xq'] = model
334
+ elif option == function_list[1]:
335
  sequence = st.text_input('protein sequence', '')
336
  st.write('Try an example:')
337
  if st.button('Cas9 Enzyme'):
 
357
 
358
  start[2] = st.pyplot(visualize_3D_Coordinates(result_temp_coords).figure)
359
  st.session_state['xq'] = model
360
+ elif option == function_list[2]:
361
  st.text('we predict the biological activity of mutations of a protein, using fixed embeddings from ESM.')
362
  sequence = st.text_input('protein sequence', '')
363
  st.write('Try an example:')
 
366
  elif st.button('PETase'):
367
  sequence = 'MGSSHHHHHHSSGLVPRGSHMRGPNPTAASLEASAGPFTVRSFTVSRPSGYGAGTVYYPTNAGGTVGAIAIVPGYTARQSSIKWWGPRLASHGFVVITIDTNSTLDQPSSRSSQQMAALRQVASLNGTSSSPIYGKVDTARMGVMGWSMGGGGSLISAANNPSLKAAAPQAPWDSSTNFSSVTVPTLIFACENDSIAPVNSSALPIYDSMSRNAKQFLEINGGSHSCANSGNSNQALIGKKGVAWMKRFMDNDTRYSTFACENPNSTRVSDFRTANCSLEDPAANKARKEAELAAATAEQ'
368
 
369
+ elif option == function_list[3]:
370
  id_PDB = st.text_input('enter PDB ID', '')
371
  residues_marker = st.text_input('residues class', '')
372
  if residues_marker: