Kc-12 commited on
Commit
11d869e
1 Parent(s): 6a91a8f

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +3 -4
  2. better_transformer.py +1 -2
app.py CHANGED
@@ -38,12 +38,12 @@ def main():
38
  """
39
  )
40
  ## Default values for advanced settings
41
- user_seed = 27 # Remove if we're not rigging the "random" demo
42
  generation_method = "top-k"
43
  specified_k = 5
44
  specified_nucleus = 0.5
45
  specified_temperature = 0.9
46
- max_tokens = 400
47
 
48
  if st.checkbox("Show Advanced Settings"):
49
  user_seed = st.number_input("Randomness Seed:", value = None, step = 1, placeholder="Use to replicate response", min_value = 1)
@@ -58,11 +58,10 @@ def main():
58
  if generation_method == "temperature":
59
  specified_temperature = st.number_input("Value for temperature:", value = 0.9, step = 0.05, min_value = 0.0, max_value = 1.0)
60
 
61
- max_tokens = st.slider('Max Tokens Generated:', 100, 500, 400)
62
 
63
 
64
 
65
- st.markdown(f'random seed: {user_seed}')
66
 
67
 
68
  # model_version = st.radio("Which model would you like to use?", ["smoll", "beeg"])
 
38
  """
39
  )
40
  ## Default values for advanced settings
41
+ user_seed = None # Set to a value if we want to rig the "random" demo
42
  generation_method = "top-k"
43
  specified_k = 5
44
  specified_nucleus = 0.5
45
  specified_temperature = 0.9
46
+ max_tokens = 500
47
 
48
  if st.checkbox("Show Advanced Settings"):
49
  user_seed = st.number_input("Randomness Seed:", value = None, step = 1, placeholder="Use to replicate response", min_value = 1)
 
58
  if generation_method == "temperature":
59
  specified_temperature = st.number_input("Value for temperature:", value = 0.9, step = 0.05, min_value = 0.0, max_value = 1.0)
60
 
61
+ max_tokens = st.slider('Max Tokens Generated:', 100, 800, 500)
62
 
63
 
64
 
 
65
 
66
 
67
  # model_version = st.radio("Which model would you like to use?", ["smoll", "beeg"])
better_transformer.py CHANGED
@@ -282,7 +282,7 @@ def set_seed(seed = 42):
282
  np.random.seed(seed)
283
  os.environ['PYTHONHASHSEED'] = str(seed)
284
  torch.manual_seed(seed)
285
- torch.cuda.manual_seed(seed)
286
  # torch.cuda.manual_seed_all(seed) # if multi-GPU
287
  torch.backends.cudnn.deterministic=True # only applies to CUDA convolution operations
288
  torch.backends.cudnn.benchmark = False
@@ -359,7 +359,6 @@ def generate(model, tokenizer, device, method=None, k=None,
359
  if deterministic is not None:
360
  set_seed(deterministic)
361
 
362
- st.markdown(f"Deterministic: {deterministic}")
363
 
364
  if cond != "":
365
 
 
282
  np.random.seed(seed)
283
  os.environ['PYTHONHASHSEED'] = str(seed)
284
  torch.manual_seed(seed)
285
+ #torch.cuda.manual_seed(seed)
286
  # torch.cuda.manual_seed_all(seed) # if multi-GPU
287
  torch.backends.cudnn.deterministic=True # only applies to CUDA convolution operations
288
  torch.backends.cudnn.benchmark = False
 
359
  if deterministic is not None:
360
  set_seed(deterministic)
361
 
 
362
 
363
  if cond != "":
364