teaevo commited on
Commit
e38f7f3
1 Parent(s): aa595f0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -51,6 +51,7 @@ conn.close()
51
  num_records = 30
52
  num_columns = 20
53
 
 
54
  data = {
55
  f"column_{i}": np.random.randint(0, 100, num_records) for i in range(num_columns)
56
  }
@@ -58,17 +59,17 @@ data = {
58
  # Randomize the year and city columns
59
  years = list(range(2000, 2023)) # Range of years
60
  cities = ["New York", "Los Angeles", "Chicago", "Houston", "Miami"] # List of cities
61
-
62
  #data["year"] = [random.choice(years) for _ in range(num_records)]
63
  #data["city"] = [random.choice(cities) for _ in range(num_records)]
64
 
65
- table = pd.DataFrame(data)
66
 
67
  data = {
68
  "year": [1896, 1900, 1904, 2004, 2008, 2012],
69
  "city": ["athens", "paris", "st. louis", "athens", "beijing", "london"]
70
  }
71
- #table = pd.DataFrame.from_dict(data)
72
 
73
 
74
  # Load the chatbot model
@@ -84,8 +85,8 @@ sql_model_name = "microsoft/tapex-large-finetuned-wtq"
84
  sql_tokenizer = TapexTokenizer.from_pretrained(sql_model_name)
85
  sql_model = BartForConditionalGeneration.from_pretrained(sql_model_name)
86
 
87
- max_token_limit = sql_tokenizer.max_model_input_sizes[sql_model_name]
88
- print(f"SQL Maximum token limit for {sql_model_name}: {max_token_limit}")
89
 
90
  #sql_response = None
91
  conversation_history = []
 
51
  num_records = 30
52
  num_columns = 20
53
 
54
+ '''
55
  data = {
56
  f"column_{i}": np.random.randint(0, 100, num_records) for i in range(num_columns)
57
  }
 
59
  # Randomize the year and city columns
60
  years = list(range(2000, 2023)) # Range of years
61
  cities = ["New York", "Los Angeles", "Chicago", "Houston", "Miami"] # List of cities
62
+ '''
63
  #data["year"] = [random.choice(years) for _ in range(num_records)]
64
  #data["city"] = [random.choice(cities) for _ in range(num_records)]
65
 
66
+ #table = pd.DataFrame(data)
67
 
68
  data = {
69
  "year": [1896, 1900, 1904, 2004, 2008, 2012],
70
  "city": ["athens", "paris", "st. louis", "athens", "beijing", "london"]
71
  }
72
+ table = pd.DataFrame.from_dict(data)
73
 
74
 
75
  # Load the chatbot model
 
85
  sql_tokenizer = TapexTokenizer.from_pretrained(sql_model_name)
86
  sql_model = BartForConditionalGeneration.from_pretrained(sql_model_name)
87
 
88
+ #max_token_limit = sql_tokenizer.max_model_input_sizes[sql_model_name]
89
+ #print(f"SQL Maximum token limit for {sql_model_name}: {max_token_limit}")
90
 
91
  #sql_response = None
92
  conversation_history = []