abdullahmubeen10 commited on
Commit
dcdb825
·
verified ·
1 Parent(s): 928ec81

Upload 177 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. .streamlit/config.toml +3 -0
  3. Demo.py +163 -0
  4. Dockerfile +70 -0
  5. images/T5_model_diagram.jpg +3 -0
  6. inputs/Coreference Resolution - mrpc/t5_base/Example1.txt +2 -0
  7. inputs/Coreference Resolution - mrpc/t5_base/Example2.txt +2 -0
  8. inputs/Coreference Resolution - mrpc/t5_base/Example3.txt +2 -0
  9. inputs/Coreference Resolution - mrpc/t5_base/Example4.txt +2 -0
  10. inputs/Coreference Resolution - mrpc/t5_base/Example5.txt +2 -0
  11. inputs/Coreference Resolution - mrpc/t5_small/Example1.txt +2 -0
  12. inputs/Coreference Resolution - mrpc/t5_small/Example2.txt +2 -0
  13. inputs/Coreference Resolution - mrpc/t5_small/Example3.txt +2 -0
  14. inputs/Coreference Resolution - mrpc/t5_small/Example4.txt +2 -0
  15. inputs/Coreference Resolution - mrpc/t5_small/Example5.txt +2 -0
  16. inputs/Coreference Resolution - qqp/t5_base/Example1.txt +2 -0
  17. inputs/Coreference Resolution - qqp/t5_base/Example2.txt +2 -0
  18. inputs/Coreference Resolution - qqp/t5_base/Example3.txt +2 -0
  19. inputs/Coreference Resolution - qqp/t5_base/Example4.txt +2 -0
  20. inputs/Coreference Resolution - qqp/t5_base/Example5.txt +2 -0
  21. inputs/Coreference Resolution - qqp/t5_small/Example1.txt +2 -0
  22. inputs/Coreference Resolution - qqp/t5_small/Example2.txt +2 -0
  23. inputs/Coreference Resolution - qqp/t5_small/Example3.txt +2 -0
  24. inputs/Coreference Resolution - qqp/t5_small/Example4.txt +2 -0
  25. inputs/Coreference Resolution - qqp/t5_small/Example5.txt +2 -0
  26. inputs/Natural Language Inference - cb/t5_base/Example1.txt +2 -0
  27. inputs/Natural Language Inference - cb/t5_base/Example2.txt +2 -0
  28. inputs/Natural Language Inference - cb/t5_base/Example3.txt +2 -0
  29. inputs/Natural Language Inference - cb/t5_base/Example4.txt +2 -0
  30. inputs/Natural Language Inference - cb/t5_base/Example5.txt +2 -0
  31. inputs/Natural Language Inference - cb/t5_small/Example1.txt +2 -0
  32. inputs/Natural Language Inference - cb/t5_small/Example2.txt +2 -0
  33. inputs/Natural Language Inference - cb/t5_small/Example3.txt +2 -0
  34. inputs/Natural Language Inference - cb/t5_small/Example4.txt +2 -0
  35. inputs/Natural Language Inference - cb/t5_small/Example5.txt +2 -0
  36. inputs/Natural Language Inference - mnli/t5_base/Example1.txt +2 -0
  37. inputs/Natural Language Inference - mnli/t5_base/Example2.txt +2 -0
  38. inputs/Natural Language Inference - mnli/t5_base/Example3.txt +2 -0
  39. inputs/Natural Language Inference - mnli/t5_base/Example4.txt +2 -0
  40. inputs/Natural Language Inference - mnli/t5_base/Example5.txt +2 -0
  41. inputs/Natural Language Inference - mnli/t5_small/Example1.txt +2 -0
  42. inputs/Natural Language Inference - mnli/t5_small/Example2.txt +2 -0
  43. inputs/Natural Language Inference - mnli/t5_small/Example3.txt +2 -0
  44. inputs/Natural Language Inference - mnli/t5_small/Example4.txt +2 -0
  45. inputs/Natural Language Inference - mnli/t5_small/Example5.txt +2 -0
  46. inputs/Natural Language Inference - qnli/t5_base/Example1.txt +2 -0
  47. inputs/Natural Language Inference - qnli/t5_base/Example2.txt +2 -0
  48. inputs/Natural Language Inference - qnli/t5_base/Example3.txt +2 -0
  49. inputs/Natural Language Inference - qnli/t5_base/Example4.txt +2 -0
  50. inputs/Natural Language Inference - qnli/t5_base/Example5.txt +2 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ images/T5_model_diagram.jpg filter=lfs diff=lfs merge=lfs -text
.streamlit/config.toml ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [theme]
2
+ base="light"
3
+ primaryColor="#29B4E8"
Demo.py ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import sparknlp
3
+ import os
4
+ import pandas as pd
5
+
6
+ from sparknlp.base import *
7
+ from sparknlp.annotator import *
8
+ from pyspark.ml import Pipeline
9
+ from sparknlp.pretrained import PretrainedPipeline
10
+
11
+ # Page Configuration
12
+ st.set_page_config(
13
+ layout="wide",
14
+ initial_sidebar_state="auto"
15
+ )
16
+
17
+ # Custom CSS for Styling
18
+ st.markdown("""
19
+ <style>
20
+ .main-title {
21
+ font-size: 36px;
22
+ color: #4A90E2;
23
+ font-weight: bold;
24
+ text-align: center;
25
+ }
26
+ .section-content {
27
+ background-color: #f9f9f9;
28
+ padding: 10px;
29
+ border-radius: 10px;
30
+ margin-top: 10px;
31
+ }
32
+ .section-content p, .section-content ul {
33
+ color: #666666;
34
+ }
35
+ </style>
36
+ """, unsafe_allow_html=True)
37
+
38
+ # Initialize Spark Session
39
+ @st.cache_resource
40
+ def start_spark_session():
41
+ return sparknlp.start()
42
+
43
+ # Create NLP Pipeline
44
+ @st.cache_resource
45
+ def build_nlp_pipeline(model_name, task):
46
+ document_assembler = DocumentAssembler()\
47
+ .setInputCol("text")\
48
+ .setOutputCol("document")
49
+
50
+ t5_transformer = T5Transformer() \
51
+ .pretrained(model_name, 'en') \
52
+ .setTask(task)\
53
+ .setInputCols(["document"]) \
54
+ .setOutputCol("output")
55
+
56
+ pipeline = Pipeline().setStages([document_assembler, t5_transformer])
57
+ return pipeline
58
+
59
+ # Apply Pipeline to Text Data
60
+ def process_text(pipeline, text):
61
+ df = spark.createDataFrame([[text]]).toDF("text")
62
+ result = pipeline.fit(df).transform(df)
63
+ return result.select('output.result').collect()
64
+
65
+ # Model and Task Information
66
+ model_info = [
67
+ {
68
+ "model_name": "t5_small",
69
+ "title": "Multi-Task NLP Model",
70
+ "description": "The T5 model performs 18 different NLP tasks including summarization, question answering, and grammatical correctness detection."
71
+ },
72
+ {
73
+ "model_name": "t5_base",
74
+ "title": "Multi-Task NLP Model",
75
+ "description": "A larger variant of the T5 model, capable of performing a variety of NLP tasks with improved accuracy."
76
+ },
77
+ {
78
+ "model_name": "google_t5_small_ssm_nq",
79
+ "title": "Question Answering Model",
80
+ "description": "This model is fine-tuned for answering questions based on the Natural Questions dataset, leveraging pre-training on large text corpora."
81
+ }
82
+ ]
83
+
84
+ task_descriptions = {
85
+ 'Sentence Classification - cola': "Classify if a sentence is grammatically correct.",
86
+ 'Natural Language Inference - rte': "The RTE task is defined as recognizing, given two text fragments, whether the meaning of one text can be inferred (entailed) from the other or not.",
87
+ 'Natural Language Inference - mnli': "Classify for a hypothesis and premise whether they contradict or contradict each other or neither of both (3 class).",
88
+ 'Natural Language Inference - qnli': "Classify whether the answer to a question can be deducted from an answer candidate.",
89
+ 'Natural Language Inference - cb': "Classify for a premise and a hypothesis whether they contradict each other or not (binary).",
90
+ 'Coreference Resolution - mrpc': "Classify whether a pair of sentences is a re-phrasing of each other (semantically equivalent).",
91
+ 'Coreference Resolution - qqp': "Classify whether a pair of questions is a re-phrasing of each other (semantically equivalent).",
92
+ 'Sentiment Analysis - sst2': "Classify the sentiment of a sentence as positive or negative.",
93
+ 'Sentiment Analysis - stsb': "Measures how similar two sentences are on a scale from 0 to 5",
94
+ 'Question Answering - copa': "Classify for a question, premise, and 2 choices which choice the correct choice is (binary).",
95
+ 'Question Answering - multirc': "Classify for a question, a paragraph of text, and an answer candidate, if the answer is correct (binary).",
96
+ 'Question Answering - squad': "Answer a question for a given context.",
97
+ 'Word Sense Disambiguation - wic': "Classify for a pair of sentences and a disambiguous word if the word has the same meaning in both sentences.",
98
+ 'Text - summarization': "Summarize text into a shorter representation.",
99
+ 'Translation - wmt1': "This model is used to translate one language to the other language. Example: Translate English to German.",
100
+ 'Translation - wmt2': "This model is used to translate one language to the other language. Example: Translate English to French.",
101
+ 'Translation - wmt3': "This model is used to translate one language to the other language. Example: Translate English to Romanian."
102
+ }
103
+
104
+ # Sidebar: Task and Model Selection
105
+ selected_task = st.sidebar.selectbox("Choose an NLP Task", list(task_descriptions.keys()))
106
+ task_for_pipeline = f"{selected_task.split(' - ')[-1]}:"
107
+
108
+ available_models = ['google_t5_small_ssm_nq'] if "Question Answering" in selected_task else ['t5_base', 't5_small']
109
+ selected_model = st.sidebar.selectbox("Choose a Model", available_models)
110
+
111
+ # Get Model Info
112
+ model_details = next((info for info in model_info if info['model_name'] == selected_model), None)
113
+ app_title = model_details['title'] if model_details else "Unknown Model"
114
+ app_description = model_details['description'] if model_details else "No description available."
115
+
116
+ # Display Model Info
117
+ st.markdown(f'<div class="main-title">{app_title}</div>', unsafe_allow_html=True)
118
+ st.markdown(f'<div class="section-content"><p>{app_description}</p></div>', unsafe_allow_html=True)
119
+ st.subheader(task_descriptions[selected_task])
120
+
121
+ # Load Example Texts
122
+ example_folder = f"inputs/{selected_task}/{selected_model}"
123
+ example_texts = [
124
+ line.strip()
125
+ for file in os.listdir(example_folder)
126
+ if file.endswith('.txt')
127
+ for line in open(os.path.join(example_folder, file), 'r', encoding='utf-8')
128
+ ]
129
+
130
+ # User Input: Select or Enter Text
131
+ selected_example = st.selectbox("Select an Example", example_texts)
132
+ custom_input = st.text_input("Or enter your own text:")
133
+
134
+ text_to_process = custom_input if custom_input else selected_example
135
+
136
+ # Display Selected Text
137
+ st.subheader('Selected Text')
138
+ st.markdown(f'<div class="section-content">{text_to_process}</div>', unsafe_allow_html=True)
139
+
140
+ # Sidebar: Reference Notebook
141
+ st.sidebar.markdown('Reference notebook:')
142
+ st.sidebar.markdown("""
143
+ <a href="https://github.com/JohnSnowLabs/spark-nlp-workshop/blob/master/tutorials/streamlit_notebooks/T5TRANSFORMER.ipynb">
144
+ <img src="https://colab.research.google.com/assets/colab-badge.svg" style="zoom: 1.3" alt="Open In Colab"/>
145
+ </a>
146
+ """, unsafe_allow_html=True)
147
+
148
+ # Special Cases for Translation Tasks
149
+ task_for_pipeline = {
150
+ 'wmt1:': 'translate English to German:',
151
+ 'wmt2:': 'translate English to French:',
152
+ 'wmt3:': 'translate English to Romanian:'
153
+ }.get(task_for_pipeline, task_for_pipeline)
154
+
155
+ # Initialize Spark, Build Pipeline, and Process Text
156
+ spark = start_spark_session()
157
+ nlp_pipeline = build_nlp_pipeline(selected_model, task_for_pipeline)
158
+ processed_output = process_text(nlp_pipeline, text_to_process)
159
+
160
+ # Display Processed Output
161
+ st.subheader("Processed Output")
162
+ output_text = "".join(processed_output[0][0])
163
+ st.markdown(f'<div class="section-content">{output_text}</div>', unsafe_allow_html=True)
Dockerfile ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Download base image ubuntu 18.04
2
+ FROM ubuntu:18.04
3
+
4
+ # Set environment variables
5
+ ENV NB_USER jovyan
6
+ ENV NB_UID 1000
7
+ ENV HOME /home/${NB_USER}
8
+
9
+ # Install required packages
10
+ RUN apt-get update && apt-get install -y \
11
+ tar \
12
+ wget \
13
+ bash \
14
+ rsync \
15
+ gcc \
16
+ libfreetype6-dev \
17
+ libhdf5-serial-dev \
18
+ libpng-dev \
19
+ libzmq3-dev \
20
+ python3 \
21
+ python3-dev \
22
+ python3-pip \
23
+ unzip \
24
+ pkg-config \
25
+ software-properties-common \
26
+ graphviz \
27
+ openjdk-8-jdk \
28
+ ant \
29
+ ca-certificates-java \
30
+ && apt-get clean \
31
+ && update-ca-certificates -f;
32
+
33
+ # Install Python 3.8 and pip
34
+ RUN add-apt-repository ppa:deadsnakes/ppa \
35
+ && apt-get update \
36
+ && apt-get install -y python3.8 python3-pip \
37
+ && apt-get clean;
38
+
39
+ # Set up JAVA_HOME
40
+ ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64/
41
+ RUN mkdir -p ${HOME} \
42
+ && echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/" >> ${HOME}/.bashrc \
43
+ && chown -R ${NB_UID}:${NB_UID} ${HOME}
44
+
45
+ # Create a new user named "jovyan" with user ID 1000
46
+ RUN useradd -m -u ${NB_UID} ${NB_USER}
47
+
48
+ # Switch to the "jovyan" user
49
+ USER ${NB_USER}
50
+
51
+ # Set home and path variables for the user
52
+ ENV HOME=/home/${NB_USER} \
53
+ PATH=/home/${NB_USER}/.local/bin:$PATH
54
+
55
+ # Set the working directory to the user's home directory
56
+ WORKDIR ${HOME}
57
+
58
+ # Upgrade pip and install Python dependencies
59
+ RUN python3.8 -m pip install --upgrade pip
60
+ COPY requirements.txt /tmp/requirements.txt
61
+ RUN python3.8 -m pip install -r /tmp/requirements.txt
62
+
63
+ # Copy the application code into the container at /home/jovyan
64
+ COPY --chown=${NB_USER}:${NB_USER} . ${HOME}
65
+
66
+ # Expose port for Streamlit
67
+ EXPOSE 7860
68
+
69
+ # Define the entry point for the container
70
+ ENTRYPOINT ["streamlit", "run", "Demo.py", "--server.port=7860", "--server.address=0.0.0.0"]
images/T5_model_diagram.jpg ADDED

Git LFS Details

  • SHA256: ecdc448c0c71610fa26d4063fd82edb1b6e879d3cb0e17fd2e8d29565a1ccbc4
  • Pointer size: 132 Bytes
  • Size of remote file: 3.15 MB
inputs/Coreference Resolution - mrpc/t5_base/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ We acted because we saw the existing evidence in a new light, through the prism of our experienc...
2
+ We acted because we saw the existing evidence in a new light, through the prism of our experience on 11 September Rumsfeld said. sentence2: Rather, the US acted because the administration saw existing evidence in a new light, through the prism of our experience on September 11.
inputs/Coreference Resolution - mrpc/t5_base/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ I like to eat peanut butter for breakfast.
2
+ I like to eat peanut butter for breakfast. sentence2: I like to play football.
inputs/Coreference Resolution - mrpc/t5_base/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Charles O. Prince, 53, was named as Mr. Weill’s successor.
2
+ Charles O. Prince, 53, was named as Mr. Weill’s successor. sentence2: Mr. Weill’s longtime confidant, Charles O. Prince, 53, was named as his successor.
inputs/Coreference Resolution - mrpc/t5_base/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ The euro rose above US$1.18, the highest price since its January ...
2
+ The euro rose above US$1.18, the highest price since its January 1999 launch. sentence2: The euro rose above $1.18 the high-est level since its launch in January 1999.
inputs/Coreference Resolution - mrpc/t5_base/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ However, without a carefully con-trolled study, there was ...
2
+ However, without a carefully con-trolled study, there was little clear proof that the operation ac-tually improves people’s lives. sentence2: But without a carefully controlled study, there was little clear proof that the operation improves people’s lives.
inputs/Coreference Resolution - mrpc/t5_small/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ We acted because we saw the existing evidence in a new light, through the prism of our experienc...
2
+ We acted because we saw the existing evidence in a new light, through the prism of our experience on 11 September" Rumsfeld said. sentence2: Rather, the US acted because the administration saw "existing evidence in a new light, through the prism of our experience on September 11".
inputs/Coreference Resolution - mrpc/t5_small/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ I like to eat peanutbutter for breakfast.
2
+ I like to eat peanutbutter for breakfast. sentence2: I like to play football.
inputs/Coreference Resolution - mrpc/t5_small/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Charles O. Prince, 53, was named as Mr. Weill’s successor.
2
+ Charles O. Prince, 53, was named as Mr. Weill’s successor. sentence2: Mr. Weill’s longtime confidant, Charles O. Prince, 53, was named as his successor.
inputs/Coreference Resolution - mrpc/t5_small/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ The euro rose above US$1.18, the highest price since its January ...
2
+ The euro rose above US$1.18, the highest price since its January 1999 launch. sentence2: The euro rose above $1.18 the high-est level since its launch in January 1999.
inputs/Coreference Resolution - mrpc/t5_small/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ However, without a carefully con-trolled study, there was ...
2
+ However, without a carefully con-trolled study, there was little clear proof that the operation ac-tually improves people’s lives. sentence2: But without a carefully controlled study, there was little clear proof that the operation improves people’s lives.
inputs/Coreference Resolution - qqp/t5_base/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: What attributes would have made you highly desirable in ancient Rome?
2
+ question1: What attributes would have made you highly desirable in ancient Rome? question2: How I GET OPPERTINUTY TO JOIN IT COMPANY AS A FRESHER?'
inputs/Coreference Resolution - qqp/t5_base/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: What was it like in Ancient rome?
2
+ question1: What was it like in Ancient rome? question2: What was Ancient rome like?
inputs/Coreference Resolution - qqp/t5_base/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: How can I install Windows software or games?
2
+ question1: How can I install Windows software or games? question2: I cannot install Windows. How to install it?
inputs/Coreference Resolution - qqp/t5_base/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: Could you live without the internet?
2
+ question1: Could you live without the internet? question2: Internet is not available for a few days. Could you manage without it?
inputs/Coreference Resolution - qqp/t5_base/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: What is the best thing that happened to you during the past week?
2
+ question1: What is the best thing that happened to you during the past week? question2: What is the best thing that happened to you during the past year?
inputs/Coreference Resolution - qqp/t5_small/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: What attributes would have made you highly desirable in ancient Rome?
2
+ question1: What attributes would have made you highly desirable in ancient Rome? question2: How I GET OPPERTINUTY TO JOIN IT COMPANY AS A FRESHER?'
inputs/Coreference Resolution - qqp/t5_small/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: What was it like in Ancient rome?
2
+ question1: What was it like in Ancient rome? question2: What was Ancient rome like?
inputs/Coreference Resolution - qqp/t5_small/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: How can I install Windows software or games?
2
+ question1: How can I install Windows software or games? question2: I cannot install Windows. How to install it?
inputs/Coreference Resolution - qqp/t5_small/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: Could you live without the internet?
2
+ question1: Could you live without the internet? question2: Internet is not available for a few days. Could you manage without it?
inputs/Coreference Resolution - qqp/t5_small/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question1: What is the best thing that happened to you during the past week?
2
+ question1: What is the best thing that happened to you during the past week? question2: What is the best thing that happened to you during the past year?
inputs/Natural Language Inference - cb/t5_base/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last ...
2
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last 5 years. premise: Johnny is a poor man.
inputs/Natural Language Inference - cb/t5_base/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ It rained in England the last 4 weeks. premise: It was snowing in New Yor...
2
+ It rained in England the last 4 weeks. premise: It was snowing in New York last week.
inputs/Natural Language Inference - cb/t5_base/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Man in a black suit, white shirt and black bow tie playing an instrument with ...
2
+ Man in a black suit, white shirt and black bow tie playing an instrument with the rest of his symphony surrounding him. premise:Nobody has a suit
inputs/Natural Language Inference - cb/t5_base/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ At 8:34, the Boston Center controller received a third, transmission from ...
2
+ At 8:34, the Boston Center controller received a third, transmission from American 11. premise:The Boston Center controller got a third transmission from American 11.
inputs/Natural Language Inference - cb/t5_base/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Cats with long hair shed all over the house so you should not get a long-....
2
+ Cats with long hair shed all over the house so you should not get a long-haired cat premise: Long hair cats are good
inputs/Natural Language Inference - cb/t5_small/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last ...
2
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last 5 years. premise: Johnny is a poor man.
inputs/Natural Language Inference - cb/t5_small/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ It rained in England the last 4 weeks. premise: It was snowing in New Yor...
2
+ It rained in England the last 4 weeks. premise: It was snowing in New York last week.
inputs/Natural Language Inference - cb/t5_small/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Man in a black suit, white shirt and black bow tie playing an instrument with ...
2
+ Man in a black suit, white shirt and black bow tie playing an instrument with the rest of his symphony surrounding him. premise:Nobody has a suit
inputs/Natural Language Inference - cb/t5_small/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ At 8:34, the Boston Center controller received a third, transmission from ...
2
+ At 8:34, the Boston Center controller received a third, transmission from American 11. premise:The Boston Center controller got a third transmission from American 11.
inputs/Natural Language Inference - cb/t5_small/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Cats with long hair shed all over the house so you should not get a long-....
2
+ Cats with long hair shed all over the house so you should not get a long-haired cat premise: Long hair cats are good
inputs/Natural Language Inference - mnli/t5_base/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last 5 years.
2
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last 5 years. premise: Johnny is a poor man.
inputs/Natural Language Inference - mnli/t5_base/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ It rained in England the last 4 weeks.
2
+ It rained in England the last 4 weeks. premise: It was snowing in New York last week.
inputs/Natural Language Inference - mnli/t5_base/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Man in a black suit, white shirt and black bow tie playing an instrument with ...
2
+ Man in a black suit, white shirt and black bow tie playing an instrument with the rest of his symphony surrounding him. premise:Nobody has a suit
inputs/Natural Language Inference - mnli/t5_base/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ At 8:34, the Boston Center controller received a third, transmission from ...
2
+ At 8:34, the Boston Center controller received a third, transmission from American 11. premise:The Boston Center controller got a third transmission from American 11.
inputs/Natural Language Inference - mnli/t5_base/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Cats with long hair shed all over the house so you should not get a long-....
2
+ Cats with long hair shed all over the house so you should not get a long-haired cat premise: Long hair cats are good
inputs/Natural Language Inference - mnli/t5_small/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last ...
2
+ Recent report say Johnny makes he alot of money, he earned 10 million USD each year for the last 5 years. premise: Johnny is a poor man.
inputs/Natural Language Inference - mnli/t5_small/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ It rained in England the last 4 weeks. premise: It was snowing in New Yor...
2
+ It rained in England the last 4 weeks. premise: It was snowing in New York last week.
inputs/Natural Language Inference - mnli/t5_small/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Man in a black suit, white shirt and black bow tie playing an instrument with ...
2
+ Man in a black suit, white shirt and black bow tie playing an instrument with the rest of his symphony surrounding him. premise:Nobody has a suit
inputs/Natural Language Inference - mnli/t5_small/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ At 8:34, the Boston Center controller received a third, transmission from ...
2
+ At 8:34, the Boston Center controller received a third, transmission from American 11. premise:The Boston Center controller got a third transmission from American 11.
inputs/Natural Language Inference - mnli/t5_small/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Cats with long hair shed all over the house so you should not get a long-....
2
+ Cats with long hair shed all over the house so you should not get a long-haired cat premise: Long hair cats are good
inputs/Natural Language Inference - qnli/t5_base/Example1.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question: Where did Jebe die? sentence: Ghenkis Khan recalled Subtai back...
2
+ question: Where did Jebe die? sentence: Ghenkis Khan recalled Subtai back to Mongolia soon afterward, and Jebe died on the road back to Samarkand
inputs/Natural Language Inference - qnli/t5_base/Example2.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question: What does Steve like to eat? sentence: Steve watches TV all day.
2
+ question: What does Steve like to eat? sentence: Steve watches TV all day.
inputs/Natural Language Inference - qnli/t5_base/Example3.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question: What space station supported three manned missions in 1973–1974?
2
+ question: What space station supported three manned missions in 1973–1974? sentence: Apollo/Saturn vehicles were also used for an Apollo Applications Program, which consisted of Skylab, a space station that supported three manned missions in 1973–74, and the Apollo–Soyuz Test Project, a joint Earth orbit mission with the Soviet Union in 1975.
inputs/Natural Language Inference - qnli/t5_base/Example4.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question: When did Beyonce start becoming popular?
2
+ question: When did Beyonce start becoming popular? sentence: Beyoncé was born and raised in Houston, Texas, she performed in various singing and dancing competitions as a child, and rose to fame in the late 1990s as lead singer of R&B girl-group Destiny's Child.
inputs/Natural Language Inference - qnli/t5_base/Example5.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ question: Which NFL team represented the AFC at Super Bowl 50?
2
+ question: Which NFL team represented the AFC at Super Bowl 50? sentence: Super Bowl 50 was an American football game to determine the champion of the National Football League (NFL) for the 2015 season. The American Football Conference (AFC) champion Denver Broncos defeated the National Football Conference (NFC) champion Carolina Panthers 24–10 to earn their third Super Bowl title.