idiomify / main_infer.py
eubinecto's picture
[#9] idiomifier:m-1-3 is ready. main_deploy.py is updated accordingly
c1728bd
raw
history blame
859 Bytes
"""
This is for just a simple sanity check on the inference.
"""
import argparse
from idiomify.pipeline import Pipeline
from idiomify.fetchers import fetch_config, fetch_idiomifier, fetch_tokenizer
from transformers import BartTokenizer
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--sent", type=str,
default="Just remember that there will always be a hope even when things look hopeless")
args = parser.parse_args()
config = fetch_config()['idiomifier']
config.update(vars(args))
model = fetch_idiomifier(config['ver'])
tokenizer = fetch_tokenizer(config['tokenizer_ver'])
model.eval() # this is crucial
pipeline = Pipeline(model, tokenizer)
src = config['sent']
tgts = pipeline(sents=[src])
print(src, "\n->", tgts[0])
if __name__ == '__main__':
main()