#!/bin/bash SCHEMA_PATH=$1 function gen_data { local split=$1 pipenv run python -c "print(__import__('datasets').load_dataset('dataunitylab/json-schema-store')['$split'].to_pandas().to_json(orient='records'))" | \ jq -c '.[]' | \ pipenv run python3 extract_keywords.py | \ pipenv run python augment_neg.py -s "$SCHEMA_PATH" | \ gzip -c > "$split.jsonl.gz" } gen_data "train" gen_data "validation"