"""
https://pythonspeed.com/articles/json-memory-streaming/
https://stackoverflow.com/questions/10382253/reading-rather-large-json-files
"""
import ijson


def load_large_json(xfile_path, encoding='utf8', limit=0):
    with open(xfile_path, 'rb') as f:
        xitems_gen = ijson.items(f, 'item')
        cnt = -1
        for xrecord in xitems_gen:
            cnt += 1
            if limit and cnt >= limit:
                break
            yield xrecord


if '__main__' == __name__:

    def main():
        xfile_path = r'D:\_dell7590_root\local\LNP_datasets\med\med_dialog\MedDialog_processed\MedDialog_processed\train_data.json'

        for i, x in enumerate(load_large_json(xfile_path, limit=5)):
            print(i, x)

    main()
