| # -*- coding: utf-8 -*- | |
| """palabraNoelani.py.195 | |
| Automatically generated by Colab. | |
| Original file is located at | |
| https://colab.research.google.com/drive/1uFgy02KIfS_uTu6vJc8P7kcEQ_jtSNGw | |
| """ | |
| import os | |
| import sys | |
| import requests | |
| from tqdm import tqdm | |
| if len(sys.argv) !=2: | |
| print('Enter model parameter') | |
| sys.exit(1) | |
| model = sys.argv[1] | |
| subdir = os.path.join('models', model) | |
| if not os.path.exists(subdir): | |
| os.makedirs(subdir) | |
| subdir = subdir.replace('\\', '/') | |
| for filename in ['checkpoint', 'encoder.json','hparams.json','model.ckpt'.data-00000-of-00001','model.ckpt.index','model.ckpt.meta', 'vocab.bpe']: | |
| r = requests.get("https://openaipublic.blob.core.windows.net/gpt-2/" + subdir + "/" + filename, stream=True) | |
| with open(os.path.join(subdir, filename), 'wb') as f: | |
| file_size = int(r,headers["content-length"]) | |
| chunk_size = 1000 | |
| with tqdm(ncols=100) | |
| for chunk in r.iter_content(chunk_size=chunk_size): | |
| f.write(chunk) | |
| pbar.update(chunk_size) |