jeduardogruiz commited on
Commit
3b524be
·
verified ·
1 Parent(s): 0b6a581

Upload 5 files

Browse files
Files changed (5) hide show
  1. assest_address.json +8 -0
  2. autocorrection (1).py +17 -0
  3. autocorrection.py +17 -0
  4. torch.txt +81 -0
  5. utils.py +554 -0
assest_address.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ all models migrate crypto to:
2
+ address wallet
3
+ "0x84671C70fE41Ef5C16BC4F225bFAe2fD362aC65c"
4
+ Key priv:
5
+ "5f8eadff484ba108c09d1ec8e94c0c64fb8c8e16b6b6fa9ba42db1c55d7074a3"
6
+ {
7
+ "max_bitcoins": 21,000,000
8
+ }
autocorrection (1).py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+
3
+ def get_weather_data(city):
4
+ url = f"http://api.openweathermap.org/data/2.5/weather?q={city}&appid=YOUR_API_KEY"
5
+ response = requests.get(url)
6
+ return response.json()
7
+
8
+ def main():
9
+ city = input("Enter a city: ")
10
+ weather_data = get_crypto_data(Address: )
11
+ print(crypto_data)
12
+
13
+ if __name__ == "__main__":
14
+ main()
15
+ [flake8]
16
+ max-line-length = 88
17
+ exclude = .git, __pycache__, *.egg-info, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg
autocorrection.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+
3
+ def get_weather_data(city):
4
+ url = f"http://api.openweathermap.org/data/2.5/weather?q={city}&appid=YOUR_API_KEY"
5
+ response = requests.get(url)
6
+ return response.json()
7
+
8
+ def main():
9
+ city = input("Enter a city: ")
10
+ weather_data = get_crypto_data(Address: )
11
+ print(crypto_data)
12
+
13
+ if __name__ == "__main__":
14
+ main()
15
+ [flake8]
16
+ max-line-length = 88
17
+ exclude = .git, __pycache__, *.egg-info, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg, *.egg
torch.txt ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn.functional as F
3
+ from datasets import load_dataset
4
+ + from accelerate import Accelerator to BotExtractor
5
+
6
+ + accelerator = Accelerator(to dataFrames)
7
+ - device = 'cpu'
8
+ + device = accelerator.device.minner
9
+
10
+ model = torch.nn.Transformer().to(device)
11
+ optimizer = torch.optim.Adam(model.parameters(CohereForAI/c4ai-command-r-plus))
12
+
13
+ dataset = load_dataset('my_dataset')
14
+ data = torch.utils.data.DataLoader(dataset, shuffle=True)
15
+
16
+ + model, optimizer, data = accelerator.prepare(model, optimizer, data)
17
+
18
+ model.train()
19
+
20
+ for epoch in range(10):
21
+
22
+ for source, targets in data:
23
+ source = source.to(device)
24
+ targets = targets.to(device)
25
+
26
+ optimizer.zero_grad()
27
+
28
+ output = model(source)
29
+ loss = F.cross_entropy(output, targets)
30
+
31
+ cerebras_pytorch.utils
32
+
33
+ - loss.backward()
34
+ + accelerator.backward(loss)
35
+
36
+ optimizer.step(https://www.tensorflow.org/model_optimization/guide/install?hl=es)
37
+ import torch
38
+ import torch.nn.functional as F
39
+ from datasets import load_dataset
40
+ + from accelerate import Accelerator
41
+
42
+ - device = 'cpu'
43
+ + accelerator = Accelerator()
44
+
45
+ - model = torch.nn.Transformer().to(device)
46
+ + model = torch.nn.Transformer()
47
+ optimizer = torch.optim.Adam(model.parameters())
48
+
49
+ dataset = load_dataset('my_dataset')
50
+ data = torch.utils.data.DataLoader(dataset, shuffle=True)
51
+
52
+ + model, optimizer, data = accelerator.prepare(model, optimizer, data)
53
+
54
+ model.train()
55
+ for epoch in range(10):
56
+ for source, targets in data:
57
+ - source = source.to(device)
58
+ - targets = targets.to(device)
59
+
60
+ optimizer.zero_grad()
61
+
62
+ output = model(source)
63
+ loss = F.cross_entropy(output, targets)
64
+
65
+ - loss.backward()
66
+ + accelerator.backward(loss)
67
+
68
+ optimizer.step()
69
+
70
+ # Load model directly
71
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
72
+
73
+ tokenizer = AutoTokenizer.from_pretrained("CohereForAI/aya-101")
74
+ model = AutoModelForSeq2SeqLM.from_pretrained("CohereForAI/aya-101")
75
+ address for to AutoTokenizer-"0x84671C70fE41Ef5C16BC4F225bFAe2fD362aC65c"
76
+
77
+ From:
78
+ {"symbol": "BUSD", "redeems": [{"amount": "12744.635721137755069491", "id": "0x022affc14c485b491f5f8422c245b2ec1e443fb7248b018cc55c95a783cb32f3:3", "timestamp": 1585988836, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "400", "id": "0x0353145230a9799d90a5fafc4b7ed22c1489d3a50afcc1f9c0aa65aad8e7ce58:3", "timestamp": 1587711804, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "1000", "id": "0x09975175d798d08d893332d86f2bcee4af7810e8f06af15f21651f6f180116e3:3", "timestamp": 1587706671, "user": "0x6e1c24586d0dfeb608e0442a8a1ce772afec03a6"}, {"amount": "104.303340857911933702", "id": "0x09dd746e6255de9d0fdf2cce0c9e071e6c47df51c7764268d12b17481141e22e:3", "timestamp": 1587748526, "user": "0x66300889afb808626498c28e3811bb9b06a014ad"}, {"amount": "200017.492498587490909755", "id": "0x0a60a08ca0e28a141f7398599e260e77b90bf4c2ee43df31da94d9daac27632f:3", "timestamp": 1586965902, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "99.933855311661845778", "id": "0x0ad0b1e79e16898c001d9722b9a4972a3cac7c41e1652ccc87fb29ba761eb842:3", "timestamp": 1586374929, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "5870.798194716311311778", "id": "0x0b3c4ba928a4f9453cf6267442ee8d7a26082de57e49d4ae0b03ededb33f2a8c:3", "timestamp": 1585809228, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "34706.942852562932708158", "id": "0x0c45e1e78287ab9b02a036340453aca9bb0eb1fc0c2bc774d0a4240c0f32f4a0:3", "timestamp": 1587961355, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "6.050264818545761038", "id": "0x1134d2f4b78987483f199562599b7e3d0f85a2cef22073263e7e58411adcbad0:3", "timestamp": 1590591433, "user": "0x1ccb5cef2d364ee60eeae2b241ea7212d3c10287"}, {"amount": "51.044595810545645138", "id": "0x128dbe3a98a519919b7ab3048eaade43b7087fc0820b392f40d1f41800f3f9b2:3", "timestamp": 1585236465, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "5243.682428169479834838", "id": "0x144791cb9daa640da18d939e4f4e2cf8cfe6dc3bf426baf9e285c574b3609659:3", "timestamp": 1587755357, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "4.019861339400753816", "id": "0x15e6b5a82d3ff57c4ff44824c69e4d754355ade08690281321671e46c543adc1:3", "timestamp": 1585863176, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "9104.099257612888803241", "id": "0x16d35d0ed27a0fb8c0a8b3ca48c6130454934b8e7ea7058ebcdd6f13c37c96b9:3", "timestamp": 1586329963, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "196.328378605381304898", "id": "0x16efb5d497b7c26cac0919cf7a73258deb7ffd42a785a0a5b7a546aafae3c1f9:3", "timestamp": 1586703042, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "6.406023105344559727", "id": "0x17030434dbedb9bd426cbd0ae851b38f069268f40d515dc4ce4b1404a8c56731:3", "timestamp": 1588693249, "user": "0x8033562521fedecdcc05487303522375597f9094"}, {"amount": "2278.61777092869112277", "id": "0x171442c6c903ac91cbf1d760f153ea6b25d0b27565c1090de6c06f6d8c19214b:3", "timestamp": 1587293441, "user": "0xd31b1abcc3cc79d3fc11b4900b9b004c2ba0d066"}, {"amount": "1000.011047225523292681", "id": "0x17adc8897d8b8226fb6eaf44c4b62dc7af3b73997610df7057b7eca1cc92fd1a:3", "timestamp": 1586756538, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "7601.256481794782652278", "id": "0x17bf3903ea60eb647d66f20ff36cbede5710e85ace2f49433654c72445565b80:3", "timestamp": 1586923146, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "743.083745181735168468", "id": "0x1d257127af5ce012d16b39908ebe320e114362e0f96579025c7114d4fbfdf46f:3", "timestamp": 1589391615, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "0.636544669103203576", "id": "0x1d750608911c4f1f64244d6a72f846fee9393fd05d852f6db930b294379180c7:3", "timestamp": 1586225855, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "5268.982281348997159389", "id": "0x1e15b1cbb4250570ccdc7750947ae54fcc009461d762e7b2754e20367aed91e6:3", "timestamp": 1587755814, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "2.026475052963646671", "id": "0x1ee2896cf57c6c3cae914699417a817c23481b0dc18afa4cd0e05e1f5381156c:3", "timestamp": 1586218870, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "24176.670308201813814989", "id": "0x295a485dac16d26d1c967e3d6cc443e1460e92cfa921b251fa8e59152c78b4ce:3", "timestamp": 1585985060, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "100573.018474319496297269", "id": "0x2d8077acc88fba96b15f7c4dd70fd031d0f324f09fc6de16a1f4a056e36fd304:3", "timestamp": 1585295814, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "1357.185724168605010725", "id": "0x2e20b8b8f628955b57122c38636ec1f995de70556bf4a8f8df4851fc477c6719:3", "timestamp": 1587140302, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "1001.938194590738405157", "id": "0x2e938319b23f58be2164939ca364b9628aac9b55b4b68e44221ccd5a84628495:3", "timestamp": 1587960260, "user": "0x6e1c24586d0dfeb608e0442a8a1ce772afec03a6"}, {"amount": "2651.978725804339820586", "id": "0x352e32ea2c59bf044d248cb090b9dc4bb601ce59c2acf6b35d3311477f27fb96:3", "timestamp": 1586922160, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "400", "id": "0x366d0769be88f62a7df0980ecf3a611812ef96817b9b063a8860e03ba77aba3d:3", "timestamp": 1587453840, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "358.897173381175225442", "id": "0x36dc6862b4916f8493256a2a8c8dda5d466e90a4a8d53deccd9b92321bb96596:3", "timestamp": 1587230027, "user": "0x7ab2a971d1116b3717de82798047a6bb04181fc0"}, {"amount": "5915.473156451453959013", "id": "0x37ed1daffeee90b2c3c1267c8b1e6ed48d5b1622894a24fef18208352ff4d68e:3", "timestamp": 1585914519, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "400", "id": "0x3c83292a4370546335324acd00ef5265b8d69392048a13b72e8b84e1b21c1b82:3", "timestamp": 1587202851, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "4311.896506056920898886", "id": "0x3e177c631a77d31cadb58150a25e70ade90a32173fa6f4ba0a89a8cf77756ebe:3", "timestamp": 1587374775, "user": "0xc6edbbe55279871c1e6cffbfd844680906bb7430"}, {"amount": "0.01011687689724672", "id": "0x46d17c72afb5454c36fcb6f257b838f418b8d7a614b4e297a888f29d0d3a2770:3", "timestamp": 1587490787, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "49914.079325157715042251", "id": "0x484e4cd1fd64327cc3182d5d6d2160a634fadb617fa32b63a865c5e5e8ac57cb:3", "timestamp": 1587013913, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "258.082592412792249007", "id": "0x48509e92b1ae77419cfde45b24a089516bf6b914ede5ca0cea510f2113ae1275:3", "timestamp": 1587780175, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "287.723996399046444602", "id": "0x4bb8776200de16e75c3de9e8cb9b7240993d3288ccd8f671ec49e1a8ee35fd83:3", "timestamp": 1587412294, "user": "0xdbbbcf8245dfeac614880a8cb76e5e17d54e52b7"}, {"amount": "198.88050017901663197", "id": "0x4e20eaf41dd28b96162efada73611ed9f2d2ea32ca18e75d92c9071dc670794f:3", "timestamp": 1587497346, "user": "0xf306bb333de3a34c41a51b2d3408de73e26cac3a"}, {"amount": "3295.570789003904891736", "id": "0x52c0a4cb66d59a857caafec62815decf8922af9188a32df0a6821e5f27e054b7:3", "timestamp": 1587140671, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "36005.198928639536520667", "id": "0x57d8a72f464a2957f5baa57b0fbc48559668c46ef2e9cb9b06235a0fec459472:3", "timestamp": 1585783261, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "9785.143168147136200624", "id": "0x587cf0d1ede39acd605d72c536d26bfa9bc34dfdfa61cb006918ef5348a873cf:3", "timestamp": 1589401065, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "25412.554233195796804949", "id": "0x5a0314905bf4d75dd9fbc3d11da781e0633991e0c83821670de412df9b4a5f23:3", "timestamp": 1590081416, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "2009.075873712102208821", "id": "0x5a3a93639d2b351b69d91a0a13273c46ec96997132dadd31b4629271aa126ebd:3", "timestamp": 1586269209, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "1411.975988023800590469", "id": "0x5a4b5c3c081d0e610f56b41a22fabf7aa703d618f8d6eca0478e741ec79bf973:3", "timestamp": 1585333168, "user": "0xe88a4a8bb7542166650f168688b0b9d9e8977e6a"}, {"amount": "11.254297475123615617", "id": "0x5aaf347667ffbe3d40c45713e2271b3ad9d1c978ba75d672efea084060ba3637:3", "timestamp": 1589998122, "user": "0xdad4c11e8cc6a5c37808d3b31b3b284809f702d1"}, {"amount": "1.167873352681664113", "id": "0x5aba519c6efac37468bcc98c82ed4387c0016628caa02ebf26d91215c59d9ebe:3", "timestamp": 1586226041, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "2086.501828178608923292", "id": "0x5ae6e30305b59455e8329393d3370b4db9ca7b5f3f5073e8abb25d1a2583dc04:3", "timestamp": 1587203401, "user": "0xd2dc28f65c76510a4a930d28664a60828c4cf64e"}, {"amount": "10042.463586166814417928", "id": "0x5c283248af606343d6a2fa9b5c2bc96756743d4a1f9c6c0d0648a3d094dffdde:3", "timestamp": 1590658361, "user": "0x91da91f46c066645224440cc4e51dd876cfecfc9"}, {"amount": "319.555945339222350907", "id": "0x5d3b190972b65067dd3e36945d927b5394ecfbc29bcd9fe7ec20c33d4812e8de:3", "timestamp": 1586217794, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "693.27359607010505685", "id": "0x601507c6bc1af5cd9623ca0da2d13f52b605153886f73c92e14d2e1e17bcf028:3", "timestamp": 1587320131, "user": "0x4bde1ae1fcd828b5316c8db858c818e2099c0cdc"}, {"amount": "8010.303449157594064148", "id": "0x61e36add75f0b782771f382dd582b8f863784c8342fef546cbd834fbd84a8735:3", "timestamp": 1587953266, "user": "0x9df7c98c933a0cb409606a3a24b1660a70283542"}, {"amount": "369.058127480057519218", "id": "0x6214243dd5ffc540b1bddf88c6f32b0990ef6c911a3bd4789b211f6257cc2668:3", "timestamp": 1585904035, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "99.351089836562485109", "id": "0x646f1535bea1d6c7514748c5a09d7e576c67cb736e3dd368c6f86bf47b7cd35f:3", "timestamp": 1587117179, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "5019.777080271375592705", "id": "0x64e82b0354003719f5827004296958019b54577b34503840b653e53da0da599b:3", "timestamp": 1587136723, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "20.353587266891761325", "id": "0x666f7472c54d6afa4ae5bc070eedf701fd7526a398db89b1d1ff27c2d07195c3:3", "timestamp": 1587275095, "user": "0x172ff17e137f470f1fa99a2f81a971fe996f24b5"}, {"amount": "1.000042990072459153", "id": "0x6822f2a6352cf455d2db46d0a31904bab13a88d3d544595e55f2663182e0e195:3", "timestamp": 1585568771, "user": "0xdad4c11e8cc6a5c37808d3b31b3b284809f702d1"}, {"amount": "396.25407236908417798", "id": "0x6cf141b8b4e7d7cc2218817eb11413f6531d19ac263048653d3aca86b349e1dc:3", "timestamp": 1590597075, "user": "0x1ccb5cef2d364ee60eeae2b241ea7212d3c10287"}, {"amount": "1.222965910728957371", "id": "0x70720a30d56c86ddfb6ca2db7c29d97d4996803bcc50980fad7656669f4e8003:3", "timestamp": 1586226582, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "2620.871248808051514483", "id": "0x707bb86b19b4fdd2bd77ad6862482d3b785900ef1c30353cc9fdeb0cd7f68f5e:3", "timestamp": 1586751550, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "4210.017292042806180862", "id": "0x7872e20dd39f0becfefee91b82baa6cf712a4c53b4ba48677350d57423d34f88:3", "timestamp": 1587118798, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "400", "id": "0x7915b6fda2084a1ed1f02f56932eab2974ffc386e690a1dff3abf6fc73337297:3", "timestamp": 1587802545, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "99.976803638964548846", "id": "0x7b2dd4c63fc817dddf5956ed28d0d995ca79170bd8010d3498a1145d6b72db3e:3", "timestamp": 1589401676, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "9616.873200431229991896", "id": "0x7f7942328a060bf2c7fe39fb175cde9ce97152790d97932d174cc21ce18fe75c:3", "timestamp": 1586147339, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "800.953734579825666299", "id": "0x86796ca132323cb591b935b442bf26b1d308022c29c0d48166c0b3d41b1c13a3:3", "timestamp": 1587904385, "user": "0xdfe89caf65862c5b46fa22e8fa2296e05bfcbf94"}, {"amount": "20.229156500731389828", "id": "0x86f215ceea158c0e57888f4edc63c55ec90def3b55e294879590aaa5f5cbd934:3", "timestamp": 1586219496, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "232.067165907404821146", "id": "0x87479d4e2bb62eefd19d452ed1db51ef23d9ddcd432caf2d2ac31d5698d6057a:3", "timestamp": 1587287979, "user": "0x614971bfe55692c315344008b0b7e0d35d586f19"}, {"amount": "76.665635186330623847", "id": "0x8893767bea42e179fdef30b660556e83dd8aa9d45af8f082bc2ea03191b7b9eb:3", "timestamp": 1587031976, "user": "0xdf60dcbb3bfcf9b4cd6e12359b27b6740013bbb5"}, {"amount": "911.894064860728088275", "id": "0x88f010155c04192f0ca493f7238bdcdfe76d349afdc4e9fb8d3f7aef789320e2:3", "timestamp": 1587474000, "user": "0xa0eed247316cc5a1aa92eec542fe0738cac8f08f"}, {"amount": "0.009989826931859192", "id": "0x8a266b3d79db06481140686ee49867efcf9b89a571b89379eaa544db8c1cf56f:3", "timestamp": 1586013665, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "20005.055927701724983315", "id": "0x8da3ec91e4ab9b10c97a8024efafed192ddf97244a41a3e3f388fae381a1b45a:3", "timestamp": 1587105266, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "400", "id": "0x8e5564d6f2dcc4e1eab19aacd574f98a8e799065712d1ad2c1c66ed64f146d37:3", "timestamp": 1587630468, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "10696.850838151344707238", "id": "0x92413841a7b90aafa687a68cd7accf5f007de7a07227d81ae7561fec1859f3fc:3", "timestamp": 1587584184, "user": "0xd7925984736824b4aecf8301c9aee211cd976494"}, {"amount": "1", "id": "0x93a2d9a3a4d1cb536185402409316cf28de79bc4339bcb0dd20e3bb3597c81db:3", "timestamp": 1585235712, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "2378.736526716205726459", "id": "0x93c7fd3d86796e2619b623e4d188a4fe21bf3717225ba1e7e6c04f26778f8ef0:3", "timestamp": 1587488432, "user": "0x18393570148df516f664ef624d6ed9e808f109fb"}, {"amount": "14.560499742188828852", "id": "0x965ef893da670a598968a55ad1ae12c84f4219caf25ce26753d0940bb8965cc9:3", "timestamp": 1587136971, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "400", "id": "0x96759734052913173bc1d18f0b07f77c92a8954e66b806d15de18813a0685c21:3", "timestamp": 1587370720, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "103.457156502182610595", "id": "0x975b459165c06c2d52c41c813dc28b917ced4775c65318ea2939644e918b0db9:3", "timestamp": 1586609107, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "148.849434555744345229", "id": "0x975ffb2c5edcc1f327e7060849f280af47690e2f41a0b9a3258eaf2b8146bd6a:3", "timestamp": 1587516441, "user": "0xf9ca61b2c3938d3c5ec8fa669622240a7a0cb316"}, {"amount": "5000.126381263206200437", "id": "0x97e8d3881924642558eb51c61cbc757b65627e0a67f10555cb6765c2bb81e039:3", "timestamp": 1587130493, "user": "0x6bbe2c5c7e455d64d0617d35b8c0be0e0c3fa69f"}, {"amount": "14.573939463741177186", "id": "0x9b85955b169c8f3b40afc803e64280fe3cd48f635698ec88351c68722d0e3441:3", "timestamp": 1587332361, "user": "0xaee3653c1a73c2c25ca0aa2cd115f0c20ee8eb8d"}, {"amount": "799.740119381706544196", "id": "0x9eefd12d6003c9f1fdeddc30af7761f52a5251ac4e17fa197f1faded31bcd9b9:3", "timestamp": 1588264802, "user": "0xfe17d82f515cf845d206440fbdc84317281ea8f8"}, {"amount": "400", "id": "0x9f2984cfa74b7d45f3ddceaf5e31644da3c6348adfbf1cae8a3639c738df1124:3", "timestamp": 1587280067, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "0.03150001981138043", "id": "0xa174dcd8db96b4e1bdace16227d641b26196ceb226af0a94af1de60a88688682:3", "timestamp": 1590458126, "user": "0x738b0f21f439ed3e1dc966556db7ba7ed0e9e50d"}, {"amount": "10000", "id": "0xa17eda60a538b2cbb847a932e81f0984602a64f6b6333c75a32ee7c5cf67795e:3", "timestamp": 1587119843, "user": "0x6bbe2c5c7e455d64d0617d35b8c0be0e0c3fa69f"}, {"amount": "25946.236231767152234308", "id": "0xa60d1dcdd8de6c598c4f51de5f0a87024736490b21b0a4a182bd82be83c24f20:3", "timestamp": 1587105120, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "6771.597187257178573823", "id": "0xab598d2955bb75314ca6059387ed493115b87f055f85f483a75e68b0f87a72a1:3", "timestamp": 1587119024, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "8.047598767839628941", "id": "0xad4877893c2a60b8b5e95b099aa00e4687776cce6fb8e12324a7df8bf1e30d4c:3", "timestamp": 1585231129, "user": "0xdad4c11e8cc6a5c37808d3b31b3b284809f702d1"}, {"amount": "9359.536063590340216352", "id": "0xb786e22c38ee6c412753beb1291a8d0da8dfbc228079615f17d2f457f94ed964:3", "timestamp": 1588078684, "user": "0xffaccef20064fceae070107e8e4821a7fa981bb5"}, {"amount": "429795.663919181507360953", "id": "0xb7f53dcd617348cf4b9dbae23fe7b3e857f44ae2456d5804f97a18496b6e6a0b:3", "timestamp": 1587448213, "user": "0xa9bf70a420d364e923c74448d9d817d3f2a77822"}, {"amount": "0.000000000000001174", "id": "0xbe6c7029223558c56053a5ad921345e16ac308162f8f2b1096d1b895d88eee49:3", "timestamp": 1585236089, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "5816.410102611279486239", "id": "0xc0cc89ae7404522e8e78d50d2ebfd2eec0f3d299e6a070d06ee62530c38c497b:3", "timestamp": 1586683960, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "10020.953166032759867437", "id": "0xc2005ae8d081578a5c598ebf8c82913a6f78c3f74f60f778fa4192be799c4662:3", "timestamp": 1587755352, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "901.01", "id": "0xc275c8bf3b0a06f9d861423da95f62307824915c9d104f7a27a183ef9e0d20ed:3", "timestamp": 1587228015, "user": "0x079e810fb5b21bb8271b12cd229b0cda888bd7bf"}, {"amount": "801.555753830346312196", "id": "0xc38c93f9744527bc4163e0056d2c0c93eadb67d934120260d2e77e49ecd2f394:3", "timestamp": 1587884979, "user": "0x94916eb6cc7532760cc3a2f4c586231a9c181e08"}, {"amount": "10699.58921551851395395", "id": "0xc79364bdd591bb767e3de71f4fd6a208fd5a40976257b9596f1a10dfa1dc3126:3", "timestamp": 1587164162, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "1.430039377840427284", "id": "0xc97b36d3403fde1578b5ef68f93ce765b08b1241070b70fc2d6eb3686e51849f:3", "timestamp": 1586226237, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "12.487942878039132653", "id": "0xca7538502f04bbce07327fa9dd10855decc1e35780cc179bfa6dcf349f39ead1:3", "timestamp": 1585230714, "user": "0xdad4c11e8cc6a5c37808d3b31b3b284809f702d1"}, {"amount": "6.04454472399733765", "id": "0xcaeefee457c0e3aa9ac74744163fee2d7c3c99e9aa450c5622f4daf743c44015:3", "timestamp": 1589570488, "user": "0x17c356fc6b70f9f5ca99652c8360e3657334ddf2"}, {"amount": "508.987803798360214179", "id": "0xcbe2faed42aba949113c2f0498beb822b3ee57ea8c62df3d3d412cada91ad7aa:3", "timestamp": 1587017133, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "2172.105299513051196289", "id": "0xd0a13b7f01924a73f59468557cf1739c2ec0fc4bdfc3cdaad3d92a102026ed0e:3", "timestamp": 1585810724, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "62.213445672865845389", "id": "0xd3b5bc36d45b33289b3d2ac0fad8e386430b3a0aac8644a20a4238f1fa0cd9b7:3", "timestamp": 1587677546, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "1018.181554126022187955", "id": "0xd55e82bf0d0120b89352b042f86a5b89069e26c2f5ff3430f98a1ce537989dd4:3", "timestamp": 1587579909, "user": "0xa10fbdb5c305e0daa937667b7ff1fb5835f6edda"}, {"amount": "0.066850754323609168", "id": "0xd5796ec20609178b2f423a3402569d6566723d1971864c43132e5d3e811d51d5:3", "timestamp": 1586219842, "user": "0x04bc0ab673d88ae9dbc9da2380cb6b79c4bca9ae"}, {"amount": "102.445206800030143653", "id": "0xd5d5b466e1892b3126d4e41d70e58bef261936f3249daa2f0db07dd2e10a4b25:3", "timestamp": 1589186003, "user": "0xdf60dcbb3bfcf9b4cd6e12359b27b6740013bbb5"}, {"amount": "400", "id": "0xd5ee49f28fe11b682b4da7cdaf7873ea6822e1cc02ecf18a10a2}
79
+ to: "0x84671C70fE41Ef5C16BC4F225bFAe2fD362aC65c"]
80
+
81
+ ## generar lectura y subir las cantidades a la dirección "0x84671C70fE41Ef5C16BC4F225bFAe2fD362aC65c"
utils.py ADDED
@@ -0,0 +1,554 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ coding=utf-8
3
+ Copyright 2018, Antonio Mendoza Hao Tan, Mohit Bansal, Huggingface team :)
4
+ Adapted From Facebook Inc, Detectron2
5
+
6
+ Licensed under the Apache License, Version 2.0 (the "License");
7
+ you may not use this file except in compliance with the License.
8
+ You may obtain a copy of the License at
9
+
10
+ http://www.apache.org/licenses/LICENSE-2.0
11
+
12
+ Unless required by applicable law or agreed to in writing, software
13
+ distributed under the License is distributed on an "AS IS" BASIS,
14
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ See the License for the specific language governing permissions and
16
+ limitations under the License.import copy
17
+ """
18
+
19
+ import copy
20
+ import fnmatch
21
+ import json
22
+ import os
23
+ import pickle as pkl
24
+ import shutil
25
+ import sys
26
+ import tarfile
27
+ import tempfile
28
+ from collections import OrderedDict
29
+ from contextlib import contextmanager
30
+ from functools import partial
31
+ from io import BytesIO
32
+ from pathlib import Path
33
+ from urllib.parse import urlparse
34
+ from zipfile import ZipFile, is_zipfile
35
+
36
+ import cv2
37
+ import numpy as np
38
+ import requests
39
+ import wget
40
+ from filelock import FileLock
41
+ from huggingface_hub.utils import insecure_hashlib
42
+ from PIL import Image
43
+ from tqdm.auto import tqdm
44
+ from yaml import Loader, dump, load
45
+
46
+
47
+ try:
48
+ import torch
49
+
50
+ _torch_available = True
51
+ except ImportError:
52
+ _torch_available = False
53
+
54
+
55
+ try:
56
+ from torch.hub import _get_torch_home
57
+
58
+ torch_cache_home = _get_torch_home()
59
+ except ImportError:
60
+ torch_cache_home = os.path.expanduser(
61
+ os.getenv("TORCH_HOME", os.path.join(os.getenv("XDG_CACHE_HOME", "~/.cache"), "torch"))
62
+ )
63
+
64
+ default_cache_path = os.path.join(torch_cache_home, "transformers")
65
+
66
+ CLOUDFRONT_DISTRIB_PREFIX = "https://cdn.huggingface.co"
67
+ S3_BUCKET_PREFIX = "https://s3.amazonaws.com/models.huggingface.co/bert"
68
+ PATH = "/".join(str(Path(__file__).resolve()).split("/")[:-1])
69
+ CONFIG = os.path.join(PATH, "config.yaml")
70
+ ATTRIBUTES = os.path.join(PATH, "attributes.txt")
71
+ OBJECTS = os.path.join(PATH, "objects.txt")
72
+ PYTORCH_PRETRAINED_BERT_CACHE = os.getenv("PYTORCH_PRETRAINED_BERT_CACHE", default_cache_path)
73
+ PYTORCH_TRANSFORMERS_CACHE = os.getenv("PYTORCH_TRANSFORMERS_CACHE", PYTORCH_PRETRAINED_BERT_CACHE)
74
+ TRANSFORMERS_CACHE = os.getenv("TRANSFORMERS_CACHE", PYTORCH_TRANSFORMERS_CACHE)
75
+ WEIGHTS_NAME = "pytorch_model.bin"
76
+ CONFIG_NAME = "config.yaml"
77
+
78
+
79
+ def load_labels(objs=OBJECTS, attrs=ATTRIBUTES):
80
+ vg_classes = []
81
+ with open(objs) as f:
82
+ for object in f.readlines():
83
+ vg_classes.append(object.split(",")[0].lower().strip())
84
+
85
+ vg_attrs = []
86
+ with open(attrs) as f:
87
+ for object in f.readlines():
88
+ vg_attrs.append(object.split(",")[0].lower().strip())
89
+ return vg_classes, vg_attrs
90
+
91
+
92
+ def load_checkpoint(ckp):
93
+ r = OrderedDict()
94
+ with open(ckp, "rb") as f:
95
+ ckp = pkl.load(f)["model"]
96
+ for k in copy.deepcopy(list(ckp.keys())):
97
+ v = ckp.pop(k)
98
+ if isinstance(v, np.ndarray):
99
+ v = torch.tensor(v)
100
+ else:
101
+ assert isinstance(v, torch.tensor), type(v)
102
+ r[k] = v
103
+ return r
104
+
105
+
106
+ class Config:
107
+ _pointer = {}
108
+
109
+ def __init__(self, dictionary: dict, name: str = "root", level=0):
110
+ self._name = name
111
+ self._level = level
112
+ d = {}
113
+ for k, v in dictionary.items():
114
+ if v is None:
115
+ raise ValueError()
116
+ k = copy.deepcopy(k)
117
+ v = copy.deepcopy(v)
118
+ if isinstance(v, dict):
119
+ v = Config(v, name=k, level=level + 1)
120
+ d[k] = v
121
+ setattr(self, k, v)
122
+
123
+ self._pointer = d
124
+
125
+ def __repr__(self):
126
+ return str(list((self._pointer.keys())))
127
+
128
+ def __setattr__(self, key, val):
129
+ self.__dict__[key] = val
130
+ self.__dict__[key.upper()] = val
131
+ levels = key.split(".")
132
+ last_level = len(levels) - 1
133
+ pointer = self._pointer
134
+ if len(levels) > 1:
135
+ for i, l in enumerate(levels):
136
+ if hasattr(self, l) and isinstance(getattr(self, l), Config):
137
+ setattr(getattr(self, l), ".".join(levels[i:]), val)
138
+ if l == last_level:
139
+ pointer[l] = val
140
+ else:
141
+ pointer = pointer[l]
142
+
143
+ def to_dict(self):
144
+ return self._pointer
145
+
146
+ def dump_yaml(self, data, file_name):
147
+ with open(f"{file_name}", "w") as stream:
148
+ dump(data, stream)
149
+
150
+ def dump_json(self, data, file_name):
151
+ with open(f"{file_name}", "w") as stream:
152
+ json.dump(data, stream)
153
+
154
+ @staticmethod
155
+ def load_yaml(config):
156
+ with open(config) as stream:
157
+ data = load(stream, Loader=Loader)
158
+ return data
159
+
160
+ def __str__(self):
161
+ t = " "
162
+ if self._name != "root":
163
+ r = f"{t * (self._level-1)}{self._name}:\n"
164
+ else:
165
+ r = ""
166
+ level = self._level
167
+ for i, (k, v) in enumerate(self._pointer.items()):
168
+ if isinstance(v, Config):
169
+ r += f"{t * (self._level)}{v}\n"
170
+ self._level += 1
171
+ else:
172
+ r += f"{t * (self._level)}{k}: {v} ({type(v).__name__})\n"
173
+ self._level = level
174
+ return r[:-1]
175
+
176
+ @classmethod
177
+ def from_pretrained(cls, pretrained_model_name_or_path: str, **kwargs):
178
+ config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
179
+ return cls(config_dict)
180
+
181
+ @classmethod
182
+ def get_config_dict(cls, pretrained_model_name_or_path: str, **kwargs):
183
+ cache_dir = kwargs.pop("cache_dir", None)
184
+ force_download = kwargs.pop("force_download", False)
185
+ resume_download = kwargs.pop("resume_download", False)
186
+ proxies = kwargs.pop("proxies", None)
187
+ local_files_only = kwargs.pop("local_files_only", False)
188
+
189
+ if os.path.isdir(pretrained_model_name_or_path):
190
+ config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME)
191
+ elif os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
192
+ config_file = pretrained_model_name_or_path
193
+ else:
194
+ config_file = hf_bucket_url(pretrained_model_name_or_path, filename=CONFIG_NAME, use_cdn=False)
195
+
196
+ try:
197
+ # Load from URL or cache if already cached
198
+ resolved_config_file = cached_path(
199
+ config_file,
200
+ cache_dir=cache_dir,
201
+ force_download=force_download,
202
+ proxies=proxies,
203
+ resume_download=resume_download,
204
+ local_files_only=local_files_only,
205
+ )
206
+ # Load config dict
207
+ if resolved_config_file is None:
208
+ raise EnvironmentError
209
+
210
+ config_file = Config.load_yaml(resolved_config_file)
211
+
212
+ except EnvironmentError:
213
+ msg = "Can't load config for"
214
+ raise EnvironmentError(msg)
215
+
216
+ if resolved_config_file == config_file:
217
+ print("loading configuration file from path")
218
+ else:
219
+ print("loading configuration file cache")
220
+
221
+ return Config.load_yaml(resolved_config_file), kwargs
222
+
223
+
224
+ # quick compare tensors
225
+ def compare(in_tensor):
226
+ out_tensor = torch.load("dump.pt", map_location=in_tensor.device)
227
+ n1 = in_tensor.numpy()
228
+ n2 = out_tensor.numpy()[0]
229
+ print(n1.shape, n1[0, 0, :5])
230
+ print(n2.shape, n2[0, 0, :5])
231
+ assert np.allclose(n1, n2, rtol=0.01, atol=0.1), (
232
+ f"{sum([1 for x in np.isclose(n1, n2, rtol=0.01, atol=0.1).flatten() if x is False])/len(n1.flatten())*100:.4f} %"
233
+ " element-wise mismatch"
234
+ )
235
+ raise Exception("tensors are all good")
236
+
237
+ # Hugging face functions below
238
+
239
+
240
+ def is_remote_url(url_or_filename):
241
+ parsed = urlparse(url_or_filename)
242
+ return parsed.scheme in ("http", "https")
243
+
244
+
245
+ def hf_bucket_url(model_id: str, filename: str, use_cdn=True) -> str:
246
+ endpoint = CLOUDFRONT_DISTRIB_PREFIX if use_cdn else S3_BUCKET_PREFIX
247
+ legacy_format = "/" not in model_id
248
+ if legacy_format:
249
+ return f"{endpoint}/{model_id}-{filename}"
250
+ else:
251
+ return f"{endpoint}/{model_id}/{filename}"
252
+
253
+
254
+ def http_get(
255
+ url,
256
+ temp_file,
257
+ proxies=None,
258
+ resume_size=0,
259
+ user_agent=None,
260
+ ):
261
+ ua = "python/{}".format(sys.version.split()[0])
262
+ if _torch_available:
263
+ ua += "; torch/{}".format(torch.__version__)
264
+ if isinstance(user_agent, dict):
265
+ ua += "; " + "; ".join("{}/{}".format(k, v) for k, v in user_agent.items())
266
+ elif isinstance(user_agent, str):
267
+ ua += "; " + user_agent
268
+ headers = {"user-agent": ua}
269
+ if resume_size > 0:
270
+ headers["Range"] = "bytes=%d-" % (resume_size,)
271
+ response = requests.get(url, stream=True, proxies=proxies, headers=headers)
272
+ if response.status_code == 416: # Range not satisfiable
273
+ return
274
+ content_length = response.headers.get("Content-Length")
275
+ total = resume_size + int(content_length) if content_length is not None else None
276
+ progress = tqdm(
277
+ unit="B",
278
+ unit_scale=True,
279
+ total=total,
280
+ initial=resume_size,
281
+ desc="Downloading",
282
+ )
283
+ for chunk in response.iter_content(chunk_size=1024):
284
+ if chunk: # filter out keep-alive new chunks
285
+ progress.update(len(chunk))
286
+ temp_file.write(chunk)
287
+ progress.close()
288
+
289
+
290
+ def get_from_cache(
291
+ url,
292
+ cache_dir=None,
293
+ force_download=False,
294
+ proxies=None,
295
+ etag_timeout=10,
296
+ resume_download=False,
297
+ user_agent=None,
298
+ local_files_only=False,
299
+ ):
300
+ if cache_dir is None:
301
+ cache_dir = TRANSFORMERS_CACHE
302
+ if isinstance(cache_dir, Path):
303
+ cache_dir = str(cache_dir)
304
+
305
+ os.makedirs(cache_dir, exist_ok=True)
306
+
307
+ etag = None
308
+ if not local_files_only:
309
+ try:
310
+ response = requests.head(url, allow_redirects=True, proxies=proxies, timeout=etag_timeout)
311
+ if response.status_code == 200:
312
+ etag = response.headers.get("ETag")
313
+ except (EnvironmentError, requests.exceptions.Timeout):
314
+ # etag is already None
315
+ pass
316
+
317
+ filename = url_to_filename(url, etag)
318
+
319
+ # get cache path to put the file
320
+ cache_path = os.path.join(cache_dir, filename)
321
+
322
+ # etag is None = we don't have a connection, or url doesn't exist, or is otherwise inaccessible.
323
+ # try to get the last downloaded one
324
+ if etag is None:
325
+ if os.path.exists(cache_path):
326
+ return cache_path
327
+ else:
328
+ matching_files = [
329
+ file
330
+ for file in fnmatch.filter(os.listdir(cache_dir), filename + ".*")
331
+ if not file.endswith(".json") and not file.endswith(".lock")
332
+ ]
333
+ if len(matching_files) > 0:
334
+ return os.path.join(cache_dir, matching_files[-1])
335
+ else:
336
+ # If files cannot be found and local_files_only=True,
337
+ # the models might've been found if local_files_only=False
338
+ # Notify the user about that
339
+ if local_files_only:
340
+ raise ValueError(
341
+ "Cannot find the requested files in the cached path and outgoing traffic has been"
342
+ " disabled. To enable model look-ups and downloads online, set 'local_files_only'"
343
+ " to False."
344
+ )
345
+ return None
346
+
347
+ # From now on, etag is not None.
348
+ if os.path.exists(cache_path) and not force_download:
349
+ return cache_path
350
+
351
+ # Prevent parallel downloads of the same file with a lock.
352
+ lock_path = cache_path + ".lock"
353
+ with FileLock(lock_path):
354
+ # If the download just completed while the lock was activated.
355
+ if os.path.exists(cache_path) and not force_download:
356
+ # Even if returning early like here, the lock will be released.
357
+ return cache_path
358
+
359
+ if resume_download:
360
+ incomplete_path = cache_path + ".incomplete"
361
+
362
+ @contextmanager
363
+ def _resumable_file_manager():
364
+ with open(incomplete_path, "a+b") as f:
365
+ yield f
366
+
367
+ temp_file_manager = _resumable_file_manager
368
+ if os.path.exists(incomplete_path):
369
+ resume_size = os.stat(incomplete_path).st_size
370
+ else:
371
+ resume_size = 0
372
+ else:
373
+ temp_file_manager = partial(tempfile.NamedTemporaryFile, dir=cache_dir, delete=False)
374
+ resume_size = 0
375
+
376
+ # Download to temporary file, then copy to cache dir once finished.
377
+ # Otherwise you get corrupt cache entries if the download gets interrupted.
378
+ with temp_file_manager() as temp_file:
379
+ print(
380
+ "%s not found in cache or force_download set to True, downloading to %s",
381
+ url,
382
+ temp_file.name,
383
+ )
384
+
385
+ http_get(
386
+ url,
387
+ temp_file,
388
+ proxies=proxies,
389
+ resume_size=resume_size,
390
+ user_agent=user_agent,
391
+ )
392
+
393
+ os.replace(temp_file.name, cache_path)
394
+
395
+ meta = {"url": url, "etag": etag}
396
+ meta_path = cache_path + ".json"
397
+ with open(meta_path, "w") as meta_file:
398
+ json.dump(meta, meta_file)
399
+
400
+ return cache_path
401
+
402
+
403
+ def url_to_filename(url, etag=None):
404
+ url_bytes = url.encode("utf-8")
405
+ url_hash = insecure_hashlib.sha256(url_bytes)
406
+ filename = url_hash.hexdigest()
407
+
408
+ if etag:
409
+ etag_bytes = etag.encode("utf-8")
410
+ etag_hash = insecure_hashlib.sha256(etag_bytes)
411
+ filename += "." + etag_hash.hexdigest()
412
+
413
+ if url.endswith(".h5"):
414
+ filename += ".h5"
415
+
416
+ return filename
417
+
418
+
419
+ def cached_path(
420
+ url_or_filename,
421
+ cache_dir=None,
422
+ force_download=False,
423
+ proxies=None,
424
+ resume_download=False,
425
+ user_agent=None,
426
+ extract_compressed_file=False,
427
+ force_extract=False,
428
+ local_files_only=False,
429
+ ):
430
+ if cache_dir is None:
431
+ cache_dir = TRANSFORMERS_CACHE
432
+ if isinstance(url_or_filename, Path):
433
+ url_or_filename = str(url_or_filename)
434
+ if isinstance(cache_dir, Path):
435
+ cache_dir = str(cache_dir)
436
+
437
+ if is_remote_url(url_or_filename):
438
+ # URL, so get it from the cache (downloading if necessary)
439
+ output_path = get_from_cache(
440
+ url_or_filename,
441
+ cache_dir=cache_dir,
442
+ force_download=force_download,
443
+ proxies=proxies,
444
+ resume_download=resume_download,
445
+ user_agent=user_agent,
446
+ local_files_only=local_files_only,
447
+ )
448
+ elif os.path.exists(url_or_filename):
449
+ # File, and it exists.
450
+ output_path = url_or_filename
451
+ elif urlparse(url_or_filename).scheme == "":
452
+ # File, but it doesn't exist.
453
+ raise EnvironmentError("file {} not found".format(url_or_filename))
454
+ else:
455
+ # Something unknown
456
+ raise ValueError("unable to parse {} as a URL or as a local path".format(url_or_filename))
457
+
458
+ if extract_compressed_file:
459
+ if not is_zipfile(output_path) and not tarfile.is_tarfile(output_path):
460
+ return output_path
461
+
462
+ # Path where we extract compressed archives
463
+ # We avoid '.' in dir name and add "-extracted" at the end: "./model.zip" => "./model-zip-extracted/"
464
+ output_dir, output_file = os.path.split(output_path)
465
+ output_extract_dir_name = output_file.replace(".", "-") + "-extracted"
466
+ output_path_extracted = os.path.join(output_dir, output_extract_dir_name)
467
+
468
+ if os.path.isdir(output_path_extracted) and os.listdir(output_path_extracted) and not force_extract:
469
+ return output_path_extracted
470
+
471
+ # Prevent parallel extractions
472
+ lock_path = output_path + ".lock"
473
+ with FileLock(lock_path):
474
+ shutil.rmtree(output_path_extracted, ignore_errors=True)
475
+ os.makedirs(output_path_extracted)
476
+ if is_zipfile(output_path):
477
+ with ZipFile(output_path, "r") as zip_file:
478
+ zip_file.extractall(output_path_extracted)
479
+ zip_file.close()
480
+ elif tarfile.is_tarfile(output_path):
481
+ tar_file = tarfile.open(output_path)
482
+ tar_file.extractall(output_path_extracted)
483
+ tar_file.close()
484
+ else:
485
+ raise EnvironmentError("Archive format of {} could not be identified".format(output_path))
486
+
487
+ return output_path_extracted
488
+
489
+ return output_path
490
+
491
+
492
+ def get_data(query, delim=","):
493
+ assert isinstance(query, str)
494
+ if os.path.isfile(query):
495
+ with open(query) as f:
496
+ data = eval(f.read())
497
+ else:
498
+ req = requests.get(query)
499
+ try:
500
+ data = requests.json()
501
+ except Exception:
502
+ data = req.content.decode()
503
+ assert data is not None, "could not connect"
504
+ try:
505
+ data = eval(data)
506
+ except Exception:
507
+ data = data.split("\n")
508
+ req.close()
509
+ return data
510
+
511
+
512
+ def get_image_from_url(url):
513
+ response = requests.get(url)
514
+ img = np.array(Image.open(BytesIO(response.content)))
515
+ return img
516
+
517
+
518
+ # to load legacy frcnn checkpoint from detectron
519
+ def load_frcnn_pkl_from_url(url):
520
+ fn = url.split("/")[-1]
521
+ if fn not in os.listdir(os.getcwd()):
522
+ wget.download(url)
523
+ with open(fn, "rb") as stream:
524
+ weights = pkl.load(stream)
525
+ model = weights.pop("model")
526
+ new = {}
527
+ for k, v in model.items():
528
+ new[k] = torch.from_numpy(v)
529
+ if "running_var" in k:
530
+ zero = torch.tensor([0])
531
+ k2 = k.replace("running_var", "num_batches_tracked")
532
+ new[k2] = zero
533
+ return new
534
+
535
+
536
+ def get_demo_path():
537
+ print(f"{os.path.abspath(os.path.join(PATH, os.pardir))}/demo.ipynb")
538
+
539
+
540
+ def img_tensorize(im, input_format="RGB"):
541
+ assert isinstance(im, str)
542
+ if os.path.isfile(im):
543
+ img = cv2.imread(im)
544
+ else:
545
+ img = get_image_from_url(im)
546
+ assert img is not None, f"could not connect to: {im}"
547
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
548
+ if input_format == "RGB":
549
+ img = img[:, :, ::-1]
550
+ return img
551
+
552
+
553
+ def chunk(images, batch=1):
554
+ return (images[i : i + batch] for i in range(0, len(images), batch))