Geraldine commited on
Commit
16a686b
1 Parent(s): 010036b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -5
README.md CHANGED
@@ -16,14 +16,14 @@ The parameters passed in the url request are :
16
  - fq=abstract_s:[%22%22%20TO%20*]
17
  - fq=domain_s:*shs*
18
  - fq=publicationDateY_i:[2013%20TO%202023]
19
- - fl=doiId_s,uri_s,title_s,subTitle_s,authFullName_s,producedDate_s,journalTitle_s,journalPublisher_s,abstract_s,fr_keyword_s,openAccess_bool,submitType_s
20
 
21
  The embeddings corpus hal_embeddings.pkl stores the embeddings of the "combined" column values converted in vectors with the sentence-transformers/all-MiniLM-L6-v2 embeddings model.
22
 
23
  ## Metadata extraction
24
 
25
  ```
26
- url = ""https://api.archives-ouvertes.fr/search/UNIV-COTEDAZUR/?q=docType_s:ART&fq=abstract_s:[%22%22%20TO%20*]&fq=domain_s:*shs*&fq=publicationDateY_i:[2013%20TO%202023]&fl=doiId_s,uri_s,title_s,subTitle_s,authFullName_s,producedDate_s,journalTitle_s,journalPublisher_s,abstract_s,fr_keyword_s,openAccess_bool,submitType_s"
27
 
28
  # Get the total number of records
29
  url_for_total_count = f"{url}&wt=json&rows=0"
@@ -31,7 +31,7 @@ response = requests.request("GET", url_for_total_count).text
31
  data = json.loads(response)
32
  total_count = data["response"]["numFound"]
33
  print(total_cout)
34
- # return 3601
35
 
36
  # Loop over the records and get metadata
37
  step = 500
@@ -43,10 +43,13 @@ for i in range(1, int(total_count), int(step)):
43
  df = pd.concat(appended_data)
44
 
45
  # dedup
46
- df = appended_data.drop_duplicates(subset=['uri_s'])
 
 
 
47
 
48
  df.shape
49
- # returns 2405
50
 
51
  # New column of concatenated textuel data
52
  df["combined"] = df.title_s + ". " + df.subTitle_s + ". " +df.abstract_s
 
16
  - fq=abstract_s:[%22%22%20TO%20*]
17
  - fq=domain_s:*shs*
18
  - fq=publicationDateY_i:[2013%20TO%202023]
19
+ - fl=halId_s,doiId_s,uri_s,title_s,subTitle_s,authFullName_s,producedDate_s,journalTitle_s,journalPublisher_s,abstract_s,fr_keyword_s,openAccess_bool,submitType_s
20
 
21
  The embeddings corpus hal_embeddings.pkl stores the embeddings of the "combined" column values converted in vectors with the sentence-transformers/all-MiniLM-L6-v2 embeddings model.
22
 
23
  ## Metadata extraction
24
 
25
  ```
26
+ url = ""https://api.archives-ouvertes.fr/search/UNIV-COTEDAZUR/?q=docType_s:ART&fq=abstract_s:[%22%22%20TO%20*]&fq=domain_s:*shs*&fq=publicationDateY_i:[2013%20TO%202023]&fl=halId_s,doiId_s,uri_s,title_s,subTitle_s,authFullName_s,producedDate_s,journalTitle_s,journalPublisher_s,abstract_s,fr_keyword_s,openAccess_bool,submitType_s"
27
 
28
  # Get the total number of records
29
  url_for_total_count = f"{url}&wt=json&rows=0"
 
31
  data = json.loads(response)
32
  total_count = data["response"]["numFound"]
33
  print(total_cout)
34
+ # return 3613
35
 
36
  # Loop over the records and get metadata
37
  step = 500
 
43
  df = pd.concat(appended_data)
44
 
45
  # dedup
46
+ df = appended_data.drop_duplicates(subset=['halId_s'])
47
+
48
+ # clean date
49
+ df["producedDate_s"] = df["producedDate_s"].apply(lambda x: str(x)[0:4])
50
 
51
  df.shape
52
+ # returns 2652
53
 
54
  # New column of concatenated textuel data
55
  df["combined"] = df.title_s + ". " + df.subTitle_s + ". " +df.abstract_s