Spaces:
Sleeping
Sleeping
File size: 1,906 Bytes
8554ea7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
grobidHome: /opt/grobid/grobid-home
corsAllowedOrigins: "*"
corsAllowedMethods: "OPTIONS,GET,PUT,POST,DELETE,HEAD"
corsAllowedHeaders: "X-Requested-With,Content-Type,Accept,Origin"
# Limit the maximum number of requests
maxParallelRequests: 0
cleanlpModelPath: "/opt/grobid/grobid-quantities/resources/cleanlp/models"
models:
- name: "quantities"
# engine: "wapiti"
engine: "delft"
wapiti:
# wapiti training parameters, they will be used at training time only
epsilon: 0.00001
window: 30
nbMaxIterations: 2000
delft:
# deep learning parameters
# architecture: "BidLSTM_CRF"
architecture: "BERT_CRF"
useELMo: false
embeddings_name: "glove-840B"
- name: "units"
engine: "wapiti"
#engine: "delft"
wapiti:
# wapiti training parameters, they will be used at training time only
epsilon: 0.00001
window: 30
nbMaxIterations: 2000
delft:
# deep learning parameters
architecture: "BidLSTM_CRF_FEATURES"
#architecture: "scibert"
useELMo: false
embeddings_name: "glove-840B"
- name: "values"
engine: "wapiti"
# engine: "delft"
wapiti:
# wapiti training parameters, they will be used at training time only
epsilon: 0.00001
window: 30
nbMaxIterations: 2000
delft:
# deep learning parameters
architecture: "BidLSTM_CRF"
#architecture: "scibert"
useELMo: false
embeddings_name: "glove-840B"
views:
.mustache:
cache: false
server:
type: custom
idleTimeout: 120 seconds
applicationConnectors:
- type: http
port: 8060
adminConnectors:
- type: http
port: 8061
registerDefaultExceptionMappers: false
maxThreads: 2048
maxQueuedRequests: 2048
acceptQueueSize: 2048
logging:
level: INFO
appenders:
- type: console
threshold: INFO
timeZone: UTC
|