text stringlengths 7 324k | id stringlengths 14 166 | metadata dict | __index_level_0__ int64 0 463 |
|---|---|---|---|
# (Tensorflow) MobileNet v3
**MobileNetV3** is a convolutional neural network that is designed for mobile phone CPUs. The network design includes the use of a [hard swish activation](https://paperswithcode.com/method/hard-swish) and [squeeze-and-excitation](https://paperswithcode.com/method/squeeze-and-excitation-bloc... | pytorch-image-models/docs/models/.templates/models/tf-mobilenet-v3.md/0 | {
"file_path": "pytorch-image-models/docs/models/.templates/models/tf-mobilenet-v3.md",
"repo_id": "pytorch-image-models",
"token_count": 3951
} | 164 |
# Data
[[autodoc]] timm.data.create_dataset
[[autodoc]] timm.data.create_loader
[[autodoc]] timm.data.create_transform
[[autodoc]] timm.data.resolve_data_config | pytorch-image-models/hfdocs/source/reference/data.mdx/0 | {
"file_path": "pytorch-image-models/hfdocs/source/reference/data.mdx",
"repo_id": "pytorch-image-models",
"token_count": 67
} | 165 |
import os
from typing import Optional
from .reader_image_folder import ReaderImageFolder
from .reader_image_in_tar import ReaderImageInTar
def create_reader(
name: str,
root: Optional[str] = None,
split: str = 'train',
**kwargs,
):
kwargs = {k: v for k, v in kwargs.items() if v is... | pytorch-image-models/timm/data/readers/reader_factory.py/0 | {
"file_path": "pytorch-image-models/timm/data/readers/reader_factory.py",
"repo_id": "pytorch-image-models",
"token_count": 694
} | 166 |
""" Activations (memory-efficient w/ custom autograd)
A collection of activations fn and modules with a common interface so that they can
easily be swapped. All have an `inplace` arg even if not used.
These activations are not compatible with jit scripting or ONNX export of the model, please use either
the JIT or bas... | pytorch-image-models/timm/layers/activations_me.py/0 | {
"file_path": "pytorch-image-models/timm/layers/activations_me.py",
"repo_id": "pytorch-image-models",
"token_count": 2598
} | 167 |
""" NormAct (Normalizaiton + Activation Layer) Factory
Create norm + act combo modules that attempt to be backwards compatible with separate norm + act
isntances in models. Where these are used it will be possible to swap separate BN + act layers with
combined modules like IABN or EvoNorms.
Hacked together by / Copyr... | pytorch-image-models/timm/layers/create_norm_act.py/0 | {
"file_path": "pytorch-image-models/timm/layers/create_norm_act.py",
"repo_id": "pytorch-image-models",
"token_count": 1594
} | 168 |
""" Linear layer (alternate definition)
"""
import torch
import torch.nn.functional as F
from torch import nn as nn
class Linear(nn.Linear):
r"""Applies a linear transformation to the incoming data: :math:`y = xA^T + b`
Wraps torch.nn.Linear to support AMP + torchscript usage by manually casting
weight &... | pytorch-image-models/timm/layers/linear.py/0 | {
"file_path": "pytorch-image-models/timm/layers/linear.py",
"repo_id": "pytorch-image-models",
"token_count": 282
} | 169 |
""" Depthwise Separable Conv Modules
Basic DWS convs. Other variations of DWS exist with batch norm or activations between the
DW and PW convs such as the Depthwise modules in MobileNetV2 / EfficientNet and Xception.
Hacked together by / Copyright 2020 Ross Wightman
"""
from torch import nn as nn
from .create_conv2d... | pytorch-image-models/timm/layers/separable_conv.py/0 | {
"file_path": "pytorch-image-models/timm/layers/separable_conv.py",
"repo_id": "pytorch-image-models",
"token_count": 1138
} | 170 |
import dataclasses
import logging
import os
from copy import deepcopy
from typing import Optional, Dict, Callable, Any, Tuple
from torch import nn as nn
from torch.hub import load_state_dict_from_url
from timm.models._features import FeatureListNet, FeatureHookNet
from timm.models._features_fx import FeatureGraphNet
... | pytorch-image-models/timm/models/_builder.py/0 | {
"file_path": "pytorch-image-models/timm/models/_builder.py",
"repo_id": "pytorch-image-models",
"token_count": 7677
} | 171 |
""" Model Registry
Hacked together by / Copyright 2020 Ross Wightman
"""
import fnmatch
import re
import sys
import warnings
from collections import defaultdict, deque
from copy import deepcopy
from dataclasses import replace
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Sequence, Union, Tuple... | pytorch-image-models/timm/models/_registry.py/0 | {
"file_path": "pytorch-image-models/timm/models/_registry.py",
"repo_id": "pytorch-image-models",
"token_count": 5428
} | 172 |
""" EdgeNeXt
Paper: `EdgeNeXt: Efficiently Amalgamated CNN-Transformer Architecture for Mobile Vision Applications`
- https://arxiv.org/abs/2206.10589
Original code and weights from https://github.com/mmaaz60/EdgeNeXt
Modifications and additions for timm by / Copyright 2022, Ross Wightman
"""
import math
from colle... | pytorch-image-models/timm/models/edgenext.py/0 | {
"file_path": "pytorch-image-models/timm/models/edgenext.py",
"repo_id": "pytorch-image-models",
"token_count": 11040
} | 173 |
""" PP-HGNet (V1 & V2)
Reference:
https://github.com/PaddlePaddle/PaddleClas/blob/develop/docs/zh_CN/models/ImageNet1k/PP-HGNetV2.md
The Paddle Implement of PP-HGNet (https://github.com/PaddlePaddle/PaddleClas/blob/release/2.5.1/docs/en/models/PP-HGNet_en.md)
PP-HGNet: https://github.com/PaddlePaddle/PaddleClas/blob/r... | pytorch-image-models/timm/models/hgnet.py/0 | {
"file_path": "pytorch-image-models/timm/models/hgnet.py",
"repo_id": "pytorch-image-models",
"token_count": 13284
} | 174 |
""" Nested Transformer (NesT) in PyTorch
A PyTorch implement of Aggregating Nested Transformers as described in:
'Aggregating Nested Transformers'
- https://arxiv.org/abs/2105.12723
The official Jax code is released and available at https://github.com/google-research/nested-transformer. The weights
have been con... | pytorch-image-models/timm/models/nest.py/0 | {
"file_path": "pytorch-image-models/timm/models/nest.py",
"repo_id": "pytorch-image-models",
"token_count": 10075
} | 175 |
"""
SEResNet implementation from Cadene's pretrained models
https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/senet.py
Additional credit to https://github.com/creafz
Original model: https://github.com/hujie-frank/SENet
ResNet code gently borrowed from
https://github.com/pytorch/v... | pytorch-image-models/timm/models/senet.py/0 | {
"file_path": "pytorch-image-models/timm/models/senet.py",
"repo_id": "pytorch-image-models",
"token_count": 8344
} | 176 |
""" Vision OutLOoker (VOLO) implementation
Paper: `VOLO: Vision Outlooker for Visual Recognition` - https://arxiv.org/abs/2106.13112
Code adapted from official impl at https://github.com/sail-sg/volo, original copyright in comment below
Modifications and additions for timm by / Copyright 2022, Ross Wightman
"""
# Co... | pytorch-image-models/timm/models/volo.py/0 | {
"file_path": "pytorch-image-models/timm/models/volo.py",
"repo_id": "pytorch-image-models",
"token_count": 15802
} | 177 |
""" PyTorch MADGRAD optimizer
MADGRAD: https://arxiv.org/abs/2101.11075
Code from: https://github.com/facebookresearch/madgrad
"""
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import ma... | pytorch-image-models/timm/optim/madgrad.py/0 | {
"file_path": "pytorch-image-models/timm/optim/madgrad.py",
"repo_id": "pytorch-image-models",
"token_count": 3505
} | 178 |
""" Step Scheduler
Basic step LR schedule with warmup, noise.
Hacked together by / Copyright 2020 Ross Wightman
"""
import math
import torch
from .scheduler import Scheduler
class StepLRScheduler(Scheduler):
"""
"""
def __init__(
self,
optimizer: torch.optim.Optimizer,
... | pytorch-image-models/timm/scheduler/step_lr.py/0 | {
"file_path": "pytorch-image-models/timm/scheduler/step_lr.py",
"repo_id": "pytorch-image-models",
"token_count": 939
} | 179 |
import random
import numpy as np
import torch
def random_seed(seed=42, rank=0):
torch.manual_seed(seed + rank)
np.random.seed(seed + rank)
random.seed(seed + rank)
| pytorch-image-models/timm/utils/random.py/0 | {
"file_path": "pytorch-image-models/timm/utils/random.py",
"repo_id": "pytorch-image-models",
"token_count": 68
} | 180 |
use std::time::{Duration, Instant};
use text_generation_client::{
Batch, CachedBatch, ClientError, NextTokenChooserParameters, Request, ShardedClient,
StoppingCriteriaParameters,
};
use tokenizers::{Tokenizer, TruncationDirection};
use tokio::sync::{broadcast, mpsc};
const LOREM_IPSUM: &str = "Lorem ipsum dolo... | text-generation-inference/benchmark/src/generation.rs/0 | {
"file_path": "text-generation-inference/benchmark/src/generation.rs",
"repo_id": "text-generation-inference",
"token_count": 3201
} | 181 |
import json
import requests
from aiohttp import ClientSession, ClientTimeout
from pydantic import ValidationError
from typing import Dict, Optional, List, AsyncIterator, Iterator, Union
from text_generation.types import (
StreamResponse,
Response,
Request,
Parameters,
Grammar,
ChatRequest,
... | text-generation-inference/clients/python/text_generation/client.py/0 | {
"file_path": "text-generation-inference/clients/python/text_generation/client.py",
"repo_id": "text-generation-inference",
"token_count": 14202
} | 182 |
# Quantization
TGI offers GPTQ and bits-and-bytes quantization to quantize large language models.
## Quantization with GPTQ
GPTQ is a post-training quantization method to make the model smaller. It quantizes the layers by finding a compressed version of that weight, that will yield a minimum mean squared error like ... | text-generation-inference/docs/source/conceptual/quantization.md/0 | {
"file_path": "text-generation-inference/docs/source/conceptual/quantization.md",
"repo_id": "text-generation-inference",
"token_count": 1109
} | 183 |
{
"details": {
"best_of_sequences": null,
"finish_reason": "length",
"generated_tokens": 10,
"prefill": [
{
"id": 1,
"logprob": null,
"text": "<s>"
},
{
"id": 4321,
"logprob": -8.6875,
"text": "Test"
},
{
"id": 2009,... | text-generation-inference/integration-tests/models/__snapshots__/test_flash_llama/test_flash_llama.json/0 | {
"file_path": "text-generation-inference/integration-tests/models/__snapshots__/test_flash_llama/test_flash_llama.json",
"repo_id": "text-generation-inference",
"token_count": 1050
} | 184 |
{
"details": {
"best_of_sequences": null,
"finish_reason": "length",
"generated_tokens": 10,
"prefill": [
{
"id": 14402,
"logprob": null,
"text": "Test"
},
{
"id": 2581,
"logprob": -11.6171875,
"text": " request"
}
],
"see... | text-generation-inference/integration-tests/models/__snapshots__/test_flash_phi/test_flash_phi.json/0 | {
"file_path": "text-generation-inference/integration-tests/models/__snapshots__/test_flash_phi/test_flash_phi.json",
"repo_id": "text-generation-inference",
"token_count": 1003
} | 185 |
[
{
"details": {
"best_of_sequences": null,
"finish_reason": "length",
"generated_tokens": 10,
"prefill": [
{
"id": 589,
"logprob": null,
"text": "def"
},
{
"id": 3226,
"logprob": -8.5859375,
"text": " ge"
... | text-generation-inference/integration-tests/models/__snapshots__/test_flash_starcoder_gptq/test_flash_starcoder_gptq_load.json/0 | {
"file_path": "text-generation-inference/integration-tests/models/__snapshots__/test_flash_starcoder_gptq/test_flash_starcoder_gptq_load.json",
"repo_id": "text-generation-inference",
"token_count": 7553
} | 186 |
import pytest
@pytest.fixture(scope="module")
def flash_medusa_handle(launcher):
with launcher(
"FasterDecoding/medusa-vicuna-7b-v1.3", num_shard=2, revision="refs/pr/1"
) as handle:
yield handle
@pytest.fixture(scope="module")
async def flash_medusa(flash_medusa_handle):
await flash_med... | text-generation-inference/integration-tests/models/test_flash_medusa.py/0 | {
"file_path": "text-generation-inference/integration-tests/models/test_flash_medusa.py",
"repo_id": "text-generation-inference",
"token_count": 749
} | 187 |
import pytest
@pytest.fixture(scope="module")
def neox_sharded_handle(launcher):
with launcher(
"OpenAssistant/oasst-sft-1-pythia-12b", num_shard=2, use_flash_attention=False
) as handle:
yield handle
@pytest.fixture(scope="module")
async def neox_sharded(neox_sharded_handle):
await neox... | text-generation-inference/integration-tests/models/test_neox_sharded.py/0 | {
"file_path": "text-generation-inference/integration-tests/models/test_neox_sharded.py",
"repo_id": "text-generation-inference",
"token_count": 507
} | 188 |
[package]
name = "text-generation-router"
description = "Text Generation Webserver"
build = "build.rs"
version.workspace = true
edition.workspace = true
authors.workspace = true
homepage.workspace = true
[lib]
path = "src/lib.rs"
[[bin]]
name = "text-generation-router"
path = "src/main.rs"
[dependencies]
async-strea... | text-generation-inference/router/Cargo.toml/0 | {
"file_path": "text-generation-inference/router/Cargo.toml",
"repo_id": "text-generation-inference",
"token_count": 767
} | 189 |
/// HTTP Server logic
use crate::health::Health;
use crate::infer::{InferError, InferResponse, InferStreamResponse};
use crate::validation::ValidationError;
use crate::{
BestOfSequence, Details, ErrorResponse, FinishReason, GenerateParameters, GenerateRequest,
GenerateResponse, GrammarType, HubModelInfo, HubTok... | text-generation-inference/router/src/server.rs/0 | {
"file_path": "text-generation-inference/router/src/server.rs",
"repo_id": "text-generation-inference",
"token_count": 27597
} | 190 |
#ifndef _util_h
#define _util_h
#define DBGS(__x) printf("%s\n", __x)
#define DBGI(__x) printf("%s: %i\n", #__x, __x)
#define DBGI2(__x, __y) printf("%s, %s: %i, %i\n", #__x, #__y, __x, __y)
#define DBGI3(__x, __y, __z) printf("%s, %s, %s: %i, %i, %i\n", #__x, #__y, #__z, __x, __y, __z)
#define DBGF(__x) printf("%s: %... | text-generation-inference/server/exllamav2_kernels/exllamav2_kernels/cpp/util.h/0 | {
"file_path": "text-generation-inference/server/exllamav2_kernels/exllamav2_kernels/cpp/util.h",
"repo_id": "text-generation-inference",
"token_count": 296
} | 191 |
#ifndef _util_cuh
#define _util_cuh
#include <cuda_runtime.h>
#include <cuda_fp16.h>
#include <cstdint>
#include <cstdio>
#include <ATen/cuda/CUDAContext.h>
#define DIVIDE(x, size) (((x) + (size) - 1) / (size))
#define DBGS(__x) printf("%s\n", __x)
#define DBGI(__x) printf("%s: %i\n", #__x, __x)
#define DBGI2(__x, _... | text-generation-inference/server/exllamav2_kernels/exllamav2_kernels/cuda/util.cuh/0 | {
"file_path": "text-generation-inference/server/exllamav2_kernels/exllamav2_kernels/cuda/util.cuh",
"repo_id": "text-generation-inference",
"token_count": 1115
} | 192 |
import torch
from text_generation_server.utils.layers import (
TensorParallelEmbedding,
)
class ProcessGroup:
def __init__(self, rank: int, world_size: int):
self._rank = rank
self.world_size = world_size
def size(self) -> int:
return self.world_size
def rank(self) -> int:
... | text-generation-inference/server/tests/utils/test_layers.py/0 | {
"file_path": "text-generation-inference/server/tests/utils/test_layers.py",
"repo_id": "text-generation-inference",
"token_count": 1111
} | 193 |
# coding=utf-8
# Copyright 2022 EleutherAI and the HuggingFace Inc. team. All rights reserved.
#
# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX
# and OPT implementations in this library. It has been modified from its
# original forms to accommodate minor architectural differences compared
# to G... | text-generation-inference/server/text_generation_server/models/custom_modeling/flash_mixtral_modeling.py/0 | {
"file_path": "text-generation-inference/server/text_generation_server/models/custom_modeling/flash_mixtral_modeling.py",
"repo_id": "text-generation-inference",
"token_count": 14049
} | 194 |
# coding=utf-8
# Copyright 2022 The Fairseq Authors and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/... | text-generation-inference/server/text_generation_server/models/custom_modeling/opt_modeling.py/0 | {
"file_path": "text-generation-inference/server/text_generation_server/models/custom_modeling/opt_modeling.py",
"repo_id": "text-generation-inference",
"token_count": 15538
} | 195 |
import torch
import torch.distributed
from typing import Optional
from transformers import (
AutoTokenizer,
AutoConfig,
)
from text_generation_server.models import CausalLM
from text_generation_server.models.custom_modeling.neox_modeling import (
GPTNeoxForCausalLM,
)
from text_generation_server.utils imp... | text-generation-inference/server/text_generation_server/models/gpt_neox.py/0 | {
"file_path": "text-generation-inference/server/text_generation_server/models/gpt_neox.py",
"repo_id": "text-generation-inference",
"token_count": 1269
} | 196 |
from text_generation_server.utils.convert import convert_file, convert_files
from text_generation_server.utils.dist import initialize_torch_distributed
from text_generation_server.utils.weights import Weights
from text_generation_server.utils.peft import download_and_unload_peft
from text_generation_server.utils.hub im... | text-generation-inference/server/text_generation_server/utils/__init__.py/0 | {
"file_path": "text-generation-inference/server/text_generation_server/utils/__init__.py",
"repo_id": "text-generation-inference",
"token_count": 417
} | 197 |
import torch
# vllm imports
from vllm import cache_ops
from vllm import attention_ops
_PARTITION_SIZE = 512
def reshape_and_cache(
key: torch.Tensor,
value: torch.Tensor,
key_cache: torch.Tensor,
value_cache: torch.Tensor,
slots: torch.Tensor,
):
cache_ops.reshape_and_cache(key, value, key_c... | text-generation-inference/server/text_generation_server/utils/paged_attention.py/0 | {
"file_path": "text-generation-inference/server/text_generation_server/utils/paged_attention.py",
"repo_id": "text-generation-inference",
"token_count": 1485
} | 198 |
import {
PaddingDirection,
WordPiece,
punctuationPreTokenizer,
sequencePreTokenizer,
whitespacePreTokenizer,
Encoding,
EncodeOptions,
Tokenizer,
} from '../../'
import { InputSequence } from '../../types'
const MOCKS_DIR = __dirname + '/__mocks__'
describe('Can modify pretokenizers on the fly', () => ... | tokenizers/bindings/node/lib/bindings/encoding.test.ts/0 | {
"file_path": "tokenizers/bindings/node/lib/bindings/encoding.test.ts",
"repo_id": "tokenizers",
"token_count": 3021
} | 199 |
{
"name": "tokenizers-freebsd-x64",
"version": "0.13.4-rc1",
"os": [
"freebsd"
],
"cpu": [
"x64"
],
"main": "tokenizers.freebsd-x64.node",
"files": [
"tokenizers.freebsd-x64.node"
],
"description": "Tokenizers platform specific bindings",
"keywords": [
"napi-rs",
"NAPI",
"N... | tokenizers/bindings/node/npm/freebsd-x64/package.json/0 | {
"file_path": "tokenizers/bindings/node/npm/freebsd-x64/package.json",
"repo_id": "tokenizers",
"token_count": 272
} | 200 |
{
"name": "tokenizers-win32-x64-msvc",
"version": "0.13.4-rc1",
"os": [
"win32"
],
"cpu": [
"x64"
],
"main": "tokenizers.win32-x64-msvc.node",
"files": [
"tokenizers.win32-x64-msvc.node"
],
"description": "Tokenizers platform specific bindings",
"keywords": [
"napi-rs",
"NAPI",... | tokenizers/bindings/node/npm/win32-x64-msvc/package.json/0 | {
"file_path": "tokenizers/bindings/node/npm/win32-x64-msvc/package.json",
"repo_id": "tokenizers",
"token_count": 277
} | 201 |
use napi::bindgen_prelude::*;
use napi_derive::napi;
use tokenizers as tk;
use tokenizers::Encoding;
use crate::encoding::JsEncoding;
#[napi]
pub fn slice(s: String, begin_index: Option<i32>, end_index: Option<i32>) -> Result<String> {
let len = s.chars().count();
let get_index = |x: i32| -> usize {
if x >= ... | tokenizers/bindings/node/src/utils.rs/0 | {
"file_path": "tokenizers/bindings/node/src/utils.rs",
"repo_id": "tokenizers",
"token_count": 503
} | 202 |
import datasets
from tokenizers import Tokenizer, models, normalizers, pre_tokenizers
# Build a tokenizer
bpe_tokenizer = Tokenizer(models.BPE())
bpe_tokenizer.pre_tokenizer = pre_tokenizers.Whitespace()
bpe_tokenizer.normalizer = normalizers.Lowercase()
# Initialize a dataset
dataset = datasets.load_dataset("wikit... | tokenizers/bindings/python/examples/train_with_datasets.py/0 | {
"file_path": "tokenizers/bindings/python/examples/train_with_datasets.py",
"repo_id": "tokenizers",
"token_count": 207
} | 203 |
# Generated content DO NOT EDIT
class Normalizer:
"""
Base class for all normalizers
This class is not supposed to be instantiated directly. Instead, any implementation of a
Normalizer will return an instance of this class when instantiated.
"""
def normalize(self, normalized):
"""
... | tokenizers/bindings/python/py_src/tokenizers/normalizers/__init__.pyi/0 | {
"file_path": "tokenizers/bindings/python/py_src/tokenizers/normalizers/__init__.pyi",
"repo_id": "tokenizers",
"token_count": 8053
} | 204 |
use std::sync::{Arc, RwLock};
use crate::utils::PyChar;
use crate::utils::PyPattern;
use pyo3::exceptions;
use pyo3::prelude::*;
use pyo3::types::*;
use serde::de::Error;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use tk::decoders::bpe::BPEDecoder;
use tk::decoders::byte_fallback::ByteFallback;
use... | tokenizers/bindings/python/src/decoders.rs/0 | {
"file_path": "tokenizers/bindings/python/src/decoders.rs",
"repo_id": "tokenizers",
"token_count": 9016
} | 205 |
import argparse
import inspect
import os
from pathlib import Path
INDENT = " " * 4
GENERATED_COMMENT = "# Generated content DO NOT EDIT\n"
def do_indent(text: str, indent: str):
return text.replace("\n", f"\n{indent}")
def function(obj, indent, text_signature=None):
if text_signature is None:
text... | tokenizers/bindings/python/stub.py/0 | {
"file_path": "tokenizers/bindings/python/stub.py",
"repo_id": "tokenizers",
"token_count": 2395
} | 206 |
# Models
<tokenizerslangcontent>
<python>
## BPE
[[autodoc]] tokenizers.models.BPE
## Model
[[autodoc]] tokenizers.models.Model
## Unigram
[[autodoc]] tokenizers.models.Unigram
## WordLevel
[[autodoc]] tokenizers.models.WordLevel
## WordPiece
[[autodoc]] tokenizers.models.WordPiece
</python>
<rust>
The Rust A... | tokenizers/docs/source-doc-builder/api/models.mdx/0 | {
"file_path": "tokenizers/docs/source-doc-builder/api/models.mdx",
"repo_id": "tokenizers",
"token_count": 179
} | 207 |
Installation with npm
----------------------------------------------------------------------------------------------------
You can simply install 🤗 Tokenizers with npm using::
npm install tokenizers
| tokenizers/docs/source/installation/node.inc/0 | {
"file_path": "tokenizers/docs/source/installation/node.inc",
"repo_id": "tokenizers",
"token_count": 31
} | 208 |
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use std::collections::HashMap;
use std::fs::read_to_string;
use std::time::{Duration, Instant};
use tokenizers::models::unigram::Unigram;
use tokenizers::models::unigram::UnigramTrainer;
pub fn bench_train(c: &mut Criterion) {
let trainer = UnigramTra... | tokenizers/tokenizers/benches/unigram_benchmark.rs/0 | {
"file_path": "tokenizers/tokenizers/benches/unigram_benchmark.rs",
"repo_id": "tokenizers",
"token_count": 1174
} | 209 |
import * as wasm from "unstable_wasm";
console.log(wasm.tokenize("ab"));
console.log(wasm.tokenize("abc"));
| tokenizers/tokenizers/examples/unstable_wasm/www/index.js/0 | {
"file_path": "tokenizers/tokenizers/examples/unstable_wasm/www/index.js",
"repo_id": "tokenizers",
"token_count": 43
} | 210 |
use super::{super::OrderedVocabIter, convert_merges_to_hashmap, BpeBuilder, Pair, BPE};
use serde::{
de::{Error, MapAccess, Visitor},
ser::SerializeStruct,
Deserialize, Deserializer, Serialize, Serializer,
};
use std::collections::HashMap;
impl Serialize for BPE {
fn serialize<S>(&self, serializer: S) ... | tokenizers/tokenizers/src/models/bpe/serialization.rs/0 | {
"file_path": "tokenizers/tokenizers/src/models/bpe/serialization.rs",
"repo_id": "tokenizers",
"token_count": 2739
} | 211 |
use crate::tokenizer::{NormalizedString, Normalizer, Result};
use serde::{Deserialize, Serialize};
use unicode_categories::UnicodeCategories;
/// Checks whether a character is whitespace
fn is_whitespace(c: char) -> bool {
// These are technically control characters but we count them as whitespace
match c {
... | tokenizers/tokenizers/src/normalizers/bert.rs/0 | {
"file_path": "tokenizers/tokenizers/src/normalizers/bert.rs",
"repo_id": "tokenizers",
"token_count": 1856
} | 212 |
use crate::utils::SysRegex;
use serde::{Deserialize, Deserializer, Serialize};
use crate::tokenizer::{
pattern::Invert, PreTokenizedString, PreTokenizer, Result, SplitDelimiterBehavior,
};
/// Represents the different patterns that `Split` can use
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq)]
pub... | tokenizers/tokenizers/src/pre_tokenizers/split.rs/0 | {
"file_path": "tokenizers/tokenizers/src/pre_tokenizers/split.rs",
"repo_id": "tokenizers",
"token_count": 4038
} | 213 |
use std::marker::PhantomData;
use serde::{
self,
de::{Error, MapAccess, Visitor},
ser::SerializeStruct,
Deserialize, Deserializer, Serialize, Serializer,
};
use super::{added_vocabulary::AddedTokenWithId, TokenizerImpl};
use crate::{Decoder, Model, Normalizer, PostProcessor, PreTokenizer, TokenizerBui... | tokenizers/tokenizers/src/tokenizer/serialization.rs/0 | {
"file_path": "tokenizers/tokenizers/src/tokenizer/serialization.rs",
"repo_id": "tokenizers",
"token_count": 3618
} | 214 |
mod common;
use common::*;
use tokenizers::decoders::byte_level::ByteLevel;
use tokenizers::decoders::DecoderWrapper;
use tokenizers::models::bpe::BPE;
use tokenizers::models::wordlevel::WordLevel;
use tokenizers::models::wordpiece::WordPiece;
use tokenizers::models::ModelWrapper;
use tokenizers::normalizers::bert::Be... | tokenizers/tokenizers/tests/serialization.rs/0 | {
"file_path": "tokenizers/tokenizers/tests/serialization.rs",
"repo_id": "tokenizers",
"token_count": 3683
} | 215 |
.PHONY: deps_table_update modified_only_fixup extra_style_checks quality style fixup fix-copies test test-examples
# make sure to test the local checkout in scripts and not the pre-installed one (don't use quotes!)
export PYTHONPATH = src
check_dirs := examples tests src utils
exclude_folders := examples/research_pr... | transformers/Makefile/0 | {
"file_path": "transformers/Makefile",
"repo_id": "transformers",
"token_count": 1325
} | 216 |
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl... | transformers/conftest.py/0 | {
"file_path": "transformers/conftest.py",
"repo_id": "transformers",
"token_count": 1693
} | 217 |
<!---
Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or ... | transformers/docs/README.md/0 | {
"file_path": "transformers/docs/README.md",
"repo_id": "transformers",
"token_count": 4835
} | 218 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/de/pipeline_tutorial.md/0 | {
"file_path": "transformers/docs/source/de/pipeline_tutorial.md",
"repo_id": "transformers",
"token_count": 3003
} | 219 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/autoclass_tutorial.md/0 | {
"file_path": "transformers/docs/source/en/autoclass_tutorial.md",
"repo_id": "transformers",
"token_count": 2553
} | 220 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/main_classes/backbones.md/0 | {
"file_path": "transformers/docs/source/en/main_classes/backbones.md",
"repo_id": "transformers",
"token_count": 689
} | 221 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/main_classes/text_generation.md/0 | {
"file_path": "transformers/docs/source/en/main_classes/text_generation.md",
"repo_id": "transformers",
"token_count": 596
} | 222 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/bert.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/bert.md",
"repo_id": "transformers",
"token_count": 4645
} | 223 |
<!--Copyright 2021 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/canine.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/canine.md",
"repo_id": "transformers",
"token_count": 1723
} | 224 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/data2vec.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/data2vec.md",
"repo_id": "transformers",
"token_count": 2027
} | 225 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/dpr.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/dpr.md",
"repo_id": "transformers",
"token_count": 1170
} | 226 |
<!--Copyright 2021 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/fnet.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/fnet.md",
"repo_id": "transformers",
"token_count": 1150
} | 227 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/levit.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/levit.md",
"repo_id": "transformers",
"token_count": 1801
} | 228 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/mpnet.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/mpnet.md",
"repo_id": "transformers",
"token_count": 1255
} | 229 |
<!--Copyright 2024 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/pvt_v2.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/pvt_v2.md",
"repo_id": "transformers",
"token_count": 2543
} | 230 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/swin.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/swin.md",
"repo_id": "transformers",
"token_count": 1394
} | 231 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/vit_mae.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/vit_mae.md",
"repo_id": "transformers",
"token_count": 1492
} | 232 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/model_doc/xlm-roberta.md/0 | {
"file_path": "transformers/docs/source/en/model_doc/xlm-roberta.md",
"repo_id": "transformers",
"token_count": 3907
} | 233 |
<!---
Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or ... | transformers/docs/source/en/perf_hardware.md/0 | {
"file_path": "transformers/docs/source/en/perf_hardware.md",
"repo_id": "transformers",
"token_count": 2317
} | 234 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/preprocessing.md/0 | {
"file_path": "transformers/docs/source/en/preprocessing.md",
"repo_id": "transformers",
"token_count": 8689
} | 235 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/en/tasks/zero_shot_image_classification.md/0 | {
"file_path": "transformers/docs/source/en/tasks/zero_shot_image_classification.md",
"repo_id": "transformers",
"token_count": 1757
} | 236 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/es/autoclass_tutorial.md/0 | {
"file_path": "transformers/docs/source/es/autoclass_tutorial.md",
"repo_id": "transformers",
"token_count": 2066
} | 237 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/es/perplexity.md/0 | {
"file_path": "transformers/docs/source/es/perplexity.md",
"repo_id": "transformers",
"token_count": 3119
} | 238 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/it/accelerate.md/0 | {
"file_path": "transformers/docs/source/it/accelerate.md",
"repo_id": "transformers",
"token_count": 1891
} | 239 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/it/perf_infer_cpu.md/0 | {
"file_path": "transformers/docs/source/it/perf_infer_cpu.md",
"repo_id": "transformers",
"token_count": 1497
} | 240 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/accelerate.md/0 | {
"file_path": "transformers/docs/source/ja/accelerate.md",
"repo_id": "transformers",
"token_count": 2185
} | 241 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/hpo_train.md/0 | {
"file_path": "transformers/docs/source/ja/hpo_train.md",
"repo_id": "transformers",
"token_count": 2841
} | 242 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/model_doc/albert.md/0 | {
"file_path": "transformers/docs/source/ja/model_doc/albert.md",
"repo_id": "transformers",
"token_count": 2960
} | 243 |
<!--Copyright 2021 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/model_doc/bigbird_pegasus.md/0 | {
"file_path": "transformers/docs/source/ja/model_doc/bigbird_pegasus.md",
"repo_id": "transformers",
"token_count": 2264
} | 244 |
<!--Copyright 2021 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/model_doc/clip.md/0 | {
"file_path": "transformers/docs/source/ja/model_doc/clip.md",
"repo_id": "transformers",
"token_count": 4545
} | 245 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/model_doc/decision_transformer.md/0 | {
"file_path": "transformers/docs/source/ja/model_doc/decision_transformer.md",
"repo_id": "transformers",
"token_count": 1073
} | 246 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/perf_infer_gpu_many.md/0 | {
"file_path": "transformers/docs/source/ja/perf_infer_gpu_many.md",
"repo_id": "transformers",
"token_count": 2561
} | 247 |
<!---
Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or ... | transformers/docs/source/ja/pr_checks.md/0 | {
"file_path": "transformers/docs/source/ja/pr_checks.md",
"repo_id": "transformers",
"token_count": 5982
} | 248 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/tasks/monocular_depth_estimation.md/0 | {
"file_path": "transformers/docs/source/ja/tasks/monocular_depth_estimation.md",
"repo_id": "transformers",
"token_count": 2274
} | 249 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ja/testing.md/0 | {
"file_path": "transformers/docs/source/ja/testing.md",
"repo_id": "transformers",
"token_count": 22732
} | 250 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ko/bertology.md/0 | {
"file_path": "transformers/docs/source/ko/bertology.md",
"repo_id": "transformers",
"token_count": 1557
} | 251 |
<!---
Copyright 2021 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or ... | transformers/docs/source/ko/performance.md/0 | {
"file_path": "transformers/docs/source/ko/performance.md",
"repo_id": "transformers",
"token_count": 3692
} | 252 |
<!--Copyright 2022 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ko/tasks/image_classification.md/0 | {
"file_path": "transformers/docs/source/ko/tasks/image_classification.md",
"repo_id": "transformers",
"token_count": 11866
} | 253 |
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/ko/tasks_explained.md/0 | {
"file_path": "transformers/docs/source/ko/tasks_explained.md",
"repo_id": "transformers",
"token_count": 25797
} | 254 |
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed... | transformers/docs/source/pt/custom_models.md/0 | {
"file_path": "transformers/docs/source/pt/custom_models.md",
"repo_id": "transformers",
"token_count": 5915
} | 255 |
<!---
Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or a... | transformers/examples/README.md/0 | {
"file_path": "transformers/examples/README.md",
"repo_id": "transformers",
"token_count": 3302
} | 256 |
#!/usr/bin/env python
# coding=utf-8
# Copyright 2021 The HuggingFace Team All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-... | transformers/examples/flax/question-answering/run_qa.py/0 | {
"file_path": "transformers/examples/flax/question-answering/run_qa.py",
"repo_id": "transformers",
"token_count": 20250
} | 257 |
#### Fine-tuning BERT on SQuAD1.0 with relative position embeddings
The following examples show how to fine-tune BERT models with different relative position embeddings. The BERT model
`google-bert/bert-base-uncased` was pretrained with default absolute position embeddings. We provide the following pretrained
models... | transformers/examples/legacy/question-answering/README.md/0 | {
"file_path": "transformers/examples/legacy/question-answering/README.md",
"repo_id": "transformers",
"token_count": 1768
} | 258 |
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl... | transformers/examples/legacy/seq2seq/sentence_splitter.py/0 | {
"file_path": "transformers/examples/legacy/seq2seq/sentence_splitter.py",
"repo_id": "transformers",
"token_count": 403
} | 259 |
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl... | transformers/examples/legacy/seq2seq/train_mbart_cc25_enro.sh/0 | {
"file_path": "transformers/examples/legacy/seq2seq/train_mbart_cc25_enro.sh",
"repo_id": "transformers",
"token_count": 501
} | 260 |
#!/usr/bin/env python
# coding=utf-8
# Copyright 2018 Google AI, Google Brain and Carnegie Mellon University Authors and the HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in co... | transformers/examples/pytorch/text-generation/run_generation.py/0 | {
"file_path": "transformers/examples/pytorch/text-generation/run_generation.py",
"repo_id": "transformers",
"token_count": 6877
} | 261 |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a cop... | transformers/examples/research_projects/adversarial/run_hans.py/0 | {
"file_path": "transformers/examples/research_projects/adversarial/run_hans.py",
"repo_id": "transformers",
"token_count": 3302
} | 262 |
# coding=utf-8
# Copyright 2019 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or ag... | transformers/examples/research_projects/bertabs/test_utils_summarization.py/0 | {
"file_path": "transformers/examples/research_projects/bertabs/test_utils_summarization.py",
"repo_id": "transformers",
"token_count": 1749
} | 263 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.