long-code-arena / src /content.py
Areyde's picture
Update src/content.py
50b09d2 verified
raw
history blame contribute delete
No virus
3.52 kB
from .formatting import styled_warning
# ================================
# = ABOUT =
# ================================
INTRODUCTION_TITLE = """<h1 align="center">🏟️ Long Code Arena</h1>"""
INTRODUCTION_TEXT = """🏟️ **Long Code Arena** is a suite of benchmarks for code-related tasks with large contexts, up to a whole code repository.
It currently spans six different tasks and contains six datasets:
* πŸ€— [Library-based code generation](https://huggingface.co/datasets/JetBrains-Research/lca-library-based-code-generation)
* πŸ€— [CI builds repair](https://huggingface.co/datasets/JetBrains-Research/lca-ci-builds-repair)
* πŸ€— [Project-level code completion](https://huggingface.co/datasets/JetBrains-Research/lca-project-level-code-completion)
* πŸ€— [Commit message generation](https://huggingface.co/datasets/JetBrains-Research/lca-commit-message-generation)
* πŸ€— [Bug localization](https://huggingface.co/datasets/JetBrains-Research/lca-bug-localization)
* πŸ€— [Module summarization](https://huggingface.co/datasets/JetBrains-Research/lca-module-summarization)
We are excited to invite you to participate in solving our benchmarks! To submit your results, please send the following materials to our πŸ“© email (lca@jetbrains.com):
* **Results**: Include the summary of your benchmark outcomes.
* **Reproduction Package**: To ensure the integrity and reproducibility of your results, please include the code for context collection (if any), generation of predictions, and evaluating. You can follow [our baselines](https://github.com/JetBrains-Research/lca-baselines) as a reference.
* **Metadata**: Model information, organization name, licence of your model, context size, and other information you find relevant.
We look forward to reviewing your innovative solutions!
"""
# ================================
# = LEADERBOARD =
# ================================
LEADERBOARD_TITLE = '<h2 align="center">πŸ…Leaderboard</h2>'
LEADERBOARD_TEXT = """The raw results from the leaderboard are available in πŸ€— [JetBrains-Research/lca-results](https://huggingface.co/datasets/JetBrains-Research/lca-results)."""
# ================================
# = SUBMISSION =
# ================================
SUBMISSION_TITLE = '<h2 align="center">πŸ“© Make A Submission</h2>'
SUBMISSION_TEXT_INTRO = """Use the form below to submit new results to 🏟️ Long Code Arena. If any problems arise, don't hesitate to contact us by email `TODO` or open a discussion πŸ’›"""
SUBMISSION_TEXT_TASK = """1. Select a task you want to submit results for."""
SUBMISSION_TEXT_METADATA = """2. Fill in some metadata about your submission."""
SUBMISSION_TEXT_FILES = """3. Attach one or more files with your model's predictions.
* If several files are attached, they will be treated as separate runs of the submitted model (e.g., with different seeds), and the metrics will be averaged across runs. For baselines provided by 🏟️ Long Code Arena Team, the results are averaged across 3 runs.
"""
SUBMISSION_TEXT_SUBMIT = """All set! A new PR to πŸ€— [JetBrains-Research/lca-results](https://huggingface.co/datasets/JetBrains-Research/lca-results) should be opened when you press "Submit" button. 🏟️ Long Code Arena Team will review it shortly, and the results will appear in the leaderboard.
⏳ **Note:** It might take some time (up to 40 minutes) for PR to get created, since it involves computing metrics for your submission."""