{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "## Evaluation when you have fully labelled data\n",
        "\n",
        "In this example, our data contains a fully-populated ground-truth column called `cluster` that enables us to perform accuracy analysis of the final model\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "<a target=\"_blank\" href=\"https://colab.research.google.com/github/moj-analytical-services/splink/blob/master/docs/demos/examples/duckdb/accuracy_analysis_from_labels_column.ipynb\">\n",
        "  <img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/>\n",
        "</a>\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 34,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:16.264709Z",
          "iopub.status.busy": "2024-06-07T09:09:16.264397Z",
          "iopub.status.idle": "2024-06-07T09:09:16.269613Z",
          "shell.execute_reply": "2024-06-07T09:09:16.268968Z"
        },
        "tags": [
          "hide_input"
        ]
      },
      "outputs": [],
      "source": [
        "# Uncomment and run this cell if you're running in Google Colab.\n",
        "# !pip install splink"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 35,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:16.273849Z",
          "iopub.status.busy": "2024-06-07T09:09:16.273306Z",
          "iopub.status.idle": "2024-06-07T09:09:17.467426Z",
          "shell.execute_reply": "2024-06-07T09:09:17.466787Z"
        }
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>unique_id</th>\n",
              "      <th>first_name</th>\n",
              "      <th>surname</th>\n",
              "      <th>dob</th>\n",
              "      <th>city</th>\n",
              "      <th>email</th>\n",
              "      <th>cluster</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>0</td>\n",
              "      <td>Robert</td>\n",
              "      <td>Alan</td>\n",
              "      <td>1971-06-24</td>\n",
              "      <td>NaN</td>\n",
              "      <td>robert255@smith.net</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1</td>\n",
              "      <td>Robert</td>\n",
              "      <td>Allen</td>\n",
              "      <td>1971-05-24</td>\n",
              "      <td>NaN</td>\n",
              "      <td>roberta25@smith.net</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "</div>"
            ],
            "text/plain": [
              "   unique_id first_name surname         dob city                email  cluster\n",
              "0          0     Robert    Alan  1971-06-24  NaN  robert255@smith.net        0\n",
              "1          1     Robert   Allen  1971-05-24  NaN  roberta25@smith.net        0"
            ]
          },
          "execution_count": 35,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "from splink import splink_datasets\n",
        "\n",
        "df = splink_datasets.fake_1000\n",
        "df.head(2)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 36,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:17.501913Z",
          "iopub.status.busy": "2024-06-07T09:09:17.501641Z",
          "iopub.status.idle": "2024-06-07T09:09:17.581434Z",
          "shell.execute_reply": "2024-06-07T09:09:17.580667Z"
        }
      },
      "outputs": [],
      "source": [
        "from splink import SettingsCreator, Linker, block_on, DuckDBAPI\n",
        "\n",
        "import splink.comparison_library as cl\n",
        "\n",
        "settings = SettingsCreator(\n",
        "    link_type=\"dedupe_only\",\n",
        "    blocking_rules_to_generate_predictions=[\n",
        "        block_on(\"first_name\"),\n",
        "        block_on(\"surname\"),\n",
        "        block_on(\"dob\"),\n",
        "        block_on(\"email\"),\n",
        "    ],\n",
        "    comparisons=[\n",
        "        cl.ForenameSurnameComparison(\"first_name\", \"surname\"),\n",
        "        cl.DateOfBirthComparison(\n",
        "            \"dob\",\n",
        "            input_is_string=True,\n",
        "        ),\n",
        "        cl.ExactMatch(\"city\").configure(term_frequency_adjustments=True),\n",
        "        cl.EmailComparison(\"email\"),\n",
        "    ],\n",
        "    retain_intermediate_calculation_columns=True,\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 37,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:17.585114Z",
          "iopub.status.busy": "2024-06-07T09:09:17.584837Z",
          "iopub.status.idle": "2024-06-07T09:09:17.847471Z",
          "shell.execute_reply": "2024-06-07T09:09:17.846845Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "Probability two random records match is estimated to be  0.00333.\n",
            "This means that amongst all possible pairwise record comparisons, one in 300.13 are expected to match.  With 499,500 total possible comparisons, we expect a total of around 1,664.29 matching pairs\n"
          ]
        }
      ],
      "source": [
        "db_api = DuckDBAPI()\n",
        "linker = Linker(df, settings, db_api=db_api)\n",
        "deterministic_rules = [\n",
        "    \"l.first_name = r.first_name and levenshtein(r.dob, l.dob) <= 1\",\n",
        "    \"l.surname = r.surname and levenshtein(r.dob, l.dob) <= 1\",\n",
        "    \"l.first_name = r.first_name and levenshtein(r.surname, l.surname) <= 2\",\n",
        "    \"l.email = r.email\",\n",
        "]\n",
        "\n",
        "linker.training.estimate_probability_two_random_records_match(\n",
        "    deterministic_rules, recall=0.7\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 38,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:17.850459Z",
          "iopub.status.busy": "2024-06-07T09:09:17.850216Z",
          "iopub.status.idle": "2024-06-07T09:09:18.931010Z",
          "shell.execute_reply": "2024-06-07T09:09:18.930397Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "You are using the default value for `max_pairs`, which may be too small and thus lead to inaccurate estimates for your model's u-parameters. Consider increasing to 1e8 or 1e9, which will result in more accurate estimates, but with a longer run time.\n",
            "----- Estimating u probabilities using random sampling -----\n",
            "\n",
            "Estimated u probabilities using random sampling\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - first_name_surname (no m values are trained).\n",
            "    - dob (no m values are trained).\n",
            "    - city (no m values are trained).\n",
            "    - email (no m values are trained).\n"
          ]
        }
      ],
      "source": [
        "linker.training.estimate_u_using_random_sampling(max_pairs=1e6, seed=5)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 39,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:18.934824Z",
          "iopub.status.busy": "2024-06-07T09:09:18.934551Z",
          "iopub.status.idle": "2024-06-07T09:09:20.495494Z",
          "shell.execute_reply": "2024-06-07T09:09:20.494833Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "l.\"dob\" = r.\"dob\"\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - first_name_surname\n",
            "    - city\n",
            "    - email\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - dob\n",
            "\n",
            "WARNING:\n",
            "Level Jaro-Winkler >0.88 on username on comparison email not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was -0.751 in the m_probability of first_name_surname, level `(Exact match on first_name) AND (Exact match on surname)`\n",
            "Iteration 2: Largest change in params was 0.196 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.0536 in probability_two_random_records_match\n",
            "Iteration 4: Largest change in params was 0.0189 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00731 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.0029 in probability_two_random_records_match\n",
            "Iteration 7: Largest change in params was 0.00116 in probability_two_random_records_match\n",
            "Iteration 8: Largest change in params was 0.000469 in probability_two_random_records_match\n",
            "Iteration 9: Largest change in params was 0.000189 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 7.62e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 10 iterations\n",
            "m probability not trained for email - Jaro-Winkler >0.88 on username (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - dob (no m values are trained).\n",
            "    - email (some m values are not trained).\n",
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "l.\"email\" = r.\"email\"\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - first_name_surname\n",
            "    - dob\n",
            "    - city\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - email\n",
            "\n",
            "Iteration 1: Largest change in params was -0.438 in the m_probability of dob, level `Exact match on dob`\n",
            "Iteration 2: Largest change in params was 0.122 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.0286 in probability_two_random_records_match\n",
            "Iteration 4: Largest change in params was 0.01 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00448 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.00237 in probability_two_random_records_match\n",
            "Iteration 7: Largest change in params was 0.0014 in probability_two_random_records_match\n",
            "Iteration 8: Largest change in params was 0.000893 in probability_two_random_records_match\n",
            "Iteration 9: Largest change in params was 0.000597 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 0.000413 in probability_two_random_records_match\n",
            "Iteration 11: Largest change in params was 0.000292 in probability_two_random_records_match\n",
            "Iteration 12: Largest change in params was 0.000211 in probability_two_random_records_match\n",
            "Iteration 13: Largest change in params was 0.000154 in probability_two_random_records_match\n",
            "Iteration 14: Largest change in params was 0.000113 in probability_two_random_records_match\n",
            "Iteration 15: Largest change in params was 8.4e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 15 iterations\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - email (some m values are not trained).\n",
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "(l.\"first_name\" = r.\"first_name\") AND (l.\"surname\" = r.\"surname\")\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - dob\n",
            "    - city\n",
            "    - email\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - first_name_surname\n",
            "\n",
            "WARNING:\n",
            "Level Jaro-Winkler >0.88 on username on comparison email not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was 0.473 in probability_two_random_records_match\n",
            "Iteration 2: Largest change in params was 0.0452 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.00766 in probability_two_random_records_match\n",
            "Iteration 4: Largest change in params was 0.00135 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00025 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.000468 in the m_probability of email, level `All other comparisons`\n",
            "Iteration 7: Largest change in params was 0.00776 in the m_probability of email, level `All other comparisons`\n",
            "Iteration 8: Largest change in params was 0.00992 in the m_probability of email, level `All other comparisons`\n",
            "Iteration 9: Largest change in params was 0.00277 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 0.000972 in probability_two_random_records_match\n",
            "Iteration 11: Largest change in params was 0.000337 in probability_two_random_records_match\n",
            "Iteration 12: Largest change in params was 0.000118 in probability_two_random_records_match\n",
            "Iteration 13: Largest change in params was 4.14e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 13 iterations\n",
            "m probability not trained for email - Jaro-Winkler >0.88 on username (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - email (some m values are not trained).\n"
          ]
        }
      ],
      "source": [
        "session_dob = linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    block_on(\"dob\"), estimate_without_term_frequencies=True\n",
        ")\n",
        "session_email = linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    block_on(\"email\"), estimate_without_term_frequencies=True\n",
        ")\n",
        "session_dob = linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    block_on(\"first_name\", \"surname\"), estimate_without_term_frequencies=True\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 40,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:20.498372Z",
          "iopub.status.busy": "2024-06-07T09:09:20.498155Z",
          "iopub.status.idle": "2024-06-07T09:09:20.768827Z",
          "shell.execute_reply": "2024-06-07T09:09:20.768326Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'email':\n",
            "    m values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>truth_threshold</th>\n",
              "      <th>match_probability</th>\n",
              "      <th>total_clerical_labels</th>\n",
              "      <th>p</th>\n",
              "      <th>n</th>\n",
              "      <th>tp</th>\n",
              "      <th>tn</th>\n",
              "      <th>fp</th>\n",
              "      <th>fn</th>\n",
              "      <th>P_rate</th>\n",
              "      <th>...</th>\n",
              "      <th>precision</th>\n",
              "      <th>recall</th>\n",
              "      <th>specificity</th>\n",
              "      <th>npv</th>\n",
              "      <th>accuracy</th>\n",
              "      <th>f1</th>\n",
              "      <th>f2</th>\n",
              "      <th>f0_5</th>\n",
              "      <th>p4</th>\n",
              "      <th>phi</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>-17.8</td>\n",
              "      <td>0.000004</td>\n",
              "      <td>499500.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>497469.0</td>\n",
              "      <td>1650.0</td>\n",
              "      <td>495130.0</td>\n",
              "      <td>2339.0</td>\n",
              "      <td>381.0</td>\n",
              "      <td>0.004066</td>\n",
              "      <td>...</td>\n",
              "      <td>0.413638</td>\n",
              "      <td>0.812408</td>\n",
              "      <td>0.995298</td>\n",
              "      <td>0.999231</td>\n",
              "      <td>0.994555</td>\n",
              "      <td>0.548173</td>\n",
              "      <td>0.681086</td>\n",
              "      <td>0.458665</td>\n",
              "      <td>0.707466</td>\n",
              "      <td>0.577474</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>-17.7</td>\n",
              "      <td>0.000005</td>\n",
              "      <td>499500.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>497469.0</td>\n",
              "      <td>1650.0</td>\n",
              "      <td>495225.0</td>\n",
              "      <td>2244.0</td>\n",
              "      <td>381.0</td>\n",
              "      <td>0.004066</td>\n",
              "      <td>...</td>\n",
              "      <td>0.423729</td>\n",
              "      <td>0.812408</td>\n",
              "      <td>0.995489</td>\n",
              "      <td>0.999231</td>\n",
              "      <td>0.994745</td>\n",
              "      <td>0.556962</td>\n",
              "      <td>0.686470</td>\n",
              "      <td>0.468564</td>\n",
              "      <td>0.714769</td>\n",
              "      <td>0.584558</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>-17.1</td>\n",
              "      <td>0.000007</td>\n",
              "      <td>499500.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>497469.0</td>\n",
              "      <td>1650.0</td>\n",
              "      <td>495311.0</td>\n",
              "      <td>2158.0</td>\n",
              "      <td>381.0</td>\n",
              "      <td>0.004066</td>\n",
              "      <td>...</td>\n",
              "      <td>0.433298</td>\n",
              "      <td>0.812408</td>\n",
              "      <td>0.995662</td>\n",
              "      <td>0.999231</td>\n",
              "      <td>0.994917</td>\n",
              "      <td>0.565165</td>\n",
              "      <td>0.691418</td>\n",
              "      <td>0.477901</td>\n",
              "      <td>0.721512</td>\n",
              "      <td>0.591197</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>-17.0</td>\n",
              "      <td>0.000008</td>\n",
              "      <td>499500.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>497469.0</td>\n",
              "      <td>1650.0</td>\n",
              "      <td>495354.0</td>\n",
              "      <td>2115.0</td>\n",
              "      <td>381.0</td>\n",
              "      <td>0.004066</td>\n",
              "      <td>...</td>\n",
              "      <td>0.438247</td>\n",
              "      <td>0.812408</td>\n",
              "      <td>0.995748</td>\n",
              "      <td>0.999231</td>\n",
              "      <td>0.995003</td>\n",
              "      <td>0.569358</td>\n",
              "      <td>0.693919</td>\n",
              "      <td>0.482710</td>\n",
              "      <td>0.724931</td>\n",
              "      <td>0.594601</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>-16.9</td>\n",
              "      <td>0.000008</td>\n",
              "      <td>499500.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>497469.0</td>\n",
              "      <td>1650.0</td>\n",
              "      <td>495386.0</td>\n",
              "      <td>2083.0</td>\n",
              "      <td>381.0</td>\n",
              "      <td>0.004066</td>\n",
              "      <td>...</td>\n",
              "      <td>0.442004</td>\n",
              "      <td>0.812408</td>\n",
              "      <td>0.995813</td>\n",
              "      <td>0.999231</td>\n",
              "      <td>0.995067</td>\n",
              "      <td>0.572519</td>\n",
              "      <td>0.695792</td>\n",
              "      <td>0.486353</td>\n",
              "      <td>0.727497</td>\n",
              "      <td>0.597173</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>5 rows × 25 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "   truth_threshold  match_probability  total_clerical_labels       p  \\\n",
              "0            -17.8           0.000004               499500.0  2031.0   \n",
              "1            -17.7           0.000005               499500.0  2031.0   \n",
              "2            -17.1           0.000007               499500.0  2031.0   \n",
              "3            -17.0           0.000008               499500.0  2031.0   \n",
              "4            -16.9           0.000008               499500.0  2031.0   \n",
              "\n",
              "          n      tp        tn      fp     fn    P_rate  ...  precision  \\\n",
              "0  497469.0  1650.0  495130.0  2339.0  381.0  0.004066  ...   0.413638   \n",
              "1  497469.0  1650.0  495225.0  2244.0  381.0  0.004066  ...   0.423729   \n",
              "2  497469.0  1650.0  495311.0  2158.0  381.0  0.004066  ...   0.433298   \n",
              "3  497469.0  1650.0  495354.0  2115.0  381.0  0.004066  ...   0.438247   \n",
              "4  497469.0  1650.0  495386.0  2083.0  381.0  0.004066  ...   0.442004   \n",
              "\n",
              "     recall  specificity       npv  accuracy        f1        f2      f0_5  \\\n",
              "0  0.812408     0.995298  0.999231  0.994555  0.548173  0.681086  0.458665   \n",
              "1  0.812408     0.995489  0.999231  0.994745  0.556962  0.686470  0.468564   \n",
              "2  0.812408     0.995662  0.999231  0.994917  0.565165  0.691418  0.477901   \n",
              "3  0.812408     0.995748  0.999231  0.995003  0.569358  0.693919  0.482710   \n",
              "4  0.812408     0.995813  0.999231  0.995067  0.572519  0.695792  0.486353   \n",
              "\n",
              "         p4       phi  \n",
              "0  0.707466  0.577474  \n",
              "1  0.714769  0.584558  \n",
              "2  0.721512  0.591197  \n",
              "3  0.724931  0.594601  \n",
              "4  0.727497  0.597173  \n",
              "\n",
              "[5 rows x 25 columns]"
            ]
          },
          "execution_count": 40,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_column(\n",
        "    \"cluster\", output_type=\"table\"\n",
        ").as_pandas_dataframe(limit=5)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 41,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:20.771736Z",
          "iopub.status.busy": "2024-06-07T09:09:20.771453Z",
          "iopub.status.idle": "2024-06-07T09:09:21.322647Z",
          "shell.execute_reply": "2024-06-07T09:09:21.322088Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'email':\n",
            "    m values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-b7fe9c46ac9e4afab3b93118275c69a4.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-b7fe9c46ac9e4afab3b93118275c69a4.vega-embed details,\n",
              "  #altair-viz-b7fe9c46ac9e4afab3b93118275c69a4.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-b7fe9c46ac9e4afab3b93118275c69a4\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-b7fe9c46ac9e4afab3b93118275c69a4\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-b7fe9c46ac9e4afab3b93118275c69a4\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}, \"data\": {\"name\": \"data-bccad4a7dbaace30ebe55cec591aa918\"}, \"mark\": {\"type\": \"line\", \"clip\": true, \"point\": true}, \"encoding\": {\"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".4%\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FP_rate\", \"type\": \"quantitative\"}, {\"field\": \"tp_rate\", \"format\": \".4f\", \"title\": \"TP_rate\", \"type\": \"quantitative\"}, {\"field\": \"tp\", \"format\": \",.0f\", \"title\": \"TP\", \"type\": \"quantitative\"}, {\"field\": \"tn\", \"format\": \",.0f\", \"title\": \"TN\", \"type\": \"quantitative\"}, {\"field\": \"fp\", \"format\": \",.0f\", \"title\": \"FP\", \"type\": \"quantitative\"}, {\"field\": \"fn\", \"format\": \",.0f\", \"title\": \"FN\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"f1\", \"format\": \".4f\", \"title\": \"F1\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"fp_rate\", \"sort\": [\"truth_threshold\"], \"title\": \"False Positive Rate amongst clerically reviewed records\", \"type\": \"quantitative\"}, \"y\": {\"field\": \"tp_rate\", \"sort\": [\"truth_threshold\"], \"title\": \"True Positive Rate amongst clerically reviewed records\", \"type\": \"quantitative\"}}, \"height\": 400, \"params\": [{\"name\": \"mouse_zoom\", \"select\": {\"type\": \"interval\", \"encodings\": [\"x\"]}, \"bind\": \"scales\"}], \"title\": \"Receiver operating characteristic curve\", \"width\": 400, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-bccad4a7dbaace30ebe55cec591aa918\": [{\"truth_threshold\": -17.80000026524067, \"match_probability\": 4.381916466936514e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495130.0, \"fp\": 2339.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9952981994857971, \"fp_rate\": 0.0047018005142028954, \"fn_rate\": 0.18759231905465287, \"precision\": 0.41363750313361747, \"recall\": 0.8124076809453471, \"specificity\": 0.9952981994857971, \"npv\": 0.9992310967869533, \"accuracy\": 0.9945545545545545, \"f1\": 0.5481727574750831, \"f2\": 0.681086436060431, \"f0_5\": 0.45866459109356755, \"p4\": 0.7074664508208482, \"phi\": 0.5774741518035404}, {\"truth_threshold\": -17.700000263750553, \"match_probability\": 4.696420312114957e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495225.0, \"fp\": 2244.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9954891661590973, \"fp_rate\": 0.004510833840902649, \"fn_rate\": 0.18759231905465287, \"precision\": 0.423728813559322, \"recall\": 0.8124076809453471, \"specificity\": 0.9954891661590973, \"npv\": 0.9992312441737994, \"accuracy\": 0.9947447447447447, \"f1\": 0.5569620253164557, \"f2\": 0.6864702945581628, \"f0_5\": 0.4685636394615778, \"p4\": 0.7147694968503172, \"phi\": 0.5845580356933797}, {\"truth_threshold\": -17.100000254809856, \"match_probability\": 7.118424873502875e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495311.0, \"fp\": 2158.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9956620412528218, \"fp_rate\": 0.004337958747178216, \"fn_rate\": 0.18759231905465287, \"precision\": 0.4332983193277311, \"recall\": 0.8124076809453471, \"specificity\": 0.9956620412528218, \"npv\": 0.9992313775489619, \"accuracy\": 0.994916916916917, \"f1\": 0.5651652680253468, \"f2\": 0.6914180355346966, \"f0_5\": 0.47790071250651683, \"p4\": 0.7215119201499676, \"phi\": 0.5911972192065922}, {\"truth_threshold\": -17.00000025331974, \"match_probability\": 7.629334984424643e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495354.0, \"fp\": 2115.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.995748478799684, \"fp_rate\": 0.004251521200315999, \"fn_rate\": 0.18759231905465287, \"precision\": 0.43824701195219123, \"recall\": 0.8124076809453471, \"specificity\": 0.995748478799684, \"npv\": 0.9992314442191896, \"accuracy\": 0.995003003003003, \"f1\": 0.5693581780538303, \"f2\": 0.6939187484229119, \"f0_5\": 0.4827101983500088, \"p4\": 0.7249310558395574, \"phi\": 0.5946014708278546}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495386.0, \"fp\": 2083.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9958128044159535, \"fp_rate\": 0.0041871955840464435, \"fn_rate\": 0.18759231905465287, \"precision\": 0.44200375033485134, \"recall\": 0.8124076809453471, \"specificity\": 0.9958128044159535, \"npv\": 0.9992314938267371, \"accuracy\": 0.995067067067067, \"f1\": 0.5725190839694656, \"f2\": 0.6957915155604284, \"f0_5\": 0.4863526498850439, \"p4\": 0.7274966332137337, \"phi\": 0.5971728084857775}, {\"truth_threshold\": -16.800000250339508, \"match_probability\": 8.76379462217525e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495431.0, \"fp\": 2038.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9959032623138326, \"fp_rate\": 0.0040967376861673795, \"fn_rate\": 0.18759231905465287, \"precision\": 0.44739696312364424, \"recall\": 0.8124076809453471, \"specificity\": 0.9959032623138326, \"npv\": 0.9992315635765169, \"accuracy\": 0.9951571571571571, \"f1\": 0.5770239552369295, \"f2\": 0.6984422621063325, \"f0_5\": 0.49156884943097184, \"p4\": 0.7311353521049642, \"phi\": 0.6008450097183674}, {\"truth_threshold\": -16.700000248849392, \"match_probability\": 9.392796608724036e-06, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495477.0, \"fp\": 1992.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9959957303872201, \"fp_rate\": 0.0040042696127798915, \"fn_rate\": 0.18759231905465287, \"precision\": 0.45304777594728174, \"recall\": 0.8124076809453471, \"specificity\": 0.9959957303872201, \"npv\": 0.9992316348632068, \"accuracy\": 0.9952492492492493, \"f1\": 0.5817028027498677, \"f2\": 0.7011728709841918, \"f0_5\": 0.4970178926441352, \"p4\": 0.734892744301747, \"phi\": 0.6046686849628337}, {\"truth_threshold\": -16.600000247359276, \"match_probability\": 1.0066943367963594e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495552.0, \"fp\": 1917.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9961464935503519, \"fp_rate\": 0.0038535064496481187, \"fn_rate\": 0.18759231905465287, \"precision\": 0.4625735912531539, \"recall\": 0.8124076809453471, \"specificity\": 0.9961464935503519, \"npv\": 0.9992317510631477, \"accuracy\": 0.9953993993993994, \"f1\": 0.5894962486602358, \"f2\": 0.7056710289966641, \"f0_5\": 0.5061660224553653, \"p4\": 0.7411024363708335, \"phi\": 0.6110602567130846}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495569.0, \"fp\": 1900.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9961806665339951, \"fp_rate\": 0.003819333466004917, \"fn_rate\": 0.18759231905465287, \"precision\": 0.4647887323943662, \"recall\": 0.8124076809453471, \"specificity\": 0.9961806665339951, \"npv\": 0.999231777396915, \"accuracy\": 0.9954334334334334, \"f1\": 0.5912918831750582, \"f2\": 0.7066986465650162, \"f0_5\": 0.5082866120386914, \"p4\": 0.7425245828506902, \"phi\": 0.6125370006853904}, {\"truth_threshold\": -16.400000244379044, \"match_probability\": 1.1563864000129272e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495639.0, \"fp\": 1830.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9963213788195847, \"fp_rate\": 0.003678621180415262, \"fn_rate\": 0.18759231905465287, \"precision\": 0.47413793103448276, \"recall\": 0.8124076809453471, \"specificity\": 0.9963213788195847, \"npv\": 0.999231885811056, \"accuracy\": 0.9955735735735736, \"f1\": 0.5988023952095808, \"f2\": 0.7109617373319544, \"f0_5\": 0.5172089524167763, \"p4\": 0.7484384428713122, \"phi\": 0.6187309002293246}, {\"truth_threshold\": -16.300000242888927, \"match_probability\": 1.239383228590334e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495647.0, \"fp\": 1822.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9963374602236521, \"fp_rate\": 0.003662539776347873, \"fn_rate\": 0.18759231905465287, \"precision\": 0.47523041474654376, \"recall\": 0.8124076809453471, \"specificity\": 0.9963374602236521, \"npv\": 0.9992318981992953, \"accuracy\": 0.9955895895895895, \"f1\": 0.5996729056878066, \"f2\": 0.7114522249051397, \"f0_5\": 0.5182486337081474, \"p4\": 0.7491203158782602, \"phi\": 0.6194506358562824}, {\"truth_threshold\": -16.20000024139881, \"match_probability\": 1.328336874067903e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495716.0, \"fp\": 1753.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9964761623337334, \"fp_rate\": 0.003523837666266642, \"fn_rate\": 0.18759231905465287, \"precision\": 0.484866294446077, \"recall\": 0.8124076809453471, \"specificity\": 0.9964761623337334, \"npv\": 0.9992320050312741, \"accuracy\": 0.9957277277277278, \"f1\": 0.6072874493927125, \"f2\": 0.7157109395332697, \"f0_5\": 0.5273924439046219, \"p4\": 0.7550534475391266, \"phi\": 0.6257629687078863}, {\"truth_threshold\": -16.100000239908695, \"match_probability\": 1.4236748550826774e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495728.0, \"fp\": 1741.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9965002844398344, \"fp_rate\": 0.003499715560165558, \"fn_rate\": 0.18759231905465287, \"precision\": 0.4865821291654379, \"recall\": 0.8124076809453471, \"specificity\": 0.9965002844398344, \"npv\": 0.9992320236077152, \"accuracy\": 0.9957517517517518, \"f1\": 0.6086315012910365, \"f2\": 0.7164567954841511, \"f0_5\": 0.5290157101635139, \"p4\": 0.7560949028519492, \"phi\": 0.6268803218119524}, {\"truth_threshold\": -16.00000023841858, \"match_probability\": 1.5258553713831415e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495741.0, \"fp\": 1728.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9965264167214439, \"fp_rate\": 0.0034735832785560508, \"fn_rate\": 0.18759231905465287, \"precision\": 0.48845470692717585, \"recall\": 0.8124076809453471, \"specificity\": 0.9965264167214439, \"npv\": 0.999232043731179, \"accuracy\": 0.9957777777777778, \"f1\": 0.6100942872989462, \"f2\": 0.7172665623369848, \"f0_5\": 0.5307855626326964, \"p4\": 0.7572263928542975, \"phi\": 0.6280974777754371}, {\"truth_threshold\": -15.900000236928463, \"match_probability\": 1.6353695054159956e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495789.0, \"fp\": 1680.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9966229051458483, \"fp_rate\": 0.003377094854151716, \"fn_rate\": 0.18759231905465287, \"precision\": 0.4954954954954955, \"recall\": 0.8124076809453471, \"specificity\": 0.9966229051458483, \"npv\": 0.9992321180240643, \"accuracy\": 0.9958738738738738, \"f1\": 0.6155567991046447, \"f2\": 0.7202723939235202, \"f0_5\": 0.5374242720343951, \"p4\": 0.7614336999409107, \"phi\": 0.6326529626274011}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495806.0, \"fp\": 1663.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9966570781294914, \"fp_rate\": 0.0033429218705085142, \"fn_rate\": 0.18759231905465287, \"precision\": 0.49803803199517055, \"recall\": 0.8124076809453471, \"specificity\": 0.9966570781294914, \"npv\": 0.999232144332681, \"accuracy\": 0.995907907907908, \"f1\": 0.6175149700598802, \"f2\": 0.7213430095304713, \"f0_5\": 0.5398154812536805, \"p4\": 0.7629350207842558, \"phi\": 0.6342899773808474}, {\"truth_threshold\": -15.70000023394823, \"match_probability\": 1.8785416963874395e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495829.0, \"fp\": 1640.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9967033121661852, \"fp_rate\": 0.0032966878338147702, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5015197568389058, \"recall\": 0.8124076809453471, \"specificity\": 0.9967033121661852, \"npv\": 0.9992321799238226, \"accuracy\": 0.9959539539539539, \"f1\": 0.6201841759067844, \"f2\": 0.722796565621167, \"f0_5\": 0.5430847212165097, \"p4\": 0.7649756675687114, \"phi\": 0.6365248595747179}, {\"truth_threshold\": -15.600000232458115, \"match_probability\": 2.0133684259220603e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495860.0, \"fp\": 1609.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9967656276069463, \"fp_rate\": 0.0032343723930536375, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5062902730899049, \"recall\": 0.8124076809453471, \"specificity\": 0.9967656276069463, \"npv\": 0.9992322278892716, \"accuracy\": 0.996016016016016, \"f1\": 0.6238185255198487, \"f2\": 0.7247650004392515, \"f0_5\": 0.5475542576491671, \"p4\": 0.7677434382344854, \"phi\": 0.6395743230187952}, {\"truth_threshold\": -15.500000230967999, \"match_probability\": 2.1578717331772276e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495889.0, \"fp\": 1580.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9968239226966906, \"fp_rate\": 0.003176077303309352, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5108359133126935, \"recall\": 0.8124076809453471, \"specificity\": 0.9968239226966906, \"npv\": 0.9992322727547505, \"accuracy\": 0.9960740740740741, \"f1\": 0.6272571754419312, \"f2\": 0.7266161705125946, \"f0_5\": 0.5518025550130427, \"p4\": 0.7703508352719283, \"phi\": 0.642466572031684}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495895.0, \"fp\": 1574.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9968359837497411, \"fp_rate\": 0.00316401625025881, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5117866004962779, \"recall\": 0.8124076809453471, \"specificity\": 0.9968359837497411, \"npv\": 0.9992322820366086, \"accuracy\": 0.9960860860860861, \"f1\": 0.6279733587059942, \"f2\": 0.7270003524850194, \"f0_5\": 0.552689756816507, \"p4\": 0.7708925093222379, \"phi\": 0.6430698198712967}, {\"truth_threshold\": -15.300000227987766, \"match_probability\": 2.478735761747151e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495944.0, \"fp\": 1525.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.996934482349654, \"fp_rate\": 0.0030655176503460516, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5196850393700787, \"recall\": 0.8124076809453471, \"specificity\": 0.996934482349654, \"npv\": 0.9992323578300508, \"accuracy\": 0.9961841841841842, \"f1\": 0.6338839800230504, \"f2\": 0.7301531108947694, \"f0_5\": 0.5600434457945829, \"p4\": 0.7753448568153782, \"phi\": 0.6480599731671169}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495978.0, \"fp\": 1491.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9970028283169403, \"fp_rate\": 0.0029971716830596478, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5253104106972302, \"recall\": 0.8124076809453471, \"specificity\": 0.9970028283169403, \"npv\": 0.9992324104126248, \"accuracy\": 0.9962522522522522, \"f1\": 0.6380510440835266, \"f2\": 0.7323568575233023, \"f0_5\": 0.5652620760534429, \"p4\": 0.7784645844600595, \"phi\": 0.6515907242774444}, {\"truth_threshold\": -15.100000225007534, \"match_probability\": 2.8473092031487608e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496003.0, \"fp\": 1466.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.997053082704651, \"fp_rate\": 0.002946917295349057, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5295250320924262, \"recall\": 0.8124076809453471, \"specificity\": 0.997053082704651, \"npv\": 0.9992324490716864, \"accuracy\": 0.9963023023023023, \"f1\": 0.641150184573538, \"f2\": 0.7339857651245552, \"f0_5\": 0.5691617799241118, \"p4\": 0.7807745651409332, \"phi\": 0.65422353556621}, {\"truth_threshold\": -14.900000222027302, \"match_probability\": 3.270685556819147e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496025.0, \"fp\": 1444.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9970973065658363, \"fp_rate\": 0.0029026934341637367, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5332902391725921, \"recall\": 0.8124076809453471, \"specificity\": 0.9970973065658363, \"npv\": 0.9992324830884397, \"accuracy\": 0.9963463463463463, \"f1\": 0.6439024390243903, \"f2\": 0.7354252094847566, \"f0_5\": 0.5726383008259873, \"p4\": 0.7828187173219892, \"phi\": 0.6565666756951178}, {\"truth_threshold\": -14.800000220537186, \"match_probability\": 3.505425758788192e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496064.0, \"fp\": 1405.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9971757034106647, \"fp_rate\": 0.0028242965893352148, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5400981996726678, \"recall\": 0.8124076809453471, \"specificity\": 0.9971757034106647, \"npv\": 0.9992325433834564, \"accuracy\": 0.9964244244244245, \"f1\": 0.6488399528116398, \"f2\": 0.7379908757491725, \"f0_5\": 0.578906743386429, \"p4\": 0.7864688685019895, \"phi\": 0.6607822699275607}, {\"truth_threshold\": -14.70000021904707, \"match_probability\": 3.757012854526189e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496065.0, \"fp\": 1404.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9971777135861732, \"fp_rate\": 0.002822286413826791, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5402750491159135, \"recall\": 0.8124076809453471, \"specificity\": 0.9971777135861732, \"npv\": 0.9992325449293579, \"accuracy\": 0.9964264264264264, \"f1\": 0.6489675516224189, \"f2\": 0.7380568974771873, \"f0_5\": 0.5790692777426827, \"p4\": 0.7865629097515164, \"phi\": 0.6608914195069355}, {\"truth_threshold\": -14.500000216066837, \"match_probability\": 4.315650384728788e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496071.0, \"fp\": 1398.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9971897746392238, \"fp_rate\": 0.0028102253607762495, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5413385826771654, \"recall\": 0.8124076809453471, \"specificity\": 0.9971897746392238, \"npv\": 0.9992325542046361, \"accuracy\": 0.9964384384384385, \"f1\": 0.6497341996455995, \"f2\": 0.738453276047261, \"f0_5\": 0.580046403712297, \"p4\": 0.7871276298466465, \"phi\": 0.6615474412179109}, {\"truth_threshold\": -14.400000214576721, \"match_probability\": 4.625385233621647e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496086.0, \"fp\": 1383.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972199272718502, \"fp_rate\": 0.0027800727281498947, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5440158259149357, \"recall\": 0.8124076809453471, \"specificity\": 0.9972199272718502, \"npv\": 0.9992325773918508, \"accuracy\": 0.9964684684684685, \"f1\": 0.6516587677725119, \"f2\": 0.7394460876579726, \"f0_5\": 0.5825037068417708, \"p4\": 0.7885429839164078, \"phi\": 0.663195978152073}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496099.0, \"fp\": 1370.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972460595534596, \"fp_rate\": 0.0027539404465403874, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5463576158940397, \"recall\": 0.8124076809453471, \"specificity\": 0.9972460595534596, \"npv\": 0.9992325974863036, \"accuracy\": 0.9964944944944945, \"f1\": 0.6533359730746386, \"f2\": 0.7403086862885858, \"f0_5\": 0.5846502728367939, \"p4\": 0.7897737475233195, \"phi\": 0.6646346041570037}, {\"truth_threshold\": -14.200000211596489, \"match_probability\": 5.313135876996633e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496109.0, \"fp\": 1360.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972661613085438, \"fp_rate\": 0.002733838691456151, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5481727574750831, \"recall\": 0.8124076809453471, \"specificity\": 0.9972661613085438, \"npv\": 0.9992326129428589, \"accuracy\": 0.9965145145145146, \"f1\": 0.6546320174568538, \"f2\": 0.7409735943955452, \"f0_5\": 0.5863122734702579, \"p4\": 0.7907231061133986, \"phi\": 0.665747556913812}, {\"truth_threshold\": -14.100000210106373, \"match_probability\": 5.694456326333118e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496119.0, \"fp\": 1350.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972862630636281, \"fp_rate\": 0.0027137369363719145, \"fn_rate\": 0.18759231905465287, \"precision\": 0.55, \"recall\": 0.8124076809453471, \"specificity\": 0.9972862630636281, \"npv\": 0.9992326283987916, \"accuracy\": 0.9965345345345346, \"f1\": 0.655933214072749, \"f2\": 0.7416396979503775, \"f0_5\": 0.5879837502672653, \"p4\": 0.7916747497021438, \"phi\": 0.6668660533170312}, {\"truth_threshold\": -14.000000208616257, \"match_probability\": 6.103142236234761e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496168.0, \"fp\": 1301.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9973847616635408, \"fp_rate\": 0.0026152383364591563, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5591324974584887, \"recall\": 0.8124076809453471, \"specificity\": 0.9973847616635408, \"npv\": 0.9992327041238629, \"accuracy\": 0.9966326326326327, \"f1\": 0.6623845845042152, \"f2\": 0.744920993227991, \"f0_5\": 0.5963136971449223, \"p4\": 0.7963711088402224, \"phi\": 0.6724283757079088}, {\"truth_threshold\": -13.90000020712614, \"match_probability\": 6.541157240512605e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496172.0, \"fp\": 1297.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9973928023655746, \"fp_rate\": 0.0026071976344254617, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5598914149983033, \"recall\": 0.8124076809453471, \"specificity\": 0.9973928023655746, \"npv\": 0.9992327103048416, \"accuracy\": 0.9966406406406406, \"f1\": 0.6629168340699076, \"f2\": 0.7451901363923765, \"f0_5\": 0.597004124755771, \"p4\": 0.7967569460627082, \"phi\": 0.6728885397309743}, {\"truth_threshold\": -13.700000204145908, \"match_probability\": 7.51374349434771e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496175.0, \"fp\": 1294.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9973988328920999, \"fp_rate\": 0.0026011671079001907, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5604619565217391, \"recall\": 0.8124076809453471, \"specificity\": 0.9973988328920999, \"npv\": 0.9992327149405102, \"accuracy\": 0.9966466466466466, \"f1\": 0.6633165829145728, \"f2\": 0.7453921214311529, \"f0_5\": 0.5975229955819512, \"p4\": 0.7970465694085548, \"phi\": 0.6732342762443393}, {\"truth_threshold\": -13.600000202655792, \"match_probability\": 8.052987461117984e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496182.0, \"fp\": 1287.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9974129041206587, \"fp_rate\": 0.0025870958793412255, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5617977528089888, \"recall\": 0.8124076809453471, \"specificity\": 0.9974129041206587, \"npv\": 0.9992327257568526, \"accuracy\": 0.9966606606606606, \"f1\": 0.6642512077294686, \"f2\": 0.7458638459452129, \"f0_5\": 0.5987372087959939, \"p4\": 0.797723176701015, \"phi\": 0.6740430478775454}, {\"truth_threshold\": -13.500000201165676, \"match_probability\": 8.630928377906233e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496183.0, \"fp\": 1286.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9974149142961672, \"fp_rate\": 0.0025850857038328015, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5619891008174387, \"recall\": 0.8124076809453471, \"specificity\": 0.9974149142961672, \"npv\": 0.9992327273020195, \"accuracy\": 0.9966626626626627, \"f1\": 0.6643849406080129, \"f2\": 0.7459312839059674, \"f0_5\": 0.5989110707803993, \"p4\": 0.7978199286660785, \"phi\": 0.674158822054482}, {\"truth_threshold\": -13.40000019967556, \"match_probability\": 9.25034269879762e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496206.0, \"fp\": 1263.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9974611483328609, \"fp_rate\": 0.002538851667139058, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5664263645726055, \"recall\": 0.8124076809453471, \"specificity\": 0.9974611483328609, \"npv\": 0.99923276283914, \"accuracy\": 0.9967087087087088, \"f1\": 0.6674757281553398, \"f2\": 0.7474857298178853, \"f0_5\": 0.6029379522034641, \"p4\": 0.8000517191080434, \"phi\": 0.6768380125905187}, {\"truth_threshold\": -13.300000198185444, \"match_probability\": 9.914206010875549e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496228.0, \"fp\": 1241.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9975053721940462, \"fp_rate\": 0.002494627805953738, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5707367692839848, \"recall\": 0.8124076809453471, \"specificity\": 0.9975053721940462, \"npv\": 0.9992327968280881, \"accuracy\": 0.9967527527527528, \"f1\": 0.6704591629418936, \"f2\": 0.7489786654561961, \"f0_5\": 0.6068407502758367, \"p4\": 0.8021981896092447, \"phi\": 0.6794304905480503}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496238.0, \"fp\": 1231.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9975254739491305, \"fp_rate\": 0.002474526050869501, \"fn_rate\": 0.18759231905465287, \"precision\": 0.572717806317251, \"recall\": 0.8124076809453471, \"specificity\": 0.9975254739491305, \"npv\": 0.9992328122766144, \"accuracy\": 0.9967727727727728, \"f1\": 0.6718241042345277, \"f2\": 0.7496592457973649, \"f0_5\": 0.6086315012910365, \"p4\": 0.8031776699267482, \"phi\": 0.6806186663334892}, {\"truth_threshold\": -13.100000195205212, \"match_probability\": 0.00011388264270550263, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496258.0, \"fp\": 1211.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9975656774592989, \"fp_rate\": 0.0024343225407010287, \"fn_rate\": 0.18759231905465287, \"precision\": 0.576721426074799, \"recall\": 0.8124076809453471, \"specificity\": 0.9975656774592989, \"npv\": 0.9992328431718008, \"accuracy\": 0.9968128128128129, \"f1\": 0.6745707277187244, \"f2\": 0.7510241238051889, \"f0_5\": 0.6122448979591837, \"p4\": 0.8051438233772689, \"phi\": 0.6830136263275881}, {\"truth_threshold\": -13.000000193715096, \"match_probability\": 0.00012205539677081966, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496277.0, \"fp\": 1192.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9976038707939591, \"fp_rate\": 0.0023961292060409793, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5805770584095707, \"recall\": 0.8124076809453471, \"specificity\": 0.9976038707939591, \"npv\": 0.9992328725199232, \"accuracy\": 0.9968508508508509, \"f1\": 0.6772009029345373, \"f2\": 0.7523253693233631, \"f0_5\": 0.6157175908649899, \"p4\": 0.8070206056804747, \"phi\": 0.6853121492181934}, {\"truth_threshold\": -12.90000019222498, \"match_probability\": 0.00013081458937332365, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496323.0, \"fp\": 1146.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9976963388673465, \"fp_rate\": 0.002303661132653492, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5901287553648069, \"recall\": 0.8124076809453471, \"specificity\": 0.9976963388673465, \"npv\": 0.9992329435639737, \"accuracy\": 0.996942942942943, \"f1\": 0.6836544437538844, \"f2\": 0.7554945054945055, \"f0_5\": 0.6242905788876277, \"p4\": 0.8116008322256546, \"phi\": 0.690973430332776}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496331.0, \"fp\": 1138.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977124202714139, \"fp_rate\": 0.002287579728586103, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5918220946915351, \"recall\": 0.8124076809453471, \"specificity\": 0.9977124202714139, \"npv\": 0.9992329559181176, \"accuracy\": 0.9969589589589589, \"f1\": 0.6847893753890849, \"f2\": 0.7560483870967742, \"f0_5\": 0.6258059622240765, \"p4\": 0.8124027054184834, \"phi\": 0.6919722374666267}, {\"truth_threshold\": -12.700000189244747, \"match_probability\": 0.00015026358101882152, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496337.0, \"fp\": 1132.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977244813244645, \"fp_rate\": 0.002275518675535561, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5930984902947519, \"recall\": 0.8124076809453471, \"specificity\": 0.9977244813244645, \"npv\": 0.9992329651834643, \"accuracy\": 0.996970970970971, \"f1\": 0.6856430500727198, \"f2\": 0.7564643315606089, \"f0_5\": 0.6269473364237405, \"p4\": 0.8130051508717604, \"phi\": 0.6927241606924014}, {\"truth_threshold\": -12.600000187754631, \"match_probability\": 0.0001610467818084837, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496342.0, \"fp\": 1127.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977345322020066, \"fp_rate\": 0.0022654677979934428, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5941663665826431, \"recall\": 0.8124076809453471, \"specificity\": 0.9977345322020066, \"npv\": 0.9992329729044155, \"accuracy\": 0.996980980980981, \"f1\": 0.6863560732113144, \"f2\": 0.756811301715439, \"f0_5\": 0.6279016667935154, \"p4\": 0.8135078716684839, \"phi\": 0.6933526189264908}, {\"truth_threshold\": -12.500000186264515, \"match_probability\": 0.00017260367204143044, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496347.0, \"fp\": 1122.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977445830795487, \"fp_rate\": 0.0022554169204513246, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5952380952380952, \"recall\": 0.8124076809453471, \"specificity\": 0.9977445830795487, \"npv\": 0.9992329806252114, \"accuracy\": 0.996990990990991, \"f1\": 0.6870705808869456, \"f2\": 0.7571585903083701, \"f0_5\": 0.6288589069288818, \"p4\": 0.8140112145298072, \"phi\": 0.693982772126206}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496367.0, \"fp\": 1102.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977847865897171, \"fp_rate\": 0.0022152134102828517, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5995639534883721, \"recall\": 0.8124076809453471, \"specificity\": 0.9977847865897171, \"npv\": 0.9992330115068405, \"accuracy\": 0.9970310310310311, \"f1\": 0.6899435500731759, \"f2\": 0.7585509378447959, \"f0_5\": 0.6327172329166347, \"p4\": 0.8160308297706109, \"phi\": 0.6965204883002213}, {\"truth_threshold\": -12.300000183284283, \"match_probability\": 0.00019826446591752426, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496381.0, \"fp\": 1088.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.997812929046835, \"fp_rate\": 0.002187070953164921, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6026296566837107, \"recall\": 0.8124076809453471, \"specificity\": 0.997812929046835, \"npv\": 0.9992330331225013, \"accuracy\": 0.997059059059059, \"f1\": 0.6919689662403019, \"f2\": 0.7595286319278217, \"f0_5\": 0.6354463529230532, \"p4\": 0.8174505333389831, \"phi\": 0.6983133649489494}, {\"truth_threshold\": -12.200000181794167, \"match_probability\": 0.00021249156957169895, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496411.0, \"fp\": 1058.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9978732343120877, \"fp_rate\": 0.0021267656879122116, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6093057607090103, \"recall\": 0.8124076809453471, \"specificity\": 0.9978732343120877, \"npv\": 0.9992330794376721, \"accuracy\": 0.9971191191191191, \"f1\": 0.6963494408102975, \"f2\": 0.7616322008862629, \"f0_5\": 0.641374484956853, \"p4\": 0.8205094509939885, \"phi\": 0.702201828795505}, {\"truth_threshold\": -12.10000018030405, \"match_probability\": 0.0002277393522037113, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496417.0, \"fp\": 1052.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9978852953651384, \"fp_rate\": 0.00211470463486167, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6106587712805329, \"recall\": 0.8124076809453471, \"specificity\": 0.9978852953651384, \"npv\": 0.999233088700035, \"accuracy\": 0.9971311311311312, \"f1\": 0.6972321994506655, \"f2\": 0.7620543136892666, \"f0_5\": 0.6425734091440143, \"p4\": 0.8211239836030536, \"phi\": 0.7029872619409204}, {\"truth_threshold\": -12.000000178813934, \"match_probability\": 0.00024408100465850272, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496442.0, \"fp\": 1027.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9979355497528489, \"fp_rate\": 0.002064450247151079, \"fn_rate\": 0.18759231905465287, \"precision\": 0.616361598804632, \"recall\": 0.8124076809453471, \"specificity\": 0.9979355497528489, \"npv\": 0.9992331272908058, \"accuracy\": 0.9971811811811812, \"f1\": 0.7009345794392523, \"f2\": 0.7638181649847237, \"f0_5\": 0.6476175523981474, \"p4\": 0.8236944750682825, \"phi\": 0.7062881983616519}, {\"truth_threshold\": -11.900000177323818, \"match_probability\": 0.0002615949610108224, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496459.0, \"fp\": 1010.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9979697227364921, \"fp_rate\": 0.002030277263507877, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6203007518796992, \"recall\": 0.8124076809453471, \"specificity\": 0.9979697227364921, \"npv\": 0.9992331535303116, \"accuracy\": 0.9972152152152152, \"f1\": 0.70347473886165, \"f2\": 0.7650222551928784, \"f0_5\": 0.6510930471154605, \"p4\": 0.8254516210525357, \"phi\": 0.7085592972779587}, {\"truth_threshold\": -11.800000175833702, \"match_probability\": 0.0002803652734145845, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496482.0, \"fp\": 987.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9980159567731859, \"fp_rate\": 0.001984043226814133, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6257110352673493, \"recall\": 0.8124076809453471, \"specificity\": 0.9980159567731859, \"npv\": 0.9992331890279614, \"accuracy\": 0.9972612612612612, \"f1\": 0.7069408740359897, \"f2\": 0.766657373850014, \"f0_5\": 0.6558549964226091, \"p4\": 0.8278409028277725, \"phi\": 0.711666756020395}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496492.0, \"fp\": 977.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9980360585282702, \"fp_rate\": 0.0019639414717298968, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6280928816140083, \"recall\": 0.8124076809453471, \"specificity\": 0.9980360585282702, \"npv\": 0.9992332044606972, \"accuracy\": 0.9972812812812812, \"f1\": 0.7084585659081151, \"f2\": 0.7673704771649149, \"f0_5\": 0.6579472047212697, \"p4\": 0.8288840404075595, \"phi\": 0.7130305036025687}, {\"truth_threshold\": -11.60000017285347, \"match_probability\": 0.0003220417031628006, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496506.0, \"fp\": 963.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9980642009853881, \"fp_rate\": 0.0019357990146119657, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6314580941446614, \"recall\": 0.8124076809453471, \"specificity\": 0.9980642009853881, \"npv\": 0.9992332260654837, \"accuracy\": 0.9973093093093093, \"f1\": 0.710594315245478, \"f2\": 0.7683710533668623, \"f0_5\": 0.6608988223984619, \"p4\": 0.8303488570615297, \"phi\": 0.7149528524372053}, {\"truth_threshold\": -11.400000169873238, \"match_probability\": 0.0003699110614699968, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496508.0, \"fp\": 961.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9980682213364048, \"fp_rate\": 0.0019317786635951184, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6318007662835249, \"recall\": 0.8119153126538651, \"specificity\": 0.9980682213364048, \"npv\": 0.9992312181770613, \"accuracy\": 0.9973113113113113, \"f1\": 0.7106227106227107, \"f2\": 0.7681199925470468, \"f0_5\": 0.6611338304867292, \"p4\": 0.8303685921194651, \"phi\": 0.7149304279678399}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496514.0, \"fp\": 955.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9980802823894555, \"fp_rate\": 0.0019197176105445767, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6332565284178188, \"recall\": 0.8119153126538651, \"specificity\": 0.9980802823894555, \"npv\": 0.9992312274600721, \"accuracy\": 0.9973233233233233, \"f1\": 0.7115426105717367, \"f2\": 0.7685495898583147, \"f0_5\": 0.6624086125170724, \"p4\": 0.8309983673070499, \"phi\": 0.7157599431233398}, {\"truth_threshold\": -11.200000166893005, \"match_probability\": 0.00042489285738089063, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496547.0, \"fp\": 922.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9981466181812334, \"fp_rate\": 0.0018533818187665965, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6413846752236484, \"recall\": 0.8119153126538651, \"specificity\": 0.9981466181812334, \"npv\": 0.9992312785126245, \"accuracy\": 0.9973893893893894, \"f1\": 0.7166449369839201, \"f2\": 0.7709209911173446, \"f0_5\": 0.6695087291920422, \"p4\": 0.8344792770642273, \"phi\": 0.7203739281840877}, {\"truth_threshold\": -11.000000163912773, \"match_probability\": 0.00048804289235713973, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496585.0, \"fp\": 884.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982230048505535, \"fp_rate\": 0.0017769951494464981, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6510067114093959, \"recall\": 0.8119153126538651, \"specificity\": 0.9982230048505535, \"npv\": 0.9992313372920133, \"accuracy\": 0.9974654654654654, \"f1\": 0.7226117440841368, \"f2\": 0.7736698883363048, \"f0_5\": 0.6778755241305598, \"p4\": 0.8385238948508511, \"phi\": 0.7257980151201269}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496594.0, \"fp\": 875.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982410964301293, \"fp_rate\": 0.0017589035698706854, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6533280507131537, \"recall\": 0.8119153126538651, \"specificity\": 0.9982410964301293, \"npv\": 0.9992313512121309, \"accuracy\": 0.9974834834834835, \"f1\": 0.72403951701427, \"f2\": 0.7743238166791886, \"f0_5\": 0.6798878535499299, \"p4\": 0.8394875795924727, \"phi\": 0.7271005307230821}, {\"truth_threshold\": -10.800000160932541, \"match_probability\": 0.0005605733873065377, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496612.0, \"fp\": 857.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982772795892809, \"fp_rate\": 0.00172272041071906, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6580207501995211, \"recall\": 0.8119153126538651, \"specificity\": 0.9982772795892809, \"npv\": 0.9992313790508537, \"accuracy\": 0.9975195195195196, \"f1\": 0.7269120564249504, \"f2\": 0.7756349952963312, \"f0_5\": 0.683948569058482, \"p4\": 0.8414216092003565, \"phi\": 0.7297265284712408}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496616.0, \"fp\": 853.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982853202913147, \"fp_rate\": 0.0017146797086853654, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6590727418065547, \"recall\": 0.8119153126538651, \"specificity\": 0.9982853202913147, \"npv\": 0.9992313852369628, \"accuracy\": 0.9975275275275275, \"f1\": 0.727553496580631, \"f2\": 0.7759269715791455, \"f0_5\": 0.6848575463078329, \"p4\": 0.8418526042931959, \"phi\": 0.7303139190793791}, {\"truth_threshold\": -10.600000157952309, \"match_probability\": 0.0006438760580315065, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496623.0, \"fp\": 846.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982993915198736, \"fp_rate\": 0.0017006084801263997, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6609218436873747, \"recall\": 0.8119153126538651, \"specificity\": 0.9982993915198736, \"npv\": 0.9992313960624138, \"accuracy\": 0.9975415415415415, \"f1\": 0.728678745028723, \"f2\": 0.7764384593652887, \"f0_5\": 0.6864540837565565, \"p4\": 0.8426079090367605, \"phi\": 0.7313452412186806}, {\"truth_threshold\": -10.500000156462193, \"match_probability\": 0.0006900573831033208, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496636.0, \"fp\": 833.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9983255238014831, \"fp_rate\": 0.0016744761985168924, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6643835616438356, \"recall\": 0.8119153126538651, \"specificity\": 0.9983255238014831, \"npv\": 0.9992314161660141, \"accuracy\": 0.9975675675675676, \"f1\": 0.7307777531575449, \"f2\": 0.7773901565151801, \"f0_5\": 0.6894389162973493, \"p4\": 0.8440142193773499, \"phi\": 0.7332720877716197}, {\"truth_threshold\": -10.400000154972076, \"match_probability\": 0.0007395485633816526, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496669.0, \"fp\": 800.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.998391859593261, \"fp_rate\": 0.0016081404067389124, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6733360555328706, \"recall\": 0.8119153126538651, \"specificity\": 0.998391859593261, \"npv\": 0.9992314671935073, \"accuracy\": 0.9976336336336337, \"f1\": 0.7361607142857143, \"f2\": 0.7798165137614679, \"f0_5\": 0.6971336771793354, \"p4\": 0.8476052551686003, \"phi\": 0.7382318656666521}, {\"truth_threshold\": -10.30000015348196, \"match_probability\": 0.0007925864548491303, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496673.0, \"fp\": 796.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9983999002952948, \"fp_rate\": 0.0016000997047052178, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6744376278118609, \"recall\": 0.8119153126538651, \"specificity\": 0.9983999002952948, \"npv\": 0.9992314733781976, \"accuracy\": 0.9976416416416416, \"f1\": 0.7368185880250223, \"f2\": 0.7801116472703189, \"f0_5\": 0.6980780628227923, \"p4\": 0.8480426096573597, \"phi\": 0.7388398481322535}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496699.0, \"fp\": 770.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9984521648585138, \"fp_rate\": 0.0015478351414862032, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6816866473749483, \"recall\": 0.8119153126538651, \"specificity\": 0.9984521648585138, \"npv\": 0.9992315135762582, \"accuracy\": 0.9976936936936937, \"f1\": 0.741123595505618, \"f2\": 0.7820354737740681, \"f0_5\": 0.7042794909028787, \"p4\": 0.8508964516877607, \"phi\": 0.7428283331176843}, {\"truth_threshold\": -10.100000150501728, \"match_probability\": 0.0009103354699850551, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496744.0, \"fp\": 725.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9985426227563928, \"fp_rate\": 0.0014573772436071394, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6946082561078348, \"recall\": 0.8119153126538651, \"specificity\": 0.9985426227563928, \"npv\": 0.999231583139888, \"accuracy\": 0.9977837837837837, \"f1\": 0.7486946651532349, \"f2\": 0.7853876928938845, \"f0_5\": 0.715277175327492, \"p4\": 0.8558814478534577, \"phi\": 0.7498853269814624}, {\"truth_threshold\": -10.000000149011612, \"match_probability\": 0.0009756096554280922, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496756.0, \"fp\": 713.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.998566744862494, \"fp_rate\": 0.0014332551375060556, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6981371718882303, \"recall\": 0.8119153126538651, \"specificity\": 0.998566744862494, \"npv\": 0.9992316016880625, \"accuracy\": 0.9978078078078078, \"f1\": 0.7507398133394036, \"f2\": 0.7862864772077055, \"f0_5\": 0.7182681418242007, \"p4\": 0.8572206597924804, \"phi\": 0.7518010896878068}, {\"truth_threshold\": -9.900000147521496, \"match_probability\": 0.0010455593264824352, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496801.0, \"fp\": 668.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.998657202760373, \"fp_rate\": 0.0013427972396269918, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7115716753022453, \"recall\": 0.811422944362383, \"specificity\": 0.998657202760373, \"npv\": 0.999229661453305, \"accuracy\": 0.9978958958958959, \"f1\": 0.7582240625718887, \"f2\": 0.789272030651341, \"f0_5\": 0.7295263390880921, \"p4\": 0.862095301480666, \"phi\": 0.7588188528588281}, {\"truth_threshold\": -9.80000014603138, \"match_probability\": 0.0011205186532430977, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496805.0, \"fp\": 664.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9986652434624067, \"fp_rate\": 0.0013347565375932972, \"fn_rate\": 0.18857705563761692, \"precision\": 0.71280276816609, \"recall\": 0.811422944362383, \"specificity\": 0.9986652434624067, \"npv\": 0.9992296676508685, \"accuracy\": 0.9979039039039039, \"f1\": 0.7589224038682938, \"f2\": 0.7895745496358758, \"f0_5\": 0.7305612199663091, \"p4\": 0.8625480141454344, \"phi\": 0.7594794701658701}, {\"truth_threshold\": -9.700000144541264, \"match_probability\": 0.001200845581852835, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496862.0, \"fp\": 607.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9987798234663868, \"fp_rate\": 0.0012201765336131497, \"fn_rate\": 0.18857705563761692, \"precision\": 0.730820399113082, \"recall\": 0.811422944362383, \"specificity\": 0.9987798234663868, \"npv\": 0.9992297559553138, \"accuracy\": 0.998018018018018, \"f1\": 0.7690153989734018, \"f2\": 0.79391078138549, \"f0_5\": 0.7456338792869424, \"p4\": 0.8690512188802841, \"phi\": 0.7690829970102296}, {\"truth_threshold\": -9.600000143051147, \"match_probability\": 0.001286923510110021, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496870.0, \"fp\": 599.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9987959048704542, \"fp_rate\": 0.0012040951295457607, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7334223408989764, \"recall\": 0.811422944362383, \"specificity\": 0.9987959048704542, \"npv\": 0.9992297683473, \"accuracy\": 0.998034034034034, \"f1\": 0.7704534829359514, \"f2\": 0.7945231896634847, \"f0_5\": 0.7477992558308376, \"p4\": 0.8699718046721923, \"phi\": 0.7704599592720761}, {\"truth_threshold\": -9.500000141561031, \"match_probability\": 0.0013791630787767571, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496883.0, \"fp\": 586.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9988220371520637, \"fp_rate\": 0.0011779628479362534, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7376902417188899, \"recall\": 0.811422944362383, \"specificity\": 0.9988220371520637, \"npv\": 0.9992297884834274, \"accuracy\": 0.9980600600600601, \"f1\": 0.7728018757327081, \"f2\": 0.7955203707279398, \"f0_5\": 0.7513449439226771, \"p4\": 0.8714719232830772, \"phi\": 0.7727132432208614}, {\"truth_threshold\": -9.400000140070915, \"match_probability\": 0.001478004086219237, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496914.0, \"fp\": 555.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9988843525928249, \"fp_rate\": 0.0011156474071751204, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7480708125283704, \"recall\": 0.811422944362383, \"specificity\": 0.9988843525928249, \"npv\": 0.9992298364960979, \"accuracy\": 0.9981221221221221, \"f1\": 0.7784600850259802, \"f2\": 0.7979083954681901, \"f0_5\": 0.7599372867287651, \"p4\": 0.8750700916095864, \"phi\": 0.7781665431794722}, {\"truth_threshold\": -9.300000138580799, \"match_probability\": 0.0015839175344616876, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496926.0, \"fp\": 543.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.998908474698926, \"fp_rate\": 0.0010915253010740367, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7521679598356915, \"recall\": 0.811422944362383, \"specificity\": 0.998908474698926, \"npv\": 0.9992298550800408, \"accuracy\": 0.9981461461461462, \"f1\": 0.7806726669824727, \"f2\": 0.7988366456616578, \"f0_5\": 0.7633163501621121, \"p4\": 0.8764709202608811, \"phi\": 0.7803084375041911}, {\"truth_threshold\": -9.200000137090683, \"match_probability\": 0.0016974078152024628, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496961.0, \"fp\": 508.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9989788308417208, \"fp_rate\": 0.0010211691582792093, \"fn_rate\": 0.18857705563761692, \"precision\": 0.764378478664193, \"recall\": 0.811422944362383, \"specificity\": 0.9989788308417208, \"npv\": 0.9992299092780852, \"accuracy\": 0.9982162162162163, \"f1\": 0.7871984714592787, \"f2\": 0.8015564202334631, \"f0_5\": 0.7733458470201783, \"p4\": 0.8805824065065767, \"phi\": 0.7866572272274414}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496968.0, \"fp\": 501.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9989929020702798, \"fp_rate\": 0.001007097929720244, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7668683108422523, \"recall\": 0.811422944362383, \"specificity\": 0.9989929020702798, \"npv\": 0.9992299201167787, \"accuracy\": 0.9982302302302303, \"f1\": 0.7885167464114833, \"f2\": 0.802102599046043, \"f0_5\": 0.77538345723158, \"p4\": 0.8814093367028363, \"phi\": 0.7879455224146075}, {\"truth_threshold\": -9.00000013411045, \"match_probability\": 0.0019493175579394322, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496969.0, \"fp\": 500.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9989949122457882, \"fp_rate\": 0.0010050877542118202, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7672253258845437, \"recall\": 0.811422944362383, \"specificity\": 0.9989949122457882, \"npv\": 0.9992299216651386, \"accuracy\": 0.9982322322322322, \"f1\": 0.7887054319215123, \"f2\": 0.8021806853582555, \"f0_5\": 0.7756754212557658, \"p4\": 0.8815275963818923, \"phi\": 0.7881300774017567}, {\"truth_threshold\": -8.900000132620335, \"match_probability\": 0.002088934569496736, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496987.0, \"fp\": 482.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9990310954049398, \"fp_rate\": 0.0009689045950601947, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7737089201877935, \"recall\": 0.811422944362383, \"specificity\": 0.9990310954049398, \"npv\": 0.9992299495345517, \"accuracy\": 0.9982682682682683, \"f1\": 0.7921172795001201, \"f2\": 0.8035888433781939, \"f0_5\": 0.7809686285660127, \"p4\": 0.8836617099968711, \"phi\": 0.7914742127572213}, {\"truth_threshold\": -8.800000131130219, \"match_probability\": 0.0022385290160630528, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497006.0, \"fp\": 463.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9990692887395999, \"fp_rate\": 0.0009307112604001455, \"fn_rate\": 0.18906942392909898, \"precision\": 0.7805687203791469, \"recall\": 0.810930576070901, \"specificity\": 0.9990692887395999, \"npv\": 0.9992279700034178, \"accuracy\": 0.9983043043043043, \"f1\": 0.7954600338082589, \"f2\": 0.8046707054914989, \"f0_5\": 0.7864578359278006, \"p4\": 0.8857449722433604, \"phi\": 0.7947548136309421}, {\"truth_threshold\": -8.700000129640102, \"match_probability\": 0.002398810587356977, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497020.0, \"fp\": 449.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9990974311967178, \"fp_rate\": 0.0009025688032822146, \"fn_rate\": 0.18906942392909898, \"precision\": 0.7857824427480916, \"recall\": 0.810930576070901, \"specificity\": 0.9990974311967178, \"npv\": 0.9992279917330782, \"accuracy\": 0.9983323323323323, \"f1\": 0.7981584686212745, \"f2\": 0.8057729941291585, \"f0_5\": 0.7906865098415746, \"p4\": 0.887420887274379, \"phi\": 0.7974211170561447}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497041.0, \"fp\": 428.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9991396448823947, \"fp_rate\": 0.0008603551176053181, \"fn_rate\": 0.18906942392909898, \"precision\": 0.7937349397590362, \"recall\": 0.810930576070901, \"specificity\": 0.9991396448823947, \"npv\": 0.9992280243252751, \"accuracy\": 0.9983743743743744, \"f1\": 0.8022406234778373, \"f2\": 0.8074321011863909, \"f0_5\": 0.7971154776885103, \"p4\": 0.889946684299984, \"phi\": 0.8014709498937302}, {\"truth_threshold\": -8.50000012665987, \"match_probability\": 0.0027545272436909716, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497062.0, \"fp\": 407.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9991818585680716, \"fp_rate\": 0.0008181414319284216, \"fn_rate\": 0.18906942392909898, \"precision\": 0.8018500486854917, \"recall\": 0.810930576070901, \"specificity\": 0.9991818585680716, \"npv\": 0.9992280569147204, \"accuracy\": 0.9984164164164164, \"f1\": 0.8063647490820074, \"f2\": 0.8090980546276282, \"f0_5\": 0.8036498487362155, \"p4\": 0.8924868996122787, \"phi\": 0.8055826056114691}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497067.0, \"fp\": 402.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9991919094456136, \"fp_rate\": 0.0008080905543863035, \"fn_rate\": 0.18906942392909898, \"precision\": 0.8038067349926794, \"recall\": 0.810930576070901, \"specificity\": 0.9991919094456136, \"npv\": 0.9992280646737065, \"accuracy\": 0.9984264264264264, \"f1\": 0.8073529411764706, \"f2\": 0.8094957239752285, \"f0_5\": 0.8052214725726019, \"p4\": 0.8930938515107973, \"phi\": 0.8065708573101243}, {\"truth_threshold\": -8.300000123679638, \"match_probability\": 0.0031628254468557835, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1646.0, \"tn\": 497085.0, \"fp\": 384.0, \"fn\": 385.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810438207779419, \"tn_rate\": 0.9992280926047653, \"fp_rate\": 0.000771907395234678, \"fn_rate\": 0.189561792220581, \"precision\": 0.8108374384236453, \"recall\": 0.810438207779419, \"specificity\": 0.9992280926047653, \"npv\": 0.9992260839849639, \"accuracy\": 0.9984604604604604, \"f1\": 0.8106377739473036, \"f2\": 0.8105180224542052, \"f0_5\": 0.8107575608314451, \"p4\": 0.8951068551265626, \"phi\": 0.8098648870427458}, {\"truth_threshold\": -8.200000122189522, \"match_probability\": 0.0033890630432542824, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1646.0, \"tn\": 497106.0, \"fp\": 363.0, \"fn\": 385.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810438207779419, \"tn_rate\": 0.9992703062904422, \"fp_rate\": 0.0007296937095577815, \"fn_rate\": 0.189561792220581, \"precision\": 0.8193130910900945, \"recall\": 0.810438207779419, \"specificity\": 0.9992703062904422, \"npv\": 0.9992261166533666, \"accuracy\": 0.9985025025025025, \"f1\": 0.8148514851485148, \"f2\": 0.8121977696634758, \"f0_5\": 0.8175225985894506, \"p4\": 0.8976782152468817, \"phi\": 0.814111887648055}, {\"truth_threshold\": -8.100000120699406, \"match_probability\": 0.003631424511270156, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1646.0, \"tn\": 497108.0, \"fp\": 361.0, \"fn\": 385.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810438207779419, \"tn_rate\": 0.9992743266414591, \"fp_rate\": 0.0007256733585409342, \"fn_rate\": 0.189561792220581, \"precision\": 0.8201295465869457, \"recall\": 0.810438207779419, \"specificity\": 0.9992743266414591, \"npv\": 0.9992261197644992, \"accuracy\": 0.9985065065065065, \"f1\": 0.8152550767706785, \"f2\": 0.8123581087750469, \"f0_5\": 0.8181727805944925, \"p4\": 0.8979238773557973, \"phi\": 0.8145198298344466}, {\"truth_threshold\": -8.00000011920929, \"match_probability\": 0.0038910502633927486, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497108.0, \"fp\": 361.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9992743266414591, \"fp_rate\": 0.0007256733585409342, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8197703444832751, \"recall\": 0.8084687346134909, \"specificity\": 0.9992743266414591, \"npv\": 0.9992180857372004, \"accuracy\": 0.9984984984984985, \"f1\": 0.8140803173029252, \"f2\": 0.8107040584575886, \"f0_5\": 0.8174848152942348, \"p4\": 0.8972092535683982, \"phi\": 0.8133463110869028}, {\"truth_threshold\": -7.900000117719173, \"match_probability\": 0.004169160079349993, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497116.0, \"fp\": 353.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9992904080455265, \"fp_rate\": 0.0007095919544735451, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8230576441102757, \"recall\": 0.8084687346134909, \"specificity\": 0.9992904080455265, \"npv\": 0.9992180983105697, \"accuracy\": 0.9985145145145146, \"f1\": 0.8156979632389468, \"f2\": 0.8113449945646803, \"f0_5\": 0.8200978923184497, \"p4\": 0.8981940694887678, \"phi\": 0.8149851217827041}, {\"truth_threshold\": -7.800000116229057, \"match_probability\": 0.004467058438231288, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497124.0, \"fp\": 345.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9993064894495939, \"fp_rate\": 0.0006935105504061559, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8263714141922496, \"recall\": 0.8084687346134909, \"specificity\": 0.9993064894495939, \"npv\": 0.9992181108835347, \"accuracy\": 0.9985305305305305, \"f1\": 0.8173220507715281, \"f2\": 0.8119869449114825, \"f0_5\": 0.8227277282292815, \"p4\": 0.8991810496335679, \"phi\": 0.8166338001835506}, {\"truth_threshold\": -7.700000114738941, \"match_probability\": 0.004786140180292905, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497132.0, \"fp\": 337.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9993225708536613, \"fp_rate\": 0.0006774291463387669, \"fn_rate\": 0.1915312653865091, \"precision\": 0.829711975745326, \"recall\": 0.8084687346134909, \"specificity\": 0.9993225708536613, \"npv\": 0.9992181234560953, \"accuracy\": 0.9985465465465465, \"f1\": 0.8189526184538654, \"f2\": 0.8126299119073542, \"f0_5\": 0.8253744847692772, \"p4\": 0.9001702011447774, \"phi\": 0.8182924458742925}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497140.0, \"fp\": 329.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9993386522577287, \"fp_rate\": 0.0006613477422713777, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8330796549974632, \"recall\": 0.8084687346134909, \"specificity\": 0.9993386522577287, \"npv\": 0.9992181360282516, \"accuracy\": 0.9985625625625626, \"f1\": 0.8205897051474262, \"f2\": 0.8132738979692917, \"f0_5\": 0.8280383257690368, \"p4\": 0.9011615311958345, \"phi\": 0.8199611598528135}, {\"truth_threshold\": -7.500000111758709, \"match_probability\": 0.005493921387833209, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1641.0, \"tn\": 497164.0, \"fp\": 305.0, \"fn\": 390.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8079763663220089, \"tn_rate\": 0.9993868964699308, \"fp_rate\": 0.0006131035300692104, \"fn_rate\": 0.19202363367799113, \"precision\": 0.843268242548818, \"recall\": 0.8079763663220089, \"specificity\": 0.9993868964699308, \"npv\": 0.9992161654815357, \"accuracy\": 0.9986086086086086, \"f1\": 0.8252451596680915, \"f2\": 0.8147964250248262, \"f0_5\": 0.8359653591441671, \"p4\": 0.9039711101870394, \"phi\": 0.8247369038176491}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1640.0, \"tn\": 497177.0, \"fp\": 292.0, \"fn\": 391.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8074839980305268, \"tn_rate\": 0.9994130287515403, \"fp_rate\": 0.000586971248459703, \"fn_rate\": 0.19251600196947316, \"precision\": 0.8488612836438924, \"recall\": 0.8074839980305268, \"specificity\": 0.9994130287515403, \"npv\": 0.9992141777606277, \"accuracy\": 0.9986326326326326, \"f1\": 0.8276558163007822, \"f2\": 0.8154335719968179, \"f0_5\": 0.8402500256173788, \"p4\": 0.9054204230029831, \"phi\": 0.827230057658773}, {\"truth_threshold\": -7.300000108778477, \"match_probability\": 0.006305707107734554, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1640.0, \"tn\": 497178.0, \"fp\": 291.0, \"fn\": 391.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8074839980305268, \"tn_rate\": 0.9994150389270487, \"fp_rate\": 0.0005849610729512794, \"fn_rate\": 0.19251600196947316, \"precision\": 0.8493008803728638, \"recall\": 0.8074839980305268, \"specificity\": 0.9994150389270487, \"npv\": 0.9992141793399508, \"accuracy\": 0.9986346346346346, \"f1\": 0.8278647147905098, \"f2\": 0.8155146693187469, \"f0_5\": 0.840594566888775, \"p4\": 0.9055458205533815, \"phi\": 0.8274454571307222}, {\"truth_threshold\": -7.200000107288361, \"match_probability\": 0.006755232248084272, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1637.0, \"tn\": 497178.0, \"fp\": 291.0, \"fn\": 394.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8060068931560808, \"tn_rate\": 0.9994150389270487, \"fp_rate\": 0.0005849610729512794, \"fn_rate\": 0.19399310684391924, \"precision\": 0.8490663900414938, \"recall\": 0.8060068931560808, \"specificity\": 0.9994150389270487, \"npv\": 0.9992081547997074, \"accuracy\": 0.9986286286286287, \"f1\": 0.8269765092194998, \"f2\": 0.8142658177477119, \"f0_5\": 0.8400903212562866, \"p4\": 0.9050129705542206, \"phi\": 0.8265705794685275}, {\"truth_threshold\": -7.1000001057982445, \"match_probability\": 0.007236570039195372, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1637.0, \"tn\": 497180.0, \"fp\": 289.0, \"fn\": 394.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8060068931560808, \"tn_rate\": 0.9994190592780655, \"fp_rate\": 0.0005809407219344321, \"fn_rate\": 0.19399310684391924, \"precision\": 0.8499480789200415, \"recall\": 0.8060068931560808, \"specificity\": 0.9994190592780655, \"npv\": 0.9992081579825313, \"accuracy\": 0.9986326326326326, \"f1\": 0.8273944907758403, \"f2\": 0.8144278606965174, \"f0_5\": 0.8407806882383153, \"p4\": 0.9052640325366411, \"phi\": 0.8270020984693148}, {\"truth_threshold\": -7.000000104308128, \"match_probability\": 0.00775193742836891, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1636.0, \"tn\": 497181.0, \"fp\": 288.0, \"fn\": 395.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8055145248645987, \"tn_rate\": 0.999421069453574, \"fp_rate\": 0.0005789305464260085, \"fn_rate\": 0.1944854751354013, \"precision\": 0.8503118503118503, \"recall\": 0.8055145248645987, \"specificity\": 0.999421069453574, \"npv\": 0.9992061514220943, \"accuracy\": 0.9986326326326326, \"f1\": 0.827307206068268, \"f2\": 0.8140923566878981, \"f0_5\": 0.8409581577053562, \"p4\": 0.9052117869639349, \"phi\": 0.8269264039862914}, {\"truth_threshold\": -6.900000102818012, \"match_probability\": 0.008303700786279804, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1635.0, \"tn\": 497183.0, \"fp\": 286.0, \"fn\": 396.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8050221565731167, \"tn_rate\": 0.9994250898045909, \"fp_rate\": 0.0005749101954091612, \"fn_rate\": 0.19497784342688332, \"precision\": 0.8511192087454451, \"recall\": 0.8050221565731167, \"specificity\": 0.9994250898045909, \"npv\": 0.9992041464772428, \"accuracy\": 0.9986346346346346, \"f1\": 0.8274291497975709, \"f2\": 0.8138377302140368, \"f0_5\": 0.8414822439526506, \"p4\": 0.905285190845045, \"phi\": 0.8270672775775526}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1634.0, \"tn\": 497184.0, \"fp\": 285.0, \"fn\": 397.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8045297882816347, \"tn_rate\": 0.9994270999800993, \"fp_rate\": 0.0005729000199007375, \"fn_rate\": 0.19547021171836534, \"precision\": 0.8514851485148515, \"recall\": 0.8045297882816347, \"specificity\": 0.9994270999800993, \"npv\": 0.999202139953093, \"accuracy\": 0.9986346346346346, \"f1\": 0.8273417721518987, \"f2\": 0.8135019416509012, \"f0_5\": 0.8416606572576492, \"p4\": 0.9052328915756458, \"phi\": 0.8269920829853818}, {\"truth_threshold\": -6.600000098347664, \"match_probability\": 0.010203470791514735, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1633.0, \"tn\": 497195.0, \"fp\": 274.0, \"fn\": 398.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8040374199901527, \"tn_rate\": 0.9994492119106919, \"fp_rate\": 0.0005507880893080775, \"fn_rate\": 0.19596258000984737, \"precision\": 0.8563188253801783, \"recall\": 0.8040374199901527, \"specificity\": 0.9994492119106919, \"npv\": 0.9992001495197883, \"accuracy\": 0.9986546546546546, \"f1\": 0.8293550025393601, \"f2\": 0.8139766723158209, \"f0_5\": 0.8453256030644994, \"p4\": 0.906440777862351, \"phi\": 0.8290947229778014}, {\"truth_threshold\": -6.500000096857548, \"match_probability\": 0.010927806378730125, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1633.0, \"tn\": 497207.0, \"fp\": 262.0, \"fn\": 398.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8040374199901527, \"tn_rate\": 0.9994733340167931, \"fp_rate\": 0.0005266659832069938, \"fn_rate\": 0.19596258000984737, \"precision\": 0.8617414248021108, \"recall\": 0.8040374199901527, \"specificity\": 0.9994733340167931, \"npv\": 0.9992001688085932, \"accuracy\": 0.9986786786786787, \"f1\": 0.8318899643402955, \"f2\": 0.814951591975247, \"f0_5\": 0.8495473936114868, \"p4\": 0.9079577097588084, \"phi\": 0.8317306228810372}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1630.0, \"tn\": 497222.0, \"fp\": 247.0, \"fn\": 401.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8025603151157066, \"tn_rate\": 0.9995034866494193, \"fp_rate\": 0.0004965133505806392, \"fn_rate\": 0.19743968488429345, \"precision\": 0.8684070324986681, \"recall\": 0.8025603151157066, \"specificity\": 0.9995034866494193, \"npv\": 0.9991941690798054, \"accuracy\": 0.9987027027027027, \"f1\": 0.834186284544524, \"f2\": 0.8149185081491851, \"f0_5\": 0.8543872523325297, \"p4\": 0.9093287289176903, \"phi\": 0.834189138117982}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1630.0, \"tn\": 497229.0, \"fp\": 240.0, \"fn\": 401.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8025603151157066, \"tn_rate\": 0.9995175578779784, \"fp_rate\": 0.0004824421220216737, \"fn_rate\": 0.19743968488429345, \"precision\": 0.8716577540106952, \"recall\": 0.8025603151157066, \"specificity\": 0.9995175578779784, \"npv\": 0.9991941804151679, \"accuracy\": 0.9987167167167167, \"f1\": 0.8356831581645732, \"f2\": 0.8154892935761456, \"f0_5\": 0.8569025339081064, \"p4\": 0.9102202694038696, \"phi\": 0.8357577802549805}, {\"truth_threshold\": -6.200000092387199, \"match_probability\": 0.013419810695865477, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1629.0, \"tn\": 497238.0, \"fp\": 231.0, \"fn\": 402.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8020679468242246, \"tn_rate\": 0.9995356494575541, \"fp_rate\": 0.00046435054244586095, \"fn_rate\": 0.19793205317577547, \"precision\": 0.8758064516129033, \"recall\": 0.8020679468242246, \"specificity\": 0.9995356494575541, \"npv\": 0.9991921871232216, \"accuracy\": 0.9987327327327328, \"f1\": 0.8373168851195065, \"f2\": 0.8158052884615384, \"f0_5\": 0.8599936648717137, \"p4\": 0.9111918307693678, \"phi\": 0.837497495848518}, {\"truth_threshold\": -6.100000090897083, \"match_probability\": 0.014369156816028038, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1627.0, \"tn\": 497246.0, \"fp\": 223.0, \"fn\": 404.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8010832102412605, \"tn_rate\": 0.9995517308616215, \"fp_rate\": 0.0004482691383784718, \"fn_rate\": 0.19891678975873953, \"precision\": 0.8794594594594595, \"recall\": 0.8010832102412605, \"specificity\": 0.9995517308616215, \"npv\": 0.9991881844669949, \"accuracy\": 0.9987447447447447, \"f1\": 0.8384437000772996, \"f2\": 0.8156206135953479, \"f0_5\": 0.8625808503870215, \"p4\": 0.9118611442174754, \"phi\": 0.8387347641806931}, {\"truth_threshold\": -6.000000089406967, \"match_probability\": 0.015384614445865122, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1627.0, \"tn\": 497254.0, \"fp\": 215.0, \"fn\": 404.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8010832102412605, \"tn_rate\": 0.9995678122656889, \"fp_rate\": 0.0004321877343110827, \"fn_rate\": 0.19891678975873953, \"precision\": 0.8832790445168295, \"recall\": 0.8010832102412605, \"specificity\": 0.9995678122656889, \"npv\": 0.9991881975171705, \"accuracy\": 0.9987607607607608, \"f1\": 0.8401755744900594, \"f2\": 0.8162753361428858, \"f0_5\": 0.8655176082561975, \"p4\": 0.9128877625205759, \"phi\": 0.8405642745581147}, {\"truth_threshold\": -5.900000087916851, \"match_probability\": 0.016470634520449206, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1624.0, \"tn\": 497265.0, \"fp\": 204.0, \"fn\": 407.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7996061053668144, \"tn_rate\": 0.9995899241962816, \"fp_rate\": 0.00041007580371842263, \"fn_rate\": 0.20039389463318563, \"precision\": 0.888402625820569, \"recall\": 0.7996061053668144, \"specificity\": 0.9995899241962816, \"npv\": 0.9991821922872897, \"accuracy\": 0.9987767767767768, \"f1\": 0.8416688261207567, \"f2\": 0.8159163987138264, \"f0_5\": 0.8690998608583966, \"p4\": 0.9137718615826569, \"phi\": 0.8422317154633179}, {\"truth_threshold\": -5.800000086426735, \"match_probability\": 0.017631945325087592, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1620.0, \"tn\": 497279.0, \"fp\": 190.0, \"fn\": 411.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7976366322008862, \"tn_rate\": 0.9996180666533995, \"fp_rate\": 0.0003819333466004917, \"fn_rate\": 0.20236336779911374, \"precision\": 0.8950276243093923, \"recall\": 0.7976366322008862, \"specificity\": 0.9996180666533995, \"npv\": 0.9991741847334686, \"accuracy\": 0.9987967967967968, \"f1\": 0.8435303306430617, \"f2\": 0.8153815180189249, \"f0_5\": 0.8736921583432208, \"p4\": 0.9148720204448654, \"phi\": 0.8443379619461768}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1620.0, \"tn\": 497295.0, \"fp\": 174.0, \"fn\": 411.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7976366322008862, \"tn_rate\": 0.9996502294615343, \"fp_rate\": 0.00034977053846571343, \"fn_rate\": 0.20236336779911374, \"precision\": 0.903010033444816, \"recall\": 0.7976366322008862, \"specificity\": 0.9996502294615343, \"npv\": 0.9991742112813589, \"accuracy\": 0.9988288288288288, \"f1\": 0.8470588235294118, \"f2\": 0.8166969147005445, \"f0_5\": 0.8797653958944281, \"p4\": 0.9169501145219608, \"phi\": 0.8481153489467179}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1620.0, \"tn\": 497300.0, \"fp\": 169.0, \"fn\": 411.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7976366322008862, \"tn_rate\": 0.9996602803390764, \"fp_rate\": 0.00033971966092359525, \"fn_rate\": 0.20236336779911374, \"precision\": 0.9055338177752935, \"recall\": 0.7976366322008862, \"specificity\": 0.9996602803390764, \"npv\": 0.9991742195772245, \"accuracy\": 0.9988388388388388, \"f1\": 0.8481675392670157, \"f2\": 0.8171088469686271, \"f0_5\": 0.8816806356808534, \"p4\": 0.9176014562694327, \"phi\": 0.8493061432317865}, {\"truth_threshold\": -5.500000081956387, \"match_probability\": 0.02161936078957948, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1619.0, \"tn\": 497303.0, \"fp\": 166.0, \"fn\": 412.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7971442639094042, \"tn_rate\": 0.9996663108656016, \"fp_rate\": 0.0003336891343983243, \"fn_rate\": 0.20285573609059576, \"precision\": 0.9070028011204482, \"recall\": 0.7971442639094042, \"specificity\": 0.9996663108656016, \"npv\": 0.9991722170318355, \"accuracy\": 0.9988428428428429, \"f1\": 0.8485324947589099, \"f2\": 0.8169341003128469, \"f0_5\": 0.8826736451859121, \"p4\": 0.9178158396820322, \"phi\": 0.8497351570110964}, {\"truth_threshold\": -5.4000000804662704, \"match_probability\": 0.023135158452986655, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1618.0, \"tn\": 497315.0, \"fp\": 154.0, \"fn\": 413.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7966518956179222, \"tn_rate\": 0.9996904329717028, \"fp_rate\": 0.00030956702829724065, \"fn_rate\": 0.2033481043820778, \"precision\": 0.9130925507900677, \"recall\": 0.7966518956179222, \"specificity\": 0.9996904329717028, \"npv\": 0.9991702295229523, \"accuracy\": 0.9988648648648648, \"f1\": 0.8509071785432554, \"f2\": 0.8175020210185934, \"f0_5\": 0.8871586796797895, \"p4\": 0.9192078875605487, \"phi\": 0.8523341285904934}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1618.0, \"tn\": 497330.0, \"fp\": 139.0, \"fn\": 413.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7966518956179222, \"tn_rate\": 0.9997205856043291, \"fp_rate\": 0.00027941439567088605, \"fn_rate\": 0.2033481043820778, \"precision\": 0.9208878770631759, \"recall\": 0.7966518956179222, \"specificity\": 0.9997205856043291, \"npv\": 0.9991702545289437, \"accuracy\": 0.9988948948948949, \"f1\": 0.8542766631467793, \"f2\": 0.8187430422022063, \"f0_5\": 0.8930345512749751, \"p4\": 0.9211767787183339, \"phi\": 0.8559842154647039}, {\"truth_threshold\": -5.200000077486038, \"match_probability\": 0.02648420859582165, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1615.0, \"tn\": 497341.0, \"fp\": 128.0, \"fn\": 416.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7951747907434761, \"tn_rate\": 0.9997426975349217, \"fp_rate\": 0.000257302465078226, \"fn_rate\": 0.2048252092565239, \"precision\": 0.9265633964429145, \"recall\": 0.7951747907434761, \"specificity\": 0.9997426975349217, \"npv\": 0.9991642508292199, \"accuracy\": 0.9989109109109109, \"f1\": 0.8558558558558559, \"f2\": 0.818384514036688, \"f0_5\": 0.8969232478062867, \"p4\": 0.922097530197822, \"phi\": 0.8578327903498651}, {\"truth_threshold\": -5.100000075995922, \"match_probability\": 0.02833121820332325, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1613.0, \"tn\": 497346.0, \"fp\": 123.0, \"fn\": 418.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.794190054160512, \"tn_rate\": 0.9997527484124639, \"fp_rate\": 0.0002472515875361078, \"fn_rate\": 0.20580994583948795, \"precision\": 0.929147465437788, \"recall\": 0.794190054160512, \"specificity\": 0.9997527484124639, \"npv\": 0.9991602446139134, \"accuracy\": 0.9989169169169169, \"f1\": 0.8563843907618794, \"f2\": 0.8179513184584178, \"f0_5\": 0.8986072423398329, \"p4\": 0.9224054864036791, \"phi\": 0.8585005678323977}, {\"truth_threshold\": -5.000000074505806, \"match_probability\": 0.030303028785498974, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1611.0, \"tn\": 497352.0, \"fp\": 117.0, \"fn\": 420.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.793205317577548, \"tn_rate\": 0.9997648094655144, \"fp_rate\": 0.00023519053448556594, \"fn_rate\": 0.206794682422452, \"precision\": 0.9322916666666666, \"recall\": 0.793205317577548, \"specificity\": 0.9997648094655144, \"npv\": 0.9991562402063595, \"accuracy\": 0.9989249249249249, \"f1\": 0.8571428571428571, \"f2\": 0.8176004872107187, \"f0_5\": 0.9007044615900704, \"p4\": 0.9228469812300154, \"phi\": 0.8594244382417212}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1611.0, \"tn\": 497356.0, \"fp\": 113.0, \"fn\": 420.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.793205317577548, \"tn_rate\": 0.9997728501675481, \"fp_rate\": 0.00022714983245187138, \"fn_rate\": 0.206794682422452, \"precision\": 0.9344547563805105, \"recall\": 0.793205317577548, \"specificity\": 0.9997728501675481, \"npv\": 0.9991562469865963, \"accuracy\": 0.998932932932933, \"f1\": 0.8580559254327563, \"f2\": 0.8179325751421609, \"f0_5\": 0.9023188081102274, \"p4\": 0.9233776450607978, \"phi\": 0.8604261402565063}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1606.0, \"tn\": 497375.0, \"fp\": 94.0, \"fn\": 425.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7907434761201378, \"tn_rate\": 0.9998110435022082, \"fp_rate\": 0.0001889564977918222, \"fn_rate\": 0.20925652387986213, \"precision\": 0.9447058823529412, \"recall\": 0.7907434761201378, \"specificity\": 0.9998110435022082, \"npv\": 0.9991462434712736, \"accuracy\": 0.998960960960961, \"f1\": 0.8608952023586169, \"f2\": 0.817385993485342, \"f0_5\": 0.9092967953799117, \"p4\": 0.9250251791262751, \"phi\": 0.8638093330854282}, {\"truth_threshold\": -4.700000070035458, \"match_probability\": 0.037047907242669466, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1602.0, \"tn\": 497376.0, \"fp\": 93.0, \"fn\": 429.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7887740029542097, \"tn_rate\": 0.9998130536777166, \"fp_rate\": 0.00018694632228339857, \"fn_rate\": 0.21122599704579026, \"precision\": 0.9451327433628318, \"recall\": 0.7887740029542097, \"specificity\": 0.9998130536777166, \"npv\": 0.9991382167716274, \"accuracy\": 0.998954954954955, \"f1\": 0.8599033816425121, \"f2\": 0.8157653528872594, \"f0_5\": 0.9090909090909091, \"p4\": 0.924451040377126, \"phi\": 0.8629251746909673}, {\"truth_threshold\": -4.6000000685453415, \"match_probability\": 0.039601660807737325, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1601.0, \"tn\": 497383.0, \"fp\": 86.0, \"fn\": 430.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7882816346627277, \"tn_rate\": 0.9998271249062756, \"fp_rate\": 0.00017287509372443307, \"fn_rate\": 0.21171836533727229, \"precision\": 0.949021932424422, \"recall\": 0.7882816346627277, \"specificity\": 0.9998271249062756, \"npv\": 0.9991362218343033, \"accuracy\": 0.998966966966967, \"f1\": 0.8612157073695536, \"f2\": 0.815920905106513, \"f0_5\": 0.9118350609408816, \"p4\": 0.9252114585110551, \"phi\": 0.8644372428511393}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1595.0, \"tn\": 497383.0, \"fp\": 86.0, \"fn\": 436.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7853274249138356, \"tn_rate\": 0.9998271249062756, \"fp_rate\": 0.00017287509372443307, \"fn_rate\": 0.21467257508616444, \"precision\": 0.9488399762046401, \"recall\": 0.7853274249138356, \"specificity\": 0.9998271249062756, \"npv\": 0.9991241796717281, \"accuracy\": 0.998954954954955, \"f1\": 0.859375, \"f2\": 0.8133605303416624, \"f0_5\": 0.910908052541405, \"p4\": 0.9241456149502866, \"phi\": 0.8627272860253764}, {\"truth_threshold\": -4.400000065565109, \"match_probability\": 0.04522405175894309, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1595.0, \"tn\": 497387.0, \"fp\": 82.0, \"fn\": 436.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7853274249138356, \"tn_rate\": 0.9998351656083092, \"fp_rate\": 0.0001648343916907385, \"fn_rate\": 0.21467257508616444, \"precision\": 0.951103160405486, \"recall\": 0.7853274249138356, \"specificity\": 0.9998351656083092, \"npv\": 0.9991241867089307, \"accuracy\": 0.9989629629629629, \"f1\": 0.860302049622438, \"f2\": 0.813692480359147, \"f0_5\": 0.9125758095891978, \"p4\": 0.92468309771844, \"phi\": 0.8637609423013644}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1592.0, \"tn\": 497391.0, \"fp\": 78.0, \"fn\": 439.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7838503200393895, \"tn_rate\": 0.999843206310343, \"fp_rate\": 0.00015679368965704394, \"fn_rate\": 0.21614967996061055, \"precision\": 0.9532934131736527, \"recall\": 0.7838503200393895, \"specificity\": 0.999843206310343, \"npv\": 0.999118172870257, \"accuracy\": 0.998964964964965, \"f1\": 0.8603080248581464, \"f2\": 0.8127424954053503, \"f0_5\": 0.9137871656526231, \"p4\": 0.9246869807716529, \"phi\": 0.8639437449296644}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1591.0, \"tn\": 497393.0, \"fp\": 76.0, \"fn\": 440.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7833579517479075, \"tn_rate\": 0.9998472266613598, \"fp_rate\": 0.00015277333864019668, \"fn_rate\": 0.21664204825209257, \"precision\": 0.9544091181763648, \"recall\": 0.7833579517479075, \"specificity\": 0.9998472266613598, \"npv\": 0.9991161694785199, \"accuracy\": 0.998966966966967, \"f1\": 0.8604651162790697, \"f2\": 0.8124808497599837, \"f0_5\": 0.9144729279227497, \"p4\": 0.9247781449434443, \"phi\": 0.8641793465684607}, {\"truth_threshold\": -4.100000061094761, \"match_probability\": 0.0551013486283602, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1587.0, \"tn\": 497394.0, \"fp\": 75.0, \"fn\": 444.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7813884785819794, \"tn_rate\": 0.9998492368368682, \"fp_rate\": 0.00015076316313177303, \"fn_rate\": 0.21861152141802068, \"precision\": 0.9548736462093863, \"recall\": 0.7813884785819794, \"specificity\": 0.9998492368368682, \"npv\": 0.9991081436129825, \"accuracy\": 0.998960960960961, \"f1\": 0.859463850528026, \"f2\": 0.8108522378908645, \"f0_5\": 0.9142758382302109, \"p4\": 0.9241982796143317, \"phi\": 0.8632998054164096}, {\"truth_threshold\": -4.000000059604645, \"match_probability\": 0.05882352712444066, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1587.0, \"tn\": 497400.0, \"fp\": 69.0, \"fn\": 444.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7813884785819794, \"tn_rate\": 0.9998612978899187, \"fp_rate\": 0.0001387021100812312, \"fn_rate\": 0.21861152141802068, \"precision\": 0.9583333333333334, \"recall\": 0.7813884785819794, \"specificity\": 0.9998612978899187, \"npv\": 0.9991081543616073, \"accuracy\": 0.9989729729729729, \"f1\": 0.8608624898291294, \"f2\": 0.8113496932515337, \"f0_5\": 0.9168110918544194, \"p4\": 0.9250088864925864, \"phi\": 0.8648704806192624}, {\"truth_threshold\": -3.9000000581145287, \"match_probability\": 0.06278043839004852, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1582.0, \"tn\": 497408.0, \"fp\": 61.0, \"fn\": 449.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7789266371245692, \"tn_rate\": 0.9998773792939861, \"fp_rate\": 0.00012262070601384208, \"fn_rate\": 0.22107336287543083, \"precision\": 0.9628727936701157, \"recall\": 0.7789266371245692, \"specificity\": 0.9998773792939861, \"npv\": 0.9990981346049167, \"accuracy\": 0.998978978978979, \"f1\": 0.8611867174741427, \"f2\": 0.8098699703081806, \"f0_5\": 0.919446704637917, \"p4\": 0.9251973215687156, \"phi\": 0.8655557882762271}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1582.0, \"tn\": 497410.0, \"fp\": 59.0, \"fn\": 449.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7789266371245692, \"tn_rate\": 0.999881399645003, \"fp_rate\": 0.00011860035499699478, \"fn_rate\": 0.22107336287543083, \"precision\": 0.9640463132236441, \"recall\": 0.7789266371245692, \"specificity\": 0.999881399645003, \"npv\": 0.9990981382278918, \"accuracy\": 0.9989829829829829, \"f1\": 0.8616557734204793, \"f2\": 0.8100358422939068, \"f0_5\": 0.9203025014543339, \"p4\": 0.9254688030174308, \"phi\": 0.8660858152403128}, {\"truth_threshold\": -3.7000000551342964, \"match_probability\": 0.07144878715678568, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1575.0, \"tn\": 497411.0, \"fp\": 58.0, \"fn\": 456.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7754800590841949, \"tn_rate\": 0.9998834098205114, \"fp_rate\": 0.00011659017948857115, \"fn_rate\": 0.22451994091580502, \"precision\": 0.964482547458665, \"recall\": 0.7754800590841949, \"specificity\": 0.9998834098205114, \"npv\": 0.9990840927396273, \"accuracy\": 0.998970970970971, \"f1\": 0.8597161572052402, \"f2\": 0.8071128420621092, \"f0_5\": 0.9196543267546421, \"p4\": 0.9243462872246998, \"phi\": 0.864357780473457}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1570.0, \"tn\": 497415.0, \"fp\": 54.0, \"fn\": 461.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7730182176267848, \"tn_rate\": 0.9998914505225451, \"fp_rate\": 0.00010854947745487659, \"fn_rate\": 0.22698178237321517, \"precision\": 0.9667487684729064, \"recall\": 0.7730182176267848, \"specificity\": 0.9998914505225451, \"npv\": 0.9990740666350657, \"accuracy\": 0.9989689689689689, \"f1\": 0.8590971272229823, \"f2\": 0.8052933935166188, \"f0_5\": 0.9206051366248388, \"p4\": 0.923987940256248, \"phi\": 0.8639987308870006}, {\"truth_threshold\": -3.500000052154064, \"match_probability\": 0.08121030044424019, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1569.0, \"tn\": 497415.0, \"fp\": 54.0, \"fn\": 462.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7725258493353028, \"tn_rate\": 0.9998914505225451, \"fp_rate\": 0.00010854947745487659, \"fn_rate\": 0.2274741506646972, \"precision\": 0.966728280961183, \"recall\": 0.7725258493353028, \"specificity\": 0.9998914505225451, \"npv\": 0.9990720599666183, \"accuracy\": 0.998966966966967, \"f1\": 0.8587848932676518, \"f2\": 0.8048630347799323, \"f0_5\": 0.9204505455825414, \"p4\": 0.923806889110029, \"phi\": 0.8637134313771212}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1565.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 466.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7705563761693747, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.2294436238306253, \"precision\": 0.9702417854928704, \"recall\": 0.7705563761693747, \"specificity\": 0.9999035115755956, \"npv\": 0.9990640446527023, \"accuracy\": 0.998970970970971, \"f1\": 0.8589462129527992, \"f2\": 0.8036356167197288, \"f0_5\": 0.9224331014971119, \"p4\": 0.9239010780801296, \"phi\": 0.8641824234989411}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1562.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 469.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7690792712949286, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.23092072870507138, \"precision\": 0.9701863354037267, \"recall\": 0.7690792712949286, \"specificity\": 0.9999035115755956, \"npv\": 0.99905802486493, \"accuracy\": 0.998964964964965, \"f1\": 0.8580060422960725, \"f2\": 0.8023423053215534, \"f0_5\": 0.92196907094794, \"p4\": 0.9233556465339778, \"phi\": 0.8633262566613811}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1561.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 470.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7685869030034466, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.23141309699655344, \"precision\": 0.9701678060907396, \"recall\": 0.7685869030034466, \"specificity\": 0.9999035115755956, \"npv\": 0.9990560182851267, \"accuracy\": 0.9989629629629629, \"f1\": 0.8576923076923076, \"f2\": 0.8019110243501489, \"f0_5\": 0.9218141018070155, \"p4\": 0.9231735147751612, \"phi\": 0.8630406819000448}, {\"truth_threshold\": -3.0000000447034836, \"match_probability\": 0.11111110805075623, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1560.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 471.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7680945347119645, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.23190546528803546, \"precision\": 0.9701492537313433, \"recall\": 0.7680945347119645, \"specificity\": 0.9999035115755956, \"npv\": 0.9990540117133836, \"accuracy\": 0.998960960960961, \"f1\": 0.8573784006595219, \"f2\": 0.8014796547472256, \"f0_5\": 0.9216589861751152, \"p4\": 0.9229912220379601, \"phi\": 0.8627550140781992}, {\"truth_threshold\": -2.9000000432133675, \"match_probability\": 0.11814376082605058, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1513.0, \"tn\": 497438.0, \"fp\": 31.0, \"fn\": 518.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7449532250123092, \"tn_rate\": 0.9999376845592388, \"fp_rate\": 6.231544076113285e-05, \"fn_rate\": 0.2550467749876908, \"precision\": 0.9799222797927462, \"recall\": 0.7449532250123092, \"specificity\": 0.9999376845592388, \"npv\": 0.9989597474475657, \"accuracy\": 0.9989009009009009, \"f1\": 0.8464335664335665, \"f2\": 0.7824782788580885, \"f0_5\": 0.9217740952845133, \"p4\": 0.916598914625346, \"phi\": 0.8539088497224261}, {\"truth_threshold\": -2.8000000417232513, \"match_probability\": 0.1255586621587546, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1511.0, \"tn\": 497440.0, \"fp\": 29.0, \"fn\": 520.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7439684884293452, \"tn_rate\": 0.9999417049102557, \"fp_rate\": 5.8295089744285574e-05, \"fn_rate\": 0.25603151157065485, \"precision\": 0.9811688311688311, \"recall\": 0.7439684884293452, \"specificity\": 0.9999417049102557, \"npv\": 0.9989557394168206, \"accuracy\": 0.9989009009009009, \"f1\": 0.8462615513861663, \"f2\": 0.7817673841059603, \"f0_5\": 0.9223538029544622, \"p4\": 0.9164980483864553, \"phi\": 0.8538879661883434}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1510.0, \"tn\": 497440.0, \"fp\": 29.0, \"fn\": 521.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7434761201378631, \"tn_rate\": 0.9999417049102557, \"fp_rate\": 5.8295089744285574e-05, \"fn_rate\": 0.25652387986213687, \"precision\": 0.9811565951916829, \"recall\": 0.7434761201378631, \"specificity\": 0.9999417049102557, \"npv\": 0.9989537333244973, \"accuracy\": 0.9988988988988989, \"f1\": 0.84593837535014, \"f2\": 0.7813308496326192, \"f0_5\": 0.9221937217540003, \"p4\": 0.916308069744429, \"phi\": 0.8535991381981919}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1509.0, \"tn\": 497441.0, \"fp\": 28.0, \"fn\": 522.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7429837518463811, \"tn_rate\": 0.9999437150857642, \"fp_rate\": 5.6284914235861934e-05, \"fn_rate\": 0.2570162481536189, \"precision\": 0.9817826935588809, \"recall\": 0.7429837518463811, \"specificity\": 0.9999437150857642, \"npv\": 0.9989517293453529, \"accuracy\": 0.9988988988988989, \"f1\": 0.8458520179372198, \"f2\": 0.7809750543422006, \"f0_5\": 0.9224844112972246, \"p4\": 0.9162574066722536, \"phi\": 0.8535892106519398}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1506.0, \"tn\": 497442.0, \"fp\": 27.0, \"fn\": 525.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.741506646971935, \"tn_rate\": 0.9999457252612726, \"fp_rate\": 5.427473872743829e-05, \"fn_rate\": 0.258493353028065, \"precision\": 0.9823874755381604, \"recall\": 0.741506646971935, \"specificity\": 0.9999457252612726, \"npv\": 0.9989457132701565, \"accuracy\": 0.9988948948948949, \"f1\": 0.8451178451178452, \"f2\": 0.7797452625038832, \"f0_5\": 0.9224549797868431, \"p4\": 0.9158256533199051, \"phi\": 0.8530016534527379}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1504.0, \"tn\": 497443.0, \"fp\": 26.0, \"fn\": 527.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.740521910388971, \"tn_rate\": 0.999947735436781, \"fp_rate\": 5.226456321901465e-05, \"fn_rate\": 0.25947808961102903, \"precision\": 0.9830065359477124, \"recall\": 0.740521910388971, \"specificity\": 0.999947735436781, \"npv\": 0.9989417033154607, \"accuracy\": 0.9988928928928928, \"f1\": 0.8447065431058691, \"f2\": 0.7789517298529107, \"f0_5\": 0.9225861857440805, \"p4\": 0.9155836759722604, \"phi\": 0.8527032689901116}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1502.0, \"tn\": 497446.0, \"fp\": 23.0, \"fn\": 529.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7395371738060069, \"tn_rate\": 0.9999537659633062, \"fp_rate\": 4.623403669374373e-05, \"fn_rate\": 0.2604628261939931, \"precision\": 0.9849180327868853, \"recall\": 0.7395371738060069, \"specificity\": 0.9999537659633062, \"npv\": 0.9989376976755862, \"accuracy\": 0.9988948948948949, \"f1\": 0.8447694038245219, \"f2\": 0.7783189967872318, \"f0_5\": 0.9236256303037756, \"p4\": 0.9156210235746602, \"phi\": 0.8529668215804862}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1496.0, \"tn\": 497446.0, \"fp\": 23.0, \"fn\": 535.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7365829640571148, \"tn_rate\": 0.9999537659633062, \"fp_rate\": 4.623403669374373e-05, \"fn_rate\": 0.2634170359428853, \"precision\": 0.9848584595128373, \"recall\": 0.7365829640571148, \"specificity\": 0.9999537659633062, \"npv\": 0.998925661822439, \"accuracy\": 0.9988828828828828, \"f1\": 0.8428169014084507, \"f2\": 0.7756922119672301, \"f0_5\": 0.9226594301221167, \"p4\": 0.9144704096753229, \"phi\": 0.8512303682166759}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1495.0, \"tn\": 497448.0, \"fp\": 21.0, \"fn\": 536.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7360905957656327, \"tn_rate\": 0.9999577863143231, \"fp_rate\": 4.221368567689645e-05, \"fn_rate\": 0.2639094042343673, \"precision\": 0.9861477572559367, \"recall\": 0.7360905957656327, \"specificity\": 0.9999577863143231, \"npv\": 0.998923660197918, \"accuracy\": 0.9988848848848849, \"f1\": 0.8429658866647871, \"f2\": 0.7754149377593361, \"f0_5\": 0.9234095120444719, \"p4\": 0.9145585211638112, \"phi\": 0.851504681754668}, {\"truth_threshold\": -1.9000000283122063, \"match_probability\": 0.2113212378007128, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1490.0, \"tn\": 497448.0, \"fp\": 21.0, \"fn\": 541.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7336287543082225, \"tn_rate\": 0.9999577863143231, \"fp_rate\": 4.221368567689645e-05, \"fn_rate\": 0.26637124569177745, \"precision\": 0.986101919258769, \"recall\": 0.7336287543082225, \"specificity\": 0.9999577863143231, \"npv\": 0.9989136306223632, \"accuracy\": 0.9988748748748749, \"f1\": 0.8413325804630153, \"f2\": 0.7732226258432797, \"f0_5\": 0.9226006191950464, \"p4\": 0.913594311460595, \"phi\": 0.8500553803704287}, {\"truth_threshold\": -1.8000000268220901, \"match_probability\": 0.22310460998179016, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1452.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 579.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7149187592319055, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.28508124076809455, \"precision\": 0.9897750511247444, \"recall\": 0.7149187592319055, \"specificity\": 0.9999698473673736, \"npv\": 0.9988374264355976, \"accuracy\": 0.9988108108108108, \"f1\": 0.8301886792452831, \"f2\": 0.7569596496715671, \"f0_5\": 0.9191036840106342, \"p4\": 0.906970866885346, \"phi\": 0.8406829346131477}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1451.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 580.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7144263909404235, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.28557360905957657, \"precision\": 0.9897680763983628, \"recall\": 0.7144263909404235, \"specificity\": 0.9999698473673736, \"npv\": 0.99883542087488, \"accuracy\": 0.9988088088088088, \"f1\": 0.8298541607091793, \"f2\": 0.7565172054223149, \"f0_5\": 0.9189360354654845, \"p4\": 0.9067707883321519, \"phi\": 0.8403895642081339}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1450.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 581.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7139340226489415, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.2860659773510586, \"precision\": 0.9897610921501706, \"recall\": 0.7139340226489415, \"specificity\": 0.9999698473673736, \"npv\": 0.9988334153222163, \"accuracy\": 0.9988068068068068, \"f1\": 0.8295194508009154, \"f2\": 0.7560746688914382, \"f0_5\": 0.9187682169560258, \"p4\": 0.9065705228164346, \"phi\": 0.8400960925759684}, {\"truth_threshold\": -1.5000000223517418, \"match_probability\": 0.2612038719739489, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1448.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 583.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7129492860659774, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.28705071393402265, \"precision\": 0.9897470950102529, \"recall\": 0.7129492860659774, \"specificity\": 0.9999698473673736, \"npv\": 0.9988294042410504, \"accuracy\": 0.9988028028028028, \"f1\": 0.8288494562106469, \"f2\": 0.7551893188693022, \"f0_5\": 0.9184320690092604, \"p4\": 0.9061694298434989, \"phi\": 0.8395088452058233}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1431.0, \"tn\": 497460.0, \"fp\": 9.0, \"fn\": 600.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7045790251107829, \"tn_rate\": 0.9999819084204242, \"fp_rate\": 1.8091579575812764e-05, \"fn_rate\": 0.29542097488921715, \"precision\": 0.99375, \"recall\": 0.7045790251107829, \"specificity\": 0.9999819084204242, \"npv\": 0.9987953258643537, \"accuracy\": 0.9987807807807808, \"f1\": 0.8245462402765773, \"f2\": 0.7481179422835633, \"f0_5\": 0.9183673469387755, \"p4\": 0.9035870905158078, \"phi\": 0.8362467706144072}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1429.0, \"tn\": 497460.0, \"fp\": 9.0, \"fn\": 602.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7035942885278188, \"tn_rate\": 0.9999819084204242, \"fp_rate\": 1.8091579575812764e-05, \"fn_rate\": 0.2964057114721812, \"precision\": 0.9937413073713491, \"recall\": 0.7035942885278188, \"specificity\": 0.9999819084204242, \"npv\": 0.9987913151374729, \"accuracy\": 0.9987767767767768, \"f1\": 0.8238685500144134, \"f2\": 0.7472286132608241, \"f0_5\": 0.9180264679429526, \"p4\": 0.9031791972899718, \"phi\": 0.8356568242357764}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1429.0, \"tn\": 497461.0, \"fp\": 8.0, \"fn\": 602.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7035942885278188, \"tn_rate\": 0.9999839185959326, \"fp_rate\": 1.6081404067389124e-05, \"fn_rate\": 0.2964057114721812, \"precision\": 0.9944328462073765, \"recall\": 0.7035942885278188, \"specificity\": 0.9999839185959326, \"npv\": 0.9987913175642439, \"accuracy\": 0.9987787787787787, \"f1\": 0.8241061130334487, \"f2\": 0.7473067670745738, \"f0_5\": 0.9184985216608819, \"p4\": 0.9033223408273486, \"phi\": 0.8359490872166939}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1429.0, \"tn\": 497462.0, \"fp\": 7.0, \"fn\": 602.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7035942885278188, \"tn_rate\": 0.999985928771441, \"fp_rate\": 1.4071228558965483e-05, \"fn_rate\": 0.2964057114721812, \"precision\": 0.995125348189415, \"recall\": 0.7035942885278188, \"specificity\": 0.999985928771441, \"npv\": 0.9987913199910051, \"accuracy\": 0.9987807807807808, \"f1\": 0.8243438130948947, \"f2\": 0.7473849372384938, \"f0_5\": 0.9189710610932476, \"p4\": 0.9034655297434823, \"phi\": 0.836241654891537}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1426.0, \"tn\": 497462.0, \"fp\": 7.0, \"fn\": 605.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7021171836533727, \"tn_rate\": 0.999985928771441, \"fp_rate\": 1.4071228558965483e-05, \"fn_rate\": 0.2978828163466273, \"precision\": 0.9951151430565248, \"recall\": 0.7021171836533727, \"specificity\": 0.999985928771441, \"npv\": 0.9987853039852068, \"accuracy\": 0.9987747747747747, \"f1\": 0.8233256351039261, \"f2\": 0.7460500156953018, \"f0_5\": 0.918459358495427, \"p4\": 0.9028524540501471, \"phi\": 0.8353565689622686}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1424.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 607.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.29886755292959133, \"precision\": 0.9971988795518207, \"recall\": 0.7011324470704087, \"specificity\": 0.9999919592979664, \"npv\": 0.9987813006954818, \"accuracy\": 0.9987767767767768, \"f1\": 0.8233593524139925, \"f2\": 0.7453936348408711, \"f0_5\": 0.9195402298850575, \"p4\": 0.9028731376295295, \"phi\": 0.8356470622401285}, {\"truth_threshold\": -0.800000011920929, \"match_probability\": 0.36481689239780585, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1423.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 608.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.29935992122107336, \"precision\": 0.9971969166082691, \"recall\": 0.7006400787789266, \"specificity\": 0.9999919592979664, \"npv\": 0.998779295404489, \"accuracy\": 0.9987747747747747, \"f1\": 0.8230190861769809, \"f2\": 0.7449481729661815, \"f0_5\": 0.919369427574622, \"p4\": 0.9026681098063133, \"phi\": 0.8353519278116274}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1420.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 611.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6991629739044806, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.30083702609551943, \"precision\": 0.9971910112359551, \"recall\": 0.6991629739044806, \"specificity\": 0.9999919592979664, \"npv\": 0.9987732795798232, \"accuracy\": 0.9987687687687687, \"f1\": 0.8219971056439942, \"f2\": 0.7436112274821952, \"f0_5\": 0.9188559596221043, \"p4\": 0.9020518582388883, \"phi\": 0.8344659053654186}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1406.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 625.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6922698178237322, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.30773018217626785, \"precision\": 0.9971631205673759, \"recall\": 0.6922698178237322, \"specificity\": 0.9999919592979664, \"npv\": 0.9987452066895541, \"accuracy\": 0.9987407407407407, \"f1\": 0.8172043010752689, \"f2\": 0.7373610237046361, \"f0_5\": 0.9164385347412333, \"p4\": 0.8991526501297269, \"phi\": 0.8303187752165915}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1393.0, \"tn\": 497467.0, \"fp\": 2.0, \"fn\": 638.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 0.9999959796489831, \"fp_rate\": 4.020351016847281e-06, \"fn_rate\": 0.31413096996553425, \"precision\": 0.9985663082437276, \"recall\": 0.6858690300344658, \"specificity\": 0.9999959796489831, \"npv\": 0.9987191455616787, \"accuracy\": 0.9987187187187188, \"f1\": 0.8131932282545242, \"f2\": 0.7316945057253913, \"f0_5\": 0.9151228485087374, \"p4\": 0.8967149034605146, \"phi\": 0.8270448581736107}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1382.0, \"tn\": 497467.0, \"fp\": 2.0, \"fn\": 649.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 0.9999959796489831, \"fp_rate\": 4.020351016847281e-06, \"fn_rate\": 0.31954702117183653, \"precision\": 0.9985549132947977, \"recall\": 0.6804529788281635, \"specificity\": 0.9999959796489831, \"npv\": 0.9986970906375222, \"accuracy\": 0.9986966966966967, \"f1\": 0.8093704245973645, \"f2\": 0.726756415649979, \"f0_5\": 0.9131756310294701, \"p4\": 0.89438137045544, \"phi\": 0.8237591197765245}, {\"truth_threshold\": -0.10000000149011612, \"match_probability\": 0.48267825490990723, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1373.0, \"tn\": 497467.0, \"fp\": 2.0, \"fn\": 658.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6760216642048252, \"tn_rate\": 0.9999959796489831, \"fp_rate\": 4.020351016847281e-06, \"fn_rate\": 0.32397833579517477, \"precision\": 0.9985454545454545, \"recall\": 0.6760216642048252, \"specificity\": 0.9999959796489831, \"npv\": 0.9986790464240903, \"accuracy\": 0.9986786786786787, \"f1\": 0.806224310041104, \"f2\": 0.7227076534372039, \"f0_5\": 0.9115655291461957, \"p4\": 0.8924535626059765, \"phi\": 0.8210611168108751}, {\"truth_threshold\": -0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1371.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 660.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6750369276218612, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3249630723781389, \"precision\": 0.999271137026239, \"recall\": 0.6750369276218612, \"specificity\": 0.9999979898244916, \"npv\": 0.9986750393473164, \"accuracy\": 0.9986766766766767, \"f1\": 0.8057596238612988, \"f2\": 0.7218828980623421, \"f0_5\": 0.9116903843596222, \"p4\": 0.8921683886776143, \"phi\": 0.8207609364390014}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1369.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 662.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6740521910388971, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3259478089611029, \"precision\": 0.9992700729927008, \"recall\": 0.6740521910388971, \"specificity\": 0.9999979898244916, \"npv\": 0.9986710296508944, \"accuracy\": 0.9986726726726727, \"f1\": 0.8050573360776242, \"f2\": 0.7209816726353486, \"f0_5\": 0.9113300492610837, \"p4\": 0.8917369272127385, \"phi\": 0.8201599726443158}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1366.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 665.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.672575086164451, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.327424913835549, \"precision\": 0.9992684711046086, \"recall\": 0.672575086164451, \"specificity\": 0.9999979898244916, \"npv\": 0.9986650151666322, \"accuracy\": 0.9986666666666667, \"f1\": 0.8040023543260741, \"f2\": 0.7196291223264145, \"f0_5\": 0.9107881050806774, \"p4\": 0.8910881569529707, \"phi\": 0.8192577094503399}, {\"truth_threshold\": 0.4000000059604645, \"match_probability\": 0.5688740732440556, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1360.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 671.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6696208764155588, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.33037912358444116, \"precision\": 0.9992652461425422, \"recall\": 0.6696208764155588, \"specificity\": 0.9999979898244916, \"npv\": 0.9986529864154383, \"accuracy\": 0.9986546546546546, \"f1\": 0.8018867924528302, \"f2\": 0.716921454928835, \"f0_5\": 0.9096989966555183, \"p4\": 0.8897849047168714, \"phi\": 0.8174502281397625}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1354.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 677.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6666666666666666, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3333333333333333, \"precision\": 0.9992619926199262, \"recall\": 0.6666666666666666, \"specificity\": 0.9999979898244916, \"npv\": 0.9986409579540094, \"accuracy\": 0.9986426426426427, \"f1\": 0.7997637330183107, \"f2\": 0.7142103597425888, \"f0_5\": 0.908602872097705, \"p4\": 0.888473980588251, \"phi\": 0.8156387850978434}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1345.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 686.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6622353520433284, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3377646479566716, \"precision\": 0.9992570579494799, \"recall\": 0.6622353520433284, \"specificity\": 0.9999979898244916, \"npv\": 0.9986229158051526, \"accuracy\": 0.9986246246246246, \"f1\": 0.7965649985193959, \"f2\": 0.7101372756071805, \"f0_5\": 0.9069453809844908, \"p4\": 0.8864930601329402, \"phi\": 0.8129141343355415}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1338.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 693.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3412112259970458, \"precision\": 0.9992531740104555, \"recall\": 0.6587887740029542, \"specificity\": 0.9999979898244916, \"npv\": 0.9986088834734153, \"accuracy\": 0.9986106106106106, \"f1\": 0.7940652818991097, \"f2\": 0.7069639649159886, \"f0_5\": 0.9056450521185867, \"p4\": 0.884940154458185, \"phi\": 0.8107886997214477}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1332.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.9985968588909765, \"accuracy\": 0.9986006006006006, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.8837474646304863, \"phi\": 0.8092677775719472}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1326.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 705.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6528803545051699, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34711964549483015, \"precision\": 1.0, \"recall\": 0.6528803545051699, \"specificity\": 1.0, \"npv\": 0.9985848318057546, \"accuracy\": 0.9985885885885886, \"f1\": 0.7899910634495085, \"f2\": 0.7015873015873015, \"f0_5\": 0.9038854805725971, \"p4\": 0.8824000363175218, \"phi\": 0.8074381827687037}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1318.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 713.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6489414081733137, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35105859182668636, \"precision\": 1.0, \"recall\": 0.6489414081733137, \"specificity\": 1.0, \"npv\": 0.9985687961427752, \"accuracy\": 0.9985725725725726, \"f1\": 0.7871006270528516, \"f2\": 0.6979453505613218, \"f0_5\": 0.9023688894974667, \"p4\": 0.8805909001057858, \"phi\": 0.8049923233962067}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1313.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 718.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6464795667159035, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3535204332840965, \"precision\": 1.0, \"recall\": 0.6464795667159035, \"specificity\": 1.0, \"npv\": 0.9985587741149408, \"accuracy\": 0.9985625625625626, \"f1\": 0.7852870813397129, \"f2\": 0.6956659955494331, \"f0_5\": 0.9014142523685295, \"p4\": 0.8794528280439022, \"phi\": 0.803459920363294}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1310.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 721.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6450024618414574, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35499753815854257, \"precision\": 1.0, \"recall\": 0.6450024618414574, \"specificity\": 1.0, \"npv\": 0.9985527609948012, \"accuracy\": 0.9985565565565565, \"f1\": 0.784196348398683, \"f2\": 0.6942972228111087, \"f0_5\": 0.9008389492504469, \"p4\": 0.8787672446539555, \"phi\": 0.8025390888425505}, {\"truth_threshold\": 1.4000000208616257, \"match_probability\": 0.7252004282056979, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1307.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 724.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6435253569670113, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3564746430329887, \"precision\": 1.0, \"recall\": 0.6435253569670113, \"specificity\": 1.0, \"npv\": 0.9985467479470808, \"accuracy\": 0.9985505505505505, \"f1\": 0.7831036548831636, \"f2\": 0.6929275792598876, \"f0_5\": 0.9002617440418791, \"p4\": 0.8780795947480086, \"phi\": 0.801617210656616}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1304.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 727.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6420482520925652, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3579517479074348, \"precision\": 1.0, \"recall\": 0.6420482520925652, \"specificity\": 1.0, \"npv\": 0.9985407349717782, \"accuracy\": 0.9985445445445446, \"f1\": 0.7820089955022489, \"f2\": 0.6915570640644888, \"f0_5\": 0.8996826272940527, \"p4\": 0.8773898689316875, \"phi\": 0.8006942821900601}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1296.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 735.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.638109305760709, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.361890694239291, \"precision\": 1.0, \"recall\": 0.638109305760709, \"specificity\": 1.0, \"npv\": 0.9985247007250042, \"accuracy\": 0.9985285285285286, \"f1\": 0.7790802524797115, \"f2\": 0.6878980891719745, \"f0_5\": 0.8981288981288982, \"p4\": 0.875540379133403, \"phi\": 0.7982279771873148}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1285.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 746.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6326932545544067, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36730674544559333, \"precision\": 1.0, \"recall\": 0.6326932545544067, \"specificity\": 1.0, \"npv\": 0.998502654476481, \"accuracy\": 0.9985065065065065, \"f1\": 0.7750301568154403, \"f2\": 0.682856839196514, \"f0_5\": 0.8959698786780086, \"p4\": 0.8729727835275262, \"phi\": 0.7948244423405328}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1282.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 749.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6312161496799606, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3687838503200394, \"precision\": 1.0, \"recall\": 0.6312161496799606, \"specificity\": 1.0, \"npv\": 0.9984966420322028, \"accuracy\": 0.9985005005005005, \"f1\": 0.7739209175973438, \"f2\": 0.6814799064426962, \"f0_5\": 0.895376449224752, \"p4\": 0.872267541831041, \"phi\": 0.7938936993401176}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1274.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 757.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6272772033481043, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3727227966518956, \"precision\": 1.0, \"recall\": 0.6272772033481043, \"specificity\": 1.0, \"npv\": 0.9984806092014468, \"accuracy\": 0.9984844844844845, \"f1\": 0.7709531013615734, \"f2\": 0.6778037880400085, \"f0_5\": 0.8937842009260558, \"p4\": 0.8703763275955726, \"phi\": 0.791406421592089}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1266.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.9984645768855598, \"accuracy\": 0.9984684684684685, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.8684695887997785, \"phi\": 0.788911382252982}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1263.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 768.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.621861152141802, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37813884785819796, \"precision\": 1.0, \"recall\": 0.621861152141802, \"specificity\": 1.0, \"npv\": 0.9984585648998369, \"accuracy\": 0.9984624624624625, \"f1\": 0.7668488160291439, \"f2\": 0.6727388942154043, \"f0_5\": 0.8915713680643795, \"p4\": 0.8677505201016583, \"phi\": 0.7879737264239606}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1258.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 773.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6193993106843919, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3806006893156081, \"precision\": 1.0, \"recall\": 0.6193993106843919, \"specificity\": 1.0, \"npv\": 0.9984485450845172, \"accuracy\": 0.9984524524524524, \"f1\": 0.7649741562785041, \"f2\": 0.6704327435514815, \"f0_5\": 0.8905564207843693, \"p4\": 0.8665471327679816, \"phi\": 0.7864085074432906}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1246.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.998424498348232, \"accuracy\": 0.9984284284284284, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.863633549529319, \"phi\": 0.7826393392068908}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1239.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 792.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6100443131462334, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38995568685376664, \"precision\": 1.0, \"recall\": 0.6100443131462334, \"specificity\": 1.0, \"npv\": 0.9984104716202954, \"accuracy\": 0.9984144144144144, \"f1\": 0.7577981651376147, \"f2\": 0.6616469080422941, \"f0_5\": 0.8866466294547016, \"p4\": 0.8619171565312959, \"phi\": 0.7804323355663898}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1228.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 803.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.604628261939931, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3953717380600689, \"precision\": 1.0, \"recall\": 0.604628261939931, \"specificity\": 1.0, \"npv\": 0.9983884304155161, \"accuracy\": 0.9983923923923924, \"f1\": 0.7536054004295796, \"f2\": 0.6565440547476475, \"f0_5\": 0.8843439435402564, \"p4\": 0.8591945586890813, \"phi\": 0.7769516467728665}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1220.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 811.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6006893156080748, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3993106843919252, \"precision\": 1.0, \"recall\": 0.6006893156080748, \"specificity\": 1.0, \"npv\": 0.9983724010596452, \"accuracy\": 0.9983763763763763, \"f1\": 0.750538295908951, \"f2\": 0.6528253424657534, \"f0_5\": 0.8826508464766315, \"p4\": 0.8571947142334491, \"phi\": 0.7744105076214479}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1216.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.9983643865747245, \"accuracy\": 0.9983683683683684, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.8561884718896848, \"phi\": 0.7731368366789086}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1212.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 819.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5967503692762186, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4032496307237814, \"precision\": 1.0, \"recall\": 0.5967503692762186, \"specificity\": 1.0, \"npv\": 0.9983563722184761, \"accuracy\": 0.9983603603603604, \"f1\": 0.7474560592044404, \"f2\": 0.6491002570694088, \"f0_5\": 0.8809419973833406, \"p4\": 0.8551779804239885, \"phi\": 0.7718610845162758}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1204.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 827.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5928114229443624, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4071885770556376, \"precision\": 1.0, \"recall\": 0.5928114229443624, \"specificity\": 1.0, \"npv\": 0.9983403438919839, \"accuracy\": 0.9983443443443444, \"f1\": 0.7443585780525502, \"f2\": 0.6453687821612349, \"f0_5\": 0.879217175405287, \"p4\": 0.8531441416250695, \"phi\": 0.7693032950958751}, {\"truth_threshold\": 3.0000000447034836, \"match_probability\": 0.8888888919492438, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1190.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.9983122955589894, \"accuracy\": 0.9983163163163163, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.8495431030447466, \"phi\": 0.764806779521834}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1184.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 847.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5829640571147218, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4170359428852782, \"precision\": 1.0, \"recall\": 0.5829640571147218, \"specificity\": 1.0, \"npv\": 0.9983002753273024, \"accuracy\": 0.9983043043043043, \"f1\": 0.736547433903577, \"f2\": 0.6360120326600773, \"f0_5\": 0.8748337520319196, \"p4\": 0.847983261710312, \"phi\": 0.7628716659593198}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1173.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 858.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5775480059084195, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4224519940915805, \"precision\": 1.0, \"recall\": 0.5775480059084195, \"specificity\": 1.0, \"npv\": 0.9982782389876527, \"accuracy\": 0.9982822822822823, \"f1\": 0.7322097378277154, \"f2\": 0.6308486608583413, \"f0_5\": 0.8723784024988844, \"p4\": 0.8450973470216678, \"phi\": 0.7593112710009562}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1157.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 874.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5696701132447071, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43032988675529293, \"precision\": 1.0, \"recall\": 0.5696701132447071, \"specificity\": 1.0, \"npv\": 0.9982461878665899, \"accuracy\": 0.9982502502502503, \"f1\": 0.7258469259723965, \"f2\": 0.6233164529684301, \"f0_5\": 0.8687490614206337, \"p4\": 0.8408380238050522, \"phi\": 0.7541027906645469}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1151.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.9982341692267869, \"accuracy\": 0.9982382382382382, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.8392216062989525, \"phi\": 0.7521403985385657}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1139.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 892.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5608074839980305, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43919251600196946, \"precision\": 1.0, \"recall\": 0.5608074839980305, \"specificity\": 1.0, \"npv\": 0.9982101328153687, \"accuracy\": 0.9982142142142142, \"f1\": 0.7186119873817035, \"f2\": 0.6148116161070928, \"f0_5\": 0.8645817519356308, \"p4\": 0.835956812323099, \"phi\": 0.7482003161490423}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1128.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 903.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5553914327917282, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4446085672082718, \"precision\": 1.0, \"recall\": 0.5553914327917282, \"specificity\": 1.0, \"npv\": 0.9981881004550818, \"accuracy\": 0.9981921921921922, \"f1\": 0.7141500474833808, \"f2\": 0.6095979247730221, \"f0_5\": 0.8619899128839982, \"p4\": 0.8329260492617466, \"phi\": 0.7445704260225499}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1124.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 907.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5534219596258001, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4465780403741999, \"precision\": 1.0, \"recall\": 0.5534219596258001, \"specificity\": 1.0, \"npv\": 0.9981800889288409, \"accuracy\": 0.9981841841841842, \"f1\": 0.7125198098256735, \"f2\": 0.6076989619377162, \"f0_5\": 0.8610387620652673, \"p4\": 0.8318147980648825, \"phi\": 0.7432461105679965}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1114.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 917.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5484982767109798, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45150172328902016, \"precision\": 1.0, \"recall\": 0.5484982767109798, \"specificity\": 1.0, \"npv\": 0.9981600606758617, \"accuracy\": 0.9981641641641642, \"f1\": 0.7084260731319555, \"f2\": 0.6029443602511366, \"f0_5\": 0.8586403576383537, \"p4\": 0.8290150087466541, \"phi\": 0.7399250456380276}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1101.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.9981340251485256, \"accuracy\": 0.9981381381381381, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.8253283172196622, \"phi\": 0.7355854461857835}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1095.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 936.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5391432791728212, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4608567208271787, \"precision\": 1.0, \"recall\": 0.5391432791728212, \"specificity\": 1.0, \"npv\": 0.9981220092093779, \"accuracy\": 0.9981261261261262, \"f1\": 0.7005758157389635, \"f2\": 0.5938821998047511, \"f0_5\": 0.8540009358914366, \"p4\": 0.8236085674816911, \"phi\": 0.733573972452478}, {\"truth_threshold\": 4.100000061094761, \"match_probability\": 0.9448986513716398, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1093.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.9981180039606186, \"accuracy\": 0.9981221221221221, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.8230327328837387, \"phi\": 0.7329022652060395}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1089.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.9981099935595322, \"accuracy\": 0.9981141141141141, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.8218771602970261, \"phi\": 0.7315570166633695}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1076.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 955.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5297882816346627, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47021171836533726, \"precision\": 1.0, \"recall\": 0.5297882816346627, \"specificity\": 1.0, \"npv\": 0.9980839606439498, \"accuracy\": 0.9980880880880881, \"f1\": 0.6926295461860316, \"f2\": 0.5847826086956521, \"f0_5\": 0.8492501973164956, \"p4\": 0.8180852158142232, \"phi\": 0.7271679217599443}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1070.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 961.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5268340718857706, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4731659281142294, \"precision\": 1.0, \"recall\": 0.5268340718857706, \"specificity\": 1.0, \"npv\": 0.9980719459101579, \"accuracy\": 0.9980760760760761, \"f1\": 0.690099967752338, \"f2\": 0.5819012399390907, \"f0_5\": 0.8477261923625415, \"p4\": 0.8163160984614997, \"phi\": 0.7251333031235035}, {\"truth_threshold\": 4.500000067055225, \"match_probability\": 0.9576762895591182, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1065.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 966.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5243722304283605, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4756277695716396, \"precision\": 1.0, \"recall\": 0.5243722304283605, \"specificity\": 1.0, \"npv\": 0.9980619338529597, \"accuracy\": 0.9980660660660661, \"f1\": 0.687984496124031, \"f2\": 0.5794972249428665, \"f0_5\": 0.8464473056747734, \"p4\": 0.8148325462698929, \"phi\": 0.7234334539956797}, {\"truth_threshold\": 4.6000000685453415, \"match_probability\": 0.9603983391922627, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1044.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 987.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5140324963072378, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4859675036927622, \"precision\": 1.0, \"recall\": 0.5140324963072378, \"specificity\": 1.0, \"npv\": 0.9980198854061342, \"accuracy\": 0.998024024024024, \"f1\": 0.6790243902439025, \"f2\": 0.569371727748691, \"f0_5\": 0.8409859835669405, \"p4\": 0.8085077096122667, \"phi\": 0.7162504122578769}, {\"truth_threshold\": 4.700000070035458, \"match_probability\": 0.9629520927573305, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1027.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1004.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5056622353520434, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49433776464795665, \"precision\": 1.0, \"recall\": 0.5056622353520434, \"specificity\": 1.0, \"npv\": 0.9979858487821808, \"accuracy\": 0.99798998998999, \"f1\": 0.671680837148463, \"f2\": 0.5611408589225221, \"f0_5\": 0.836455448770158, \"p4\": 0.8032736792214825, \"phi\": 0.7103828229517545}, {\"truth_threshold\": 4.800000071525574, \"match_probability\": 0.9653471069144568, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1006.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1025.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.49532250123092075, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5046774987690793, \"precision\": 1.0, \"recall\": 0.49532250123092075, \"specificity\": 1.0, \"npv\": 0.9979438067459188, \"accuracy\": 0.9979479479479479, \"f1\": 0.6624958840961476, \"f2\": 0.5509309967141293, \"f0_5\": 0.8307184145334434, \"p4\": 0.7966624448561656, \"phi\": 0.703067580283215}, {\"truth_threshold\": 4.90000007301569, \"match_probability\": 0.9675925026740654, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 983.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1048.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.48399803052683404, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5160019694731659, \"precision\": 1.0, \"recall\": 0.48399803052683404, \"specificity\": 1.0, \"npv\": 0.9978977647703088, \"accuracy\": 0.9979019019019019, \"f1\": 0.6522893165228931, \"f2\": 0.5396947403096519, \"f0_5\": 0.8242495388227402, \"p4\": 0.7892300446691121, \"phi\": 0.694968022872966}, {\"truth_threshold\": 5.000000074505806, \"match_probability\": 0.969696971214501, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 974.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1057.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4795667159034958, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5204332840965041, \"precision\": 1.0, \"recall\": 0.4795667159034958, \"specificity\": 1.0, \"npv\": 0.9978797495015305, \"accuracy\": 0.9978838838838839, \"f1\": 0.648252911813644, \"f2\": 0.5352824796658606, \"f0_5\": 0.8216635734773072, \"p4\": 0.7862654581671864, \"phi\": 0.6917730222660118}, {\"truth_threshold\": 5.100000075995922, \"match_probability\": 0.9716687817966767, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 966.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1065.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4756277695716396, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5243722304283605, \"precision\": 1.0, \"recall\": 0.4756277695716396, \"specificity\": 1.0, \"npv\": 0.9978637364753458, \"accuracy\": 0.9978678678678679, \"f1\": 0.6446446446446447, \"f2\": 0.5313531353135313, \"f0_5\": 0.8193384223918575, \"p4\": 0.7836030575983881, \"phi\": 0.6889206799887713}, {\"truth_threshold\": 5.300000078976154, \"match_probability\": 0.9752454557772836, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 961.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1070.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4731659281142294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5268340718857706, \"precision\": 1.0, \"recall\": 0.4731659281142294, \"specificity\": 1.0, \"npv\": 0.9978537285949545, \"accuracy\": 0.9978578578578579, \"f1\": 0.642379679144385, \"f2\": 0.5288937809576224, \"f0_5\": 0.8178723404255319, \"p4\": 0.7819258764213869, \"phi\": 0.6871320001374379}, {\"truth_threshold\": 5.4000000804662704, \"match_probability\": 0.9768648415470134, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 940.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1091.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.46282619399310687, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5371738060068931, \"precision\": 1.0, \"recall\": 0.46282619399310687, \"specificity\": 1.0, \"npv\": 0.9978116976893453, \"accuracy\": 0.9978158158158158, \"f1\": 0.6327835745540222, \"f2\": 0.5185348631950574, \"f0_5\": 0.81160421343464, \"p4\": 0.7747686667633629, \"phi\": 0.6795685324993795}, {\"truth_threshold\": 5.500000081956387, \"match_probability\": 0.9783806392104205, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 936.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1095.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4608567208271787, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5391432791728212, \"precision\": 1.0, \"recall\": 0.4608567208271787, \"specificity\": 1.0, \"npv\": 0.9978036922040099, \"accuracy\": 0.9978078078078078, \"f1\": 0.6309403437815976, \"f2\": 0.5165562913907285, \"f0_5\": 0.8103896103896104, \"p4\": 0.7733842984586297, \"phi\": 0.6781183802393146}, {\"truth_threshold\": 5.600000083446503, \"match_probability\": 0.9797991767207457, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 931.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1100.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4583948793697686, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5416051206302314, \"precision\": 1.0, \"recall\": 0.4583948793697686, \"specificity\": 1.0, \"npv\": 0.9977936855279811, \"accuracy\": 0.9977977977977978, \"f1\": 0.6286293045239703, \"f2\": 0.5140806184428492, \"f0_5\": 0.8088618592528236, \"p4\": 0.7716441702546891, \"phi\": 0.6763013500751833}, {\"truth_threshold\": 5.700000084936619, \"match_probability\": 0.9811264334957893, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 926.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1105.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.45593303791235845, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5440669620876416, \"precision\": 1.0, \"recall\": 0.45593303791235845, \"specificity\": 1.0, \"npv\": 0.9977836790526582, \"accuracy\": 0.9977877877877878, \"f1\": 0.6263104497801826, \"f2\": 0.5116022099447514, \"f0_5\": 0.8073234524847428, \"p4\": 0.7698932061363013, \"phi\": 0.6744794614885231}, {\"truth_threshold\": 5.800000086426735, \"match_probability\": 0.9823680546749124, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 922.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1109.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.45396356474643035, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5460364352535697, \"precision\": 1.0, \"recall\": 0.45396356474643035, \"specificity\": 1.0, \"npv\": 0.997775674016904, \"accuracy\": 0.9977797797797798, \"f1\": 0.6244497121571283, \"f2\": 0.5096175105018793, \"f0_5\": 0.8060849798915894, \"p4\": 0.7684845646216468, \"phi\": 0.6730184260434375}, {\"truth_threshold\": 5.900000087916851, \"match_probability\": 0.9835293654795508, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 886.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1145.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4362383062530773, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5637616937469226, \"precision\": 1.0, \"recall\": 0.4362383062530773, \"specificity\": 1.0, \"npv\": 0.9977036344747641, \"accuracy\": 0.9977077077077077, \"f1\": 0.6074734316078162, \"f2\": 0.4916759156492786, \"f0_5\": 0.7946188340807175, \"p4\": 0.7554829114612676, \"phi\": 0.6597245968173465}, {\"truth_threshold\": 6.000000089406967, \"match_probability\": 0.9846153855541349, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 868.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1163.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4273756770064008, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5726243229935992, \"precision\": 1.0, \"recall\": 0.4273756770064008, \"specificity\": 1.0, \"npv\": 0.9976676186045019, \"accuracy\": 0.9976716716716717, \"f1\": 0.598827181786823, \"f2\": 0.48265124555160144, \"f0_5\": 0.7886607305106306, \"p4\": 0.7487552526173648, \"phi\": 0.6529769321564604}, {\"truth_threshold\": 6.100000090897083, \"match_probability\": 0.985630843183972, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 848.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1183.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4175283111767602, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5824716888232397, \"precision\": 1.0, \"recall\": 0.4175283111767602, \"specificity\": 1.0, \"npv\": 0.9976276040204392, \"accuracy\": 0.9976316316316316, \"f1\": 0.5890934352205627, \"f2\": 0.4725813642443156, \"f0_5\": 0.7818550617739258, \"p4\": 0.7410941043681163, \"phi\": 0.6453973727014789}, {\"truth_threshold\": 6.200000092387199, \"match_probability\": 0.9865801893041345, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 828.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1203.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4076809453471196, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5923190546528804, \"precision\": 1.0, \"recall\": 0.4076809453471196, \"specificity\": 1.0, \"npv\": 0.997587592646068, \"accuracy\": 0.9975915915915916, \"f1\": 0.5792235047219307, \"f2\": 0.4624664879356568, \"f0_5\": 0.7748455923638405, \"p4\": 0.7332296459790487, \"phi\": 0.6377283534832886}, {\"truth_threshold\": 6.3000000938773155, \"match_probability\": 0.987467611228855, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 827.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1204.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.4071885770556376, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5928114229443624, \"precision\": 1.0, \"recall\": 0.4071885770556376, \"specificity\": 1.0, \"npv\": 0.9975855921615969, \"accuracy\": 0.9975895895895895, \"f1\": 0.5787263820853744, \"f2\": 0.46195955759133056, \"f0_5\": 0.7744896047949054, \"p4\": 0.7328309405623654, \"phi\": 0.6373424964361676}, {\"truth_threshold\": 6.400000095367432, \"match_probability\": 0.9882970460445225, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 814.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1217.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.40078778926637126, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5992122107336287, \"precision\": 1.0, \"recall\": 0.40078778926637126, \"specificity\": 1.0, \"npv\": 0.9975595865935679, \"accuracy\": 0.9975635635635636, \"f1\": 0.572231985940246, \"f2\": 0.45535914074737077, \"f0_5\": 0.7698127482504256, \"p4\": 0.727599170768434, \"phi\": 0.6323050698613062}, {\"truth_threshold\": 6.500000096857548, \"match_probability\": 0.9890721936212699, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 785.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1246.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3865091088133924, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6134908911866076, \"precision\": 1.0, \"recall\": 0.3865091088133924, \"specificity\": 1.0, \"npv\": 0.9975015790581795, \"accuracy\": 0.9975055055055055, \"f1\": 0.5575284090909091, \"f2\": 0.4405657200583679, \"f0_5\": 0.7590408044865596, \"p4\": 0.7155934837164615, \"phi\": 0.6209214494295786}, {\"truth_threshold\": 6.600000098347664, \"match_probability\": 0.9897965292084853, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 765.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1266.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3766617429837518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6233382570162481, \"precision\": 1.0, \"recall\": 0.3766617429837518, \"specificity\": 1.0, \"npv\": 0.9974615777918133, \"accuracy\": 0.9974654654654654, \"f1\": 0.5472103004291845, \"f2\": 0.43030712116098546, \"f0_5\": 0.7513258691809075, \"p4\": 0.7070327147598036, \"phi\": 0.6129482983501852}, {\"truth_threshold\": 6.70000009983778, \"match_probability\": 0.9904733155885336, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 763.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1268.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.37567700640078777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6243229935992122, \"precision\": 1.0, \"recall\": 0.37567700640078777, \"specificity\": 1.0, \"npv\": 0.9974575778416279, \"accuracy\": 0.9974614614614614, \"f1\": 0.5461703650680029, \"f2\": 0.4292787217283673, \"f0_5\": 0.7505410190832186, \"p4\": 0.7061635750352407, \"phi\": 0.6121453069781092}, {\"truth_threshold\": 6.800000101327896, \"match_probability\": 0.9911056147706719, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 755.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1276.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.37173806006893156, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6282619399310685, \"precision\": 1.0, \"recall\": 0.37173806006893156, \"specificity\": 1.0, \"npv\": 0.9974415783616878, \"accuracy\": 0.9974454454454454, \"f1\": 0.5419956927494616, \"f2\": 0.425160491046289, \"f0_5\": 0.7473767570778064, \"p4\": 0.702662767543366, \"phi\": 0.608922817253769}, {\"truth_threshold\": 6.900000102818012, \"match_probability\": 0.9916962992137202, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 747.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1284.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.36779911373707536, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6322008862629247, \"precision\": 1.0, \"recall\": 0.36779911373707536, \"specificity\": 1.0, \"npv\": 0.9974255793950112, \"accuracy\": 0.9974294294294295, \"f1\": 0.5377969762419006, \"f2\": 0.4210348326006087, \"f0_5\": 0.7441721458457861, \"p4\": 0.6991226714572598, \"phi\": 0.60568328697445}, {\"truth_threshold\": 7.000000104308128, \"match_probability\": 0.9922480625716311, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 737.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1294.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.36287543082225504, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.637124569177745, \"precision\": 1.0, \"recall\": 0.36287543082225504, \"specificity\": 1.0, \"npv\": 0.9974055814084044, \"accuracy\": 0.9974094094094094, \"f1\": 0.5325144508670521, \"f2\": 0.41586728360230224, \"f0_5\": 0.7401084555131553, \"p4\": 0.6946412849759801, \"phi\": 0.6016094913297965}, {\"truth_threshold\": 7.1000001057982445, \"match_probability\": 0.9927634299608046, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 735.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1296.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.638109305760709, \"precision\": 1.0, \"recall\": 0.361890694239291, \"specificity\": 1.0, \"npv\": 0.9974015819073111, \"accuracy\": 0.9974054054054055, \"f1\": 0.5314533622559653, \"f2\": 0.4148323738570945, \"f0_5\": 0.7392878696439348, \"p4\": 0.6937373991408317, \"phi\": 0.6007914371159129}, {\"truth_threshold\": 7.200000107288361, \"match_probability\": 0.9932447677519157, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 728.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1303.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3584441161989168, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6415558838010832, \"precision\": 1.0, \"recall\": 0.3584441161989168, \"specificity\": 1.0, \"npv\": 0.9973875839060733, \"accuracy\": 0.9973913913913914, \"f1\": 0.5277274374773469, \"f2\": 0.41120650700406686, \"f0_5\": 0.7363949018814485, \"p4\": 0.6905535559772998, \"phi\": 0.5979194853999872}, {\"truth_threshold\": 7.300000108778477, \"match_probability\": 0.9936942928922654, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 718.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1313.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3535204332840965, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6464795667159035, \"precision\": 1.0, \"recall\": 0.3535204332840965, \"specificity\": 1.0, \"npv\": 0.9973675874430111, \"accuracy\": 0.9973713713713713, \"f1\": 0.5223717715532921, \"f2\": 0.4060167382945035, \"f0_5\": 0.7322047725882113, \"p4\": 0.6859498407043375, \"phi\": 0.5937927430142332}, {\"truth_threshold\": 7.400000110268593, \"match_probability\": 0.9941140817673122, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 713.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1318.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.35105859182668636, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6489414081733137, \"precision\": 1.0, \"recall\": 0.35105859182668636, \"specificity\": 1.0, \"npv\": 0.9973575895121565, \"accuracy\": 0.9973613613613613, \"f1\": 0.5196793002915452, \"f2\": 0.4034174493606428, \"f0_5\": 0.7300839647757527, \"p4\": 0.6836231690981912, \"phi\": 0.591718641688595}, {\"truth_threshold\": 7.500000111758709, \"match_probability\": 0.9945060786121668, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 696.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1335.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.34268833087149186, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6573116691285081, \"precision\": 1.0, \"recall\": 0.34268833087149186, \"specificity\": 1.0, \"npv\": 0.9973235980465273, \"accuracy\": 0.9973273273273273, \"f1\": 0.5104510451045104, \"f2\": 0.3945578231292517, \"f0_5\": 0.7227414330218068, \"p4\": 0.6755858601212437, \"phi\": 0.5846119731525476}, {\"truth_threshold\": 7.600000113248825, \"match_probability\": 0.9948721034855129, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 673.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1358.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3313638601674052, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6686361398325947, \"precision\": 1.0, \"recall\": 0.3313638601674052, \"specificity\": 1.0, \"npv\": 0.9972776132807567, \"accuracy\": 0.9972812812812812, \"f1\": 0.4977810650887574, \"f2\": 0.38251676707968624, \"f0_5\": 0.7124708871479991, \"p4\": 0.6643899763374379, \"phi\": 0.5748580342965106}, {\"truth_threshold\": 7.700000114738941, \"match_probability\": 0.9952138598197071, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 660.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1371.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3249630723781389, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6750369276218612, \"precision\": 1.0, \"recall\": 0.3249630723781389, \"specificity\": 1.0, \"npv\": 0.9972516237671397, \"accuracy\": 0.9972552552552553, \"f1\": 0.49052396878483834, \"f2\": 0.3756830601092896, \"f0_5\": 0.7064868336544637, \"p4\": 0.6578916337258148, \"phi\": 0.569271421725575}, {\"truth_threshold\": 7.800000116229057, \"match_probability\": 0.9955329415617687, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 647.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3185622845888725, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6814377154111275, \"precision\": 1.0, \"recall\": 0.3185622845888725, \"specificity\": 1.0, \"npv\": 0.997225635608085, \"accuracy\": 0.9972292292292292, \"f1\": 0.4831964152352502, \"f2\": 0.36882909588416374, \"f0_5\": 0.700368045031392, \"p4\": 0.6512657896786949, \"phi\": 0.5636297337170051}, {\"truth_threshold\": 7.900000117719173, \"match_probability\": 0.99583083992065, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 641.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1390.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3156080748399803, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6843919251600197, \"precision\": 1.0, \"recall\": 0.3156080748399803, \"specificity\": 1.0, \"npv\": 0.9972136415299714, \"accuracy\": 0.9972172172172172, \"f1\": 0.47979041916167664, \"f2\": 0.3656588705077011, \"f0_5\": 0.6974972796517954, \"p4\": 0.6481636593049067, \"phi\": 0.5610068427456483}, {\"truth_threshold\": 8.00000011920929, \"match_probability\": 0.9961089497366072, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 629.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1402.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.30969965534219596, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.690300344657804, \"precision\": 1.0, \"recall\": 0.30969965534219596, \"specificity\": 1.0, \"npv\": 0.9971896542392723, \"accuracy\": 0.9971931931931932, \"f1\": 0.47293233082706765, \"f2\": 0.35930538101222437, \"f0_5\": 0.6916648339564548, \"p4\": 0.6418739553019761, \"phi\": 0.5557241152124912}, {\"truth_threshold\": 8.100000120699406, \"match_probability\": 0.9963685754887298, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 625.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1406.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.30773018217626785, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6922698178237322, \"precision\": 1.0, \"recall\": 0.30773018217626785, \"specificity\": 1.0, \"npv\": 0.9971816587321474, \"accuracy\": 0.9971851851851852, \"f1\": 0.47063253012048195, \"f2\": 0.35718367813464397, \"f0_5\": 0.689693224453763, \"p4\": 0.6397516426018777, \"phi\": 0.5539520678763431}, {\"truth_threshold\": 8.200000122189522, \"match_probability\": 0.9966109369567457, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 616.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1415.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.3032988675529296, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6967011324470704, \"precision\": 1.0, \"recall\": 0.3032988675529296, \"specificity\": 1.0, \"npv\": 0.9971636693098997, \"accuracy\": 0.9971671671671671, \"f1\": 0.4654325651681148, \"f2\": 0.3524027459954233, \"f0_5\": 0.6852057842046718, \"p4\": 0.6349284636468094, \"phi\": 0.549944189592559}, {\"truth_threshold\": 8.300000123679638, \"match_probability\": 0.9968371745531442, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 607.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1424.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.29886755292959133, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7011324470704087, \"precision\": 1.0, \"recall\": 0.29886755292959133, \"specificity\": 1.0, \"npv\": 0.9971456805367083, \"accuracy\": 0.9971491491491491, \"f1\": 0.46019711902956784, \"f2\": 0.34761195739319667, \"f0_5\": 0.6806458847275174, \"p4\": 0.6300377179885711, \"phi\": 0.5459070337120764}, {\"truth_threshold\": 8.400000125169754, \"match_probability\": 0.9970483543414643, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 598.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1433.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2944362383062531, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7055637616937469, \"precision\": 1.0, \"recall\": 0.2944362383062531, \"specificity\": 1.0, \"npv\": 0.997127692412538, \"accuracy\": 0.9971311311311312, \"f1\": 0.45492582731076453, \"f2\": 0.3428112818160972, \"f0_5\": 0.676011756726204, \"p4\": 0.625077973424046, \"phi\": 0.5418399458003648}, {\"truth_threshold\": 8.50000012665987, \"match_probability\": 0.997245472756309, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 588.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1443.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2895125553914328, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7104874446085672, \"precision\": 1.0, \"recall\": 0.2895125553914328, \"specificity\": 1.0, \"npv\": 0.9971077063690591, \"accuracy\": 0.9971111111111111, \"f1\": 0.44902634593356244, \"f2\": 0.33746556473829203, \"f0_5\": 0.6707734428473648, \"p4\": 0.6194844285298629, \"phi\": 0.5372850268445946}, {\"truth_threshold\": 8.600000128149986, \"match_probability\": 0.9974294610402847, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 580.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1451.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.28557360905957657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7144263909404235, \"precision\": 1.0, \"recall\": 0.28557360905957657, \"specificity\": 1.0, \"npv\": 0.9970917181111201, \"accuracy\": 0.9970950950950951, \"f1\": 0.44427422443508235, \"f2\": 0.33318014705882354, \"f0_5\": 0.666513445185015, \"p4\": 0.6149455534747477, \"phi\": 0.5336132311931616}, {\"truth_threshold\": 8.700000129640102, \"match_probability\": 0.997601189412643, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 573.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1458.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2821270310192024, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7178729689807977, \"precision\": 1.0, \"recall\": 0.2821270310192024, \"specificity\": 1.0, \"npv\": 0.9970777288060177, \"accuracy\": 0.9970810810810811, \"f1\": 0.4400921658986175, \"f2\": 0.32942393928941016, \"f0_5\": 0.6627342123525329, \"p4\": 0.610926407773455, \"phi\": 0.530379655834772}, {\"truth_threshold\": 8.800000131130219, \"match_probability\": 0.997761470983937, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 569.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1462.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.28015755785327423, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7198424421467258, \"precision\": 1.0, \"recall\": 0.28015755785327423, \"specificity\": 1.0, \"npv\": 0.9970697350936302, \"accuracy\": 0.997073073073073, \"f1\": 0.4376923076923077, \"f2\": 0.32727481881974, \"f0_5\": 0.6605525888089158, \"p4\": 0.608609489182534, \"phi\": 0.5285230572011429}, {\"truth_threshold\": 8.900000132620335, \"match_probability\": 0.9979110654305032, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 562.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1469.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.27671097981290005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7232890201871, \"precision\": 1.0, \"recall\": 0.27671097981290005, \"specificity\": 1.0, \"npv\": 0.997055746405365, \"accuracy\": 0.997059059059059, \"f1\": 0.433474739683764, \"f2\": 0.3235090950955561, \"f0_5\": 0.6566954896003739, \"p4\": 0.6045189041207862, \"phi\": 0.5252582912395681}, {\"truth_threshold\": 9.00000013411045, \"match_probability\": 0.9980506824420605, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 554.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1477.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.27277203348104384, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7272279665189562, \"precision\": 1.0, \"recall\": 0.27277203348104384, \"specificity\": 1.0, \"npv\": 0.9970397598136872, \"accuracy\": 0.997043043043043, \"f1\": 0.42862669245647966, \"f2\": 0.3191979718829223, \"f0_5\": 0.652225100070638, \"p4\": 0.5997870146256774, \"phi\": 0.5215022173930145}, {\"truth_threshold\": 9.100000135600567, \"match_probability\": 0.9981809849551747, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 552.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1479.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2717872968980798, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7282127031019202, \"precision\": 1.0, \"recall\": 0.2717872968980798, \"specificity\": 1.0, \"npv\": 0.9970357632458693, \"accuracy\": 0.997039039039039, \"f1\": 0.4274099883855981, \"f2\": 0.318118948824343, \"f0_5\": 0.6510969568294409, \"p4\": 0.5985944216727821, \"phi\": 0.5205589832125738}, {\"truth_threshold\": 9.200000137090683, \"match_probability\": 0.9983025921847976, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 541.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1490.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.26637124569177745, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7336287543082225, \"precision\": 1.0, \"recall\": 0.26637124569177745, \"specificity\": 1.0, \"npv\": 0.9970137826955722, \"accuracy\": 0.9970170170170171, \"f1\": 0.42068429237947125, \"f2\": 0.31217541834968265, \"f0_5\": 0.6448152562574494, \"p4\": 0.5919651861098707, \"phi\": 0.5153404731519645}, {\"truth_threshold\": 9.300000138580799, \"match_probability\": 0.9984160824655384, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 527.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1504.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.25947808961102903, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.740521910388971, \"precision\": 1.0, \"recall\": 0.25947808961102903, \"specificity\": 1.0, \"npv\": 0.9969858088513808, \"accuracy\": 0.996988988988989, \"f1\": 0.41204065676309615, \"f2\": 0.3045890648479945, \"f0_5\": 0.6366272046388016, \"p4\": 0.5833528667914092, \"phi\": 0.5086216403674374}, {\"truth_threshold\": 9.400000140070915, \"match_probability\": 0.9985219959137808, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 520.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1511.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.25603151157065485, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7439684884293452, \"precision\": 1.0, \"recall\": 0.25603151157065485, \"specificity\": 1.0, \"npv\": 0.9969718225179366, \"accuracy\": 0.996974974974975, \"f1\": 0.40768326146609174, \"f2\": 0.3007866728366497, \"f0_5\": 0.6324495256628557, \"p4\": 0.5789711925859826, \"phi\": 0.5052288617177545}, {\"truth_threshold\": 9.500000141561031, \"match_probability\": 0.9986208369212233, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 510.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1521.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2511078286558346, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7488921713441654, \"precision\": 1.0, \"recall\": 0.2511078286558346, \"specificity\": 1.0, \"npv\": 0.9969518427222991, \"accuracy\": 0.996954954954955, \"f1\": 0.4014167650531287, \"f2\": 0.2953439888811675, \"f0_5\": 0.6263817243920413, \"p4\": 0.5726220451730867, \"phi\": 0.5003422953343337}, {\"truth_threshold\": 9.600000143051147, \"match_probability\": 0.9987130764898899, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 504.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1527.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2481536189069424, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7518463810930576, \"precision\": 1.0, \"recall\": 0.2481536189069424, \"specificity\": 1.0, \"npv\": 0.9969398552293004, \"accuracy\": 0.996942942942943, \"f1\": 0.39763313609467454, \"f2\": 0.29207232267037553, \"f0_5\": 0.6226834692364714, \"p4\": 0.5687609652746577, \"phi\": 0.4973874072669252}, {\"truth_threshold\": 9.700000144541264, \"match_probability\": 0.9987991544181472, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 495.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1536.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.24372230428360414, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7562776957163959, \"precision\": 1.0, \"recall\": 0.24372230428360414, \"specificity\": 1.0, \"npv\": 0.9969218745303153, \"accuracy\": 0.9969249249249249, \"f1\": 0.3919239904988123, \"f2\": 0.28715628263139575, \"f0_5\": 0.6170531039640987, \"p4\": 0.5628952457491997, \"phi\": 0.49292199834381356}, {\"truth_threshold\": 9.900000147521496, \"match_probability\": 0.9989544406735176, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 492.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1539.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.24224519940915806, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.757754800590842, \"precision\": 1.0, \"recall\": 0.24224519940915806, \"specificity\": 1.0, \"npv\": 0.9969158811081185, \"accuracy\": 0.9969189189189189, \"f1\": 0.39001189060642094, \"f2\": 0.28551532033426186, \"f0_5\": 0.6151537884471118, \"p4\": 0.5609199414148993, \"phi\": 0.4914245480368199}, {\"truth_threshold\": 10.000000149011612, \"match_probability\": 0.9990243903445719, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 491.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1540.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.241752831117676, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.758247168882324, \"precision\": 1.0, \"recall\": 0.241752831117676, \"specificity\": 1.0, \"npv\": 0.9969138833167338, \"accuracy\": 0.9969169169169169, \"f1\": 0.3893735130848533, \"f2\": 0.2849680789320952, \"f0_5\": 0.6145181476846058, \"p4\": 0.5602592524092592, \"phi\": 0.49092438691955087}, {\"truth_threshold\": 10.100000150501728, \"match_probability\": 0.9990896645300149, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 486.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1545.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.23929098966026588, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7607090103397341, \"precision\": 1.0, \"recall\": 0.23929098966026588, \"specificity\": 1.0, \"npv\": 0.9969038944799143, \"accuracy\": 0.9969069069069069, \"f1\": 0.38617401668653156, \"f2\": 0.28222996515679444, \"f0_5\": 0.6113207547169811, \"p4\": 0.5569387697019843, \"phi\": 0.48841592880072204}, {\"truth_threshold\": 10.200000151991844, \"match_probability\": 0.9991505751910027, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 477.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1554.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.23485967503692762, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7651403249630724, \"precision\": 1.0, \"recall\": 0.23485967503692762, \"specificity\": 1.0, \"npv\": 0.9968859150780626, \"accuracy\": 0.9968888888888889, \"f1\": 0.3803827751196172, \"f2\": 0.27729333798395533, \"f0_5\": 0.6054836252856055, \"p4\": 0.5508894114536058, \"phi\": 0.48386806266184174}, {\"truth_threshold\": 10.30000015348196, \"match_probability\": 0.9992074135451509, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 469.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1562.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.23092072870507138, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7690792712949286, \"precision\": 1.0, \"recall\": 0.23092072870507138, \"specificity\": 1.0, \"npv\": 0.9968699339319601, \"accuracy\": 0.9968728728728729, \"f1\": 0.3752, \"f2\": 0.27289654369835914, \"f0_5\": 0.6002047606859483, \"p4\": 0.5454324579741688, \"phi\": 0.47978946587825855}, {\"truth_threshold\": 10.400000154972076, \"match_probability\": 0.9992604514366183, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 466.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1565.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2294436238306253, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7705563761693747, \"precision\": 1.0, \"recall\": 0.2294436238306253, \"specificity\": 1.0, \"npv\": 0.9968639411342715, \"accuracy\": 0.9968668668668669, \"f1\": 0.37324789747697235, \"f2\": 0.2712456344586729, \"f0_5\": 0.5982028241335045, \"p4\": 0.5433664113018031, \"phi\": 0.4782510586709939}, {\"truth_threshold\": 10.500000156462193, \"match_probability\": 0.9993099426168967, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 462.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1569.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2274741506646972, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7725258493353028, \"precision\": 1.0, \"recall\": 0.2274741506646972, \"specificity\": 1.0, \"npv\": 0.9968559508494343, \"accuracy\": 0.9968588588588588, \"f1\": 0.37063778580024065, \"f2\": 0.2690426275331936, \"f0_5\": 0.5955143078112916, \"p4\": 0.5405947562424526, \"phi\": 0.47619214688455774}, {\"truth_threshold\": 10.600000157952309, \"match_probability\": 0.9993561239419685, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 455.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1576.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.224027572624323, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.775972427375677, \"precision\": 1.0, \"recall\": 0.224027572624323, \"specificity\": 1.0, \"npv\": 0.996841968159184, \"accuracy\": 0.9968448448448448, \"f1\": 0.3660498793242156, \"f2\": 0.2651824221937289, \"f0_5\": 0.5907556478836666, \"p4\": 0.5356972366472115, \"phi\": 0.4725675469356256}, {\"truth_threshold\": 10.700000159442425, \"match_probability\": 0.9993992164911604, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 447.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1584.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.22008862629246675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7799113737075333, \"precision\": 1.0, \"recall\": 0.22008862629246675, \"specificity\": 1.0, \"npv\": 0.9968259884220714, \"accuracy\": 0.9968288288288288, \"f1\": 0.36077481840193704, \"f2\": 0.2607630381519076, \"f0_5\": 0.5852317360565593, \"p4\": 0.5300253898179144, \"phi\": 0.4683909290800197}, {\"truth_threshold\": 10.800000160932541, \"match_probability\": 0.9994394266126935, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 435.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1596.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.21418020679468242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7858197932053176, \"precision\": 1.0, \"recall\": 0.21418020679468242, \"specificity\": 1.0, \"npv\": 0.9968020197769829, \"accuracy\": 0.9968048048048048, \"f1\": 0.35279805352798055, \"f2\": 0.2541184717840869, \"f0_5\": 0.5767700875099443, \"p4\": 0.5213646257902459, \"phi\": 0.46205547581344747}, {\"truth_threshold\": 10.900000162422657, \"match_probability\": 0.9994769469006325, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 425.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1606.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.20925652387986213, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7907434761201378, \"precision\": 1.0, \"recall\": 0.20925652387986213, \"specificity\": 1.0, \"npv\": 0.9967820467865551, \"accuracy\": 0.9967847847847848, \"f1\": 0.34609120521172637, \"f2\": 0.24856708386945842, \"f0_5\": 0.5695523988206915, \"p4\": 0.5140032559745651, \"phi\": 0.4567090388599821}, {\"truth_threshold\": 11.000000163912773, \"match_probability\": 0.9995119571076428, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 417.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1614.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.20531757754800592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.794682422451994, \"precision\": 1.0, \"recall\": 0.20531757754800592, \"specificity\": 1.0, \"npv\": 0.9967660689704919, \"accuracy\": 0.9967687687687687, \"f1\": 0.34068627450980393, \"f2\": 0.24411661397962767, \"f0_5\": 0.5636658556366586, \"p4\": 0.5080172733036484, \"phi\": 0.45238655446760345}, {\"truth_threshold\": 11.10000016540289, \"match_probability\": 0.9995446249976983, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 410.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1621.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.2018709995076317, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7981290004923683, \"precision\": 1.0, \"recall\": 0.2018709995076317, \"specificity\": 1.0, \"npv\": 0.9967520888016189, \"accuracy\": 0.9967547547547547, \"f1\": 0.33592789840229414, \"f2\": 0.24021560815561285, \"f0_5\": 0.5584309452465268, \"p4\": 0.5027072651849436, \"phi\": 0.44857032941078756}, {\"truth_threshold\": 11.200000166893005, \"match_probability\": 0.9995751071426191, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 400.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1631.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.19694731659281142, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8030526834071886, \"precision\": 1.0, \"recall\": 0.19694731659281142, \"specificity\": 1.0, \"npv\": 0.9967321178120617, \"accuracy\": 0.9967347347347347, \"f1\": 0.3290826820238585, \"f2\": 0.2346316283435007, \"f0_5\": 0.5508124483613329, \"p4\": 0.4950018061197913, \"phi\": 0.44306175186417923}, {\"truth_threshold\": 11.300000168383121, \"match_probability\": 0.9996035496660847, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 393.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1638.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1935007385524372, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8064992614475628, \"precision\": 1.0, \"recall\": 0.1935007385524372, \"specificity\": 1.0, \"npv\": 0.9967181385955316, \"accuracy\": 0.9967207207207207, \"f1\": 0.32425742574257427, \"f2\": 0.23071504050722086, \"f0_5\": 0.5453788509575354, \"p4\": 0.48952228946698423, \"phi\": 0.43916477083988176}, {\"truth_threshold\": 11.400000169873238, \"match_probability\": 0.99963008893853, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 391.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1640.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.19251600196947316, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8074839980305268, \"precision\": 1.0, \"recall\": 0.19251600196947316, \"specificity\": 1.0, \"npv\": 0.9967141446056873, \"accuracy\": 0.9967167167167167, \"f1\": 0.3228736581337737, \"f2\": 0.22959483264826777, \"f0_5\": 0.5438108484005564, \"p4\": 0.4879435200969662, \"phi\": 0.438045000229326}, {\"truth_threshold\": 11.500000171363354, \"match_probability\": 0.999654852226126, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 381.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1650.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.18759231905465287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8124076809453471, \"precision\": 1.0, \"recall\": 0.18759231905465287, \"specificity\": 1.0, \"npv\": 0.9966941751365906, \"accuracy\": 0.9966966966966967, \"f1\": 0.31592039800995025, \"f2\": 0.2239858906525573, \"f0_5\": 0.5358649789029536, \"p4\": 0.4799601373328004, \"phi\": 0.432402788730759}, {\"truth_threshold\": 11.60000017285347, \"match_probability\": 0.9996779582968373, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 374.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1657.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1841457410142787, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8158542589857213, \"precision\": 1.0, \"recall\": 0.1841457410142787, \"specificity\": 1.0, \"npv\": 0.9966801969843286, \"accuracy\": 0.9966826826826827, \"f1\": 0.31101871101871104, \"f2\": 0.2200517768886797, \"f0_5\": 0.5301956336830167, \"p4\": 0.47428137259755065, \"phi\": 0.42840916590093686}, {\"truth_threshold\": 11.700000174343586, \"match_probability\": 0.9996995179863626, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 366.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1665.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.18020679468242246, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8197932053175776, \"precision\": 1.0, \"recall\": 0.18020679468242246, \"specificity\": 1.0, \"npv\": 0.9966642224332544, \"accuracy\": 0.9966666666666667, \"f1\": 0.3053817271589487, \"f2\": 0.21554770318021202, \"f0_5\": 0.5236051502145923, \"p4\": 0.46769801178988, \"phi\": 0.42379908553387147}, {\"truth_threshold\": 11.800000175833702, \"match_probability\": 0.9997196347265854, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 363.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1668.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.17872968980797638, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8212703101920237, \"precision\": 1.0, \"recall\": 0.17872968980797638, \"specificity\": 1.0, \"npv\": 0.9966582321086195, \"accuracy\": 0.9966606606606606, \"f1\": 0.3032581453634085, \"f2\": 0.21385648639095087, \"f0_5\": 0.5211024978466839, \"p4\": 0.4652031370900409, \"phi\": 0.42205736182341336}, {\"truth_threshold\": 11.900000177323818, \"match_probability\": 0.9997384050389891, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 347.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1684.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1708517971442639, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8291482028557361, \"precision\": 1.0, \"recall\": 0.1708517971442639, \"specificity\": 1.0, \"npv\": 0.9966262849266657, \"accuracy\": 0.9966286286286287, \"f1\": 0.29184188393608074, \"f2\": 0.20481643253452958, \"f0_5\": 0.5074583211465341, \"p4\": 0.4516502189669047, \"phi\": 0.4126443891063249}, {\"truth_threshold\": 12.000000178813934, \"match_probability\": 0.9997559189953416, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 343.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1688.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1688823239783358, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8311176760216642, \"precision\": 1.0, \"recall\": 0.1688823239783358, \"specificity\": 1.0, \"npv\": 0.9966182984511887, \"accuracy\": 0.9966206206206206, \"f1\": 0.2889637742207245, \"f2\": 0.20255108066611552, \"f0_5\": 0.5039670878636497, \"p4\": 0.4481955435876482, \"phi\": 0.410257497630173}, {\"truth_threshold\": 12.10000018030405, \"match_probability\": 0.9997722606477963, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 335.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1696.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.16494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8350566223535204, \"precision\": 1.0, \"recall\": 0.16494337764647957, \"specificity\": 1.0, \"npv\": 0.9966023258842267, \"accuracy\": 0.9966046046046046, \"f1\": 0.28317836010143704, \"f2\": 0.19801394963943728, \"f0_5\": 0.4968851972708395, \"p4\": 0.4412042490270683, \"phi\": 0.4054416774354629}, {\"truth_threshold\": 12.200000181794167, \"match_probability\": 0.9997875084304283, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 331.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1700.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.16297390448055146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8370260955194485, \"precision\": 1.0, \"recall\": 0.16297390448055146, \"specificity\": 1.0, \"npv\": 0.9965943397927355, \"accuracy\": 0.9965965965965966, \"f1\": 0.2802709568162574, \"f2\": 0.19574216439976344, \"f0_5\": 0.4932935916542474, \"p4\": 0.4376669787059198, \"phi\": 0.40301224638866684}, {\"truth_threshold\": 12.300000183284283, \"match_probability\": 0.9998017355340825, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 326.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1705.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1605120630231413, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8394879369768586, \"precision\": 1.0, \"recall\": 0.1605120630231413, \"specificity\": 1.0, \"npv\": 0.996584357358356, \"accuracy\": 0.9965865865865866, \"f1\": 0.2766228256257955, \"f2\": 0.19289940828402366, \"f0_5\": 0.48875562218890556, \"p4\": 0.43320570701097133, \"phi\": 0.39995476141206426}, {\"truth_threshold\": 12.400000184774399, \"match_probability\": 0.9998150102562988, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 321.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1710.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.15805022156573117, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8419497784342689, \"precision\": 1.0, \"recall\": 0.15805022156573117, \"specificity\": 1.0, \"npv\": 0.9965743751239535, \"accuracy\": 0.9965765765765766, \"f1\": 0.2729591836734694, \"f2\": 0.19005328596802842, \"f0_5\": 0.4841628959276018, \"p4\": 0.428699722910598, \"phi\": 0.39687378446436966}, {\"truth_threshold\": 12.500000186264515, \"match_probability\": 0.9998273963279586, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 314.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1717.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.15460364352535697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8453963564746431, \"precision\": 1.0, \"recall\": 0.15460364352535697, \"specificity\": 1.0, \"npv\": 0.9965604003317401, \"accuracy\": 0.9965625625625626, \"f1\": 0.2678038379530917, \"f2\": 0.1860630481156672, \"f0_5\": 0.4776391846668695, \"p4\": 0.4223149381488816, \"phi\": 0.39251989616371724}, {\"truth_threshold\": 12.600000187754631, \"match_probability\": 0.9998389532181915, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 307.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1724.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.15115706548498276, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8488429345150172, \"precision\": 1.0, \"recall\": 0.15115706548498276, \"specificity\": 1.0, \"npv\": 0.9965464259314534, \"accuracy\": 0.9965485485485486, \"f1\": 0.262617621899059, \"f2\": 0.18206618431977226, \"f0_5\": 0.4710033752684873, \"p4\": 0.4158392884860818, \"phi\": 0.38811729330621975}, {\"truth_threshold\": 12.700000189244747, \"match_probability\": 0.9998497364189812, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 299.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1732.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.14721811915312655, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8527818808468735, \"precision\": 1.0, \"recall\": 0.14721811915312655, \"specificity\": 1.0, \"npv\": 0.9965304556681577, \"accuracy\": 0.9965325325325325, \"f1\": 0.25665236051502144, \"f2\": 0.17749020539000357, \"f0_5\": 0.4632785869228386, \"p4\": 0.4083247717343495, \"phi\": 0.3830239409779424}, {\"truth_threshold\": 12.800000190734863, \"match_probability\": 0.9998597977108138, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 292.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1739.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.14377154111275234, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8562284588872476, \"precision\": 1.0, \"recall\": 0.14377154111275234, \"specificity\": 1.0, \"npv\": 0.9965164821076585, \"accuracy\": 0.9965185185185185, \"f1\": 0.2513990529487731, \"f2\": 0.17347908745247148, \"f0_5\": 0.4563926226945921, \"p4\": 0.40164775399594516, \"phi\": 0.3785111760263844}, {\"truth_threshold\": 12.90000019222498, \"match_probability\": 0.9998691854106266, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 287.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1744.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1413096996553422, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8586903003446578, \"precision\": 1.0, \"recall\": 0.1413096996553422, \"specificity\": 1.0, \"npv\": 0.9965065012329406, \"accuracy\": 0.9965085085085085, \"f1\": 0.24762726488352027, \"f2\": 0.17060991558673166, \"f0_5\": 0.45139981126140294, \"p4\": 0.39681906668137246, \"phi\": 0.37525462607917676}, {\"truth_threshold\": 13.000000193715096, \"match_probability\": 0.9998779446032292, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 282.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1749.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.13884785819793205, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8611521418020679, \"precision\": 1.0, \"recall\": 0.13884785819793205, \"specificity\": 1.0, \"npv\": 0.996496520558153, \"accuracy\": 0.9964984984984985, \"f1\": 0.2438391699092088, \"f2\": 0.16773733047822983, \"f0_5\": 0.44634377967711303, \"p4\": 0.39194001015557933, \"phi\": 0.3719696326061996}, {\"truth_threshold\": 13.100000195205212, \"match_probability\": 0.9998861173572945, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 276.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1755.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1358936484490399, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8641063515509602, \"precision\": 1.0, \"recall\": 0.1358936484490399, \"specificity\": 1.0, \"npv\": 0.9964845440123071, \"accuracy\": 0.9964864864864865, \"f1\": 0.23927178153446033, \"f2\": 0.16428571428571428, \"f0_5\": 0.44019138755980863, \"p4\": 0.3860175372656283, \"phi\": 0.36798902199510014}, {\"truth_threshold\": 13.200000196695328, \"match_probability\": 0.9998937429269453, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 274.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1757.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.13490891186607581, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8650910881339242, \"precision\": 1.0, \"recall\": 0.13490891186607581, \"specificity\": 1.0, \"npv\": 0.9964805518943324, \"accuracy\": 0.9964824824824825, \"f1\": 0.23774403470715835, \"f2\": 0.16313407954274828, \"f0_5\": 0.4381196034537896, \"p4\": 0.384026766205053, \"phi\": 0.36665256981476496}, {\"truth_threshold\": 13.300000198185444, \"match_probability\": 0.9999008579398913, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 264.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1767.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.12998522895125553, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8700147710487445, \"precision\": 1.0, \"recall\": 0.12998522895125553, \"specificity\": 1.0, \"npv\": 0.9964605917842463, \"accuracy\": 0.9964624624624625, \"f1\": 0.23006535947712417, \"f2\": 0.15736766809728184, \"f0_5\": 0.42759961127308066, \"p4\": 0.3739459236270573, \"phi\": 0.3598960379942781}, {\"truth_threshold\": 13.40000019967556, \"match_probability\": 0.999907496573012, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 257.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1774.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.12653865091088135, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8734613490891187, \"precision\": 1.0, \"recall\": 0.12653865091088135, \"specificity\": 1.0, \"npv\": 0.996446620182957, \"accuracy\": 0.9964484484484485, \"f1\": 0.22465034965034966, \"f2\": 0.15332299248299724, \"f0_5\": 0.4200719189277542, \"p4\": 0.366760839681312, \"phi\": 0.35509014492472013}, {\"truth_threshold\": 13.500000201165676, \"match_probability\": 0.9999136907162209, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 255.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1776.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1255539143279173, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8744460856720827, \"precision\": 1.0, \"recall\": 0.1255539143279173, \"specificity\": 1.0, \"npv\": 0.9964426283688369, \"accuracy\": 0.9964444444444445, \"f1\": 0.2230971128608924, \"f2\": 0.15216612960973863, \"f0_5\": 0.41789577187807275, \"p4\": 0.3646881257141293, \"phi\": 0.3537050641352279}, {\"truth_threshold\": 13.600000202655792, \"match_probability\": 0.9999194701253888, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 247.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1784.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.12161496799606106, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8783850320039389, \"precision\": 1.0, \"recall\": 0.12161496799606106, \"specificity\": 1.0, \"npv\": 0.9964266614321797, \"accuracy\": 0.9964284284284284, \"f1\": 0.21685689201053557, \"f2\": 0.14753315016127105, \"f0_5\": 0.4090758529314342, \"p4\": 0.3563075000027765, \"phi\": 0.34810974783894877}, {\"truth_threshold\": 13.700000204145908, \"match_probability\": 0.9999248625650565, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 240.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1791.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.11816838995568685, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8818316100443131, \"precision\": 1.0, \"recall\": 0.11816838995568685, \"specificity\": 1.0, \"npv\": 0.9964126907823578, \"accuracy\": 0.9964144144144144, \"f1\": 0.21136063408190225, \"f2\": 0.14347202295552366, \"f0_5\": 0.4012036108324975, \"p4\": 0.3488544423770536, \"phi\": 0.34313916040167275}, {\"truth_threshold\": 13.800000205636024, \"match_probability\": 0.999929893941616, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 236.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1795.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.11619891678975874, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8838010832102413, \"precision\": 1.0, \"recall\": 0.11619891678975874, \"specificity\": 1.0, \"npv\": 0.9964047077297782, \"accuracy\": 0.9964064064064064, \"f1\": 0.20820467578297308, \"f2\": 0.14114832535885166, \"f0_5\": 0.39663865546218485, \"p4\": 0.3445442143793838, \"phi\": 0.3402662894299351}, {\"truth_threshold\": 13.90000020712614, \"match_probability\": 0.9999345884275949, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 231.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1800.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1137370753323486, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8862629246676514, \"precision\": 1.0, \"recall\": 0.1137370753323486, \"specificity\": 1.0, \"npv\": 0.9963947290939353, \"accuracy\": 0.9963963963963964, \"f1\": 0.20424403183023873, \"f2\": 0.13824057450628366, \"f0_5\": 0.39086294416243655, \"p4\": 0.339102998241512, \"phi\": 0.33664079129498253}, {\"truth_threshold\": 14.000000208616257, \"match_probability\": 0.9999389685776376, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 229.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1802.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.11275233874938453, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8872476612506155, \"precision\": 1.0, \"recall\": 0.11275233874938453, \"specificity\": 1.0, \"npv\": 0.9963907376955601, \"accuracy\": 0.9963923923923924, \"f1\": 0.20265486725663717, \"f2\": 0.1370764994612714, \"f0_5\": 0.38853070919579236, \"p4\": 0.3369096866665178, \"phi\": 0.3351796324113369}, {\"truth_threshold\": 14.100000210106373, \"match_probability\": 0.9999430554367367, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 221.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1810.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.10881339241752831, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8911866075824717, \"precision\": 1.0, \"recall\": 0.10881339241752831, \"specificity\": 1.0, \"npv\": 0.9963747724218323, \"accuracy\": 0.9963763763763763, \"f1\": 0.19626998223801065, \"f2\": 0.1324146195326543, \"f0_5\": 0.379073756432247, \"p4\": 0.3280386886676825, \"phi\": 0.3292702827563737}, {\"truth_threshold\": 14.200000211596489, \"match_probability\": 0.9999468686412301, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 207.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1824.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.1019202363367799, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8980797636632201, \"precision\": 1.0, \"recall\": 0.1019202363367799, \"specificity\": 1.0, \"npv\": 0.9963468344238754, \"accuracy\": 0.9963483483483484, \"f1\": 0.18498659517426275, \"f2\": 0.12423478574000721, \"f0_5\": 0.3620146904512067, \"p4\": 0.3121278664528343, \"phi\": 0.3186658200025285}, {\"truth_threshold\": 14.300000213086605, \"match_probability\": 0.9999504265130488, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 204.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1827.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.10044313146233383, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8995568685376661, \"precision\": 1.0, \"recall\": 0.10044313146233383, \"specificity\": 1.0, \"npv\": 0.9963408479138627, \"accuracy\": 0.9963423423423423, \"f1\": 0.1825503355704698, \"f2\": 0.12247838616714697, \"f0_5\": 0.3582718651211802, \"p4\": 0.3086525746763317, \"phi\": 0.31634726926007317}, {\"truth_threshold\": 14.400000214576721, \"match_probability\": 0.9999537461476637, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 194.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1837.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.09551944854751354, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9044805514524865, \"precision\": 1.0, \"recall\": 0.09551944854751354, \"specificity\": 1.0, \"npv\": 0.9963208934000393, \"accuracy\": 0.9963223223223223, \"f1\": 0.17438202247191012, \"f2\": 0.11661457081029093, \"f0_5\": 0.3455646597791236, \"p4\": 0.2968952581262943, \"phi\": 0.30849314792056204}, {\"truth_threshold\": 14.500000216066837, \"match_probability\": 0.9999568434961527, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 182.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1849.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0896110290497292, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9103889709502708, \"precision\": 1.0, \"recall\": 0.0896110290497292, \"specificity\": 1.0, \"npv\": 0.9962969490384885, \"accuracy\": 0.9962982982982983, \"f1\": 0.16448260280162674, \"f2\": 0.10955935468336142, \"f0_5\": 0.3298296484233418, \"p4\": 0.28242489359262934, \"phi\": 0.29879624301929325}, {\"truth_threshold\": 14.600000217556953, \"match_probability\": 0.9999597334417798, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 178.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1853.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.08764155588380108, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9123584441161989, \"precision\": 1.0, \"recall\": 0.08764155588380108, \"specificity\": 1.0, \"npv\": 0.9962889678403916, \"accuracy\": 0.9962902902902903, \"f1\": 0.16115889542779538, \"f2\": 0.10720308359431463, \"f0_5\": 0.32446226759022967, \"p4\": 0.2775111124400522, \"phi\": 0.2954933421439444}, {\"truth_threshold\": 14.70000021904707, \"match_probability\": 0.9999624298714548, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 174.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1857.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.08567208271787297, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.914327917282127, \"precision\": 1.0, \"recall\": 0.08567208271787297, \"specificity\": 1.0, \"npv\": 0.9962809867701662, \"accuracy\": 0.9962822822822823, \"f1\": 0.15782312925170067, \"f2\": 0.10484454085321765, \"f0_5\": 0.31903190319031904, \"p4\": 0.27255110518838743, \"phi\": 0.2921531569379625}, {\"truth_threshold\": 14.800000220537186, \"match_probability\": 0.9999649457424121, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 171.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1860.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.08419497784342689, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9158050221565731, \"precision\": 1.0, \"recall\": 0.08419497784342689, \"specificity\": 1.0, \"npv\": 0.996275001051411, \"accuracy\": 0.9962762762762762, \"f1\": 0.1553133514986376, \"f2\": 0.10307414104882459, \"f0_5\": 0.3149171270718232, \"p4\": 0.26880036979452815, \"phi\": 0.2896227747251304}, {\"truth_threshold\": 14.900000222027302, \"match_probability\": 0.9999672931444318, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 160.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1871.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.07877892663712457, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9212210733628754, \"precision\": 1.0, \"recall\": 0.07877892663712457, \"specificity\": 1.0, \"npv\": 0.9962530540313214, \"accuracy\": 0.9962542542542543, \"f1\": 0.1460520310360566, \"f2\": 0.09657170449058426, \"f0_5\": 0.2995132909022838, \"p4\": 0.25481746693606766, \"phi\": 0.28014950697001906}, {\"truth_threshold\": 15.100000225007534, \"match_probability\": 0.9999715269079685, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 155.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1876.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.07631708517971443, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9236829148202855, \"precision\": 1.0, \"recall\": 0.07631708517971443, \"specificity\": 1.0, \"npv\": 0.996243078432747, \"accuracy\": 0.9962442442442443, \"f1\": 0.14181152790484905, \"f2\": 0.09361033941297259, \"f0_5\": 0.29234251225952473, \"p4\": 0.2483392792523921, \"phi\": 0.27573604747376224}, {\"truth_threshold\": 15.20000022649765, \"match_probability\": 0.9999734336151354, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 152.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1879.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.07483998030526834, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9251600196947316, \"precision\": 1.0, \"recall\": 0.07483998030526834, \"specificity\": 1.0, \"npv\": 0.996237093169493, \"accuracy\": 0.9962382382382382, \"f1\": 0.13925790196976637, \"f2\": 0.09183180280328661, \"f0_5\": 0.28798787419477073, \"p4\": 0.24441482751101778, \"phi\": 0.27305377571493616}, {\"truth_threshold\": 15.300000227987766, \"match_probability\": 0.9999752126423825, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 137.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1894.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.06745445593303791, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9325455440669621, \"precision\": 1.0, \"recall\": 0.06745445593303791, \"specificity\": 1.0, \"npv\": 0.9962071679319453, \"accuracy\": 0.9962082082082082, \"f1\": 0.12638376383763839, \"f2\": 0.08291974337247307, \"f0_5\": 0.26560682435052346, \"p4\": 0.2243583028397823, \"phi\": 0.25922695174198596}, {\"truth_threshold\": 15.400000229477882, \"match_probability\": 0.9999768725392036, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 134.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1897.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.06597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9340226489414082, \"precision\": 1.0, \"recall\": 0.06597735105859183, \"specificity\": 1.0, \"npv\": 0.996201183100171, \"accuracy\": 0.9962022022022022, \"f1\": 0.12378752886836028, \"f2\": 0.08113344635504965, \"f0_5\": 0.2610050642773666, \"p4\": 0.2202578923349157, \"phi\": 0.2563722199895778}, {\"truth_threshold\": 15.500000230967999, \"match_probability\": 0.9999784212826682, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 120.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1911.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.059084194977843424, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9409158050221565, \"precision\": 1.0, \"recall\": 0.059084194977843424, \"specificity\": 1.0, \"npv\": 0.9961732548359966, \"accuracy\": 0.9961741741741742, \"f1\": 0.11157601115760112, \"f2\": 0.07278020378457059, \"f0_5\": 0.23894862604540024, \"p4\": 0.2007141263559989, \"phi\": 0.24260687298681985}, {\"truth_threshold\": 15.600000232458115, \"match_probability\": 0.9999798663157408, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 108.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1923.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.053175775480059084, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.946824224519941, \"precision\": 1.0, \"recall\": 0.053175775480059084, \"specificity\": 1.0, \"npv\": 0.9961493175701653, \"accuracy\": 0.9961501501501502, \"f1\": 0.10098176718092566, \"f2\": 0.06559766763848396, \"f0_5\": 0.2192448233861145, \"p4\": 0.18340697707794523, \"phi\": 0.23015432313064466}, {\"truth_threshold\": 15.70000023394823, \"match_probability\": 0.9999812145830361, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 106.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1925.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.052191038897095025, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.947808961102905, \"precision\": 1.0, \"recall\": 0.052191038897095025, \"specificity\": 1.0, \"npv\": 0.9961453281377028, \"accuracy\": 0.9961461461461462, \"f1\": 0.09920449227889565, \"f2\": 0.06439854191980558, \"f0_5\": 0.2158859470468432, \"p4\": 0.18047082807204287, \"phi\": 0.22801284956772574}, {\"truth_threshold\": 15.800000235438347, \"match_probability\": 0.9999824725641815, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 103.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1928.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.05071393402264894, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9492860659773511, \"precision\": 1.0, \"recall\": 0.05071393402264894, \"specificity\": 1.0, \"npv\": 0.996139344048923, \"accuracy\": 0.9961401401401402, \"f1\": 0.09653233364573571, \"f2\": 0.06259876017989546, \"f0_5\": 0.21080638559148587, \"p4\": 0.17603834507823987, \"phi\": 0.22476241894823493}, {\"truth_threshold\": 15.900000236928463, \"match_probability\": 0.9999836463049459, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 98.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1933.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0482520925652388, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9517479074347612, \"precision\": 1.0, \"recall\": 0.0482520925652388, \"specificity\": 1.0, \"npv\": 0.9961293707273899, \"accuracy\": 0.9961301301301301, \"f1\": 0.09206200093940817, \"f2\": 0.059596205302846025, \"f0_5\": 0.2022286421791168, \"p4\": 0.16857454088546495, \"phi\": 0.21923805920344008}, {\"truth_threshold\": 16.100000239908695, \"match_probability\": 0.9999857632514492, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 94.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1937.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.046282619399310686, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9537173806006893, \"precision\": 1.0, \"recall\": 0.046282619399310686, \"specificity\": 1.0, \"npv\": 0.9961213922139501, \"accuracy\": 0.9961221221221221, \"f1\": 0.08847058823529412, \"f2\": 0.05719153078607934, \"f0_5\": 0.19526381387619443, \"p4\": 0.16253372729609977, \"phi\": 0.2147163414166461}, {\"truth_threshold\": 16.20000024139881, \"match_probability\": 0.9999867166312594, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 92.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1939.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.04529788281634663, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9547021171836534, \"precision\": 1.0, \"recall\": 0.04529788281634663, \"specificity\": 1.0, \"npv\": 0.996117403005158, \"accuracy\": 0.9961181181181181, \"f1\": 0.08666980687706076, \"f2\": 0.05598831548198637, \"f0_5\": 0.1917465610671113, \"p4\": 0.15948973055394525, \"phi\": 0.21241941858655763}, {\"truth_threshold\": 16.300000242888927, \"match_probability\": 0.9999876061677141, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 91.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1940.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0448055145248646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9551944854751354, \"precision\": 1.0, \"recall\": 0.0448055145248646, \"specificity\": 1.0, \"npv\": 0.9961154084127438, \"accuracy\": 0.9961161161161162, \"f1\": 0.08576814326107446, \"f2\": 0.05538648813146683, \"f0_5\": 0.18997912317327767, \"p4\": 0.1579617808569265, \"phi\": 0.21126159944504497}, {\"truth_threshold\": 16.400000244379044, \"match_probability\": 0.9999884361359999, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 89.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1942.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.04382077794190054, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9561792220580995, \"precision\": 1.0, \"recall\": 0.04382077794190054, \"specificity\": 1.0, \"npv\": 0.9961114192518787, \"accuracy\": 0.9961121121121121, \"f1\": 0.08396226415094339, \"f2\": 0.05418239376598076, \"f0_5\": 0.1864264767490574, \"p4\": 0.15489390099291858, \"phi\": 0.2089267271280244}, {\"truth_threshold\": 16.600000247359276, \"match_probability\": 0.9999899330566321, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 86.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1945.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.04234367306745446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9576563269325455, \"precision\": 1.0, \"recall\": 0.04234367306745446, \"specificity\": 1.0, \"npv\": 0.9961054355704886, \"accuracy\": 0.9961061061061061, \"f1\": 0.0812470477090222, \"f2\": 0.05237515225334957, \"f0_5\": 0.18105263157894738, \"p4\": 0.15026189411602864, \"phi\": 0.20537468905517808}, {\"truth_threshold\": 16.700000248849392, \"match_probability\": 0.9999906072033913, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 80.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1951.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.03938946331856229, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9606105366814377, \"precision\": 1.0, \"recall\": 0.03938946331856229, \"specificity\": 1.0, \"npv\": 0.9960934684233711, \"accuracy\": 0.9960940940940941, \"f1\": 0.07579346281383231, \"f2\": 0.04875670404680644, \"f0_5\": 0.1701403658017865, \"p4\": 0.1408876251704697, \"phi\": 0.19807974943522585}, {\"truth_threshold\": 16.800000250339508, \"match_probability\": 0.9999912362053778, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 79.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1952.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.038897095027080254, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9611029049729197, \"precision\": 1.0, \"recall\": 0.038897095027080254, \"specificity\": 1.0, \"npv\": 0.9960914739268072, \"accuracy\": 0.996092092092092, \"f1\": 0.07488151658767772, \"f2\": 0.048153114714128975, \"f0_5\": 0.16829995739241585, \"p4\": 0.13931076531403946, \"phi\": 0.19683766081976145}, {\"truth_threshold\": 16.900000251829624, \"match_probability\": 0.999991823085696, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 71.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1960.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.034958148695224026, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9650418513047759, \"precision\": 1.0, \"recall\": 0.034958148695224026, \"specificity\": 1.0, \"npv\": 0.9960755182418322, \"accuracy\": 0.9960760760760761, \"f1\": 0.0675547098001903, \"f2\": 0.043319097010372176, \"f0_5\": 0.15334773218142547, \"p4\": 0.12654393988477125, \"phi\": 0.18660374079414996}, {\"truth_threshold\": 17.00000025331974, \"match_probability\": 0.9999923706650156, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 68.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1963.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.03348104382077794, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.966518956179222, \"precision\": 1.0, \"recall\": 0.03348104382077794, \"specificity\": 1.0, \"npv\": 0.9960695349917507, \"accuracy\": 0.9960700700700701, \"f1\": 0.06479275845640782, \"f2\": 0.04150390625, \"f0_5\": 0.14763352149370387, \"p4\": 0.12168561456520233, \"phi\": 0.18261831164919007}, {\"truth_threshold\": 17.200000256299973, \"match_probability\": 0.999993358271586, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 67.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1964.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.032988675529295915, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.967011324470704, \"precision\": 1.0, \"recall\": 0.032988675529295915, \"specificity\": 1.0, \"npv\": 0.9960675405910302, \"accuracy\": 0.996068068068068, \"f1\": 0.06387035271687322, \"f2\": 0.04089854718593578, \"f0_5\": 0.14571552849064812, \"p4\": 0.12005745654341737, \"phi\": 0.18127037513565553}, {\"truth_threshold\": 17.30000025779009, \"match_probability\": 0.999993803045519, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 65.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1966.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.032003938946331856, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9679960610536681, \"precision\": 1.0, \"recall\": 0.032003938946331856, \"specificity\": 1.0, \"npv\": 0.9960635518135493, \"accuracy\": 0.9960640640640641, \"f1\": 0.06202290076335878, \"f2\": 0.03968738551715716, \"f0_5\": 0.14185945002182454, \"p4\": 0.11678796022930997, \"phi\": 0.17854399205491986}, {\"truth_threshold\": 17.400000259280205, \"match_probability\": 0.9999942180346287, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 64.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1967.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.03151157065484983, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9684884293451502, \"precision\": 1.0, \"recall\": 0.03151157065484983, \"specificity\": 1.0, \"npv\": 0.9960615574367887, \"accuracy\": 0.996062062062062, \"f1\": 0.06109785202863962, \"f2\": 0.039081582804103565, \"f0_5\": 0.139921294271972, \"p4\": 0.11514658630375932, \"phi\": 0.17716507597082762}, {\"truth_threshold\": 17.50000026077032, \"match_probability\": 0.9999946052334694, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 62.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1969.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.03052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9694731659281143, \"precision\": 1.0, \"recall\": 0.03052683407188577, \"specificity\": 1.0, \"npv\": 0.996057568707227, \"accuracy\": 0.996058058058058, \"f1\": 0.05924510272336359, \"f2\": 0.03786953334962131, \"f0_5\": 0.13602457218078104, \"p4\": 0.11185049710811956, \"phi\": 0.17437455125668846}, {\"truth_threshold\": 17.700000263750553, \"match_probability\": 0.9999953035796879, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 56.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1975.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.027572624322993598, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9724273756770064, \"precision\": 1.0, \"recall\": 0.027572624322993598, \"specificity\": 1.0, \"npv\": 0.9960456027102138, \"accuracy\": 0.9960460460460461, \"f1\": 0.05366554863440345, \"f2\": 0.034229828850855744, \"f0_5\": 0.12416851441241686, \"p4\": 0.10185418609649406, \"phi\": 0.16572142653289723}, {\"truth_threshold\": 17.80000026524067, \"match_probability\": 0.9999956180835331, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 52.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1979.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.025603151157065487, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9743968488429345, \"precision\": 1.0, \"recall\": 0.025603151157065487, \"specificity\": 1.0, \"npv\": 0.9960376255385947, \"accuracy\": 0.996038038038038, \"f1\": 0.04992798847815651, \"f2\": 0.031800391389432484, \"f0_5\": 0.11612326931665923, \"p4\": 0.09509845799462915, \"phi\": 0.15969252294578237}, {\"truth_threshold\": 18.200000271201134, \"match_probability\": 0.9999966791247992, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 50.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1981.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.024618414574101428, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9753815854258986, \"precision\": 1.0, \"recall\": 0.024618414574101428, \"specificity\": 1.0, \"npv\": 0.9960336370007008, \"accuracy\": 0.996034034034034, \"f1\": 0.048053820278712155, \"f2\": 0.030584781012967948, \"f0_5\": 0.11205737337516809, \"p4\": 0.09169268375229242, \"phi\": 0.15659108852496462}, {\"truth_threshold\": 18.30000027269125, \"match_probability\": 0.999996901513191, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 48.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1983.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.023633677991137372, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9763663220088626, \"precision\": 1.0, \"recall\": 0.023633677991137372, \"specificity\": 1.0, \"npv\": 0.9960296484947503, \"accuracy\": 0.99603003003003, \"f1\": 0.046176046176046176, \"f2\": 0.02936857562408223, \"f0_5\": 0.10796221322537113, \"p4\": 0.08826809705159724, \"phi\": 0.15342699886966005}, {\"truth_threshold\": 18.400000274181366, \"match_probability\": 0.9999971090089864, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 46.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1985.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.022648941408173313, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9773510585918267, \"precision\": 1.0, \"recall\": 0.022648941408173313, \"specificity\": 1.0, \"npv\": 0.9960256600207427, \"accuracy\": 0.9960260260260261, \"f1\": 0.04429465575349061, \"f2\": 0.028151774785801713, \"f0_5\": 0.1038374717832957, \"p4\": 0.08482454150740923, \"phi\": 0.15019629427801126}, {\"truth_threshold\": 18.500000275671482, \"match_probability\": 0.9999973026094866, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 43.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1988.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.02117183653372723, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9788281634662728, \"precision\": 1.0, \"recall\": 0.02117183653372723, \"specificity\": 1.0, \"npv\": 0.9960196773696234, \"accuracy\": 0.99602002002002, \"f1\": 0.041465766634522665, \"f2\": 0.026325456103832495, \"f0_5\": 0.09759418974126191, \"p4\": 0.07962329523364889, \"phi\": 0.145215583852579}, {\"truth_threshold\": 18.600000277161598, \"match_probability\": 0.999997483245208, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 42.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1989.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0206794682422452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9793205317577548, \"precision\": 1.0, \"recall\": 0.0206794682422452, \"specificity\": 1.0, \"npv\": 0.9960176831685548, \"accuracy\": 0.996018018018018, \"f1\": 0.04052098408104197, \"f2\": 0.025716385011021307, \"f0_5\": 0.09549795361527967, \"p4\": 0.07787988963301433, \"phi\": 0.14351695386886792}, {\"truth_threshold\": 18.700000278651714, \"match_probability\": 0.9999976517843541, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 40.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1991.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.019694731659281144, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9803052683407188, \"precision\": 1.0, \"recall\": 0.019694731659281144, \"specificity\": 1.0, \"npv\": 0.9960136947903736, \"accuracy\": 0.996014014014014, \"f1\": 0.0386286817962337, \"f2\": 0.02449779519843214, \"f0_5\": 0.09128251939753537, \"p4\": 0.07437847174448221, \"phi\": 0.14005792533043446}, {\"truth_threshold\": 18.80000028014183, \"match_probability\": 0.9999978090369889, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 38.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1993.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.018709995076317085, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9812900049236829, \"precision\": 1.0, \"recall\": 0.018709995076317085, \"specificity\": 1.0, \"npv\": 0.9960097064441339, \"accuracy\": 0.99601001001001, \"f1\": 0.03673272112131464, \"f2\": 0.02327860818426856, \"f0_5\": 0.08703618873110398, \"p4\": 0.07085744184614896, \"phi\": 0.1365113061381136}, {\"truth_threshold\": 19.000000283122063, \"match_probability\": 0.9999980926553794, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 37.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1994.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.018217626784835055, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9817823732151649, \"precision\": 1.0, \"recall\": 0.018217626784835055, \"specificity\": 1.0, \"npv\": 0.9960077122829919, \"accuracy\": 0.996008008008008, \"f1\": 0.035783365570599614, \"f2\": 0.022668790589388556, \"f0_5\": 0.08490133088572739, \"p4\": 0.0690895208026074, \"phi\": 0.13470299468530358}, {\"truth_threshold\": 19.200000286102295, \"match_probability\": 0.9999983395596597, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 35.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1996.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.017232890201871, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.982767109798129, \"precision\": 1.0, \"recall\": 0.017232890201871, \"specificity\": 1.0, \"npv\": 0.9960037239846636, \"accuracy\": 0.996004004004004, \"f1\": 0.03388189738625363, \"f2\": 0.02144870694938105, \"f0_5\": 0.08060801473975127, \"p4\": 0.06553876232920226, \"phi\": 0.13101153695794251}, {\"truth_threshold\": 19.30000028759241, \"match_probability\": 0.9999984507542113, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 34.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1997.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.01674052191038897, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.983259478089611, \"precision\": 1.0, \"recall\": 0.01674052191038897, \"specificity\": 1.0, \"npv\": 0.9960017298474771, \"accuracy\": 0.996002002002002, \"f1\": 0.03292978208232446, \"f2\": 0.02083844079431233, \"f0_5\": 0.07844946931241348, \"p4\": 0.0637558828679807, \"phi\": 0.1291262513251934}, {\"truth_threshold\": 19.500000290572643, \"match_probability\": 0.9999986513029383, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 33.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1998.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.01624815361890694, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.983751846381093, \"precision\": 1.0, \"recall\": 0.01624815361890694, \"specificity\": 1.0, \"npv\": 0.9959997357182757, \"accuracy\": 0.996, \"f1\": 0.03197674418604651, \"f2\": 0.02022802500919456, \"f0_5\": 0.07628294036061026, \"p4\": 0.06196797503850294, \"phi\": 0.12721303671535109}, {\"truth_threshold\": 19.60000029206276, \"match_probability\": 0.9999987416210334, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 31.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2000.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.015263417035942885, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9847365829640571, \"precision\": 1.0, \"recall\": 0.015263417035942885, \"specificity\": 1.0, \"npv\": 0.9959957474838278, \"accuracy\": 0.995995995995996, \"f1\": 0.030067895247332686, \"f2\": 0.01900674432863274, \"f0_5\": 0.07192575406032482, \"p4\": 0.05837698890038264, \"phi\": 0.12329760119268877}, {\"truth_threshold\": 19.700000293552876, \"match_probability\": 0.9999988258908107, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 30.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2001.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.014771048744460856, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9852289512555391, \"precision\": 1.0, \"recall\": 0.014771048744460856, \"specificity\": 1.0, \"npv\": 0.9959937533785813, \"accuracy\": 0.995993993993994, \"f1\": 0.02911208151382824, \"f2\": 0.01839587932303164, \"f0_5\": 0.0697350069735007, \"p4\": 0.05657386760212808, \"phi\": 0.12129250710713152}, {\"truth_threshold\": 19.900000296533108, \"match_probability\": 0.9999989778784306, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 25.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2006.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.012309207287050714, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9876907927129492, \"precision\": 1.0, \"recall\": 0.012309207287050714, \"specificity\": 1.0, \"npv\": 0.9959837829721208, \"accuracy\": 0.995983983983984, \"f1\": 0.024319066147859923, \"f2\": 0.015339305436249846, \"f0_5\": 0.05865790708587518, \"p4\": 0.04748110798441443, \"phi\": 0.11072384945956659}, {\"truth_threshold\": 20.10000029951334, \"match_probability\": 0.9999991101913761, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 24.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2007.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.011816838995568686, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9881831610044313, \"precision\": 1.0, \"recall\": 0.011816838995568686, \"specificity\": 1.0, \"npv\": 0.9959817889147827, \"accuracy\": 0.995981981981982, \"f1\": 0.02335766423357664, \"f2\": 0.014727540500736377, \"f0_5\": 0.056417489421720736, \"p4\": 0.045646971079537135, \"phi\": 0.10848666481242965}, {\"truth_threshold\": 20.200000301003456, \"match_probability\": 0.9999991697791492, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 18.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2013.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.008862629246676515, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9911373707533235, \"precision\": 1.0, \"recall\": 0.008862629246676515, \"specificity\": 1.0, \"npv\": 0.995969824738429, \"accuracy\": 0.99596996996997, \"f1\": 0.017569546120058566, \"f2\": 0.01105379513633014, \"f0_5\": 0.042796005706134094, \"p4\": 0.03453116780034147, \"phi\": 0.09395164339985801}, {\"truth_threshold\": 20.40000030398369, \"match_probability\": 0.9999992772506945, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 15.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2016.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.007385524372230428, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9926144756277696, \"precision\": 1.0, \"recall\": 0.007385524372230428, \"specificity\": 1.0, \"npv\": 0.9959638427580407, \"accuracy\": 0.9959639639639639, \"f1\": 0.01466275659824047, \"f2\": 0.009214891264283081, \"f0_5\": 0.035868005738880916, \"p4\": 0.028900887852257404, \"phi\": 0.08576546644512453}, {\"truth_threshold\": 20.500000305473804, \"match_probability\": 0.9999993256510213, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 14.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2017.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.006893156080748399, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9931068439192516, \"precision\": 1.0, \"recall\": 0.006893156080748399, \"specificity\": 1.0, \"npv\": 0.9959618487805464, \"accuracy\": 0.995961961961962, \"f1\": 0.013691931540342298, \"f2\": 0.00860162202015237, \"f0_5\": 0.033540967896502155, \"p4\": 0.027013249704693862, \"phi\": 0.082857229461979}, {\"truth_threshold\": 20.700000308454037, \"match_probability\": 0.9999994129450668, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 8.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2023.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.003938946331856229, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9960610536681438, \"precision\": 1.0, \"recall\": 0.003938946331856229, \"specificity\": 1.0, \"npv\": 0.9959498850832446, \"accuracy\": 0.9959499499499499, \"f1\": 0.00784698381559588, \"f2\": 0.004918839153959666, \"f0_5\": 0.019389238972370333, \"p4\": 0.015571529643018172, \"phi\": 0.06263380194879821}, {\"truth_threshold\": 21.000000312924385, \"match_probability\": 0.9999995231631726, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 7.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2024.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0034465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9965534219596258, \"precision\": 1.0, \"recall\": 0.0034465780403741997, \"specificity\": 1.0, \"npv\": 0.995947891161638, \"accuracy\": 0.9959479479479479, \"f1\": 0.0068694798822374874, \"f2\": 0.004304513589964334, \"f0_5\": 0.016998542982030112, \"p4\": 0.013645034789052582, \"phi\": 0.05858849828280884}, {\"truth_threshold\": 21.40000031888485, \"match_probability\": 0.9999996386252203, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 5.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2026.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.002461841457410143, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9975381585425899, \"precision\": 1.0, \"recall\": 0.002461841457410143, \"specificity\": 1.0, \"npv\": 0.9959439033423758, \"accuracy\": 0.9959439439439439, \"f1\": 0.004911591355599214, \"f2\": 0.0030754090294009104, \"f0_5\": 0.01218917601170161, \"p4\": 0.009775073777815826, \"phi\": 0.049516219469009755}, {\"truth_threshold\": 21.600000321865082, \"match_probability\": 0.999999685404968, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 4.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2027.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0019694731659281144, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9980305268340719, \"precision\": 1.0, \"recall\": 0.0019694731659281144, \"specificity\": 1.0, \"npv\": 0.9959419094447203, \"accuracy\": 0.995941941941942, \"f1\": 0.003931203931203931, \"f2\": 0.0024606299212598425, \"f0_5\": 0.009770395701025891, \"p4\": 0.007831557688366788, \"phi\": 0.04428860875523846}, {\"truth_threshold\": 21.90000032633543, \"match_probability\": 0.9999997444694171, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 3.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2028.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0014771048744460858, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9985228951255539, \"precision\": 1.0, \"recall\": 0.0014771048744460858, \"specificity\": 1.0, \"npv\": 0.9959399155550483, \"accuracy\": 0.9959399399399399, \"f1\": 0.0029498525073746312, \"f2\": 0.0018456995201181247, \"f0_5\": 0.007342143906020558, \"p4\": 0.00588231767637118, \"phi\": 0.038355021886602864}, {\"truth_threshold\": 22.20000033080578, \"match_probability\": 0.9999997924446623, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 2030.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.0004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999507631708518, \"precision\": 1.0, \"recall\": 0.0004923682914820286, \"specificity\": 1.0, \"npv\": 0.9959359277996552, \"accuracy\": 0.995935935935936, \"f1\": 0.000984251968503937, \"f2\": 0.0006153846153846154, \"f0_5\": 0.002457002457002457, \"p4\": 0.001966564392884295, \"phi\": 0.0221442378779737}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.Chart(...)"
            ]
          },
          "execution_count": 41,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_column(\"cluster\", output_type=\"roc\")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 42,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:21.327370Z",
          "iopub.status.busy": "2024-06-07T09:09:21.327111Z",
          "iopub.status.idle": "2024-06-07T09:09:22.635682Z",
          "shell.execute_reply": "2024-06-07T09:09:22.635098Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'email':\n",
            "    m values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-e67694ab185e4abfac2bdc8000468b2c.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-e67694ab185e4abfac2bdc8000468b2c.vega-embed details,\n",
              "  #altair-viz-e67694ab185e4abfac2bdc8000468b2c.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-e67694ab185e4abfac2bdc8000468b2c\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-e67694ab185e4abfac2bdc8000468b2c\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-e67694ab185e4abfac2bdc8000468b2c\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300, \"discreteHeight\": {\"step\": 150}, \"discreteWidth\": {\"step\": 150}}, \"axis\": {\"gridWidth\": 0.5, \"labelFontSize\": 12, \"titleFontSize\": 16}, \"axisX\": {\"format\": \"+.0f\", \"grid\": false, \"offset\": 20, \"values\": {\"expr\": \"[-25,-20,-15,-10,-5,0,5,10,15,20,25]\"}}, \"axisY\": {\"title\": \"Match probability threshold\", \"titleFontSize\": 16}, \"concat\": {\"spacing\": 40}}, \"hconcat\": [{\"vconcat\": [{\"layer\": [{\"layer\": [{\"mark\": {\"type\": \"rule\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 0.3, \"empty\": false}, \"value\": 0}, \"x\": {\"axis\": {\"orient\": \"bottom\"}, \"field\": \"truth_threshold\", \"scale\": {\"nice\": false}, \"title\": null, \"type\": \"quantitative\"}}, \"params\": [{\"name\": \"threshold\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}, \"value\": null}]}, {\"mark\": {\"type\": \"rule\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 0.3, \"empty\": false}, \"value\": 0}, \"y\": {\"axis\": {\"orient\": \"right\"}, \"field\": \"match_probability\", \"title\": \" \", \"type\": \"quantitative\"}}, \"params\": [{\"name\": \"prob\", \"select\": {\"type\": \"point\", \"encodings\": [\"y\"], \"fields\": [\"match_probability\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}}]}]}, {\"layer\": [{\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"fontWeight\": \"bold\", \"xOffset\": 25, \"yOffset\": 10}, \"encoding\": {\"text\": {\"aggregate\": \"min\", \"field\": \"truth_threshold\", \"format\": \"+.2f\"}, \"y\": {\"axis\": {\"orient\": \"left\"}, \"field\": \"match_probability\", \"title\": \"Match probability threshold\", \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"xOffset\": -25, \"yOffset\": -10}, \"encoding\": {\"text\": {\"aggregate\": \"min\", \"field\": \"match_probability\", \"format\": \".3f\"}}, \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}, {\"mark\": {\"type\": \"line\", \"color\": \"red\", \"opacity\": 0.5}}, {\"mark\": {\"type\": \"line\", \"color\": \"green\", \"opacity\": 0.5, \"strokeWidth\": 3}, \"transform\": [{\"filter\": \"datum.truth_threshold >= threshold.truth_threshold\"}]}, {\"mark\": {\"type\": \"point\", \"color\": \"green\", \"size\": 100}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 1, \"empty\": false}, \"value\": 0}}}], \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\", \"title\": \"Match weight threshold\", \"axis\": {\"orient\": \"top\"}}, \"y\": {\"field\": \"match_probability\", \"type\": \"quantitative\", \"title\": \"Match probability threshold\", \"axis\": {\"orient\": \"left\", \"titlePadding\": 10}}}}, {\"mark\": {\"type\": \"text\", \"align\": \"left\", \"color\": \"red\", \"fontSize\": 12, \"text\": \"Non-match\", \"x\": 0, \"y\": \"height\", \"yOffset\": 10}, \"data\": {\"values\": [{}]}}, {\"mark\": {\"type\": \"text\", \"align\": \"right\", \"color\": \"green\", \"fontSize\": 12, \"fontWeight\": \"bold\", \"text\": \"Match\", \"x\": \"width\", \"y\": 0, \"yOffset\": -10}, \"data\": {\"values\": [{}]}}], \"description\": \"Match weight vs probability\"}, {\"hconcat\": [{\"layer\": [{\"mark\": {\"type\": \"rect\", \"opacity\": 0.5}, \"encoding\": {\"color\": {\"field\": \"count\", \"legend\": null, \"scale\": {\"scheme\": \"reds\", \"zero\": true}, \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": \"datum.predicted == 0\"}]}, {\"mark\": {\"type\": \"rect\", \"opacity\": 0.5}, \"encoding\": {\"color\": {\"field\": \"count\", \"legend\": null, \"scale\": {\"scheme\": \"greens\", \"zero\": true}, \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": \"datum.predicted == 1\"}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"yOffset\": -40}, \"encoding\": {\"color\": {\"condition\": [{\"test\": \"datum.predicted==1 && datum.actual==1\", \"value\": \"darkgreen\"}, {\"test\": \"datum.predicted==0 && datum.actual==0\", \"value\": \"darkred\"}], \"value\": \"black\"}, \"opacity\": {\"condition\": {\"test\": \"datum.predicted != datum.actual\", \"value\": 1}, \"value\": 0.5}, \"text\": {\"field\": \"confusion_label\", \"type\": \"nominal\"}}}, {\"mark\": {\"type\": \"text\", \"fontSize\": 28, \"fontWeight\": \"bold\", \"yOffset\": 10}, \"encoding\": {\"color\": {\"condition\": [{\"test\": \"datum.predicted==1 && datum.actual==1\", \"value\": \"darkgreen\"}, {\"test\": \"datum.predicted==0 && datum.actual==0\", \"value\": \"darkred\"}], \"value\": \"black\"}, \"text\": {\"field\": \"count\", \"format\": \",\", \"type\": \"nominal\"}}}], \"description\": \"Confusion matrix\", \"encoding\": {\"x\": {\"field\": \"actual\", \"type\": \"nominal\", \"title\": \"Actual\", \"axis\": {\"domain\": false, \"labelAngle\": 0, \"labelExpr\": \"datum.label == 1 ? 'Match' : 'Non-match'\", \"labelFontSize\": 18, \"labelPadding\": 10, \"orient\": \"top\", \"ticks\": false, \"titleAngle\": 0, \"titleFontSize\": 20}, \"sort\": \"-x\"}, \"y\": {\"field\": \"predicted\", \"type\": \"nominal\", \"title\": \"Predicted\", \"axis\": {\"domain\": false, \"labelExpr\": \"datum.label == 1 ? 'Match' : 'Non-match'\", \"labelFontSize\": 18, \"labelPadding\": 10, \"ticks\": false, \"titleAngle\": 0, \"titleFontSize\": 20, \"titlePadding\": -30}, \"sort\": \"-y\"}}, \"resolve\": {\"scale\": {\"color\": \"independent\"}}, \"transform\": [{\"filter\": {\"or\": [{\"param\": \"threshold\", \"empty\": false}, {\"and\": [{\"param\": \"threshold\", \"empty\": true}, \"datum.truth_threshold == datum.median_threshold\"]}]}}]}], \"transform\": [{\"fold\": [\"tp\", \"tn\", \"fp\", \"fn\"], \"as\": [\"label\", \"count\"]}, {\"calculate\": \"datum.label === 'tp' ? 'True Positive (TP)' : datum.label === 'tn' ? 'True Negative (TN)' : datum.label === 'fp' ? 'False Positive (FP)' : 'False Negative (FN)'\", \"as\": \"confusion_label\"}, {\"calculate\": \"datum.label === 'tp' || datum.label === 'fp' ? 1 : 0\", \"as\": \"predicted\"}, {\"calculate\": \"datum.label === 'tp' || datum.label === 'fn' ? 1 : 0\", \"as\": \"actual\"}, {\"joinaggregate\": [{\"op\": \"median\", \"field\": \"truth_threshold\", \"as\": \"median_threshold\"}]}]}]}, {\"layer\": [{\"layer\": [{\"mark\": {\"type\": \"point\", \"size\": 100}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 1, \"empty\": false}, \"value\": 0}, \"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".3f\", \"title\": \"Match weight threshold\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".3%\", \"title\": \"Match probability threshold\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"title\": \"Precision\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"title\": \"Recall (TPR)\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FPR\", \"type\": \"quantitative\"}], \"x\": {\"axis\": {\"orient\": \"top\"}, \"field\": \"truth_threshold\", \"title\": \"Match weight threshold\"}}, \"params\": [{\"name\": \"metric\", \"select\": {\"type\": \"point\", \"fields\": [\"metric\"]}, \"bind\": \"legend\", \"value\": [{\"metric\": \"precision\"}, {\"metric\": \"recall\"}]}, {\"name\": \"threshold\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}, \"value\": null}], \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": {\"type\": \"line\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"metric\", \"value\": 1}, \"value\": 0.1}, \"x\": {\"axis\": {\"orient\": \"bottom\"}, \"field\": \"truth_threshold\", \"title\": null}}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"type\": \"nominal\", \"sort\": [\"precision\", \"recall\", \"f1\"], \"title\": [\"Performance\", \"Metric\"], \"legend\": {\"fillColor\": \"whitesmoke\", \"labelExpr\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.value]\", \"labelFontSize\": 14, \"legendX\": 800, \"legendY\": 160, \"orient\": \"none\", \"padding\": 10, \"titleFontSize\": 16, \"titlePadding\": 15}}, \"x\": {\"type\": \"quantitative\", \"field\": \"truth_threshold\"}, \"y\": {\"field\": \"value\", \"type\": \"quantitative\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Performance metric score\", \"titleFontSize\": 18, \"titlePadding\": 10, \"values\": {\"expr\": \"[0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95,1.0]\"}}, \"scale\": {\"domain\": [0.5, 1]}}}}, {\"layer\": [{\"mark\": {\"type\": \"rule\", \"color\": \"gray\"}, \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"title\": null, \"type\": \"quantitative\"}}}, {\"layer\": [{\"mark\": {\"type\": \"rect\", \"fill\": \"whitesmoke\", \"x\": 200, \"x2\": 10, \"y2Offset\": 20, \"yOffset\": -20}, \"encoding\": {\"y2\": {\"field\": \"score_index\"}}}, {\"mark\": {\"type\": \"text\", \"align\": \"right\", \"baseline\": \"middle\", \"fontSize\": 16, \"x\": 200, \"xOffset\": -10}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"sort\": [\"precision\", \"recall\", \"f1\"]}, \"text\": {\"field\": \"y_text\"}, \"y\": {\"field\": \"score_index\", \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"fontWeight\": \"bold\", \"xOffset\": 20, \"y\": 0, \"yOffset\": -10}, \"encoding\": {\"text\": {\"condition\": {\"param\": \"threshold\", \"aggregate\": \"min\", \"empty\": false, \"field\": \"truth_threshold\", \"format\": \"+.2f\", \"type\": \"nominal\"}, \"value\": \"\"}, \"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}], \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}], \"description\": \"Accuracy chart\", \"height\": 700, \"transform\": [{\"fold\": [\"precision\", \"recall\", \"f1\"], \"as\": [\"metric\", \"value\"]}, {\"calculate\": \"0.6375 - 0.025*indexof(['precision', 'recall', 'f1'], datum.metric)\", \"as\": \"score_index\"}, {\"calculate\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.metric]\", \"as\": \"metric_text\"}, {\"calculate\": \"datum.metric_text + ' = ' + format(datum.value, ',.3g')\", \"as\": \"y_text\"}], \"width\": 500}], \"data\": {\"name\": \"data-20e09c93d73c716251959bc719b5e642\"}, \"title\": {\"text\": \"Match Threshold Selection Tool\", \"anchor\": \"middle\", \"baseline\": \"line-bottom\", \"fontSize\": 28, \"subtitle\": [\"Hover over either line graph to show Confusion Matrix (bottom left) and selected performance metrics (right).\", \"\", \"Click a legend value to show a specific evaluation metric. Shift + Click to show multiple metrics\"], \"subtitleFontSize\": 14, \"subtitleFontStyle\": \"italic\"}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.14.1.json\", \"datasets\": {\"data-20e09c93d73c716251959bc719b5e642\": [{\"truth_threshold\": -15.70000023394823, \"match_probability\": 1.8785416963874395e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495829.0, \"fp\": 1640.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9967033121661852, \"fp_rate\": 0.0032966878338147702, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5015197568389058, \"recall\": 0.8124076809453471, \"specificity\": 0.9967033121661852, \"npv\": 0.9992321799238226, \"accuracy\": 0.9959539539539539, \"f1\": 0.6201841759067844, \"f2\": 0.722796565621167, \"f0_5\": 0.5430847212165097, \"p4\": 0.7649756675687114, \"phi\": 0.6365248595747179}, {\"truth_threshold\": -15.600000232458115, \"match_probability\": 2.0133684259220603e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495860.0, \"fp\": 1609.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9967656276069463, \"fp_rate\": 0.0032343723930536375, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5062902730899049, \"recall\": 0.8124076809453471, \"specificity\": 0.9967656276069463, \"npv\": 0.9992322278892716, \"accuracy\": 0.996016016016016, \"f1\": 0.6238185255198487, \"f2\": 0.7247650004392515, \"f0_5\": 0.5475542576491671, \"p4\": 0.7677434382344854, \"phi\": 0.6395743230187952}, {\"truth_threshold\": -15.500000230967999, \"match_probability\": 2.1578717331772276e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495889.0, \"fp\": 1580.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9968239226966906, \"fp_rate\": 0.003176077303309352, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5108359133126935, \"recall\": 0.8124076809453471, \"specificity\": 0.9968239226966906, \"npv\": 0.9992322727547505, \"accuracy\": 0.9960740740740741, \"f1\": 0.6272571754419312, \"f2\": 0.7266161705125946, \"f0_5\": 0.5518025550130427, \"p4\": 0.7703508352719283, \"phi\": 0.642466572031684}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495895.0, \"fp\": 1574.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9968359837497411, \"fp_rate\": 0.00316401625025881, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5117866004962779, \"recall\": 0.8124076809453471, \"specificity\": 0.9968359837497411, \"npv\": 0.9992322820366086, \"accuracy\": 0.9960860860860861, \"f1\": 0.6279733587059942, \"f2\": 0.7270003524850194, \"f0_5\": 0.552689756816507, \"p4\": 0.7708925093222379, \"phi\": 0.6430698198712967}, {\"truth_threshold\": -15.300000227987766, \"match_probability\": 2.478735761747151e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495944.0, \"fp\": 1525.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.996934482349654, \"fp_rate\": 0.0030655176503460516, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5196850393700787, \"recall\": 0.8124076809453471, \"specificity\": 0.996934482349654, \"npv\": 0.9992323578300508, \"accuracy\": 0.9961841841841842, \"f1\": 0.6338839800230504, \"f2\": 0.7301531108947694, \"f0_5\": 0.5600434457945829, \"p4\": 0.7753448568153782, \"phi\": 0.6480599731671169}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 495978.0, \"fp\": 1491.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9970028283169403, \"fp_rate\": 0.0029971716830596478, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5253104106972302, \"recall\": 0.8124076809453471, \"specificity\": 0.9970028283169403, \"npv\": 0.9992324104126248, \"accuracy\": 0.9962522522522522, \"f1\": 0.6380510440835266, \"f2\": 0.7323568575233023, \"f0_5\": 0.5652620760534429, \"p4\": 0.7784645844600595, \"phi\": 0.6515907242774444}, {\"truth_threshold\": -15.100000225007534, \"match_probability\": 2.8473092031487608e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496003.0, \"fp\": 1466.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.997053082704651, \"fp_rate\": 0.002946917295349057, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5295250320924262, \"recall\": 0.8124076809453471, \"specificity\": 0.997053082704651, \"npv\": 0.9992324490716864, \"accuracy\": 0.9963023023023023, \"f1\": 0.641150184573538, \"f2\": 0.7339857651245552, \"f0_5\": 0.5691617799241118, \"p4\": 0.7807745651409332, \"phi\": 0.65422353556621}, {\"truth_threshold\": -14.900000222027302, \"match_probability\": 3.270685556819147e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496025.0, \"fp\": 1444.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9970973065658363, \"fp_rate\": 0.0029026934341637367, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5332902391725921, \"recall\": 0.8124076809453471, \"specificity\": 0.9970973065658363, \"npv\": 0.9992324830884397, \"accuracy\": 0.9963463463463463, \"f1\": 0.6439024390243903, \"f2\": 0.7354252094847566, \"f0_5\": 0.5726383008259873, \"p4\": 0.7828187173219892, \"phi\": 0.6565666756951178}, {\"truth_threshold\": -14.800000220537186, \"match_probability\": 3.505425758788192e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496064.0, \"fp\": 1405.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9971757034106647, \"fp_rate\": 0.0028242965893352148, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5400981996726678, \"recall\": 0.8124076809453471, \"specificity\": 0.9971757034106647, \"npv\": 0.9992325433834564, \"accuracy\": 0.9964244244244245, \"f1\": 0.6488399528116398, \"f2\": 0.7379908757491725, \"f0_5\": 0.578906743386429, \"p4\": 0.7864688685019895, \"phi\": 0.6607822699275607}, {\"truth_threshold\": -14.70000021904707, \"match_probability\": 3.757012854526189e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496065.0, \"fp\": 1404.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9971777135861732, \"fp_rate\": 0.002822286413826791, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5402750491159135, \"recall\": 0.8124076809453471, \"specificity\": 0.9971777135861732, \"npv\": 0.9992325449293579, \"accuracy\": 0.9964264264264264, \"f1\": 0.6489675516224189, \"f2\": 0.7380568974771873, \"f0_5\": 0.5790692777426827, \"p4\": 0.7865629097515164, \"phi\": 0.6608914195069355}, {\"truth_threshold\": -14.500000216066837, \"match_probability\": 4.315650384728788e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496071.0, \"fp\": 1398.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9971897746392238, \"fp_rate\": 0.0028102253607762495, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5413385826771654, \"recall\": 0.8124076809453471, \"specificity\": 0.9971897746392238, \"npv\": 0.9992325542046361, \"accuracy\": 0.9964384384384385, \"f1\": 0.6497341996455995, \"f2\": 0.738453276047261, \"f0_5\": 0.580046403712297, \"p4\": 0.7871276298466465, \"phi\": 0.6615474412179109}, {\"truth_threshold\": -14.400000214576721, \"match_probability\": 4.625385233621647e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496086.0, \"fp\": 1383.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972199272718502, \"fp_rate\": 0.0027800727281498947, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5440158259149357, \"recall\": 0.8124076809453471, \"specificity\": 0.9972199272718502, \"npv\": 0.9992325773918508, \"accuracy\": 0.9964684684684685, \"f1\": 0.6516587677725119, \"f2\": 0.7394460876579726, \"f0_5\": 0.5825037068417708, \"p4\": 0.7885429839164078, \"phi\": 0.663195978152073}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496099.0, \"fp\": 1370.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972460595534596, \"fp_rate\": 0.0027539404465403874, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5463576158940397, \"recall\": 0.8124076809453471, \"specificity\": 0.9972460595534596, \"npv\": 0.9992325974863036, \"accuracy\": 0.9964944944944945, \"f1\": 0.6533359730746386, \"f2\": 0.7403086862885858, \"f0_5\": 0.5846502728367939, \"p4\": 0.7897737475233195, \"phi\": 0.6646346041570037}, {\"truth_threshold\": -14.200000211596489, \"match_probability\": 5.313135876996633e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496109.0, \"fp\": 1360.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972661613085438, \"fp_rate\": 0.002733838691456151, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5481727574750831, \"recall\": 0.8124076809453471, \"specificity\": 0.9972661613085438, \"npv\": 0.9992326129428589, \"accuracy\": 0.9965145145145146, \"f1\": 0.6546320174568538, \"f2\": 0.7409735943955452, \"f0_5\": 0.5863122734702579, \"p4\": 0.7907231061133986, \"phi\": 0.665747556913812}, {\"truth_threshold\": -14.100000210106373, \"match_probability\": 5.694456326333118e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496119.0, \"fp\": 1350.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9972862630636281, \"fp_rate\": 0.0027137369363719145, \"fn_rate\": 0.18759231905465287, \"precision\": 0.55, \"recall\": 0.8124076809453471, \"specificity\": 0.9972862630636281, \"npv\": 0.9992326283987916, \"accuracy\": 0.9965345345345346, \"f1\": 0.655933214072749, \"f2\": 0.7416396979503775, \"f0_5\": 0.5879837502672653, \"p4\": 0.7916747497021438, \"phi\": 0.6668660533170312}, {\"truth_threshold\": -14.000000208616257, \"match_probability\": 6.103142236234761e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496168.0, \"fp\": 1301.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9973847616635408, \"fp_rate\": 0.0026152383364591563, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5591324974584887, \"recall\": 0.8124076809453471, \"specificity\": 0.9973847616635408, \"npv\": 0.9992327041238629, \"accuracy\": 0.9966326326326327, \"f1\": 0.6623845845042152, \"f2\": 0.744920993227991, \"f0_5\": 0.5963136971449223, \"p4\": 0.7963711088402224, \"phi\": 0.6724283757079088}, {\"truth_threshold\": -13.90000020712614, \"match_probability\": 6.541157240512605e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496172.0, \"fp\": 1297.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9973928023655746, \"fp_rate\": 0.0026071976344254617, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5598914149983033, \"recall\": 0.8124076809453471, \"specificity\": 0.9973928023655746, \"npv\": 0.9992327103048416, \"accuracy\": 0.9966406406406406, \"f1\": 0.6629168340699076, \"f2\": 0.7451901363923765, \"f0_5\": 0.597004124755771, \"p4\": 0.7967569460627082, \"phi\": 0.6728885397309743}, {\"truth_threshold\": -13.700000204145908, \"match_probability\": 7.51374349434771e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496175.0, \"fp\": 1294.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9973988328920999, \"fp_rate\": 0.0026011671079001907, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5604619565217391, \"recall\": 0.8124076809453471, \"specificity\": 0.9973988328920999, \"npv\": 0.9992327149405102, \"accuracy\": 0.9966466466466466, \"f1\": 0.6633165829145728, \"f2\": 0.7453921214311529, \"f0_5\": 0.5975229955819512, \"p4\": 0.7970465694085548, \"phi\": 0.6732342762443393}, {\"truth_threshold\": -13.600000202655792, \"match_probability\": 8.052987461117984e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496182.0, \"fp\": 1287.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9974129041206587, \"fp_rate\": 0.0025870958793412255, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5617977528089888, \"recall\": 0.8124076809453471, \"specificity\": 0.9974129041206587, \"npv\": 0.9992327257568526, \"accuracy\": 0.9966606606606606, \"f1\": 0.6642512077294686, \"f2\": 0.7458638459452129, \"f0_5\": 0.5987372087959939, \"p4\": 0.797723176701015, \"phi\": 0.6740430478775454}, {\"truth_threshold\": -13.500000201165676, \"match_probability\": 8.630928377906233e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496183.0, \"fp\": 1286.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9974149142961672, \"fp_rate\": 0.0025850857038328015, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5619891008174387, \"recall\": 0.8124076809453471, \"specificity\": 0.9974149142961672, \"npv\": 0.9992327273020195, \"accuracy\": 0.9966626626626627, \"f1\": 0.6643849406080129, \"f2\": 0.7459312839059674, \"f0_5\": 0.5989110707803993, \"p4\": 0.7978199286660785, \"phi\": 0.674158822054482}, {\"truth_threshold\": -13.40000019967556, \"match_probability\": 9.25034269879762e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496206.0, \"fp\": 1263.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9974611483328609, \"fp_rate\": 0.002538851667139058, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5664263645726055, \"recall\": 0.8124076809453471, \"specificity\": 0.9974611483328609, \"npv\": 0.99923276283914, \"accuracy\": 0.9967087087087088, \"f1\": 0.6674757281553398, \"f2\": 0.7474857298178853, \"f0_5\": 0.6029379522034641, \"p4\": 0.8000517191080434, \"phi\": 0.6768380125905187}, {\"truth_threshold\": -13.300000198185444, \"match_probability\": 9.914206010875549e-05, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496228.0, \"fp\": 1241.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9975053721940462, \"fp_rate\": 0.002494627805953738, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5707367692839848, \"recall\": 0.8124076809453471, \"specificity\": 0.9975053721940462, \"npv\": 0.9992327968280881, \"accuracy\": 0.9967527527527528, \"f1\": 0.6704591629418936, \"f2\": 0.7489786654561961, \"f0_5\": 0.6068407502758367, \"p4\": 0.8021981896092447, \"phi\": 0.6794304905480503}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496238.0, \"fp\": 1231.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9975254739491305, \"fp_rate\": 0.002474526050869501, \"fn_rate\": 0.18759231905465287, \"precision\": 0.572717806317251, \"recall\": 0.8124076809453471, \"specificity\": 0.9975254739491305, \"npv\": 0.9992328122766144, \"accuracy\": 0.9967727727727728, \"f1\": 0.6718241042345277, \"f2\": 0.7496592457973649, \"f0_5\": 0.6086315012910365, \"p4\": 0.8031776699267482, \"phi\": 0.6806186663334892}, {\"truth_threshold\": -13.100000195205212, \"match_probability\": 0.00011388264270550263, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496258.0, \"fp\": 1211.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9975656774592989, \"fp_rate\": 0.0024343225407010287, \"fn_rate\": 0.18759231905465287, \"precision\": 0.576721426074799, \"recall\": 0.8124076809453471, \"specificity\": 0.9975656774592989, \"npv\": 0.9992328431718008, \"accuracy\": 0.9968128128128129, \"f1\": 0.6745707277187244, \"f2\": 0.7510241238051889, \"f0_5\": 0.6122448979591837, \"p4\": 0.8051438233772689, \"phi\": 0.6830136263275881}, {\"truth_threshold\": -13.000000193715096, \"match_probability\": 0.00012205539677081966, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496277.0, \"fp\": 1192.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9976038707939591, \"fp_rate\": 0.0023961292060409793, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5805770584095707, \"recall\": 0.8124076809453471, \"specificity\": 0.9976038707939591, \"npv\": 0.9992328725199232, \"accuracy\": 0.9968508508508509, \"f1\": 0.6772009029345373, \"f2\": 0.7523253693233631, \"f0_5\": 0.6157175908649899, \"p4\": 0.8070206056804747, \"phi\": 0.6853121492181934}, {\"truth_threshold\": -12.90000019222498, \"match_probability\": 0.00013081458937332365, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496323.0, \"fp\": 1146.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9976963388673465, \"fp_rate\": 0.002303661132653492, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5901287553648069, \"recall\": 0.8124076809453471, \"specificity\": 0.9976963388673465, \"npv\": 0.9992329435639737, \"accuracy\": 0.996942942942943, \"f1\": 0.6836544437538844, \"f2\": 0.7554945054945055, \"f0_5\": 0.6242905788876277, \"p4\": 0.8116008322256546, \"phi\": 0.690973430332776}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496331.0, \"fp\": 1138.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977124202714139, \"fp_rate\": 0.002287579728586103, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5918220946915351, \"recall\": 0.8124076809453471, \"specificity\": 0.9977124202714139, \"npv\": 0.9992329559181176, \"accuracy\": 0.9969589589589589, \"f1\": 0.6847893753890849, \"f2\": 0.7560483870967742, \"f0_5\": 0.6258059622240765, \"p4\": 0.8124027054184834, \"phi\": 0.6919722374666267}, {\"truth_threshold\": -12.700000189244747, \"match_probability\": 0.00015026358101882152, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496337.0, \"fp\": 1132.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977244813244645, \"fp_rate\": 0.002275518675535561, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5930984902947519, \"recall\": 0.8124076809453471, \"specificity\": 0.9977244813244645, \"npv\": 0.9992329651834643, \"accuracy\": 0.996970970970971, \"f1\": 0.6856430500727198, \"f2\": 0.7564643315606089, \"f0_5\": 0.6269473364237405, \"p4\": 0.8130051508717604, \"phi\": 0.6927241606924014}, {\"truth_threshold\": -12.600000187754631, \"match_probability\": 0.0001610467818084837, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496342.0, \"fp\": 1127.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977345322020066, \"fp_rate\": 0.0022654677979934428, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5941663665826431, \"recall\": 0.8124076809453471, \"specificity\": 0.9977345322020066, \"npv\": 0.9992329729044155, \"accuracy\": 0.996980980980981, \"f1\": 0.6863560732113144, \"f2\": 0.756811301715439, \"f0_5\": 0.6279016667935154, \"p4\": 0.8135078716684839, \"phi\": 0.6933526189264908}, {\"truth_threshold\": -12.500000186264515, \"match_probability\": 0.00017260367204143044, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496347.0, \"fp\": 1122.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977445830795487, \"fp_rate\": 0.0022554169204513246, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5952380952380952, \"recall\": 0.8124076809453471, \"specificity\": 0.9977445830795487, \"npv\": 0.9992329806252114, \"accuracy\": 0.996990990990991, \"f1\": 0.6870705808869456, \"f2\": 0.7571585903083701, \"f0_5\": 0.6288589069288818, \"p4\": 0.8140112145298072, \"phi\": 0.693982772126206}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496367.0, \"fp\": 1102.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9977847865897171, \"fp_rate\": 0.0022152134102828517, \"fn_rate\": 0.18759231905465287, \"precision\": 0.5995639534883721, \"recall\": 0.8124076809453471, \"specificity\": 0.9977847865897171, \"npv\": 0.9992330115068405, \"accuracy\": 0.9970310310310311, \"f1\": 0.6899435500731759, \"f2\": 0.7585509378447959, \"f0_5\": 0.6327172329166347, \"p4\": 0.8160308297706109, \"phi\": 0.6965204883002213}, {\"truth_threshold\": -12.300000183284283, \"match_probability\": 0.00019826446591752426, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496381.0, \"fp\": 1088.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.997812929046835, \"fp_rate\": 0.002187070953164921, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6026296566837107, \"recall\": 0.8124076809453471, \"specificity\": 0.997812929046835, \"npv\": 0.9992330331225013, \"accuracy\": 0.997059059059059, \"f1\": 0.6919689662403019, \"f2\": 0.7595286319278217, \"f0_5\": 0.6354463529230532, \"p4\": 0.8174505333389831, \"phi\": 0.6983133649489494}, {\"truth_threshold\": -12.200000181794167, \"match_probability\": 0.00021249156957169895, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496411.0, \"fp\": 1058.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9978732343120877, \"fp_rate\": 0.0021267656879122116, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6093057607090103, \"recall\": 0.8124076809453471, \"specificity\": 0.9978732343120877, \"npv\": 0.9992330794376721, \"accuracy\": 0.9971191191191191, \"f1\": 0.6963494408102975, \"f2\": 0.7616322008862629, \"f0_5\": 0.641374484956853, \"p4\": 0.8205094509939885, \"phi\": 0.702201828795505}, {\"truth_threshold\": -12.10000018030405, \"match_probability\": 0.0002277393522037113, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496417.0, \"fp\": 1052.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9978852953651384, \"fp_rate\": 0.00211470463486167, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6106587712805329, \"recall\": 0.8124076809453471, \"specificity\": 0.9978852953651384, \"npv\": 0.999233088700035, \"accuracy\": 0.9971311311311312, \"f1\": 0.6972321994506655, \"f2\": 0.7620543136892666, \"f0_5\": 0.6425734091440143, \"p4\": 0.8211239836030536, \"phi\": 0.7029872619409204}, {\"truth_threshold\": -12.000000178813934, \"match_probability\": 0.00024408100465850272, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496442.0, \"fp\": 1027.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9979355497528489, \"fp_rate\": 0.002064450247151079, \"fn_rate\": 0.18759231905465287, \"precision\": 0.616361598804632, \"recall\": 0.8124076809453471, \"specificity\": 0.9979355497528489, \"npv\": 0.9992331272908058, \"accuracy\": 0.9971811811811812, \"f1\": 0.7009345794392523, \"f2\": 0.7638181649847237, \"f0_5\": 0.6476175523981474, \"p4\": 0.8236944750682825, \"phi\": 0.7062881983616519}, {\"truth_threshold\": -11.900000177323818, \"match_probability\": 0.0002615949610108224, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496459.0, \"fp\": 1010.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9979697227364921, \"fp_rate\": 0.002030277263507877, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6203007518796992, \"recall\": 0.8124076809453471, \"specificity\": 0.9979697227364921, \"npv\": 0.9992331535303116, \"accuracy\": 0.9972152152152152, \"f1\": 0.70347473886165, \"f2\": 0.7650222551928784, \"f0_5\": 0.6510930471154605, \"p4\": 0.8254516210525357, \"phi\": 0.7085592972779587}, {\"truth_threshold\": -11.800000175833702, \"match_probability\": 0.0002803652734145845, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496482.0, \"fp\": 987.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9980159567731859, \"fp_rate\": 0.001984043226814133, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6257110352673493, \"recall\": 0.8124076809453471, \"specificity\": 0.9980159567731859, \"npv\": 0.9992331890279614, \"accuracy\": 0.9972612612612612, \"f1\": 0.7069408740359897, \"f2\": 0.766657373850014, \"f0_5\": 0.6558549964226091, \"p4\": 0.8278409028277725, \"phi\": 0.711666756020395}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496492.0, \"fp\": 977.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9980360585282702, \"fp_rate\": 0.0019639414717298968, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6280928816140083, \"recall\": 0.8124076809453471, \"specificity\": 0.9980360585282702, \"npv\": 0.9992332044606972, \"accuracy\": 0.9972812812812812, \"f1\": 0.7084585659081151, \"f2\": 0.7673704771649149, \"f0_5\": 0.6579472047212697, \"p4\": 0.8288840404075595, \"phi\": 0.7130305036025687}, {\"truth_threshold\": -11.60000017285347, \"match_probability\": 0.0003220417031628006, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1650.0, \"tn\": 496506.0, \"fp\": 963.0, \"fn\": 381.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8124076809453471, \"tn_rate\": 0.9980642009853881, \"fp_rate\": 0.0019357990146119657, \"fn_rate\": 0.18759231905465287, \"precision\": 0.6314580941446614, \"recall\": 0.8124076809453471, \"specificity\": 0.9980642009853881, \"npv\": 0.9992332260654837, \"accuracy\": 0.9973093093093093, \"f1\": 0.710594315245478, \"f2\": 0.7683710533668623, \"f0_5\": 0.6608988223984619, \"p4\": 0.8303488570615297, \"phi\": 0.7149528524372053}, {\"truth_threshold\": -11.400000169873238, \"match_probability\": 0.0003699110614699968, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496508.0, \"fp\": 961.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9980682213364048, \"fp_rate\": 0.0019317786635951184, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6318007662835249, \"recall\": 0.8119153126538651, \"specificity\": 0.9980682213364048, \"npv\": 0.9992312181770613, \"accuracy\": 0.9973113113113113, \"f1\": 0.7106227106227107, \"f2\": 0.7681199925470468, \"f0_5\": 0.6611338304867292, \"p4\": 0.8303685921194651, \"phi\": 0.7149304279678399}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496514.0, \"fp\": 955.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9980802823894555, \"fp_rate\": 0.0019197176105445767, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6332565284178188, \"recall\": 0.8119153126538651, \"specificity\": 0.9980802823894555, \"npv\": 0.9992312274600721, \"accuracy\": 0.9973233233233233, \"f1\": 0.7115426105717367, \"f2\": 0.7685495898583147, \"f0_5\": 0.6624086125170724, \"p4\": 0.8309983673070499, \"phi\": 0.7157599431233398}, {\"truth_threshold\": -11.200000166893005, \"match_probability\": 0.00042489285738089063, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496547.0, \"fp\": 922.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9981466181812334, \"fp_rate\": 0.0018533818187665965, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6413846752236484, \"recall\": 0.8119153126538651, \"specificity\": 0.9981466181812334, \"npv\": 0.9992312785126245, \"accuracy\": 0.9973893893893894, \"f1\": 0.7166449369839201, \"f2\": 0.7709209911173446, \"f0_5\": 0.6695087291920422, \"p4\": 0.8344792770642273, \"phi\": 0.7203739281840877}, {\"truth_threshold\": -11.000000163912773, \"match_probability\": 0.00048804289235713973, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496585.0, \"fp\": 884.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982230048505535, \"fp_rate\": 0.0017769951494464981, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6510067114093959, \"recall\": 0.8119153126538651, \"specificity\": 0.9982230048505535, \"npv\": 0.9992313372920133, \"accuracy\": 0.9974654654654654, \"f1\": 0.7226117440841368, \"f2\": 0.7736698883363048, \"f0_5\": 0.6778755241305598, \"p4\": 0.8385238948508511, \"phi\": 0.7257980151201269}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496594.0, \"fp\": 875.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982410964301293, \"fp_rate\": 0.0017589035698706854, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6533280507131537, \"recall\": 0.8119153126538651, \"specificity\": 0.9982410964301293, \"npv\": 0.9992313512121309, \"accuracy\": 0.9974834834834835, \"f1\": 0.72403951701427, \"f2\": 0.7743238166791886, \"f0_5\": 0.6798878535499299, \"p4\": 0.8394875795924727, \"phi\": 0.7271005307230821}, {\"truth_threshold\": -10.800000160932541, \"match_probability\": 0.0005605733873065377, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496612.0, \"fp\": 857.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982772795892809, \"fp_rate\": 0.00172272041071906, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6580207501995211, \"recall\": 0.8119153126538651, \"specificity\": 0.9982772795892809, \"npv\": 0.9992313790508537, \"accuracy\": 0.9975195195195196, \"f1\": 0.7269120564249504, \"f2\": 0.7756349952963312, \"f0_5\": 0.683948569058482, \"p4\": 0.8414216092003565, \"phi\": 0.7297265284712408}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496616.0, \"fp\": 853.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982853202913147, \"fp_rate\": 0.0017146797086853654, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6590727418065547, \"recall\": 0.8119153126538651, \"specificity\": 0.9982853202913147, \"npv\": 0.9992313852369628, \"accuracy\": 0.9975275275275275, \"f1\": 0.727553496580631, \"f2\": 0.7759269715791455, \"f0_5\": 0.6848575463078329, \"p4\": 0.8418526042931959, \"phi\": 0.7303139190793791}, {\"truth_threshold\": -10.600000157952309, \"match_probability\": 0.0006438760580315065, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496623.0, \"fp\": 846.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9982993915198736, \"fp_rate\": 0.0017006084801263997, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6609218436873747, \"recall\": 0.8119153126538651, \"specificity\": 0.9982993915198736, \"npv\": 0.9992313960624138, \"accuracy\": 0.9975415415415415, \"f1\": 0.728678745028723, \"f2\": 0.7764384593652887, \"f0_5\": 0.6864540837565565, \"p4\": 0.8426079090367605, \"phi\": 0.7313452412186806}, {\"truth_threshold\": -10.500000156462193, \"match_probability\": 0.0006900573831033208, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496636.0, \"fp\": 833.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9983255238014831, \"fp_rate\": 0.0016744761985168924, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6643835616438356, \"recall\": 0.8119153126538651, \"specificity\": 0.9983255238014831, \"npv\": 0.9992314161660141, \"accuracy\": 0.9975675675675676, \"f1\": 0.7307777531575449, \"f2\": 0.7773901565151801, \"f0_5\": 0.6894389162973493, \"p4\": 0.8440142193773499, \"phi\": 0.7332720877716197}, {\"truth_threshold\": -10.400000154972076, \"match_probability\": 0.0007395485633816526, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496669.0, \"fp\": 800.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.998391859593261, \"fp_rate\": 0.0016081404067389124, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6733360555328706, \"recall\": 0.8119153126538651, \"specificity\": 0.998391859593261, \"npv\": 0.9992314671935073, \"accuracy\": 0.9976336336336337, \"f1\": 0.7361607142857143, \"f2\": 0.7798165137614679, \"f0_5\": 0.6971336771793354, \"p4\": 0.8476052551686003, \"phi\": 0.7382318656666521}, {\"truth_threshold\": -10.30000015348196, \"match_probability\": 0.0007925864548491303, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496673.0, \"fp\": 796.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9983999002952948, \"fp_rate\": 0.0016000997047052178, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6744376278118609, \"recall\": 0.8119153126538651, \"specificity\": 0.9983999002952948, \"npv\": 0.9992314733781976, \"accuracy\": 0.9976416416416416, \"f1\": 0.7368185880250223, \"f2\": 0.7801116472703189, \"f0_5\": 0.6980780628227923, \"p4\": 0.8480426096573597, \"phi\": 0.7388398481322535}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496699.0, \"fp\": 770.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9984521648585138, \"fp_rate\": 0.0015478351414862032, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6816866473749483, \"recall\": 0.8119153126538651, \"specificity\": 0.9984521648585138, \"npv\": 0.9992315135762582, \"accuracy\": 0.9976936936936937, \"f1\": 0.741123595505618, \"f2\": 0.7820354737740681, \"f0_5\": 0.7042794909028787, \"p4\": 0.8508964516877607, \"phi\": 0.7428283331176843}, {\"truth_threshold\": -10.100000150501728, \"match_probability\": 0.0009103354699850551, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496744.0, \"fp\": 725.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.9985426227563928, \"fp_rate\": 0.0014573772436071394, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6946082561078348, \"recall\": 0.8119153126538651, \"specificity\": 0.9985426227563928, \"npv\": 0.999231583139888, \"accuracy\": 0.9977837837837837, \"f1\": 0.7486946651532349, \"f2\": 0.7853876928938845, \"f0_5\": 0.715277175327492, \"p4\": 0.8558814478534577, \"phi\": 0.7498853269814624}, {\"truth_threshold\": -10.000000149011612, \"match_probability\": 0.0009756096554280922, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1649.0, \"tn\": 496756.0, \"fp\": 713.0, \"fn\": 382.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8119153126538651, \"tn_rate\": 0.998566744862494, \"fp_rate\": 0.0014332551375060556, \"fn_rate\": 0.1880846873461349, \"precision\": 0.6981371718882303, \"recall\": 0.8119153126538651, \"specificity\": 0.998566744862494, \"npv\": 0.9992316016880625, \"accuracy\": 0.9978078078078078, \"f1\": 0.7507398133394036, \"f2\": 0.7862864772077055, \"f0_5\": 0.7182681418242007, \"p4\": 0.8572206597924804, \"phi\": 0.7518010896878068}, {\"truth_threshold\": -9.900000147521496, \"match_probability\": 0.0010455593264824352, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496801.0, \"fp\": 668.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.998657202760373, \"fp_rate\": 0.0013427972396269918, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7115716753022453, \"recall\": 0.811422944362383, \"specificity\": 0.998657202760373, \"npv\": 0.999229661453305, \"accuracy\": 0.9978958958958959, \"f1\": 0.7582240625718887, \"f2\": 0.789272030651341, \"f0_5\": 0.7295263390880921, \"p4\": 0.862095301480666, \"phi\": 0.7588188528588281}, {\"truth_threshold\": -9.80000014603138, \"match_probability\": 0.0011205186532430977, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496805.0, \"fp\": 664.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9986652434624067, \"fp_rate\": 0.0013347565375932972, \"fn_rate\": 0.18857705563761692, \"precision\": 0.71280276816609, \"recall\": 0.811422944362383, \"specificity\": 0.9986652434624067, \"npv\": 0.9992296676508685, \"accuracy\": 0.9979039039039039, \"f1\": 0.7589224038682938, \"f2\": 0.7895745496358758, \"f0_5\": 0.7305612199663091, \"p4\": 0.8625480141454344, \"phi\": 0.7594794701658701}, {\"truth_threshold\": -9.700000144541264, \"match_probability\": 0.001200845581852835, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496862.0, \"fp\": 607.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9987798234663868, \"fp_rate\": 0.0012201765336131497, \"fn_rate\": 0.18857705563761692, \"precision\": 0.730820399113082, \"recall\": 0.811422944362383, \"specificity\": 0.9987798234663868, \"npv\": 0.9992297559553138, \"accuracy\": 0.998018018018018, \"f1\": 0.7690153989734018, \"f2\": 0.79391078138549, \"f0_5\": 0.7456338792869424, \"p4\": 0.8690512188802841, \"phi\": 0.7690829970102296}, {\"truth_threshold\": -9.600000143051147, \"match_probability\": 0.001286923510110021, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496870.0, \"fp\": 599.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9987959048704542, \"fp_rate\": 0.0012040951295457607, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7334223408989764, \"recall\": 0.811422944362383, \"specificity\": 0.9987959048704542, \"npv\": 0.9992297683473, \"accuracy\": 0.998034034034034, \"f1\": 0.7704534829359514, \"f2\": 0.7945231896634847, \"f0_5\": 0.7477992558308376, \"p4\": 0.8699718046721923, \"phi\": 0.7704599592720761}, {\"truth_threshold\": -9.500000141561031, \"match_probability\": 0.0013791630787767571, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496883.0, \"fp\": 586.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9988220371520637, \"fp_rate\": 0.0011779628479362534, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7376902417188899, \"recall\": 0.811422944362383, \"specificity\": 0.9988220371520637, \"npv\": 0.9992297884834274, \"accuracy\": 0.9980600600600601, \"f1\": 0.7728018757327081, \"f2\": 0.7955203707279398, \"f0_5\": 0.7513449439226771, \"p4\": 0.8714719232830772, \"phi\": 0.7727132432208614}, {\"truth_threshold\": -9.400000140070915, \"match_probability\": 0.001478004086219237, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496914.0, \"fp\": 555.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9988843525928249, \"fp_rate\": 0.0011156474071751204, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7480708125283704, \"recall\": 0.811422944362383, \"specificity\": 0.9988843525928249, \"npv\": 0.9992298364960979, \"accuracy\": 0.9981221221221221, \"f1\": 0.7784600850259802, \"f2\": 0.7979083954681901, \"f0_5\": 0.7599372867287651, \"p4\": 0.8750700916095864, \"phi\": 0.7781665431794722}, {\"truth_threshold\": -9.300000138580799, \"match_probability\": 0.0015839175344616876, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496926.0, \"fp\": 543.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.998908474698926, \"fp_rate\": 0.0010915253010740367, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7521679598356915, \"recall\": 0.811422944362383, \"specificity\": 0.998908474698926, \"npv\": 0.9992298550800408, \"accuracy\": 0.9981461461461462, \"f1\": 0.7806726669824727, \"f2\": 0.7988366456616578, \"f0_5\": 0.7633163501621121, \"p4\": 0.8764709202608811, \"phi\": 0.7803084375041911}, {\"truth_threshold\": -9.200000137090683, \"match_probability\": 0.0016974078152024628, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496961.0, \"fp\": 508.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9989788308417208, \"fp_rate\": 0.0010211691582792093, \"fn_rate\": 0.18857705563761692, \"precision\": 0.764378478664193, \"recall\": 0.811422944362383, \"specificity\": 0.9989788308417208, \"npv\": 0.9992299092780852, \"accuracy\": 0.9982162162162163, \"f1\": 0.7871984714592787, \"f2\": 0.8015564202334631, \"f0_5\": 0.7733458470201783, \"p4\": 0.8805824065065767, \"phi\": 0.7866572272274414}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496968.0, \"fp\": 501.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9989929020702798, \"fp_rate\": 0.001007097929720244, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7668683108422523, \"recall\": 0.811422944362383, \"specificity\": 0.9989929020702798, \"npv\": 0.9992299201167787, \"accuracy\": 0.9982302302302303, \"f1\": 0.7885167464114833, \"f2\": 0.802102599046043, \"f0_5\": 0.77538345723158, \"p4\": 0.8814093367028363, \"phi\": 0.7879455224146075}, {\"truth_threshold\": -9.00000013411045, \"match_probability\": 0.0019493175579394322, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496969.0, \"fp\": 500.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9989949122457882, \"fp_rate\": 0.0010050877542118202, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7672253258845437, \"recall\": 0.811422944362383, \"specificity\": 0.9989949122457882, \"npv\": 0.9992299216651386, \"accuracy\": 0.9982322322322322, \"f1\": 0.7887054319215123, \"f2\": 0.8021806853582555, \"f0_5\": 0.7756754212557658, \"p4\": 0.8815275963818923, \"phi\": 0.7881300774017567}, {\"truth_threshold\": -8.900000132620335, \"match_probability\": 0.002088934569496736, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1648.0, \"tn\": 496987.0, \"fp\": 482.0, \"fn\": 383.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.811422944362383, \"tn_rate\": 0.9990310954049398, \"fp_rate\": 0.0009689045950601947, \"fn_rate\": 0.18857705563761692, \"precision\": 0.7737089201877935, \"recall\": 0.811422944362383, \"specificity\": 0.9990310954049398, \"npv\": 0.9992299495345517, \"accuracy\": 0.9982682682682683, \"f1\": 0.7921172795001201, \"f2\": 0.8035888433781939, \"f0_5\": 0.7809686285660127, \"p4\": 0.8836617099968711, \"phi\": 0.7914742127572213}, {\"truth_threshold\": -8.800000131130219, \"match_probability\": 0.0022385290160630528, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497006.0, \"fp\": 463.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9990692887395999, \"fp_rate\": 0.0009307112604001455, \"fn_rate\": 0.18906942392909898, \"precision\": 0.7805687203791469, \"recall\": 0.810930576070901, \"specificity\": 0.9990692887395999, \"npv\": 0.9992279700034178, \"accuracy\": 0.9983043043043043, \"f1\": 0.7954600338082589, \"f2\": 0.8046707054914989, \"f0_5\": 0.7864578359278006, \"p4\": 0.8857449722433604, \"phi\": 0.7947548136309421}, {\"truth_threshold\": -8.700000129640102, \"match_probability\": 0.002398810587356977, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497020.0, \"fp\": 449.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9990974311967178, \"fp_rate\": 0.0009025688032822146, \"fn_rate\": 0.18906942392909898, \"precision\": 0.7857824427480916, \"recall\": 0.810930576070901, \"specificity\": 0.9990974311967178, \"npv\": 0.9992279917330782, \"accuracy\": 0.9983323323323323, \"f1\": 0.7981584686212745, \"f2\": 0.8057729941291585, \"f0_5\": 0.7906865098415746, \"p4\": 0.887420887274379, \"phi\": 0.7974211170561447}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497041.0, \"fp\": 428.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9991396448823947, \"fp_rate\": 0.0008603551176053181, \"fn_rate\": 0.18906942392909898, \"precision\": 0.7937349397590362, \"recall\": 0.810930576070901, \"specificity\": 0.9991396448823947, \"npv\": 0.9992280243252751, \"accuracy\": 0.9983743743743744, \"f1\": 0.8022406234778373, \"f2\": 0.8074321011863909, \"f0_5\": 0.7971154776885103, \"p4\": 0.889946684299984, \"phi\": 0.8014709498937302}, {\"truth_threshold\": -8.50000012665987, \"match_probability\": 0.0027545272436909716, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497062.0, \"fp\": 407.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9991818585680716, \"fp_rate\": 0.0008181414319284216, \"fn_rate\": 0.18906942392909898, \"precision\": 0.8018500486854917, \"recall\": 0.810930576070901, \"specificity\": 0.9991818585680716, \"npv\": 0.9992280569147204, \"accuracy\": 0.9984164164164164, \"f1\": 0.8063647490820074, \"f2\": 0.8090980546276282, \"f0_5\": 0.8036498487362155, \"p4\": 0.8924868996122787, \"phi\": 0.8055826056114691}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1647.0, \"tn\": 497067.0, \"fp\": 402.0, \"fn\": 384.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810930576070901, \"tn_rate\": 0.9991919094456136, \"fp_rate\": 0.0008080905543863035, \"fn_rate\": 0.18906942392909898, \"precision\": 0.8038067349926794, \"recall\": 0.810930576070901, \"specificity\": 0.9991919094456136, \"npv\": 0.9992280646737065, \"accuracy\": 0.9984264264264264, \"f1\": 0.8073529411764706, \"f2\": 0.8094957239752285, \"f0_5\": 0.8052214725726019, \"p4\": 0.8930938515107973, \"phi\": 0.8065708573101243}, {\"truth_threshold\": -8.300000123679638, \"match_probability\": 0.0031628254468557835, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1646.0, \"tn\": 497085.0, \"fp\": 384.0, \"fn\": 385.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810438207779419, \"tn_rate\": 0.9992280926047653, \"fp_rate\": 0.000771907395234678, \"fn_rate\": 0.189561792220581, \"precision\": 0.8108374384236453, \"recall\": 0.810438207779419, \"specificity\": 0.9992280926047653, \"npv\": 0.9992260839849639, \"accuracy\": 0.9984604604604604, \"f1\": 0.8106377739473036, \"f2\": 0.8105180224542052, \"f0_5\": 0.8107575608314451, \"p4\": 0.8951068551265626, \"phi\": 0.8098648870427458}, {\"truth_threshold\": -8.200000122189522, \"match_probability\": 0.0033890630432542824, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1646.0, \"tn\": 497106.0, \"fp\": 363.0, \"fn\": 385.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810438207779419, \"tn_rate\": 0.9992703062904422, \"fp_rate\": 0.0007296937095577815, \"fn_rate\": 0.189561792220581, \"precision\": 0.8193130910900945, \"recall\": 0.810438207779419, \"specificity\": 0.9992703062904422, \"npv\": 0.9992261166533666, \"accuracy\": 0.9985025025025025, \"f1\": 0.8148514851485148, \"f2\": 0.8121977696634758, \"f0_5\": 0.8175225985894506, \"p4\": 0.8976782152468817, \"phi\": 0.814111887648055}, {\"truth_threshold\": -8.100000120699406, \"match_probability\": 0.003631424511270156, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1646.0, \"tn\": 497108.0, \"fp\": 361.0, \"fn\": 385.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.810438207779419, \"tn_rate\": 0.9992743266414591, \"fp_rate\": 0.0007256733585409342, \"fn_rate\": 0.189561792220581, \"precision\": 0.8201295465869457, \"recall\": 0.810438207779419, \"specificity\": 0.9992743266414591, \"npv\": 0.9992261197644992, \"accuracy\": 0.9985065065065065, \"f1\": 0.8152550767706785, \"f2\": 0.8123581087750469, \"f0_5\": 0.8181727805944925, \"p4\": 0.8979238773557973, \"phi\": 0.8145198298344466}, {\"truth_threshold\": -8.00000011920929, \"match_probability\": 0.0038910502633927486, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497108.0, \"fp\": 361.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9992743266414591, \"fp_rate\": 0.0007256733585409342, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8197703444832751, \"recall\": 0.8084687346134909, \"specificity\": 0.9992743266414591, \"npv\": 0.9992180857372004, \"accuracy\": 0.9984984984984985, \"f1\": 0.8140803173029252, \"f2\": 0.8107040584575886, \"f0_5\": 0.8174848152942348, \"p4\": 0.8972092535683982, \"phi\": 0.8133463110869028}, {\"truth_threshold\": -7.900000117719173, \"match_probability\": 0.004169160079349993, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497116.0, \"fp\": 353.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9992904080455265, \"fp_rate\": 0.0007095919544735451, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8230576441102757, \"recall\": 0.8084687346134909, \"specificity\": 0.9992904080455265, \"npv\": 0.9992180983105697, \"accuracy\": 0.9985145145145146, \"f1\": 0.8156979632389468, \"f2\": 0.8113449945646803, \"f0_5\": 0.8200978923184497, \"p4\": 0.8981940694887678, \"phi\": 0.8149851217827041}, {\"truth_threshold\": -7.800000116229057, \"match_probability\": 0.004467058438231288, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497124.0, \"fp\": 345.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9993064894495939, \"fp_rate\": 0.0006935105504061559, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8263714141922496, \"recall\": 0.8084687346134909, \"specificity\": 0.9993064894495939, \"npv\": 0.9992181108835347, \"accuracy\": 0.9985305305305305, \"f1\": 0.8173220507715281, \"f2\": 0.8119869449114825, \"f0_5\": 0.8227277282292815, \"p4\": 0.8991810496335679, \"phi\": 0.8166338001835506}, {\"truth_threshold\": -7.700000114738941, \"match_probability\": 0.004786140180292905, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497132.0, \"fp\": 337.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9993225708536613, \"fp_rate\": 0.0006774291463387669, \"fn_rate\": 0.1915312653865091, \"precision\": 0.829711975745326, \"recall\": 0.8084687346134909, \"specificity\": 0.9993225708536613, \"npv\": 0.9992181234560953, \"accuracy\": 0.9985465465465465, \"f1\": 0.8189526184538654, \"f2\": 0.8126299119073542, \"f0_5\": 0.8253744847692772, \"p4\": 0.9001702011447774, \"phi\": 0.8182924458742925}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1642.0, \"tn\": 497140.0, \"fp\": 329.0, \"fn\": 389.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8084687346134909, \"tn_rate\": 0.9993386522577287, \"fp_rate\": 0.0006613477422713777, \"fn_rate\": 0.1915312653865091, \"precision\": 0.8330796549974632, \"recall\": 0.8084687346134909, \"specificity\": 0.9993386522577287, \"npv\": 0.9992181360282516, \"accuracy\": 0.9985625625625626, \"f1\": 0.8205897051474262, \"f2\": 0.8132738979692917, \"f0_5\": 0.8280383257690368, \"p4\": 0.9011615311958345, \"phi\": 0.8199611598528135}, {\"truth_threshold\": -7.500000111758709, \"match_probability\": 0.005493921387833209, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1641.0, \"tn\": 497164.0, \"fp\": 305.0, \"fn\": 390.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8079763663220089, \"tn_rate\": 0.9993868964699308, \"fp_rate\": 0.0006131035300692104, \"fn_rate\": 0.19202363367799113, \"precision\": 0.843268242548818, \"recall\": 0.8079763663220089, \"specificity\": 0.9993868964699308, \"npv\": 0.9992161654815357, \"accuracy\": 0.9986086086086086, \"f1\": 0.8252451596680915, \"f2\": 0.8147964250248262, \"f0_5\": 0.8359653591441671, \"p4\": 0.9039711101870394, \"phi\": 0.8247369038176491}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1640.0, \"tn\": 497177.0, \"fp\": 292.0, \"fn\": 391.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8074839980305268, \"tn_rate\": 0.9994130287515403, \"fp_rate\": 0.000586971248459703, \"fn_rate\": 0.19251600196947316, \"precision\": 0.8488612836438924, \"recall\": 0.8074839980305268, \"specificity\": 0.9994130287515403, \"npv\": 0.9992141777606277, \"accuracy\": 0.9986326326326326, \"f1\": 0.8276558163007822, \"f2\": 0.8154335719968179, \"f0_5\": 0.8402500256173788, \"p4\": 0.9054204230029831, \"phi\": 0.827230057658773}, {\"truth_threshold\": -7.300000108778477, \"match_probability\": 0.006305707107734554, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1640.0, \"tn\": 497178.0, \"fp\": 291.0, \"fn\": 391.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8074839980305268, \"tn_rate\": 0.9994150389270487, \"fp_rate\": 0.0005849610729512794, \"fn_rate\": 0.19251600196947316, \"precision\": 0.8493008803728638, \"recall\": 0.8074839980305268, \"specificity\": 0.9994150389270487, \"npv\": 0.9992141793399508, \"accuracy\": 0.9986346346346346, \"f1\": 0.8278647147905098, \"f2\": 0.8155146693187469, \"f0_5\": 0.840594566888775, \"p4\": 0.9055458205533815, \"phi\": 0.8274454571307222}, {\"truth_threshold\": -7.200000107288361, \"match_probability\": 0.006755232248084272, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1637.0, \"tn\": 497178.0, \"fp\": 291.0, \"fn\": 394.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8060068931560808, \"tn_rate\": 0.9994150389270487, \"fp_rate\": 0.0005849610729512794, \"fn_rate\": 0.19399310684391924, \"precision\": 0.8490663900414938, \"recall\": 0.8060068931560808, \"specificity\": 0.9994150389270487, \"npv\": 0.9992081547997074, \"accuracy\": 0.9986286286286287, \"f1\": 0.8269765092194998, \"f2\": 0.8142658177477119, \"f0_5\": 0.8400903212562866, \"p4\": 0.9050129705542206, \"phi\": 0.8265705794685275}, {\"truth_threshold\": -7.1000001057982445, \"match_probability\": 0.007236570039195372, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1637.0, \"tn\": 497180.0, \"fp\": 289.0, \"fn\": 394.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8060068931560808, \"tn_rate\": 0.9994190592780655, \"fp_rate\": 0.0005809407219344321, \"fn_rate\": 0.19399310684391924, \"precision\": 0.8499480789200415, \"recall\": 0.8060068931560808, \"specificity\": 0.9994190592780655, \"npv\": 0.9992081579825313, \"accuracy\": 0.9986326326326326, \"f1\": 0.8273944907758403, \"f2\": 0.8144278606965174, \"f0_5\": 0.8407806882383153, \"p4\": 0.9052640325366411, \"phi\": 0.8270020984693148}, {\"truth_threshold\": -7.000000104308128, \"match_probability\": 0.00775193742836891, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1636.0, \"tn\": 497181.0, \"fp\": 288.0, \"fn\": 395.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8055145248645987, \"tn_rate\": 0.999421069453574, \"fp_rate\": 0.0005789305464260085, \"fn_rate\": 0.1944854751354013, \"precision\": 0.8503118503118503, \"recall\": 0.8055145248645987, \"specificity\": 0.999421069453574, \"npv\": 0.9992061514220943, \"accuracy\": 0.9986326326326326, \"f1\": 0.827307206068268, \"f2\": 0.8140923566878981, \"f0_5\": 0.8409581577053562, \"p4\": 0.9052117869639349, \"phi\": 0.8269264039862914}, {\"truth_threshold\": -6.900000102818012, \"match_probability\": 0.008303700786279804, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1635.0, \"tn\": 497183.0, \"fp\": 286.0, \"fn\": 396.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8050221565731167, \"tn_rate\": 0.9994250898045909, \"fp_rate\": 0.0005749101954091612, \"fn_rate\": 0.19497784342688332, \"precision\": 0.8511192087454451, \"recall\": 0.8050221565731167, \"specificity\": 0.9994250898045909, \"npv\": 0.9992041464772428, \"accuracy\": 0.9986346346346346, \"f1\": 0.8274291497975709, \"f2\": 0.8138377302140368, \"f0_5\": 0.8414822439526506, \"p4\": 0.905285190845045, \"phi\": 0.8270672775775526}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1634.0, \"tn\": 497184.0, \"fp\": 285.0, \"fn\": 397.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8045297882816347, \"tn_rate\": 0.9994270999800993, \"fp_rate\": 0.0005729000199007375, \"fn_rate\": 0.19547021171836534, \"precision\": 0.8514851485148515, \"recall\": 0.8045297882816347, \"specificity\": 0.9994270999800993, \"npv\": 0.999202139953093, \"accuracy\": 0.9986346346346346, \"f1\": 0.8273417721518987, \"f2\": 0.8135019416509012, \"f0_5\": 0.8416606572576492, \"p4\": 0.9052328915756458, \"phi\": 0.8269920829853818}, {\"truth_threshold\": -6.600000098347664, \"match_probability\": 0.010203470791514735, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1633.0, \"tn\": 497195.0, \"fp\": 274.0, \"fn\": 398.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8040374199901527, \"tn_rate\": 0.9994492119106919, \"fp_rate\": 0.0005507880893080775, \"fn_rate\": 0.19596258000984737, \"precision\": 0.8563188253801783, \"recall\": 0.8040374199901527, \"specificity\": 0.9994492119106919, \"npv\": 0.9992001495197883, \"accuracy\": 0.9986546546546546, \"f1\": 0.8293550025393601, \"f2\": 0.8139766723158209, \"f0_5\": 0.8453256030644994, \"p4\": 0.906440777862351, \"phi\": 0.8290947229778014}, {\"truth_threshold\": -6.500000096857548, \"match_probability\": 0.010927806378730125, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1633.0, \"tn\": 497207.0, \"fp\": 262.0, \"fn\": 398.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8040374199901527, \"tn_rate\": 0.9994733340167931, \"fp_rate\": 0.0005266659832069938, \"fn_rate\": 0.19596258000984737, \"precision\": 0.8617414248021108, \"recall\": 0.8040374199901527, \"specificity\": 0.9994733340167931, \"npv\": 0.9992001688085932, \"accuracy\": 0.9986786786786787, \"f1\": 0.8318899643402955, \"f2\": 0.814951591975247, \"f0_5\": 0.8495473936114868, \"p4\": 0.9079577097588084, \"phi\": 0.8317306228810372}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1630.0, \"tn\": 497222.0, \"fp\": 247.0, \"fn\": 401.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8025603151157066, \"tn_rate\": 0.9995034866494193, \"fp_rate\": 0.0004965133505806392, \"fn_rate\": 0.19743968488429345, \"precision\": 0.8684070324986681, \"recall\": 0.8025603151157066, \"specificity\": 0.9995034866494193, \"npv\": 0.9991941690798054, \"accuracy\": 0.9987027027027027, \"f1\": 0.834186284544524, \"f2\": 0.8149185081491851, \"f0_5\": 0.8543872523325297, \"p4\": 0.9093287289176903, \"phi\": 0.834189138117982}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1630.0, \"tn\": 497229.0, \"fp\": 240.0, \"fn\": 401.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8025603151157066, \"tn_rate\": 0.9995175578779784, \"fp_rate\": 0.0004824421220216737, \"fn_rate\": 0.19743968488429345, \"precision\": 0.8716577540106952, \"recall\": 0.8025603151157066, \"specificity\": 0.9995175578779784, \"npv\": 0.9991941804151679, \"accuracy\": 0.9987167167167167, \"f1\": 0.8356831581645732, \"f2\": 0.8154892935761456, \"f0_5\": 0.8569025339081064, \"p4\": 0.9102202694038696, \"phi\": 0.8357577802549805}, {\"truth_threshold\": -6.200000092387199, \"match_probability\": 0.013419810695865477, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1629.0, \"tn\": 497238.0, \"fp\": 231.0, \"fn\": 402.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8020679468242246, \"tn_rate\": 0.9995356494575541, \"fp_rate\": 0.00046435054244586095, \"fn_rate\": 0.19793205317577547, \"precision\": 0.8758064516129033, \"recall\": 0.8020679468242246, \"specificity\": 0.9995356494575541, \"npv\": 0.9991921871232216, \"accuracy\": 0.9987327327327328, \"f1\": 0.8373168851195065, \"f2\": 0.8158052884615384, \"f0_5\": 0.8599936648717137, \"p4\": 0.9111918307693678, \"phi\": 0.837497495848518}, {\"truth_threshold\": -6.100000090897083, \"match_probability\": 0.014369156816028038, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1627.0, \"tn\": 497246.0, \"fp\": 223.0, \"fn\": 404.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8010832102412605, \"tn_rate\": 0.9995517308616215, \"fp_rate\": 0.0004482691383784718, \"fn_rate\": 0.19891678975873953, \"precision\": 0.8794594594594595, \"recall\": 0.8010832102412605, \"specificity\": 0.9995517308616215, \"npv\": 0.9991881844669949, \"accuracy\": 0.9987447447447447, \"f1\": 0.8384437000772996, \"f2\": 0.8156206135953479, \"f0_5\": 0.8625808503870215, \"p4\": 0.9118611442174754, \"phi\": 0.8387347641806931}, {\"truth_threshold\": -6.000000089406967, \"match_probability\": 0.015384614445865122, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1627.0, \"tn\": 497254.0, \"fp\": 215.0, \"fn\": 404.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.8010832102412605, \"tn_rate\": 0.9995678122656889, \"fp_rate\": 0.0004321877343110827, \"fn_rate\": 0.19891678975873953, \"precision\": 0.8832790445168295, \"recall\": 0.8010832102412605, \"specificity\": 0.9995678122656889, \"npv\": 0.9991881975171705, \"accuracy\": 0.9987607607607608, \"f1\": 0.8401755744900594, \"f2\": 0.8162753361428858, \"f0_5\": 0.8655176082561975, \"p4\": 0.9128877625205759, \"phi\": 0.8405642745581147}, {\"truth_threshold\": -5.900000087916851, \"match_probability\": 0.016470634520449206, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1624.0, \"tn\": 497265.0, \"fp\": 204.0, \"fn\": 407.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7996061053668144, \"tn_rate\": 0.9995899241962816, \"fp_rate\": 0.00041007580371842263, \"fn_rate\": 0.20039389463318563, \"precision\": 0.888402625820569, \"recall\": 0.7996061053668144, \"specificity\": 0.9995899241962816, \"npv\": 0.9991821922872897, \"accuracy\": 0.9987767767767768, \"f1\": 0.8416688261207567, \"f2\": 0.8159163987138264, \"f0_5\": 0.8690998608583966, \"p4\": 0.9137718615826569, \"phi\": 0.8422317154633179}, {\"truth_threshold\": -5.800000086426735, \"match_probability\": 0.017631945325087592, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1620.0, \"tn\": 497279.0, \"fp\": 190.0, \"fn\": 411.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7976366322008862, \"tn_rate\": 0.9996180666533995, \"fp_rate\": 0.0003819333466004917, \"fn_rate\": 0.20236336779911374, \"precision\": 0.8950276243093923, \"recall\": 0.7976366322008862, \"specificity\": 0.9996180666533995, \"npv\": 0.9991741847334686, \"accuracy\": 0.9987967967967968, \"f1\": 0.8435303306430617, \"f2\": 0.8153815180189249, \"f0_5\": 0.8736921583432208, \"p4\": 0.9148720204448654, \"phi\": 0.8443379619461768}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1620.0, \"tn\": 497295.0, \"fp\": 174.0, \"fn\": 411.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7976366322008862, \"tn_rate\": 0.9996502294615343, \"fp_rate\": 0.00034977053846571343, \"fn_rate\": 0.20236336779911374, \"precision\": 0.903010033444816, \"recall\": 0.7976366322008862, \"specificity\": 0.9996502294615343, \"npv\": 0.9991742112813589, \"accuracy\": 0.9988288288288288, \"f1\": 0.8470588235294118, \"f2\": 0.8166969147005445, \"f0_5\": 0.8797653958944281, \"p4\": 0.9169501145219608, \"phi\": 0.8481153489467179}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1620.0, \"tn\": 497300.0, \"fp\": 169.0, \"fn\": 411.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7976366322008862, \"tn_rate\": 0.9996602803390764, \"fp_rate\": 0.00033971966092359525, \"fn_rate\": 0.20236336779911374, \"precision\": 0.9055338177752935, \"recall\": 0.7976366322008862, \"specificity\": 0.9996602803390764, \"npv\": 0.9991742195772245, \"accuracy\": 0.9988388388388388, \"f1\": 0.8481675392670157, \"f2\": 0.8171088469686271, \"f0_5\": 0.8816806356808534, \"p4\": 0.9176014562694327, \"phi\": 0.8493061432317865}, {\"truth_threshold\": -5.500000081956387, \"match_probability\": 0.02161936078957948, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1619.0, \"tn\": 497303.0, \"fp\": 166.0, \"fn\": 412.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7971442639094042, \"tn_rate\": 0.9996663108656016, \"fp_rate\": 0.0003336891343983243, \"fn_rate\": 0.20285573609059576, \"precision\": 0.9070028011204482, \"recall\": 0.7971442639094042, \"specificity\": 0.9996663108656016, \"npv\": 0.9991722170318355, \"accuracy\": 0.9988428428428429, \"f1\": 0.8485324947589099, \"f2\": 0.8169341003128469, \"f0_5\": 0.8826736451859121, \"p4\": 0.9178158396820322, \"phi\": 0.8497351570110964}, {\"truth_threshold\": -5.4000000804662704, \"match_probability\": 0.023135158452986655, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1618.0, \"tn\": 497315.0, \"fp\": 154.0, \"fn\": 413.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7966518956179222, \"tn_rate\": 0.9996904329717028, \"fp_rate\": 0.00030956702829724065, \"fn_rate\": 0.2033481043820778, \"precision\": 0.9130925507900677, \"recall\": 0.7966518956179222, \"specificity\": 0.9996904329717028, \"npv\": 0.9991702295229523, \"accuracy\": 0.9988648648648648, \"f1\": 0.8509071785432554, \"f2\": 0.8175020210185934, \"f0_5\": 0.8871586796797895, \"p4\": 0.9192078875605487, \"phi\": 0.8523341285904934}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1618.0, \"tn\": 497330.0, \"fp\": 139.0, \"fn\": 413.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7966518956179222, \"tn_rate\": 0.9997205856043291, \"fp_rate\": 0.00027941439567088605, \"fn_rate\": 0.2033481043820778, \"precision\": 0.9208878770631759, \"recall\": 0.7966518956179222, \"specificity\": 0.9997205856043291, \"npv\": 0.9991702545289437, \"accuracy\": 0.9988948948948949, \"f1\": 0.8542766631467793, \"f2\": 0.8187430422022063, \"f0_5\": 0.8930345512749751, \"p4\": 0.9211767787183339, \"phi\": 0.8559842154647039}, {\"truth_threshold\": -5.200000077486038, \"match_probability\": 0.02648420859582165, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1615.0, \"tn\": 497341.0, \"fp\": 128.0, \"fn\": 416.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7951747907434761, \"tn_rate\": 0.9997426975349217, \"fp_rate\": 0.000257302465078226, \"fn_rate\": 0.2048252092565239, \"precision\": 0.9265633964429145, \"recall\": 0.7951747907434761, \"specificity\": 0.9997426975349217, \"npv\": 0.9991642508292199, \"accuracy\": 0.9989109109109109, \"f1\": 0.8558558558558559, \"f2\": 0.818384514036688, \"f0_5\": 0.8969232478062867, \"p4\": 0.922097530197822, \"phi\": 0.8578327903498651}, {\"truth_threshold\": -5.100000075995922, \"match_probability\": 0.02833121820332325, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1613.0, \"tn\": 497346.0, \"fp\": 123.0, \"fn\": 418.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.794190054160512, \"tn_rate\": 0.9997527484124639, \"fp_rate\": 0.0002472515875361078, \"fn_rate\": 0.20580994583948795, \"precision\": 0.929147465437788, \"recall\": 0.794190054160512, \"specificity\": 0.9997527484124639, \"npv\": 0.9991602446139134, \"accuracy\": 0.9989169169169169, \"f1\": 0.8563843907618794, \"f2\": 0.8179513184584178, \"f0_5\": 0.8986072423398329, \"p4\": 0.9224054864036791, \"phi\": 0.8585005678323977}, {\"truth_threshold\": -5.000000074505806, \"match_probability\": 0.030303028785498974, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1611.0, \"tn\": 497352.0, \"fp\": 117.0, \"fn\": 420.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.793205317577548, \"tn_rate\": 0.9997648094655144, \"fp_rate\": 0.00023519053448556594, \"fn_rate\": 0.206794682422452, \"precision\": 0.9322916666666666, \"recall\": 0.793205317577548, \"specificity\": 0.9997648094655144, \"npv\": 0.9991562402063595, \"accuracy\": 0.9989249249249249, \"f1\": 0.8571428571428571, \"f2\": 0.8176004872107187, \"f0_5\": 0.9007044615900704, \"p4\": 0.9228469812300154, \"phi\": 0.8594244382417212}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1611.0, \"tn\": 497356.0, \"fp\": 113.0, \"fn\": 420.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.793205317577548, \"tn_rate\": 0.9997728501675481, \"fp_rate\": 0.00022714983245187138, \"fn_rate\": 0.206794682422452, \"precision\": 0.9344547563805105, \"recall\": 0.793205317577548, \"specificity\": 0.9997728501675481, \"npv\": 0.9991562469865963, \"accuracy\": 0.998932932932933, \"f1\": 0.8580559254327563, \"f2\": 0.8179325751421609, \"f0_5\": 0.9023188081102274, \"p4\": 0.9233776450607978, \"phi\": 0.8604261402565063}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1606.0, \"tn\": 497375.0, \"fp\": 94.0, \"fn\": 425.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7907434761201378, \"tn_rate\": 0.9998110435022082, \"fp_rate\": 0.0001889564977918222, \"fn_rate\": 0.20925652387986213, \"precision\": 0.9447058823529412, \"recall\": 0.7907434761201378, \"specificity\": 0.9998110435022082, \"npv\": 0.9991462434712736, \"accuracy\": 0.998960960960961, \"f1\": 0.8608952023586169, \"f2\": 0.817385993485342, \"f0_5\": 0.9092967953799117, \"p4\": 0.9250251791262751, \"phi\": 0.8638093330854282}, {\"truth_threshold\": -4.700000070035458, \"match_probability\": 0.037047907242669466, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1602.0, \"tn\": 497376.0, \"fp\": 93.0, \"fn\": 429.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7887740029542097, \"tn_rate\": 0.9998130536777166, \"fp_rate\": 0.00018694632228339857, \"fn_rate\": 0.21122599704579026, \"precision\": 0.9451327433628318, \"recall\": 0.7887740029542097, \"specificity\": 0.9998130536777166, \"npv\": 0.9991382167716274, \"accuracy\": 0.998954954954955, \"f1\": 0.8599033816425121, \"f2\": 0.8157653528872594, \"f0_5\": 0.9090909090909091, \"p4\": 0.924451040377126, \"phi\": 0.8629251746909673}, {\"truth_threshold\": -4.6000000685453415, \"match_probability\": 0.039601660807737325, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1601.0, \"tn\": 497383.0, \"fp\": 86.0, \"fn\": 430.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7882816346627277, \"tn_rate\": 0.9998271249062756, \"fp_rate\": 0.00017287509372443307, \"fn_rate\": 0.21171836533727229, \"precision\": 0.949021932424422, \"recall\": 0.7882816346627277, \"specificity\": 0.9998271249062756, \"npv\": 0.9991362218343033, \"accuracy\": 0.998966966966967, \"f1\": 0.8612157073695536, \"f2\": 0.815920905106513, \"f0_5\": 0.9118350609408816, \"p4\": 0.9252114585110551, \"phi\": 0.8644372428511393}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1595.0, \"tn\": 497383.0, \"fp\": 86.0, \"fn\": 436.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7853274249138356, \"tn_rate\": 0.9998271249062756, \"fp_rate\": 0.00017287509372443307, \"fn_rate\": 0.21467257508616444, \"precision\": 0.9488399762046401, \"recall\": 0.7853274249138356, \"specificity\": 0.9998271249062756, \"npv\": 0.9991241796717281, \"accuracy\": 0.998954954954955, \"f1\": 0.859375, \"f2\": 0.8133605303416624, \"f0_5\": 0.910908052541405, \"p4\": 0.9241456149502866, \"phi\": 0.8627272860253764}, {\"truth_threshold\": -4.400000065565109, \"match_probability\": 0.04522405175894309, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1595.0, \"tn\": 497387.0, \"fp\": 82.0, \"fn\": 436.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7853274249138356, \"tn_rate\": 0.9998351656083092, \"fp_rate\": 0.0001648343916907385, \"fn_rate\": 0.21467257508616444, \"precision\": 0.951103160405486, \"recall\": 0.7853274249138356, \"specificity\": 0.9998351656083092, \"npv\": 0.9991241867089307, \"accuracy\": 0.9989629629629629, \"f1\": 0.860302049622438, \"f2\": 0.813692480359147, \"f0_5\": 0.9125758095891978, \"p4\": 0.92468309771844, \"phi\": 0.8637609423013644}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1592.0, \"tn\": 497391.0, \"fp\": 78.0, \"fn\": 439.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7838503200393895, \"tn_rate\": 0.999843206310343, \"fp_rate\": 0.00015679368965704394, \"fn_rate\": 0.21614967996061055, \"precision\": 0.9532934131736527, \"recall\": 0.7838503200393895, \"specificity\": 0.999843206310343, \"npv\": 0.999118172870257, \"accuracy\": 0.998964964964965, \"f1\": 0.8603080248581464, \"f2\": 0.8127424954053503, \"f0_5\": 0.9137871656526231, \"p4\": 0.9246869807716529, \"phi\": 0.8639437449296644}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1591.0, \"tn\": 497393.0, \"fp\": 76.0, \"fn\": 440.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7833579517479075, \"tn_rate\": 0.9998472266613598, \"fp_rate\": 0.00015277333864019668, \"fn_rate\": 0.21664204825209257, \"precision\": 0.9544091181763648, \"recall\": 0.7833579517479075, \"specificity\": 0.9998472266613598, \"npv\": 0.9991161694785199, \"accuracy\": 0.998966966966967, \"f1\": 0.8604651162790697, \"f2\": 0.8124808497599837, \"f0_5\": 0.9144729279227497, \"p4\": 0.9247781449434443, \"phi\": 0.8641793465684607}, {\"truth_threshold\": -4.100000061094761, \"match_probability\": 0.0551013486283602, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1587.0, \"tn\": 497394.0, \"fp\": 75.0, \"fn\": 444.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7813884785819794, \"tn_rate\": 0.9998492368368682, \"fp_rate\": 0.00015076316313177303, \"fn_rate\": 0.21861152141802068, \"precision\": 0.9548736462093863, \"recall\": 0.7813884785819794, \"specificity\": 0.9998492368368682, \"npv\": 0.9991081436129825, \"accuracy\": 0.998960960960961, \"f1\": 0.859463850528026, \"f2\": 0.8108522378908645, \"f0_5\": 0.9142758382302109, \"p4\": 0.9241982796143317, \"phi\": 0.8632998054164096}, {\"truth_threshold\": -4.000000059604645, \"match_probability\": 0.05882352712444066, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1587.0, \"tn\": 497400.0, \"fp\": 69.0, \"fn\": 444.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7813884785819794, \"tn_rate\": 0.9998612978899187, \"fp_rate\": 0.0001387021100812312, \"fn_rate\": 0.21861152141802068, \"precision\": 0.9583333333333334, \"recall\": 0.7813884785819794, \"specificity\": 0.9998612978899187, \"npv\": 0.9991081543616073, \"accuracy\": 0.9989729729729729, \"f1\": 0.8608624898291294, \"f2\": 0.8113496932515337, \"f0_5\": 0.9168110918544194, \"p4\": 0.9250088864925864, \"phi\": 0.8648704806192624}, {\"truth_threshold\": -3.9000000581145287, \"match_probability\": 0.06278043839004852, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1582.0, \"tn\": 497408.0, \"fp\": 61.0, \"fn\": 449.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7789266371245692, \"tn_rate\": 0.9998773792939861, \"fp_rate\": 0.00012262070601384208, \"fn_rate\": 0.22107336287543083, \"precision\": 0.9628727936701157, \"recall\": 0.7789266371245692, \"specificity\": 0.9998773792939861, \"npv\": 0.9990981346049167, \"accuracy\": 0.998978978978979, \"f1\": 0.8611867174741427, \"f2\": 0.8098699703081806, \"f0_5\": 0.919446704637917, \"p4\": 0.9251973215687156, \"phi\": 0.8655557882762271}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1582.0, \"tn\": 497410.0, \"fp\": 59.0, \"fn\": 449.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7789266371245692, \"tn_rate\": 0.999881399645003, \"fp_rate\": 0.00011860035499699478, \"fn_rate\": 0.22107336287543083, \"precision\": 0.9640463132236441, \"recall\": 0.7789266371245692, \"specificity\": 0.999881399645003, \"npv\": 0.9990981382278918, \"accuracy\": 0.9989829829829829, \"f1\": 0.8616557734204793, \"f2\": 0.8100358422939068, \"f0_5\": 0.9203025014543339, \"p4\": 0.9254688030174308, \"phi\": 0.8660858152403128}, {\"truth_threshold\": -3.7000000551342964, \"match_probability\": 0.07144878715678568, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1575.0, \"tn\": 497411.0, \"fp\": 58.0, \"fn\": 456.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7754800590841949, \"tn_rate\": 0.9998834098205114, \"fp_rate\": 0.00011659017948857115, \"fn_rate\": 0.22451994091580502, \"precision\": 0.964482547458665, \"recall\": 0.7754800590841949, \"specificity\": 0.9998834098205114, \"npv\": 0.9990840927396273, \"accuracy\": 0.998970970970971, \"f1\": 0.8597161572052402, \"f2\": 0.8071128420621092, \"f0_5\": 0.9196543267546421, \"p4\": 0.9243462872246998, \"phi\": 0.864357780473457}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1570.0, \"tn\": 497415.0, \"fp\": 54.0, \"fn\": 461.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7730182176267848, \"tn_rate\": 0.9998914505225451, \"fp_rate\": 0.00010854947745487659, \"fn_rate\": 0.22698178237321517, \"precision\": 0.9667487684729064, \"recall\": 0.7730182176267848, \"specificity\": 0.9998914505225451, \"npv\": 0.9990740666350657, \"accuracy\": 0.9989689689689689, \"f1\": 0.8590971272229823, \"f2\": 0.8052933935166188, \"f0_5\": 0.9206051366248388, \"p4\": 0.923987940256248, \"phi\": 0.8639987308870006}, {\"truth_threshold\": -3.500000052154064, \"match_probability\": 0.08121030044424019, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1569.0, \"tn\": 497415.0, \"fp\": 54.0, \"fn\": 462.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7725258493353028, \"tn_rate\": 0.9998914505225451, \"fp_rate\": 0.00010854947745487659, \"fn_rate\": 0.2274741506646972, \"precision\": 0.966728280961183, \"recall\": 0.7725258493353028, \"specificity\": 0.9998914505225451, \"npv\": 0.9990720599666183, \"accuracy\": 0.998966966966967, \"f1\": 0.8587848932676518, \"f2\": 0.8048630347799323, \"f0_5\": 0.9204505455825414, \"p4\": 0.923806889110029, \"phi\": 0.8637134313771212}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1565.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 466.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7705563761693747, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.2294436238306253, \"precision\": 0.9702417854928704, \"recall\": 0.7705563761693747, \"specificity\": 0.9999035115755956, \"npv\": 0.9990640446527023, \"accuracy\": 0.998970970970971, \"f1\": 0.8589462129527992, \"f2\": 0.8036356167197288, \"f0_5\": 0.9224331014971119, \"p4\": 0.9239010780801296, \"phi\": 0.8641824234989411}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1562.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 469.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7690792712949286, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.23092072870507138, \"precision\": 0.9701863354037267, \"recall\": 0.7690792712949286, \"specificity\": 0.9999035115755956, \"npv\": 0.99905802486493, \"accuracy\": 0.998964964964965, \"f1\": 0.8580060422960725, \"f2\": 0.8023423053215534, \"f0_5\": 0.92196907094794, \"p4\": 0.9233556465339778, \"phi\": 0.8633262566613811}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1561.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 470.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7685869030034466, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.23141309699655344, \"precision\": 0.9701678060907396, \"recall\": 0.7685869030034466, \"specificity\": 0.9999035115755956, \"npv\": 0.9990560182851267, \"accuracy\": 0.9989629629629629, \"f1\": 0.8576923076923076, \"f2\": 0.8019110243501489, \"f0_5\": 0.9218141018070155, \"p4\": 0.9231735147751612, \"phi\": 0.8630406819000448}, {\"truth_threshold\": -3.0000000447034836, \"match_probability\": 0.11111110805075623, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1560.0, \"tn\": 497421.0, \"fp\": 48.0, \"fn\": 471.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7680945347119645, \"tn_rate\": 0.9999035115755956, \"fp_rate\": 9.648842440433474e-05, \"fn_rate\": 0.23190546528803546, \"precision\": 0.9701492537313433, \"recall\": 0.7680945347119645, \"specificity\": 0.9999035115755956, \"npv\": 0.9990540117133836, \"accuracy\": 0.998960960960961, \"f1\": 0.8573784006595219, \"f2\": 0.8014796547472256, \"f0_5\": 0.9216589861751152, \"p4\": 0.9229912220379601, \"phi\": 0.8627550140781992}, {\"truth_threshold\": -2.9000000432133675, \"match_probability\": 0.11814376082605058, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1513.0, \"tn\": 497438.0, \"fp\": 31.0, \"fn\": 518.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7449532250123092, \"tn_rate\": 0.9999376845592388, \"fp_rate\": 6.231544076113285e-05, \"fn_rate\": 0.2550467749876908, \"precision\": 0.9799222797927462, \"recall\": 0.7449532250123092, \"specificity\": 0.9999376845592388, \"npv\": 0.9989597474475657, \"accuracy\": 0.9989009009009009, \"f1\": 0.8464335664335665, \"f2\": 0.7824782788580885, \"f0_5\": 0.9217740952845133, \"p4\": 0.916598914625346, \"phi\": 0.8539088497224261}, {\"truth_threshold\": -2.8000000417232513, \"match_probability\": 0.1255586621587546, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1511.0, \"tn\": 497440.0, \"fp\": 29.0, \"fn\": 520.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7439684884293452, \"tn_rate\": 0.9999417049102557, \"fp_rate\": 5.8295089744285574e-05, \"fn_rate\": 0.25603151157065485, \"precision\": 0.9811688311688311, \"recall\": 0.7439684884293452, \"specificity\": 0.9999417049102557, \"npv\": 0.9989557394168206, \"accuracy\": 0.9989009009009009, \"f1\": 0.8462615513861663, \"f2\": 0.7817673841059603, \"f0_5\": 0.9223538029544622, \"p4\": 0.9164980483864553, \"phi\": 0.8538879661883434}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1510.0, \"tn\": 497440.0, \"fp\": 29.0, \"fn\": 521.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7434761201378631, \"tn_rate\": 0.9999417049102557, \"fp_rate\": 5.8295089744285574e-05, \"fn_rate\": 0.25652387986213687, \"precision\": 0.9811565951916829, \"recall\": 0.7434761201378631, \"specificity\": 0.9999417049102557, \"npv\": 0.9989537333244973, \"accuracy\": 0.9988988988988989, \"f1\": 0.84593837535014, \"f2\": 0.7813308496326192, \"f0_5\": 0.9221937217540003, \"p4\": 0.916308069744429, \"phi\": 0.8535991381981919}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1509.0, \"tn\": 497441.0, \"fp\": 28.0, \"fn\": 522.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7429837518463811, \"tn_rate\": 0.9999437150857642, \"fp_rate\": 5.6284914235861934e-05, \"fn_rate\": 0.2570162481536189, \"precision\": 0.9817826935588809, \"recall\": 0.7429837518463811, \"specificity\": 0.9999437150857642, \"npv\": 0.9989517293453529, \"accuracy\": 0.9988988988988989, \"f1\": 0.8458520179372198, \"f2\": 0.7809750543422006, \"f0_5\": 0.9224844112972246, \"p4\": 0.9162574066722536, \"phi\": 0.8535892106519398}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1506.0, \"tn\": 497442.0, \"fp\": 27.0, \"fn\": 525.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.741506646971935, \"tn_rate\": 0.9999457252612726, \"fp_rate\": 5.427473872743829e-05, \"fn_rate\": 0.258493353028065, \"precision\": 0.9823874755381604, \"recall\": 0.741506646971935, \"specificity\": 0.9999457252612726, \"npv\": 0.9989457132701565, \"accuracy\": 0.9988948948948949, \"f1\": 0.8451178451178452, \"f2\": 0.7797452625038832, \"f0_5\": 0.9224549797868431, \"p4\": 0.9158256533199051, \"phi\": 0.8530016534527379}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1504.0, \"tn\": 497443.0, \"fp\": 26.0, \"fn\": 527.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.740521910388971, \"tn_rate\": 0.999947735436781, \"fp_rate\": 5.226456321901465e-05, \"fn_rate\": 0.25947808961102903, \"precision\": 0.9830065359477124, \"recall\": 0.740521910388971, \"specificity\": 0.999947735436781, \"npv\": 0.9989417033154607, \"accuracy\": 0.9988928928928928, \"f1\": 0.8447065431058691, \"f2\": 0.7789517298529107, \"f0_5\": 0.9225861857440805, \"p4\": 0.9155836759722604, \"phi\": 0.8527032689901116}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1502.0, \"tn\": 497446.0, \"fp\": 23.0, \"fn\": 529.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7395371738060069, \"tn_rate\": 0.9999537659633062, \"fp_rate\": 4.623403669374373e-05, \"fn_rate\": 0.2604628261939931, \"precision\": 0.9849180327868853, \"recall\": 0.7395371738060069, \"specificity\": 0.9999537659633062, \"npv\": 0.9989376976755862, \"accuracy\": 0.9988948948948949, \"f1\": 0.8447694038245219, \"f2\": 0.7783189967872318, \"f0_5\": 0.9236256303037756, \"p4\": 0.9156210235746602, \"phi\": 0.8529668215804862}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1496.0, \"tn\": 497446.0, \"fp\": 23.0, \"fn\": 535.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7365829640571148, \"tn_rate\": 0.9999537659633062, \"fp_rate\": 4.623403669374373e-05, \"fn_rate\": 0.2634170359428853, \"precision\": 0.9848584595128373, \"recall\": 0.7365829640571148, \"specificity\": 0.9999537659633062, \"npv\": 0.998925661822439, \"accuracy\": 0.9988828828828828, \"f1\": 0.8428169014084507, \"f2\": 0.7756922119672301, \"f0_5\": 0.9226594301221167, \"p4\": 0.9144704096753229, \"phi\": 0.8512303682166759}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1495.0, \"tn\": 497448.0, \"fp\": 21.0, \"fn\": 536.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7360905957656327, \"tn_rate\": 0.9999577863143231, \"fp_rate\": 4.221368567689645e-05, \"fn_rate\": 0.2639094042343673, \"precision\": 0.9861477572559367, \"recall\": 0.7360905957656327, \"specificity\": 0.9999577863143231, \"npv\": 0.998923660197918, \"accuracy\": 0.9988848848848849, \"f1\": 0.8429658866647871, \"f2\": 0.7754149377593361, \"f0_5\": 0.9234095120444719, \"p4\": 0.9145585211638112, \"phi\": 0.851504681754668}, {\"truth_threshold\": -1.9000000283122063, \"match_probability\": 0.2113212378007128, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1490.0, \"tn\": 497448.0, \"fp\": 21.0, \"fn\": 541.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7336287543082225, \"tn_rate\": 0.9999577863143231, \"fp_rate\": 4.221368567689645e-05, \"fn_rate\": 0.26637124569177745, \"precision\": 0.986101919258769, \"recall\": 0.7336287543082225, \"specificity\": 0.9999577863143231, \"npv\": 0.9989136306223632, \"accuracy\": 0.9988748748748749, \"f1\": 0.8413325804630153, \"f2\": 0.7732226258432797, \"f0_5\": 0.9226006191950464, \"p4\": 0.913594311460595, \"phi\": 0.8500553803704287}, {\"truth_threshold\": -1.8000000268220901, \"match_probability\": 0.22310460998179016, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1452.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 579.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7149187592319055, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.28508124076809455, \"precision\": 0.9897750511247444, \"recall\": 0.7149187592319055, \"specificity\": 0.9999698473673736, \"npv\": 0.9988374264355976, \"accuracy\": 0.9988108108108108, \"f1\": 0.8301886792452831, \"f2\": 0.7569596496715671, \"f0_5\": 0.9191036840106342, \"p4\": 0.906970866885346, \"phi\": 0.8406829346131477}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1451.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 580.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7144263909404235, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.28557360905957657, \"precision\": 0.9897680763983628, \"recall\": 0.7144263909404235, \"specificity\": 0.9999698473673736, \"npv\": 0.99883542087488, \"accuracy\": 0.9988088088088088, \"f1\": 0.8298541607091793, \"f2\": 0.7565172054223149, \"f0_5\": 0.9189360354654845, \"p4\": 0.9067707883321519, \"phi\": 0.8403895642081339}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1450.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 581.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7139340226489415, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.2860659773510586, \"precision\": 0.9897610921501706, \"recall\": 0.7139340226489415, \"specificity\": 0.9999698473673736, \"npv\": 0.9988334153222163, \"accuracy\": 0.9988068068068068, \"f1\": 0.8295194508009154, \"f2\": 0.7560746688914382, \"f0_5\": 0.9187682169560258, \"p4\": 0.9065705228164346, \"phi\": 0.8400960925759684}, {\"truth_threshold\": -1.5000000223517418, \"match_probability\": 0.2612038719739489, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1448.0, \"tn\": 497454.0, \"fp\": 15.0, \"fn\": 583.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7129492860659774, \"tn_rate\": 0.9999698473673736, \"fp_rate\": 3.0152632626354607e-05, \"fn_rate\": 0.28705071393402265, \"precision\": 0.9897470950102529, \"recall\": 0.7129492860659774, \"specificity\": 0.9999698473673736, \"npv\": 0.9988294042410504, \"accuracy\": 0.9988028028028028, \"f1\": 0.8288494562106469, \"f2\": 0.7551893188693022, \"f0_5\": 0.9184320690092604, \"p4\": 0.9061694298434989, \"phi\": 0.8395088452058233}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1431.0, \"tn\": 497460.0, \"fp\": 9.0, \"fn\": 600.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7045790251107829, \"tn_rate\": 0.9999819084204242, \"fp_rate\": 1.8091579575812764e-05, \"fn_rate\": 0.29542097488921715, \"precision\": 0.99375, \"recall\": 0.7045790251107829, \"specificity\": 0.9999819084204242, \"npv\": 0.9987953258643537, \"accuracy\": 0.9987807807807808, \"f1\": 0.8245462402765773, \"f2\": 0.7481179422835633, \"f0_5\": 0.9183673469387755, \"p4\": 0.9035870905158078, \"phi\": 0.8362467706144072}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1429.0, \"tn\": 497460.0, \"fp\": 9.0, \"fn\": 602.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7035942885278188, \"tn_rate\": 0.9999819084204242, \"fp_rate\": 1.8091579575812764e-05, \"fn_rate\": 0.2964057114721812, \"precision\": 0.9937413073713491, \"recall\": 0.7035942885278188, \"specificity\": 0.9999819084204242, \"npv\": 0.9987913151374729, \"accuracy\": 0.9987767767767768, \"f1\": 0.8238685500144134, \"f2\": 0.7472286132608241, \"f0_5\": 0.9180264679429526, \"p4\": 0.9031791972899718, \"phi\": 0.8356568242357764}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1429.0, \"tn\": 497461.0, \"fp\": 8.0, \"fn\": 602.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7035942885278188, \"tn_rate\": 0.9999839185959326, \"fp_rate\": 1.6081404067389124e-05, \"fn_rate\": 0.2964057114721812, \"precision\": 0.9944328462073765, \"recall\": 0.7035942885278188, \"specificity\": 0.9999839185959326, \"npv\": 0.9987913175642439, \"accuracy\": 0.9987787787787787, \"f1\": 0.8241061130334487, \"f2\": 0.7473067670745738, \"f0_5\": 0.9184985216608819, \"p4\": 0.9033223408273486, \"phi\": 0.8359490872166939}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1429.0, \"tn\": 497462.0, \"fp\": 7.0, \"fn\": 602.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7035942885278188, \"tn_rate\": 0.999985928771441, \"fp_rate\": 1.4071228558965483e-05, \"fn_rate\": 0.2964057114721812, \"precision\": 0.995125348189415, \"recall\": 0.7035942885278188, \"specificity\": 0.999985928771441, \"npv\": 0.9987913199910051, \"accuracy\": 0.9987807807807808, \"f1\": 0.8243438130948947, \"f2\": 0.7473849372384938, \"f0_5\": 0.9189710610932476, \"p4\": 0.9034655297434823, \"phi\": 0.836241654891537}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1426.0, \"tn\": 497462.0, \"fp\": 7.0, \"fn\": 605.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7021171836533727, \"tn_rate\": 0.999985928771441, \"fp_rate\": 1.4071228558965483e-05, \"fn_rate\": 0.2978828163466273, \"precision\": 0.9951151430565248, \"recall\": 0.7021171836533727, \"specificity\": 0.999985928771441, \"npv\": 0.9987853039852068, \"accuracy\": 0.9987747747747747, \"f1\": 0.8233256351039261, \"f2\": 0.7460500156953018, \"f0_5\": 0.918459358495427, \"p4\": 0.9028524540501471, \"phi\": 0.8353565689622686}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1424.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 607.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.29886755292959133, \"precision\": 0.9971988795518207, \"recall\": 0.7011324470704087, \"specificity\": 0.9999919592979664, \"npv\": 0.9987813006954818, \"accuracy\": 0.9987767767767768, \"f1\": 0.8233593524139925, \"f2\": 0.7453936348408711, \"f0_5\": 0.9195402298850575, \"p4\": 0.9028731376295295, \"phi\": 0.8356470622401285}, {\"truth_threshold\": -0.800000011920929, \"match_probability\": 0.36481689239780585, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1423.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 608.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.29935992122107336, \"precision\": 0.9971969166082691, \"recall\": 0.7006400787789266, \"specificity\": 0.9999919592979664, \"npv\": 0.998779295404489, \"accuracy\": 0.9987747747747747, \"f1\": 0.8230190861769809, \"f2\": 0.7449481729661815, \"f0_5\": 0.919369427574622, \"p4\": 0.9026681098063133, \"phi\": 0.8353519278116274}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1420.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 611.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6991629739044806, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.30083702609551943, \"precision\": 0.9971910112359551, \"recall\": 0.6991629739044806, \"specificity\": 0.9999919592979664, \"npv\": 0.9987732795798232, \"accuracy\": 0.9987687687687687, \"f1\": 0.8219971056439942, \"f2\": 0.7436112274821952, \"f0_5\": 0.9188559596221043, \"p4\": 0.9020518582388883, \"phi\": 0.8344659053654186}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1406.0, \"tn\": 497465.0, \"fp\": 4.0, \"fn\": 625.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6922698178237322, \"tn_rate\": 0.9999919592979664, \"fp_rate\": 8.040702033694562e-06, \"fn_rate\": 0.30773018217626785, \"precision\": 0.9971631205673759, \"recall\": 0.6922698178237322, \"specificity\": 0.9999919592979664, \"npv\": 0.9987452066895541, \"accuracy\": 0.9987407407407407, \"f1\": 0.8172043010752689, \"f2\": 0.7373610237046361, \"f0_5\": 0.9164385347412333, \"p4\": 0.8991526501297269, \"phi\": 0.8303187752165915}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1393.0, \"tn\": 497467.0, \"fp\": 2.0, \"fn\": 638.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 0.9999959796489831, \"fp_rate\": 4.020351016847281e-06, \"fn_rate\": 0.31413096996553425, \"precision\": 0.9985663082437276, \"recall\": 0.6858690300344658, \"specificity\": 0.9999959796489831, \"npv\": 0.9987191455616787, \"accuracy\": 0.9987187187187188, \"f1\": 0.8131932282545242, \"f2\": 0.7316945057253913, \"f0_5\": 0.9151228485087374, \"p4\": 0.8967149034605146, \"phi\": 0.8270448581736107}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1382.0, \"tn\": 497467.0, \"fp\": 2.0, \"fn\": 649.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 0.9999959796489831, \"fp_rate\": 4.020351016847281e-06, \"fn_rate\": 0.31954702117183653, \"precision\": 0.9985549132947977, \"recall\": 0.6804529788281635, \"specificity\": 0.9999959796489831, \"npv\": 0.9986970906375222, \"accuracy\": 0.9986966966966967, \"f1\": 0.8093704245973645, \"f2\": 0.726756415649979, \"f0_5\": 0.9131756310294701, \"p4\": 0.89438137045544, \"phi\": 0.8237591197765245}, {\"truth_threshold\": -0.10000000149011612, \"match_probability\": 0.48267825490990723, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1373.0, \"tn\": 497467.0, \"fp\": 2.0, \"fn\": 658.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6760216642048252, \"tn_rate\": 0.9999959796489831, \"fp_rate\": 4.020351016847281e-06, \"fn_rate\": 0.32397833579517477, \"precision\": 0.9985454545454545, \"recall\": 0.6760216642048252, \"specificity\": 0.9999959796489831, \"npv\": 0.9986790464240903, \"accuracy\": 0.9986786786786787, \"f1\": 0.806224310041104, \"f2\": 0.7227076534372039, \"f0_5\": 0.9115655291461957, \"p4\": 0.8924535626059765, \"phi\": 0.8210611168108751}, {\"truth_threshold\": -0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1371.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 660.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6750369276218612, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3249630723781389, \"precision\": 0.999271137026239, \"recall\": 0.6750369276218612, \"specificity\": 0.9999979898244916, \"npv\": 0.9986750393473164, \"accuracy\": 0.9986766766766767, \"f1\": 0.8057596238612988, \"f2\": 0.7218828980623421, \"f0_5\": 0.9116903843596222, \"p4\": 0.8921683886776143, \"phi\": 0.8207609364390014}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1369.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 662.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6740521910388971, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3259478089611029, \"precision\": 0.9992700729927008, \"recall\": 0.6740521910388971, \"specificity\": 0.9999979898244916, \"npv\": 0.9986710296508944, \"accuracy\": 0.9986726726726727, \"f1\": 0.8050573360776242, \"f2\": 0.7209816726353486, \"f0_5\": 0.9113300492610837, \"p4\": 0.8917369272127385, \"phi\": 0.8201599726443158}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1366.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 665.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.672575086164451, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.327424913835549, \"precision\": 0.9992684711046086, \"recall\": 0.672575086164451, \"specificity\": 0.9999979898244916, \"npv\": 0.9986650151666322, \"accuracy\": 0.9986666666666667, \"f1\": 0.8040023543260741, \"f2\": 0.7196291223264145, \"f0_5\": 0.9107881050806774, \"p4\": 0.8910881569529707, \"phi\": 0.8192577094503399}, {\"truth_threshold\": 0.4000000059604645, \"match_probability\": 0.5688740732440556, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1360.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 671.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6696208764155588, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.33037912358444116, \"precision\": 0.9992652461425422, \"recall\": 0.6696208764155588, \"specificity\": 0.9999979898244916, \"npv\": 0.9986529864154383, \"accuracy\": 0.9986546546546546, \"f1\": 0.8018867924528302, \"f2\": 0.716921454928835, \"f0_5\": 0.9096989966555183, \"p4\": 0.8897849047168714, \"phi\": 0.8174502281397625}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1354.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 677.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6666666666666666, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3333333333333333, \"precision\": 0.9992619926199262, \"recall\": 0.6666666666666666, \"specificity\": 0.9999979898244916, \"npv\": 0.9986409579540094, \"accuracy\": 0.9986426426426427, \"f1\": 0.7997637330183107, \"f2\": 0.7142103597425888, \"f0_5\": 0.908602872097705, \"p4\": 0.888473980588251, \"phi\": 0.8156387850978434}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1345.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 686.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6622353520433284, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3377646479566716, \"precision\": 0.9992570579494799, \"recall\": 0.6622353520433284, \"specificity\": 0.9999979898244916, \"npv\": 0.9986229158051526, \"accuracy\": 0.9986246246246246, \"f1\": 0.7965649985193959, \"f2\": 0.7101372756071805, \"f0_5\": 0.9069453809844908, \"p4\": 0.8864930601329402, \"phi\": 0.8129141343355415}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1338.0, \"tn\": 497468.0, \"fp\": 1.0, \"fn\": 693.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 0.9999979898244916, \"fp_rate\": 2.0101755084236405e-06, \"fn_rate\": 0.3412112259970458, \"precision\": 0.9992531740104555, \"recall\": 0.6587887740029542, \"specificity\": 0.9999979898244916, \"npv\": 0.9986088834734153, \"accuracy\": 0.9986106106106106, \"f1\": 0.7940652818991097, \"f2\": 0.7069639649159886, \"f0_5\": 0.9056450521185867, \"p4\": 0.884940154458185, \"phi\": 0.8107886997214477}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1332.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.9985968588909765, \"accuracy\": 0.9986006006006006, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.8837474646304863, \"phi\": 0.8092677775719472}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1326.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 705.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6528803545051699, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34711964549483015, \"precision\": 1.0, \"recall\": 0.6528803545051699, \"specificity\": 1.0, \"npv\": 0.9985848318057546, \"accuracy\": 0.9985885885885886, \"f1\": 0.7899910634495085, \"f2\": 0.7015873015873015, \"f0_5\": 0.9038854805725971, \"p4\": 0.8824000363175218, \"phi\": 0.8074381827687037}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1318.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 713.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6489414081733137, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35105859182668636, \"precision\": 1.0, \"recall\": 0.6489414081733137, \"specificity\": 1.0, \"npv\": 0.9985687961427752, \"accuracy\": 0.9985725725725726, \"f1\": 0.7871006270528516, \"f2\": 0.6979453505613218, \"f0_5\": 0.9023688894974667, \"p4\": 0.8805909001057858, \"phi\": 0.8049923233962067}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1313.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 718.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6464795667159035, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3535204332840965, \"precision\": 1.0, \"recall\": 0.6464795667159035, \"specificity\": 1.0, \"npv\": 0.9985587741149408, \"accuracy\": 0.9985625625625626, \"f1\": 0.7852870813397129, \"f2\": 0.6956659955494331, \"f0_5\": 0.9014142523685295, \"p4\": 0.8794528280439022, \"phi\": 0.803459920363294}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1310.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 721.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6450024618414574, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35499753815854257, \"precision\": 1.0, \"recall\": 0.6450024618414574, \"specificity\": 1.0, \"npv\": 0.9985527609948012, \"accuracy\": 0.9985565565565565, \"f1\": 0.784196348398683, \"f2\": 0.6942972228111087, \"f0_5\": 0.9008389492504469, \"p4\": 0.8787672446539555, \"phi\": 0.8025390888425505}, {\"truth_threshold\": 1.4000000208616257, \"match_probability\": 0.7252004282056979, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1307.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 724.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6435253569670113, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3564746430329887, \"precision\": 1.0, \"recall\": 0.6435253569670113, \"specificity\": 1.0, \"npv\": 0.9985467479470808, \"accuracy\": 0.9985505505505505, \"f1\": 0.7831036548831636, \"f2\": 0.6929275792598876, \"f0_5\": 0.9002617440418791, \"p4\": 0.8780795947480086, \"phi\": 0.801617210656616}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1304.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 727.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6420482520925652, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3579517479074348, \"precision\": 1.0, \"recall\": 0.6420482520925652, \"specificity\": 1.0, \"npv\": 0.9985407349717782, \"accuracy\": 0.9985445445445446, \"f1\": 0.7820089955022489, \"f2\": 0.6915570640644888, \"f0_5\": 0.8996826272940527, \"p4\": 0.8773898689316875, \"phi\": 0.8006942821900601}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1296.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 735.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.638109305760709, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.361890694239291, \"precision\": 1.0, \"recall\": 0.638109305760709, \"specificity\": 1.0, \"npv\": 0.9985247007250042, \"accuracy\": 0.9985285285285286, \"f1\": 0.7790802524797115, \"f2\": 0.6878980891719745, \"f0_5\": 0.8981288981288982, \"p4\": 0.875540379133403, \"phi\": 0.7982279771873148}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1285.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 746.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6326932545544067, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36730674544559333, \"precision\": 1.0, \"recall\": 0.6326932545544067, \"specificity\": 1.0, \"npv\": 0.998502654476481, \"accuracy\": 0.9985065065065065, \"f1\": 0.7750301568154403, \"f2\": 0.682856839196514, \"f0_5\": 0.8959698786780086, \"p4\": 0.8729727835275262, \"phi\": 0.7948244423405328}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1282.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 749.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6312161496799606, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3687838503200394, \"precision\": 1.0, \"recall\": 0.6312161496799606, \"specificity\": 1.0, \"npv\": 0.9984966420322028, \"accuracy\": 0.9985005005005005, \"f1\": 0.7739209175973438, \"f2\": 0.6814799064426962, \"f0_5\": 0.895376449224752, \"p4\": 0.872267541831041, \"phi\": 0.7938936993401176}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1274.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 757.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6272772033481043, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3727227966518956, \"precision\": 1.0, \"recall\": 0.6272772033481043, \"specificity\": 1.0, \"npv\": 0.9984806092014468, \"accuracy\": 0.9984844844844845, \"f1\": 0.7709531013615734, \"f2\": 0.6778037880400085, \"f0_5\": 0.8937842009260558, \"p4\": 0.8703763275955726, \"phi\": 0.791406421592089}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1266.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.9984645768855598, \"accuracy\": 0.9984684684684685, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.8684695887997785, \"phi\": 0.788911382252982}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1263.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 768.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.621861152141802, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37813884785819796, \"precision\": 1.0, \"recall\": 0.621861152141802, \"specificity\": 1.0, \"npv\": 0.9984585648998369, \"accuracy\": 0.9984624624624625, \"f1\": 0.7668488160291439, \"f2\": 0.6727388942154043, \"f0_5\": 0.8915713680643795, \"p4\": 0.8677505201016583, \"phi\": 0.7879737264239606}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1258.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 773.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6193993106843919, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3806006893156081, \"precision\": 1.0, \"recall\": 0.6193993106843919, \"specificity\": 1.0, \"npv\": 0.9984485450845172, \"accuracy\": 0.9984524524524524, \"f1\": 0.7649741562785041, \"f2\": 0.6704327435514815, \"f0_5\": 0.8905564207843693, \"p4\": 0.8665471327679816, \"phi\": 0.7864085074432906}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1246.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.998424498348232, \"accuracy\": 0.9984284284284284, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.863633549529319, \"phi\": 0.7826393392068908}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1239.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 792.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6100443131462334, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38995568685376664, \"precision\": 1.0, \"recall\": 0.6100443131462334, \"specificity\": 1.0, \"npv\": 0.9984104716202954, \"accuracy\": 0.9984144144144144, \"f1\": 0.7577981651376147, \"f2\": 0.6616469080422941, \"f0_5\": 0.8866466294547016, \"p4\": 0.8619171565312959, \"phi\": 0.7804323355663898}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1228.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 803.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.604628261939931, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3953717380600689, \"precision\": 1.0, \"recall\": 0.604628261939931, \"specificity\": 1.0, \"npv\": 0.9983884304155161, \"accuracy\": 0.9983923923923924, \"f1\": 0.7536054004295796, \"f2\": 0.6565440547476475, \"f0_5\": 0.8843439435402564, \"p4\": 0.8591945586890813, \"phi\": 0.7769516467728665}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1220.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 811.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.6006893156080748, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3993106843919252, \"precision\": 1.0, \"recall\": 0.6006893156080748, \"specificity\": 1.0, \"npv\": 0.9983724010596452, \"accuracy\": 0.9983763763763763, \"f1\": 0.750538295908951, \"f2\": 0.6528253424657534, \"f0_5\": 0.8826508464766315, \"p4\": 0.8571947142334491, \"phi\": 0.7744105076214479}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1216.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.9983643865747245, \"accuracy\": 0.9983683683683684, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.8561884718896848, \"phi\": 0.7731368366789086}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1212.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 819.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5967503692762186, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4032496307237814, \"precision\": 1.0, \"recall\": 0.5967503692762186, \"specificity\": 1.0, \"npv\": 0.9983563722184761, \"accuracy\": 0.9983603603603604, \"f1\": 0.7474560592044404, \"f2\": 0.6491002570694088, \"f0_5\": 0.8809419973833406, \"p4\": 0.8551779804239885, \"phi\": 0.7718610845162758}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1204.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 827.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5928114229443624, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4071885770556376, \"precision\": 1.0, \"recall\": 0.5928114229443624, \"specificity\": 1.0, \"npv\": 0.9983403438919839, \"accuracy\": 0.9983443443443444, \"f1\": 0.7443585780525502, \"f2\": 0.6453687821612349, \"f0_5\": 0.879217175405287, \"p4\": 0.8531441416250695, \"phi\": 0.7693032950958751}, {\"truth_threshold\": 3.0000000447034836, \"match_probability\": 0.8888888919492438, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1190.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.9983122955589894, \"accuracy\": 0.9983163163163163, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.8495431030447466, \"phi\": 0.764806779521834}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1184.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 847.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5829640571147218, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4170359428852782, \"precision\": 1.0, \"recall\": 0.5829640571147218, \"specificity\": 1.0, \"npv\": 0.9983002753273024, \"accuracy\": 0.9983043043043043, \"f1\": 0.736547433903577, \"f2\": 0.6360120326600773, \"f0_5\": 0.8748337520319196, \"p4\": 0.847983261710312, \"phi\": 0.7628716659593198}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1173.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 858.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5775480059084195, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4224519940915805, \"precision\": 1.0, \"recall\": 0.5775480059084195, \"specificity\": 1.0, \"npv\": 0.9982782389876527, \"accuracy\": 0.9982822822822823, \"f1\": 0.7322097378277154, \"f2\": 0.6308486608583413, \"f0_5\": 0.8723784024988844, \"p4\": 0.8450973470216678, \"phi\": 0.7593112710009562}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1157.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 874.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5696701132447071, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43032988675529293, \"precision\": 1.0, \"recall\": 0.5696701132447071, \"specificity\": 1.0, \"npv\": 0.9982461878665899, \"accuracy\": 0.9982502502502503, \"f1\": 0.7258469259723965, \"f2\": 0.6233164529684301, \"f0_5\": 0.8687490614206337, \"p4\": 0.8408380238050522, \"phi\": 0.7541027906645469}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1151.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.9982341692267869, \"accuracy\": 0.9982382382382382, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.8392216062989525, \"phi\": 0.7521403985385657}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1139.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 892.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5608074839980305, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43919251600196946, \"precision\": 1.0, \"recall\": 0.5608074839980305, \"specificity\": 1.0, \"npv\": 0.9982101328153687, \"accuracy\": 0.9982142142142142, \"f1\": 0.7186119873817035, \"f2\": 0.6148116161070928, \"f0_5\": 0.8645817519356308, \"p4\": 0.835956812323099, \"phi\": 0.7482003161490423}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1128.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 903.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5553914327917282, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4446085672082718, \"precision\": 1.0, \"recall\": 0.5553914327917282, \"specificity\": 1.0, \"npv\": 0.9981881004550818, \"accuracy\": 0.9981921921921922, \"f1\": 0.7141500474833808, \"f2\": 0.6095979247730221, \"f0_5\": 0.8619899128839982, \"p4\": 0.8329260492617466, \"phi\": 0.7445704260225499}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1124.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 907.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5534219596258001, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4465780403741999, \"precision\": 1.0, \"recall\": 0.5534219596258001, \"specificity\": 1.0, \"npv\": 0.9981800889288409, \"accuracy\": 0.9981841841841842, \"f1\": 0.7125198098256735, \"f2\": 0.6076989619377162, \"f0_5\": 0.8610387620652673, \"p4\": 0.8318147980648825, \"phi\": 0.7432461105679965}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1114.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 917.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5484982767109798, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45150172328902016, \"precision\": 1.0, \"recall\": 0.5484982767109798, \"specificity\": 1.0, \"npv\": 0.9981600606758617, \"accuracy\": 0.9981641641641642, \"f1\": 0.7084260731319555, \"f2\": 0.6029443602511366, \"f0_5\": 0.8586403576383537, \"p4\": 0.8290150087466541, \"phi\": 0.7399250456380276}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1101.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.9981340251485256, \"accuracy\": 0.9981381381381381, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.8253283172196622, \"phi\": 0.7355854461857835}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1095.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 936.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5391432791728212, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4608567208271787, \"precision\": 1.0, \"recall\": 0.5391432791728212, \"specificity\": 1.0, \"npv\": 0.9981220092093779, \"accuracy\": 0.9981261261261262, \"f1\": 0.7005758157389635, \"f2\": 0.5938821998047511, \"f0_5\": 0.8540009358914366, \"p4\": 0.8236085674816911, \"phi\": 0.733573972452478}, {\"truth_threshold\": 4.100000061094761, \"match_probability\": 0.9448986513716398, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1093.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.9981180039606186, \"accuracy\": 0.9981221221221221, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.8230327328837387, \"phi\": 0.7329022652060395}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1089.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.9981099935595322, \"accuracy\": 0.9981141141141141, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.8218771602970261, \"phi\": 0.7315570166633695}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1076.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 955.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5297882816346627, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47021171836533726, \"precision\": 1.0, \"recall\": 0.5297882816346627, \"specificity\": 1.0, \"npv\": 0.9980839606439498, \"accuracy\": 0.9980880880880881, \"f1\": 0.6926295461860316, \"f2\": 0.5847826086956521, \"f0_5\": 0.8492501973164956, \"p4\": 0.8180852158142232, \"phi\": 0.7271679217599443}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1070.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 961.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5268340718857706, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4731659281142294, \"precision\": 1.0, \"recall\": 0.5268340718857706, \"specificity\": 1.0, \"npv\": 0.9980719459101579, \"accuracy\": 0.9980760760760761, \"f1\": 0.690099967752338, \"f2\": 0.5819012399390907, \"f0_5\": 0.8477261923625415, \"p4\": 0.8163160984614997, \"phi\": 0.7251333031235035}, {\"truth_threshold\": 4.500000067055225, \"match_probability\": 0.9576762895591182, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1065.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 966.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5243722304283605, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4756277695716396, \"precision\": 1.0, \"recall\": 0.5243722304283605, \"specificity\": 1.0, \"npv\": 0.9980619338529597, \"accuracy\": 0.9980660660660661, \"f1\": 0.687984496124031, \"f2\": 0.5794972249428665, \"f0_5\": 0.8464473056747734, \"p4\": 0.8148325462698929, \"phi\": 0.7234334539956797}, {\"truth_threshold\": 4.6000000685453415, \"match_probability\": 0.9603983391922627, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1044.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 987.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5140324963072378, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4859675036927622, \"precision\": 1.0, \"recall\": 0.5140324963072378, \"specificity\": 1.0, \"npv\": 0.9980198854061342, \"accuracy\": 0.998024024024024, \"f1\": 0.6790243902439025, \"f2\": 0.569371727748691, \"f0_5\": 0.8409859835669405, \"p4\": 0.8085077096122667, \"phi\": 0.7162504122578769}, {\"truth_threshold\": 4.700000070035458, \"match_probability\": 0.9629520927573305, \"total_clerical_labels\": 499500.0, \"p\": 2031.0, \"n\": 497469.0, \"tp\": 1027.0, \"tn\": 497469.0, \"fp\": 0.0, \"fn\": 1004.0, \"P_rate\": 0.004066066066066066, \"N_rate\": 0.9959339339339339, \"tp_rate\": 0.5056622353520434, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49433776464795665, \"precision\": 1.0, \"recall\": 0.5056622353520434, \"specificity\": 1.0, \"npv\": 0.9979858487821808, \"accuracy\": 0.99798998998999, \"f1\": 0.671680837148463, \"f2\": 0.5611408589225221, \"f0_5\": 0.836455448770158, \"p4\": 0.8032736792214825, \"phi\": 0.7103828229517545}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.HConcatChart(...)"
            ]
          },
          "execution_count": 42,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_column(\n",
        "    \"cluster\",\n",
        "    output_type=\"threshold_selection\",\n",
        "    threshold_match_probability=0.5,\n",
        "    add_metrics=[\"f1\"],\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 43,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:22.638822Z",
          "iopub.status.busy": "2024-06-07T09:09:22.638569Z",
          "iopub.status.idle": "2024-06-07T09:09:22.853941Z",
          "shell.execute_reply": "2024-06-07T09:09:22.853250Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'email':\n",
            "    m values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>clerical_match_score</th>\n",
              "      <th>found_by_blocking_rules</th>\n",
              "      <th>match_weight</th>\n",
              "      <th>match_probability</th>\n",
              "      <th>unique_id_l</th>\n",
              "      <th>unique_id_r</th>\n",
              "      <th>surname_l</th>\n",
              "      <th>surname_r</th>\n",
              "      <th>first_name_l</th>\n",
              "      <th>first_name_r</th>\n",
              "      <th>...</th>\n",
              "      <th>email_l</th>\n",
              "      <th>email_r</th>\n",
              "      <th>gamma_email</th>\n",
              "      <th>tf_email_l</th>\n",
              "      <th>tf_email_r</th>\n",
              "      <th>bf_email</th>\n",
              "      <th>bf_tf_adj_email</th>\n",
              "      <th>cluster_l</th>\n",
              "      <th>cluster_r</th>\n",
              "      <th>match_key</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-15.568945</td>\n",
              "      <td>0.000021</td>\n",
              "      <td>452</td>\n",
              "      <td>454</td>\n",
              "      <td>Daves</td>\n",
              "      <td>Reuben</td>\n",
              "      <td>None</td>\n",
              "      <td>Davies</td>\n",
              "      <td>...</td>\n",
              "      <td>rd@lewis.com</td>\n",
              "      <td>idlewrs.cocm</td>\n",
              "      <td>0</td>\n",
              "      <td>0.003802</td>\n",
              "      <td>0.001267</td>\n",
              "      <td>0.01099</td>\n",
              "      <td>1.0</td>\n",
              "      <td>115</td>\n",
              "      <td>115</td>\n",
              "      <td>4</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-14.884057</td>\n",
              "      <td>0.000033</td>\n",
              "      <td>715</td>\n",
              "      <td>717</td>\n",
              "      <td>Joes</td>\n",
              "      <td>Jones</td>\n",
              "      <td>None</td>\n",
              "      <td>Mia</td>\n",
              "      <td>...</td>\n",
              "      <td>None</td>\n",
              "      <td>mia.j63@martinez.biz</td>\n",
              "      <td>-1</td>\n",
              "      <td>NaN</td>\n",
              "      <td>0.005070</td>\n",
              "      <td>1.00000</td>\n",
              "      <td>1.0</td>\n",
              "      <td>182</td>\n",
              "      <td>182</td>\n",
              "      <td>4</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-14.884057</td>\n",
              "      <td>0.000033</td>\n",
              "      <td>626</td>\n",
              "      <td>628</td>\n",
              "      <td>Davidson</td>\n",
              "      <td>None</td>\n",
              "      <td>geeorGe</td>\n",
              "      <td>Geeorge</td>\n",
              "      <td>...</td>\n",
              "      <td>None</td>\n",
              "      <td>gdavidson@johnson-brown.com</td>\n",
              "      <td>-1</td>\n",
              "      <td>NaN</td>\n",
              "      <td>0.005070</td>\n",
              "      <td>1.00000</td>\n",
              "      <td>1.0</td>\n",
              "      <td>158</td>\n",
              "      <td>158</td>\n",
              "      <td>4</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-13.761589</td>\n",
              "      <td>0.000072</td>\n",
              "      <td>983</td>\n",
              "      <td>984</td>\n",
              "      <td>Milller</td>\n",
              "      <td>Miller</td>\n",
              "      <td>Jessica</td>\n",
              "      <td>aessicJ</td>\n",
              "      <td>...</td>\n",
              "      <td>None</td>\n",
              "      <td>jessica.miller@johnson.com</td>\n",
              "      <td>-1</td>\n",
              "      <td>NaN</td>\n",
              "      <td>0.007605</td>\n",
              "      <td>1.00000</td>\n",
              "      <td>1.0</td>\n",
              "      <td>246</td>\n",
              "      <td>246</td>\n",
              "      <td>4</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>1.0</td>\n",
              "      <td>True</td>\n",
              "      <td>-11.637585</td>\n",
              "      <td>0.000314</td>\n",
              "      <td>594</td>\n",
              "      <td>595</td>\n",
              "      <td>Kik</td>\n",
              "      <td>Kiirk</td>\n",
              "      <td>Grace</td>\n",
              "      <td>Grace</td>\n",
              "      <td>...</td>\n",
              "      <td>gk@frey-robinson.org</td>\n",
              "      <td>rgk@frey-robinon.org</td>\n",
              "      <td>0</td>\n",
              "      <td>0.001267</td>\n",
              "      <td>0.001267</td>\n",
              "      <td>0.01099</td>\n",
              "      <td>1.0</td>\n",
              "      <td>146</td>\n",
              "      <td>146</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>5 rows × 38 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "   clerical_match_score  found_by_blocking_rules  match_weight  \\\n",
              "0                   1.0                    False    -15.568945   \n",
              "1                   1.0                    False    -14.884057   \n",
              "2                   1.0                    False    -14.884057   \n",
              "3                   1.0                    False    -13.761589   \n",
              "4                   1.0                     True    -11.637585   \n",
              "\n",
              "   match_probability  unique_id_l  unique_id_r surname_l surname_r  \\\n",
              "0           0.000021          452          454     Daves    Reuben   \n",
              "1           0.000033          715          717      Joes     Jones   \n",
              "2           0.000033          626          628  Davidson      None   \n",
              "3           0.000072          983          984   Milller    Miller   \n",
              "4           0.000314          594          595       Kik     Kiirk   \n",
              "\n",
              "  first_name_l first_name_r  ...               email_l  \\\n",
              "0         None       Davies  ...          rd@lewis.com   \n",
              "1         None          Mia  ...                  None   \n",
              "2      geeorGe      Geeorge  ...                  None   \n",
              "3      Jessica      aessicJ  ...                  None   \n",
              "4        Grace        Grace  ...  gk@frey-robinson.org   \n",
              "\n",
              "                       email_r  gamma_email  tf_email_l  tf_email_r  bf_email  \\\n",
              "0                 idlewrs.cocm            0    0.003802    0.001267   0.01099   \n",
              "1         mia.j63@martinez.biz           -1         NaN    0.005070   1.00000   \n",
              "2  gdavidson@johnson-brown.com           -1         NaN    0.005070   1.00000   \n",
              "3   jessica.miller@johnson.com           -1         NaN    0.007605   1.00000   \n",
              "4         rgk@frey-robinon.org            0    0.001267    0.001267   0.01099   \n",
              "\n",
              "   bf_tf_adj_email cluster_l cluster_r  match_key  \n",
              "0              1.0       115       115          4  \n",
              "1              1.0       182       182          4  \n",
              "2              1.0       158       158          4  \n",
              "3              1.0       246       246          4  \n",
              "4              1.0       146       146          0  \n",
              "\n",
              "[5 rows x 38 columns]"
            ]
          },
          "execution_count": 43,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "# Plot some false positives\n",
        "linker.evaluation.prediction_errors_from_labels_column(\n",
        "    \"cluster\", include_false_negatives=True, include_false_positives=True\n",
        ").as_pandas_dataframe(limit=5)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 44,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:09:22.857193Z",
          "iopub.status.busy": "2024-06-07T09:09:22.856931Z",
          "iopub.status.idle": "2024-06-07T09:09:23.602967Z",
          "shell.execute_reply": "2024-06-07T09:09:23.602410Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'email':\n",
            "    m values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-f048efdc54af4a52b2dbc943367085cc.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-f048efdc54af4a52b2dbc943367085cc.vega-embed details,\n",
              "  #altair-viz-f048efdc54af4a52b2dbc943367085cc.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-f048efdc54af4a52b2dbc943367085cc\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-f048efdc54af4a52b2dbc943367085cc\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-f048efdc54af4a52b2dbc943367085cc\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}, \"layer\": [{\"layer\": [{\"mark\": \"rule\", \"encoding\": {\"color\": {\"value\": \"black\"}, \"size\": {\"value\": 0.5}, \"y\": {\"field\": \"zero\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"bar\", \"width\": 60}, \"encoding\": {\"color\": {\"condition\": {\"test\": \"(datum.log2_bayes_factor < 0)\", \"value\": \"red\"}, \"value\": \"green\"}, \"opacity\": {\"condition\": {\"test\": \"datum.column_name == 'Prior match weight' || datum.column_name == 'Final score'\", \"value\": 1}, \"value\": 0.5}, \"tooltip\": [{\"field\": \"column_name\", \"title\": \"Comparison column\", \"type\": \"nominal\"}, {\"field\": \"value_l\", \"title\": \"Value (L)\", \"type\": \"nominal\"}, {\"field\": \"value_r\", \"title\": \"Value (R)\", \"type\": \"nominal\"}, {\"field\": \"label_for_charts\", \"title\": \"Label\", \"type\": \"ordinal\"}, {\"field\": \"sql_condition\", \"title\": \"SQL condition\", \"type\": \"nominal\"}, {\"field\": \"comparison_vector_value\", \"title\": \"Comparison vector value\", \"type\": \"nominal\"}, {\"field\": \"bayes_factor\", \"format\": \",.4f\", \"title\": \"Bayes factor = m/u\", \"type\": \"quantitative\"}, {\"field\": \"log2_bayes_factor\", \"format\": \",.4f\", \"title\": \"Match weight = log2(m/u)\", \"type\": \"quantitative\"}, {\"field\": \"prob\", \"format\": \".4f\", \"title\": \"Cumulative match probability\", \"type\": \"quantitative\"}, {\"field\": \"bayes_factor_description\", \"title\": \"Match weight description\", \"type\": \"nominal\"}], \"x\": {\"axis\": {\"grid\": true, \"labelAlign\": \"center\", \"labelAngle\": -20, \"labelExpr\": \"datum.value == 'Prior' || datum.value == 'Final score' ? '' : datum.value\", \"labelPadding\": 10, \"tickBand\": \"extent\", \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"axis\": {\"grid\": false, \"orient\": \"left\", \"title\": \"Match Weight\"}, \"field\": \"previous_sum\", \"type\": \"quantitative\"}, \"y2\": {\"field\": \"sum\"}}}, {\"mark\": {\"type\": \"text\", \"fontWeight\": \"bold\"}, \"encoding\": {\"color\": {\"value\": \"white\"}, \"text\": {\"condition\": {\"test\": \"abs(datum.log2_bayes_factor) > 1\", \"field\": \"log2_bayes_factor\", \"format\": \".2f\", \"type\": \"nominal\"}, \"value\": \"\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"axis\": {\"orient\": \"left\"}, \"field\": \"center\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"baseline\": \"bottom\", \"dy\": -25, \"fontWeight\": \"bold\"}, \"encoding\": {\"color\": {\"value\": \"black\"}, \"text\": {\"field\": \"column_name\", \"type\": \"nominal\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"sum_top\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"baseline\": \"bottom\", \"dy\": -13, \"fontSize\": 8}, \"encoding\": {\"color\": {\"value\": \"grey\"}, \"text\": {\"field\": \"value_l\", \"type\": \"nominal\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"sum_top\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"baseline\": \"bottom\", \"dy\": -5, \"fontSize\": 8}, \"encoding\": {\"color\": {\"value\": \"grey\"}, \"text\": {\"field\": \"value_r\", \"type\": \"nominal\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"sum_top\", \"type\": \"quantitative\"}}}]}, {\"mark\": {\"type\": \"rule\", \"color\": \"black\", \"strokeWidth\": 2, \"x2Offset\": 30, \"xOffset\": -30}, \"encoding\": {\"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"x2\": {\"field\": \"lead\"}, \"y\": {\"axis\": {\"labelExpr\": \"format(1 / (1 + pow(2, -1*datum.value)), '.2r')\", \"orient\": \"right\", \"title\": \"Probability\"}, \"field\": \"sum\", \"scale\": {\"zero\": false}, \"type\": \"quantitative\"}}}], \"data\": {\"name\": \"data-0ead9d481700204d4138fa8762f4591e\"}, \"height\": 450, \"params\": [{\"name\": \"record_number\", \"bind\": {\"input\": \"range\", \"max\": 4, \"min\": 0, \"step\": 1}, \"value\": 0}], \"resolve\": {\"axis\": {\"y\": \"independent\"}}, \"title\": {\"text\": \"Match weights waterfall chart\", \"subtitle\": \"How each comparison contributes to the final match score\"}, \"transform\": [{\"filter\": \"(datum.record_number == record_number)\"}, {\"filter\": \"(datum.bayes_factor !== 1.0)\"}, {\"window\": [{\"op\": \"sum\", \"field\": \"log2_bayes_factor\", \"as\": \"sum\"}, {\"op\": \"lead\", \"field\": \"column_name\", \"as\": \"lead\"}], \"frame\": [null, 0]}, {\"calculate\": \"datum.column_name === \\\"Final score\\\" ? datum.sum - datum.log2_bayes_factor : datum.sum\", \"as\": \"sum\"}, {\"calculate\": \"datum.lead === null ? datum.column_name : datum.lead\", \"as\": \"lead\"}, {\"calculate\": \"datum.column_name === \\\"Final score\\\" || datum.column_name === \\\"Prior match weight\\\" ? 0 : datum.sum - datum.log2_bayes_factor\", \"as\": \"previous_sum\"}, {\"calculate\": \"datum.sum > datum.previous_sum ? datum.column_name : \\\"\\\"\", \"as\": \"top_label\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? datum.column_name : \\\"\\\"\", \"as\": \"bottom_label\"}, {\"calculate\": \"datum.sum > datum.previous_sum ? datum.sum : datum.previous_sum\", \"as\": \"sum_top\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? datum.sum : datum.previous_sum\", \"as\": \"sum_bottom\"}, {\"calculate\": \"(datum.sum + datum.previous_sum) / 2\", \"as\": \"center\"}, {\"calculate\": \"(datum.log2_bayes_factor > 0 ? \\\"+\\\" : \\\"\\\") + datum.log2_bayes_factor\", \"as\": \"text_log2_bayes_factor\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? 4 : -4\", \"as\": \"dy\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? \\\"top\\\" : \\\"bottom\\\"\", \"as\": \"baseline\"}, {\"calculate\": \"1. / (1 + pow(2, -1.*datum.sum))\", \"as\": \"prob\"}, {\"calculate\": \"0*datum.sum\", \"as\": \"zero\"}], \"width\": {\"step\": 75}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-0ead9d481700204d4138fa8762f4591e\": [{\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -8.224622793739668, \"bayes_factor\": 0.0033430420247643373, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 0.3409962430337818, \"log2_bayes_factor\": -1.5521722506126663, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"first_name_surname\", \"value_l\": \"Daves, None\", \"value_r\": \"Reuben, Davies\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"tf_first_name_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 0}, {\"sql_condition\": \"ABS(EPOCH(try_strptime(\\\"dob_l\\\", '%Y-%m-%d')) - EPOCH(try_strptime(\\\"dob_r\\\", '%Y-%m-%d'))) <= 31557600.0\", \"label_for_charts\": \"Abs difference of 'transformed dob <= 1 year'\", \"m_probability\": 0.11816342853625729, \"u_probability\": 0.033053053053053054, \"bayes_factor\": 3.5749626016874934, \"log2_bayes_factor\": 1.8379281496834179, \"comparison_vector_value\": 2, \"bayes_factor_description\": \"If comparison level is `abs difference of 'transformed dob <= 1 year'` then comparison is 3.57 times more likely to be a match\", \"column_name\": \"dob\", \"value_l\": \"1986-12-21\", \"value_r\": \"1987-11-21\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 0.4593074487624035, \"log2_bayes_factor\": -1.1224679144083194, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"city\", \"value_l\": \"Swanse\", \"value_r\": \"Swansea\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 4, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"tf_city\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 5, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.010939329773323148, \"u_probability\": 0.9953516949425154, \"bayes_factor\": 0.010990416582306546, \"log2_bayes_factor\": -6.50761011833404, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  90.99 times less likely to be a match\", \"column_name\": \"email\", \"value_l\": \"rd@lewis.com\", \"value_r\": \"idlewrs.cocm\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.010939329773323148, \"u_probability\": 0.9953516949425154, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  90.99 times less likely to be a match\", \"column_name\": \"tf_email\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 7, \"record_number\": 0}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -15.568944927411275, \"bayes_factor\": 2.0572192335180428e-05, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 8, \"record_number\": 0}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -8.224622793739668, \"bayes_factor\": 0.0033430420247643373, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 0.3409962430337818, \"log2_bayes_factor\": -1.5521722506126663, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"first_name_surname\", \"value_l\": \"Joes, None\", \"value_r\": \"Jones, Mia\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"tf_first_name_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.04128709823358695, \"u_probability\": 0.6536676676676677, \"bayes_factor\": 0.063162215718734, \"log2_bayes_factor\": -3.984794408216209, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.83 times less likely to be a match\", \"column_name\": \"dob\", \"value_l\": \"2021-01-21\", \"value_r\": \"2031-02-20\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 0.4593074487624035, \"log2_bayes_factor\": -1.1224679144083194, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"city\", \"value_l\": \"Ipswich\", \"value_r\": \"Ipsich\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 4, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"tf_city\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 5, \"record_number\": 1}, {\"sql_condition\": \"\\\"email_l\\\" IS NULL OR \\\"email_r\\\" IS NULL\", \"label_for_charts\": \"email is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `email is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"email\", \"value_l\": \"None\", \"value_r\": \"mia.j63@martinez.biz\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 1}, {\"sql_condition\": \"\\\"email_l\\\" IS NULL OR \\\"email_r\\\" IS NULL\", \"label_for_charts\": \"email is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `email is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"tf_email\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 7, \"record_number\": 1}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -14.884057366976863, \"bayes_factor\": 3.3071376790898506e-05, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 8, \"record_number\": 1}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -8.224622793739668, \"bayes_factor\": 0.0033430420247643373, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 0.3409962430337818, \"log2_bayes_factor\": -1.5521722506126663, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"first_name_surname\", \"value_l\": \"Davidson, geeorGe\", \"value_r\": \"None, Geeorge\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"tf_first_name_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.04128709823358695, \"u_probability\": 0.6536676676676677, \"bayes_factor\": 0.063162215718734, \"log2_bayes_factor\": -3.984794408216209, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.83 times less likely to be a match\", \"column_name\": \"dob\", \"value_l\": \"1999-05-07\", \"value_r\": \"2009-06-05\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 0.4593074487624035, \"log2_bayes_factor\": -1.1224679144083194, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"city\", \"value_l\": \"Southamptn\", \"value_r\": \"Suthammpton\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 4, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"tf_city\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 5, \"record_number\": 2}, {\"sql_condition\": \"\\\"email_l\\\" IS NULL OR \\\"email_r\\\" IS NULL\", \"label_for_charts\": \"email is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `email is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"email\", \"value_l\": \"None\", \"value_r\": \"gdavidson@johnson-brown.com\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 2}, {\"sql_condition\": \"\\\"email_l\\\" IS NULL OR \\\"email_r\\\" IS NULL\", \"label_for_charts\": \"email is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `email is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"tf_email\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 7, \"record_number\": 2}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -14.884057366976863, \"bayes_factor\": 3.3071376790898506e-05, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 8, \"record_number\": 2}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -8.224622793739668, \"bayes_factor\": 0.0033430420247643373, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 0.3409962430337818, \"log2_bayes_factor\": -1.5521722506126663, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"first_name_surname\", \"value_l\": \"Milller, Jessica\", \"value_r\": \"Miller, aessicJ\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.33827597476760074, \"u_probability\": 0.9920225858150832, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.93 times less likely to be a match\", \"column_name\": \"tf_first_name_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.04128709823358695, \"u_probability\": 0.6536676676676677, \"bayes_factor\": 0.063162215718734, \"log2_bayes_factor\": -3.984794408216209, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.83 times less likely to be a match\", \"column_name\": \"dob\", \"value_l\": \"2001-04-21\", \"value_r\": \"2011-05-20\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 3}, {\"sql_condition\": \"\\\"city_l\\\" IS NULL OR \\\"city_r\\\" IS NULL\", \"label_for_charts\": \"city is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `city is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"city\", \"value_l\": \"None\", \"value_r\": \"Coventry\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 4, \"record_number\": 3}, {\"sql_condition\": \"\\\"city_l\\\" IS NULL OR \\\"city_r\\\" IS NULL\", \"label_for_charts\": \"city is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `city is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"tf_city\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 5, \"record_number\": 3}, {\"sql_condition\": \"\\\"email_l\\\" IS NULL OR \\\"email_r\\\" IS NULL\", \"label_for_charts\": \"email is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `email is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"email\", \"value_l\": \"None\", \"value_r\": \"jessica.miller@johnson.com\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 3}, {\"sql_condition\": \"\\\"email_l\\\" IS NULL OR \\\"email_r\\\" IS NULL\", \"label_for_charts\": \"email is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `email is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"tf_email\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 7, \"record_number\": 3}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -13.761589452568543, \"bayes_factor\": 7.200270076178563e-05, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 8, \"record_number\": 3}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -8.224622793739668, \"bayes_factor\": 0.0033430420247643373, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 4}, {\"sql_condition\": \"\\\"first_name_l\\\" = \\\"first_name_r\\\"\", \"label_for_charts\": \"Exact match on first_name\", \"m_probability\": 0.15244196901590418, \"u_probability\": 0.003551511527801817, \"bayes_factor\": 42.923123808711686, \"log2_bayes_factor\": 5.4236831695685, \"comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `exact match on first_name` then comparison is 42.92 times more likely to be a match\", \"column_name\": \"first_name_surname\", \"value_l\": \"Kik, Grace\", \"value_r\": \"Kiirk, Grace\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 4}, {\"sql_condition\": \"\\\"first_name_l\\\" = \\\"first_name_r\\\"\", \"label_for_charts\": \"Term freq adjustment on first_name with weight {cl.tf_adjustment_weight}\", \"m_probability\": null, \"u_probability\": null, \"bayes_factor\": 0.5902612159206619, \"log2_bayes_factor\": -0.7605745446267813, \"comparison_vector_value\": 1, \"bayes_factor_description\": \"Term frequency adjustment on first_name makes comparison  1.69 times less likely to be a match\", \"column_name\": \"tf_first_name_surname\", \"value_l\": \"Grace\", \"value_r\": \"Grace\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 4}, {\"sql_condition\": \"ABS(EPOCH(try_strptime(\\\"dob_l\\\", '%Y-%m-%d')) - EPOCH(try_strptime(\\\"dob_r\\\", '%Y-%m-%d'))) <= 315576000.0\", \"label_for_charts\": \"Abs difference of 'transformed dob <= 10 year'\", \"m_probability\": 0.22571427556533818, \"u_probability\": 0.30747947947947946, \"bayes_factor\": 0.7340791520378578, \"log2_bayes_factor\": -0.4459924649307459, \"comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `abs difference of 'transformed dob <= 10 year'` then comparison is  1.36 times less likely to be a match\", \"column_name\": \"dob\", \"value_l\": \"2020-05-12\", \"value_r\": \"2011-03-17\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 0.4593074487624035, \"log2_bayes_factor\": -1.1224679144083194, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"city\", \"value_l\": \"Edinburgh\", \"value_r\": \"Edhibnurgh\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 4, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.43397775853820786, \"u_probability\": 0.9448524288198547, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  2.18 times less likely to be a match\", \"column_name\": \"tf_city\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 5, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.010939329773323148, \"u_probability\": 0.9953516949425154, \"bayes_factor\": 0.010990416582306546, \"log2_bayes_factor\": -6.50761011833404, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  90.99 times less likely to be a match\", \"column_name\": \"email\", \"value_l\": \"gk@frey-robinson.org\", \"value_r\": \"rgk@frey-robinon.org\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.010939329773323148, \"u_probability\": 0.9953516949425154, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  90.99 times less likely to be a match\", \"column_name\": \"tf_email\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 7, \"record_number\": 4}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -11.637584666471055, \"bayes_factor\": 0.00031386142218499933, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 8, \"record_number\": 4}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.LayerChart(...)"
            ]
          },
          "execution_count": 44,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "records = linker.evaluation.prediction_errors_from_labels_column(\n",
        "    \"cluster\", include_false_negatives=True, include_false_positives=True\n",
        ").as_record_dict(limit=5)\n",
        "\n",
        "linker.visualisations.waterfall_chart(records)"
      ]
    }
  ],
  "metadata": {
    "kernelspec": {
      "display_name": ".venv",
      "language": "python",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.10.8"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 4
}