{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "## Deduplicating the febrl3 dataset\n",
        "\n",
        "See A.2 [here](https://arxiv.org/pdf/2008.04443.pdf) and [here](https://recordlinkage.readthedocs.io/en/latest/ref-datasets.html) for the source of this data\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "<a target=\"_blank\" href=\"https://colab.research.google.com/github/moj-analytical-services/splink/blob/master/docs/demos/examples/duckdb/febrl3.ipynb\">\n",
        "  <img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/>\n",
        "</a>\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 29,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:24.420657Z",
          "iopub.status.busy": "2024-06-07T09:11:24.420336Z",
          "iopub.status.idle": "2024-06-07T09:11:24.443364Z",
          "shell.execute_reply": "2024-06-07T09:11:24.442120Z"
        },
        "tags": [
          "hide_input"
        ]
      },
      "outputs": [],
      "source": [
        "# Uncomment and run this cell if you're running in Google Colab.\n",
        "# !pip install splink"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 30,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:24.447798Z",
          "iopub.status.busy": "2024-06-07T09:11:24.447495Z",
          "iopub.status.idle": "2024-06-07T09:11:26.149918Z",
          "shell.execute_reply": "2024-06-07T09:11:26.149230Z"
        },
        "tags": [
          "hide_output"
        ]
      },
      "outputs": [],
      "source": [
        "from splink.datasets import splink_datasets\n",
        "\n",
        "df = splink_datasets.febrl3"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 31,
      "metadata": {},
      "outputs": [
        {
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>rec_id</th>\n",
              "      <th>given_name</th>\n",
              "      <th>surname</th>\n",
              "      <th>street_number</th>\n",
              "      <th>address_1</th>\n",
              "      <th>address_2</th>\n",
              "      <th>suburb</th>\n",
              "      <th>postcode</th>\n",
              "      <th>state</th>\n",
              "      <th>date_of_birth</th>\n",
              "      <th>soc_sec_id</th>\n",
              "      <th>cluster</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>rec-1496-org</td>\n",
              "      <td>mitchell</td>\n",
              "      <td>green</td>\n",
              "      <td>7</td>\n",
              "      <td>wallaby place</td>\n",
              "      <td>delmar</td>\n",
              "      <td>cleveland</td>\n",
              "      <td>2119</td>\n",
              "      <td>sa</td>\n",
              "      <td>19560409</td>\n",
              "      <td>1804974</td>\n",
              "      <td>rec-1496</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>rec-552-dup-3</td>\n",
              "      <td>harley</td>\n",
              "      <td>mccarthy</td>\n",
              "      <td>177</td>\n",
              "      <td>pridhamstreet</td>\n",
              "      <td>milton</td>\n",
              "      <td>marsden</td>\n",
              "      <td>3165</td>\n",
              "      <td>nsw</td>\n",
              "      <td>19080419</td>\n",
              "      <td>6089216</td>\n",
              "      <td>rec-552</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "</div>"
            ],
            "text/plain": [
              "          rec_id given_name    surname street_number       address_1  \\\n",
              "0   rec-1496-org   mitchell      green             7   wallaby place   \n",
              "1  rec-552-dup-3     harley   mccarthy           177   pridhamstreet   \n",
              "\n",
              "  address_2      suburb  postcode state date_of_birth soc_sec_id   cluster  \n",
              "0    delmar   cleveland      2119    sa      19560409    1804974  rec-1496  \n",
              "1    milton     marsden      3165   nsw      19080419    6089216   rec-552  "
            ]
          },
          "execution_count": 31,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "df = df.rename(columns=lambda x: x.strip())\n",
        "\n",
        "df[\"cluster\"] = df[\"rec_id\"].apply(lambda x: \"-\".join(x.split(\"-\")[:2]))\n",
        "\n",
        "df[\"date_of_birth\"] = df[\"date_of_birth\"].astype(str).str.strip()\n",
        "df[\"soc_sec_id\"] = df[\"soc_sec_id\"].astype(str).str.strip()\n",
        "\n",
        "df.head(2)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 32,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:26.153666Z",
          "iopub.status.busy": "2024-06-07T09:11:26.153378Z",
          "iopub.status.idle": "2024-06-07T09:11:26.160666Z",
          "shell.execute_reply": "2024-06-07T09:11:26.159911Z"
        }
      },
      "outputs": [],
      "source": [
        "df[\"date_of_birth\"] = df[\"date_of_birth\"].astype(str).str.strip()\n",
        "df[\"soc_sec_id\"] = df[\"soc_sec_id\"].astype(str).str.strip()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 33,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:26.164000Z",
          "iopub.status.busy": "2024-06-07T09:11:26.163726Z",
          "iopub.status.idle": "2024-06-07T09:11:26.170794Z",
          "shell.execute_reply": "2024-06-07T09:11:26.170146Z"
        }
      },
      "outputs": [],
      "source": [
        "df[\"date_of_birth\"] = df[\"date_of_birth\"].astype(str).str.strip()\n",
        "df[\"soc_sec_id\"] = df[\"soc_sec_id\"].astype(str).str.strip()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 34,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:26.174301Z",
          "iopub.status.busy": "2024-06-07T09:11:26.174024Z",
          "iopub.status.idle": "2024-06-07T09:11:26.331196Z",
          "shell.execute_reply": "2024-06-07T09:11:26.330465Z"
        }
      },
      "outputs": [],
      "source": [
        "from splink import DuckDBAPI, Linker, SettingsCreator\n",
        "\n",
        "# TODO:  Allow missingness to be analysed without a linker\n",
        "settings = SettingsCreator(\n",
        "    unique_id_column_name=\"rec_id\",\n",
        "    link_type=\"dedupe_only\",\n",
        ")\n",
        "\n",
        "linker = Linker(df, settings, db_api=DuckDBAPI())"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "It's usually a good idea to perform exploratory analysis on your data so you understand what's in each column and how often it's missing:\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 35,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:26.334644Z",
          "iopub.status.busy": "2024-06-07T09:11:26.334398Z",
          "iopub.status.idle": "2024-06-07T09:11:26.630134Z",
          "shell.execute_reply": "2024-06-07T09:11:26.629629Z"
        }
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-ebfe75bbb39a46c880f07502980337ca.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-ebfe75bbb39a46c880f07502980337ca.vega-embed details,\n",
              "  #altair-viz-ebfe75bbb39a46c880f07502980337ca.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-ebfe75bbb39a46c880f07502980337ca\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-ebfe75bbb39a46c880f07502980337ca\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-ebfe75bbb39a46c880f07502980337ca\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}, \"layer\": [{\"mark\": \"rect\", \"encoding\": {\"color\": {\"field\": \"completeness\", \"legend\": null, \"scale\": {\"scheme\": \"darkred\", \"zero\": true}, \"type\": \"quantitative\"}, \"tooltip\": [{\"field\": \"source_dataset\", \"title\": \"Source dataset\", \"type\": \"nominal\"}, {\"field\": \"total_rows_inc_nulls\", \"format\": \",\", \"title\": \"# of records\", \"type\": \"quantitative\"}, {\"field\": \"column_name\", \"title\": \"Column name\", \"type\": \"nominal\"}, {\"field\": \"total_null_rows\", \"format\": \",\", \"title\": \"# of nulls\", \"type\": \"quantitative\"}, {\"field\": \"completeness\", \"format\": \".1%\", \"type\": \"quantitative\"}], \"x\": {\"axis\": {\"labelAngle\": 20}, \"field\": \"column_name\", \"sort\": {\"field\": \"mean_comp\", \"order\": \"descending\"}, \"title\": \"Column name\", \"type\": \"nominal\"}, \"y\": {\"field\": \"source_dataset\", \"title\": \"Source dataset\", \"type\": \"nominal\"}}, \"title\": \"Column completeness by source dataset\", \"transform\": [{\"joinaggregate\": [{\"op\": \"mean\", \"field\": \"completeness\", \"as\": \"mean_comp\"}], \"groupby\": [\"column_name\"]}]}, {\"mark\": {\"type\": \"text\"}, \"encoding\": {\"color\": {\"condition\": {\"test\": \"datum['completeness'] < 0.5\", \"value\": \"white\"}, \"value\": \"black\"}, \"text\": {\"field\": \"completeness\", \"format\": \".0%\", \"type\": \"quantitative\"}, \"x\": {\"axis\": {\"labelAngle\": 0}, \"field\": \"column_name\", \"sort\": {\"field\": \"mean_comp\", \"order\": \"descending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"source_dataset\", \"type\": \"nominal\"}}, \"transform\": [{\"joinaggregate\": [{\"op\": \"mean\", \"field\": \"completeness\", \"as\": \"mean_comp\"}], \"groupby\": [\"column_name\"]}]}], \"data\": {\"name\": \"data-f284bee0b7d37d94cca34fa76f5ff473\"}, \"height\": {\"step\": 40}, \"width\": {\"step\": 40}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-f284bee0b7d37d94cca34fa76f5ff473\": [{\"source_dataset\": \"input_data_1\", \"column_name\": \"rec_id\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"given_name\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"surname\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"street_number\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"address_1\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"address_2\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"suburb\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"postcode\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"state\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"date_of_birth\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"soc_sec_id\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}, {\"source_dataset\": \"input_data_1\", \"column_name\": \"cluster\", \"total_null_rows\": 0, \"total_rows_inc_nulls\": 5000, \"completeness\": 1.0}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.LayerChart(...)"
            ]
          },
          "execution_count": 35,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "from splink.exploratory import completeness_chart\n",
        "\n",
        "completeness_chart(df, db_api=DuckDBAPI())"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 36,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:26.633200Z",
          "iopub.status.busy": "2024-06-07T09:11:26.632979Z",
          "iopub.status.idle": "2024-06-07T09:11:27.047469Z",
          "shell.execute_reply": "2024-06-07T09:11:27.046951Z"
        }
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-ab24a0f3fd3046f69992e84408104c8b.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-ab24a0f3fd3046f69992e84408104c8b.vega-embed details,\n",
              "  #altair-viz-ab24a0f3fd3046f69992e84408104c8b.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-ab24a0f3fd3046f69992e84408104c8b\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-ab24a0f3fd3046f69992e84408104c8b\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-ab24a0f3fd3046f69992e84408104c8b\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}, \"vconcat\": [{\"hconcat\": [{\"mark\": {\"type\": \"line\", \"interpolate\": \"step-after\"}, \"data\": {\"values\": [{\"percentile_ex_nulls\": 0.9688000082969666, \"percentile_inc_nulls\": 0.9688000082969666, \"value_count\": 156, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 156.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.9526000022888184, \"percentile_inc_nulls\": 0.9526000022888184, \"value_count\": 81, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 81.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.9387999773025513, \"percentile_inc_nulls\": 0.9387999773025513, \"value_count\": 69, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 69.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.9265999794006348, \"percentile_inc_nulls\": 0.9265999794006348, \"value_count\": 61, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 61.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.9157999753952026, \"percentile_inc_nulls\": 0.9157999753952026, \"value_count\": 54, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 54.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.9056000113487244, \"percentile_inc_nulls\": 0.9056000113487244, \"value_count\": 51, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 51.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.895799994468689, \"percentile_inc_nulls\": 0.895799994468689, \"value_count\": 49, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 49.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.8863999843597412, \"percentile_inc_nulls\": 0.8863999843597412, \"value_count\": 47, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 47.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.8772000074386597, \"percentile_inc_nulls\": 0.8772000074386597, \"value_count\": 46, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 46.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.8682000041007996, \"percentile_inc_nulls\": 0.8682000041007996, \"value_count\": 45, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 45.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.8597999811172485, \"percentile_inc_nulls\": 0.8597999811172485, \"value_count\": 42, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 42.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.836400032043457, \"percentile_inc_nulls\": 0.836400032043457, \"value_count\": 39, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 117.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.8141999840736389, \"percentile_inc_nulls\": 0.8141999840736389, \"value_count\": 37, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 111.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.8073999881744385, \"percentile_inc_nulls\": 0.8073999881744385, \"value_count\": 34, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 34.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7946000099182129, \"percentile_inc_nulls\": 0.7946000099182129, \"value_count\": 32, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 64.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7821999788284302, \"percentile_inc_nulls\": 0.7821999788284302, \"value_count\": 31, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 62.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7702000141143799, \"percentile_inc_nulls\": 0.7702000141143799, \"value_count\": 30, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 60.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7644000053405762, \"percentile_inc_nulls\": 0.7644000053405762, \"value_count\": 29, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 29.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7588000297546387, \"percentile_inc_nulls\": 0.7588000297546387, \"value_count\": 28, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 28.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7425999641418457, \"percentile_inc_nulls\": 0.7425999641418457, \"value_count\": 27, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 81.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7373999953269958, \"percentile_inc_nulls\": 0.7373999953269958, \"value_count\": 26, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 26.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.727400004863739, \"percentile_inc_nulls\": 0.727400004863739, \"value_count\": 25, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 50.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.7081999778747559, \"percentile_inc_nulls\": 0.7081999778747559, \"value_count\": 24, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 96.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.6759999990463257, \"percentile_inc_nulls\": 0.6759999990463257, \"value_count\": 23, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 161.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.6540000438690186, \"percentile_inc_nulls\": 0.6540000438690186, \"value_count\": 22, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 110.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.6288000345230103, \"percentile_inc_nulls\": 0.6288000345230103, \"value_count\": 21, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 126.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.6208000183105469, \"percentile_inc_nulls\": 0.6208000183105469, \"value_count\": 20, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 40.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.6094000339508057, \"percentile_inc_nulls\": 0.6094000339508057, \"value_count\": 19, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 57.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.5914000272750854, \"percentile_inc_nulls\": 0.5914000272750854, \"value_count\": 18, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 90.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.5845999717712402, \"percentile_inc_nulls\": 0.5845999717712402, \"value_count\": 17, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 34.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.5717999935150146, \"percentile_inc_nulls\": 0.5717999935150146, \"value_count\": 16, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 64.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.5478000044822693, \"percentile_inc_nulls\": 0.5478000044822693, \"value_count\": 15, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 120.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.522599995136261, \"percentile_inc_nulls\": 0.522599995136261, \"value_count\": 14, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 126.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.509600043296814, \"percentile_inc_nulls\": 0.509600043296814, \"value_count\": 13, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 65.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.5024000406265259, \"percentile_inc_nulls\": 0.5024000406265259, \"value_count\": 12, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 36.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.48919999599456787, \"percentile_inc_nulls\": 0.48919999599456787, \"value_count\": 11, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 66.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.4631999731063843, \"percentile_inc_nulls\": 0.4631999731063843, \"value_count\": 10, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 130.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.4326000213623047, \"percentile_inc_nulls\": 0.4326000213623047, \"value_count\": 9, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 153.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.40380001068115234, \"percentile_inc_nulls\": 0.40380001068115234, \"value_count\": 8, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 144.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.3659999966621399, \"percentile_inc_nulls\": 0.3659999966621399, \"value_count\": 7, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 189.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.33240002393722534, \"percentile_inc_nulls\": 0.33240002393722534, \"value_count\": 6, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 168.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.2784000039100647, \"percentile_inc_nulls\": 0.2784000039100647, \"value_count\": 5, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 270.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.23680001497268677, \"percentile_inc_nulls\": 0.23680001497268677, \"value_count\": 4, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 208.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.1905999779701233, \"percentile_inc_nulls\": 0.1905999779701233, \"value_count\": 3, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 231.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.14020001888275146, \"percentile_inc_nulls\": 0.14020001888275146, \"value_count\": 2, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 252.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 0.0, \"percentile_inc_nulls\": 0.0, \"value_count\": 1, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 701.0, \"distinct_value_count\": 1214}, {\"percentile_ex_nulls\": 1.0, \"percentile_inc_nulls\": 1.0, \"value_count\": 156, \"group_name\": \"given_name\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 156.0, \"distinct_value_count\": 1214}]}, \"encoding\": {\"tooltip\": [{\"field\": \"value_count\", \"type\": \"quantitative\"}, {\"field\": \"percentile_ex_nulls\", \"type\": \"quantitative\"}, {\"field\": \"percentile_inc_nulls\", \"type\": \"quantitative\"}, {\"field\": \"total_non_null_rows\", \"type\": \"quantitative\"}, {\"field\": \"total_rows_inc_nulls\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"percentile_ex_nulls\", \"sort\": \"descending\", \"title\": \"Percentile\", \"type\": \"quantitative\"}, \"y\": {\"field\": \"value_count\", \"title\": \"Count of values\", \"type\": \"quantitative\"}}, \"title\": {\"text\": \"Distribution of counts of values in column given_name\", \"subtitle\": \"In this col, 0 values (0.0%) are null and there are 1214 distinct values\"}}, {\"mark\": \"bar\", \"data\": {\"values\": [{\"value_count\": 156, \"group_name\": \"given_name\", \"value\": \" \", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 81, \"group_name\": \"given_name\", \"value\": \" joshua\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 69, \"group_name\": \"given_name\", \"value\": \" emiily\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 61, \"group_name\": \"given_name\", \"value\": \" jack\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 54, \"group_name\": \"given_name\", \"value\": \" benjamin\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 51, \"group_name\": \"given_name\", \"value\": \" isabella\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 49, \"group_name\": \"given_name\", \"value\": \" samuel\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 47, \"group_name\": \"given_name\", \"value\": \" thomas\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 46, \"group_name\": \"given_name\", \"value\": \" sophie\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 45, \"group_name\": \"given_name\", \"value\": \" james\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}]}, \"encoding\": {\"tooltip\": [{\"field\": \"value\", \"type\": \"nominal\"}, {\"field\": \"value_count\", \"type\": \"quantitative\"}, {\"field\": \"total_non_null_rows\", \"type\": \"quantitative\"}, {\"field\": \"total_rows_inc_nulls\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"value\", \"sort\": \"-y\", \"title\": null, \"type\": \"nominal\"}, \"y\": {\"field\": \"value_count\", \"title\": \"Value count\", \"type\": \"quantitative\"}}, \"title\": \"Top 10 values by value count\"}, {\"mark\": \"bar\", \"data\": {\"values\": [{\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" jaco b\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" zarlia\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" jaxin\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" michel\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" bullock\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" felicigy\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" sopihe\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" larsisa\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" victora\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}, {\"value_count\": 1, \"group_name\": \"given_name\", \"value\": \" finey\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1214}]}, \"encoding\": {\"tooltip\": [{\"field\": \"value\", \"type\": \"nominal\"}, {\"field\": \"value_count\", \"type\": \"quantitative\"}, {\"field\": \"total_non_null_rows\", \"type\": \"quantitative\"}, {\"field\": \"total_rows_inc_nulls\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"value\", \"sort\": \"-y\", \"title\": null, \"type\": \"nominal\"}, \"y\": {\"field\": \"value_count\", \"scale\": {\"domain\": [0, 156]}, \"title\": \"Value count\", \"type\": \"quantitative\"}}, \"title\": \"Bottom 10 values by value count\"}]}, {\"hconcat\": [{\"mark\": {\"type\": \"line\", \"interpolate\": \"step-after\"}, \"data\": {\"values\": [{\"percentile_ex_nulls\": 0.9753999710083008, \"percentile_inc_nulls\": 0.9753999710083008, \"value_count\": 123, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 123.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.9581999778747559, \"percentile_inc_nulls\": 0.9581999778747559, \"value_count\": 86, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 86.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.9423999786376953, \"percentile_inc_nulls\": 0.9423999786376953, \"value_count\": 79, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 79.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.9277999997138977, \"percentile_inc_nulls\": 0.9277999997138977, \"value_count\": 73, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 73.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.9139999747276306, \"percentile_inc_nulls\": 0.9139999747276306, \"value_count\": 69, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 69.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.9025999903678894, \"percentile_inc_nulls\": 0.9025999903678894, \"value_count\": 57, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 57.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8925999999046326, \"percentile_inc_nulls\": 0.8925999999046326, \"value_count\": 50, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 50.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8831999897956848, \"percentile_inc_nulls\": 0.8831999897956848, \"value_count\": 47, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 47.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8740000128746033, \"percentile_inc_nulls\": 0.8740000128746033, \"value_count\": 46, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 46.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.847599983215332, \"percentile_inc_nulls\": 0.847599983215332, \"value_count\": 44, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 132.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8395999670028687, \"percentile_inc_nulls\": 0.8395999670028687, \"value_count\": 40, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 40.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8317999839782715, \"percentile_inc_nulls\": 0.8317999839782715, \"value_count\": 39, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 39.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8242000341415405, \"percentile_inc_nulls\": 0.8242000341415405, \"value_count\": 38, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 38.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8181999921798706, \"percentile_inc_nulls\": 0.8181999921798706, \"value_count\": 30, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 30.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8123999834060669, \"percentile_inc_nulls\": 0.8123999834060669, \"value_count\": 29, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 29.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.8068000078201294, \"percentile_inc_nulls\": 0.8068000078201294, \"value_count\": 28, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 28.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7906000018119812, \"percentile_inc_nulls\": 0.7906000018119812, \"value_count\": 27, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 81.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7802000045776367, \"percentile_inc_nulls\": 0.7802000045776367, \"value_count\": 26, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 52.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7752000093460083, \"percentile_inc_nulls\": 0.7752000093460083, \"value_count\": 25, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 25.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7703999876976013, \"percentile_inc_nulls\": 0.7703999876976013, \"value_count\": 24, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 24.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7660000324249268, \"percentile_inc_nulls\": 0.7660000324249268, \"value_count\": 22, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 22.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7576000094413757, \"percentile_inc_nulls\": 0.7576000094413757, \"value_count\": 21, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 42.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.75, \"percentile_inc_nulls\": 0.75, \"value_count\": 19, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 38.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7427999973297119, \"percentile_inc_nulls\": 0.7427999973297119, \"value_count\": 18, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 36.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7394000291824341, \"percentile_inc_nulls\": 0.7394000291824341, \"value_count\": 17, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 17.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7265999913215637, \"percentile_inc_nulls\": 0.7265999913215637, \"value_count\": 16, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 64.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7175999879837036, \"percentile_inc_nulls\": 0.7175999879837036, \"value_count\": 15, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 45.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7120000123977661, \"percentile_inc_nulls\": 0.7120000123977661, \"value_count\": 14, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 28.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.7015999555587769, \"percentile_inc_nulls\": 0.7015999555587769, \"value_count\": 13, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 52.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.6872000098228455, \"percentile_inc_nulls\": 0.6872000098228455, \"value_count\": 12, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 72.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.6762000322341919, \"percentile_inc_nulls\": 0.6762000322341919, \"value_count\": 11, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 55.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.6582000255584717, \"percentile_inc_nulls\": 0.6582000255584717, \"value_count\": 10, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 90.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.6402000188827515, \"percentile_inc_nulls\": 0.6402000188827515, \"value_count\": 9, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 90.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.6161999702453613, \"percentile_inc_nulls\": 0.6161999702453613, \"value_count\": 8, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 120.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.5839999914169312, \"percentile_inc_nulls\": 0.5839999914169312, \"value_count\": 7, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 161.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.5371999740600586, \"percentile_inc_nulls\": 0.5371999740600586, \"value_count\": 6, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 234.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.46219998598098755, \"percentile_inc_nulls\": 0.46219998598098755, \"value_count\": 5, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 375.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.3845999836921692, \"percentile_inc_nulls\": 0.3845999836921692, \"value_count\": 4, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 388.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.3011999726295471, \"percentile_inc_nulls\": 0.3011999726295471, \"value_count\": 3, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 417.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.20959997177124023, \"percentile_inc_nulls\": 0.20959997177124023, \"value_count\": 2, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 458.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 0.0, \"percentile_inc_nulls\": 0.0, \"value_count\": 1, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 1048.0, \"distinct_value_count\": 1741}, {\"percentile_ex_nulls\": 1.0, \"percentile_inc_nulls\": 1.0, \"value_count\": 123, \"group_name\": \"surname\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"sum_tokens_in_value_count_group\": 123.0, \"distinct_value_count\": 1741}]}, \"encoding\": {\"tooltip\": [{\"field\": \"value_count\", \"type\": \"quantitative\"}, {\"field\": \"percentile_ex_nulls\", \"type\": \"quantitative\"}, {\"field\": \"percentile_inc_nulls\", \"type\": \"quantitative\"}, {\"field\": \"total_non_null_rows\", \"type\": \"quantitative\"}, {\"field\": \"total_rows_inc_nulls\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"percentile_ex_nulls\", \"sort\": \"descending\", \"title\": \"Percentile\", \"type\": \"quantitative\"}, \"y\": {\"field\": \"value_count\", \"title\": \"Count of values\", \"type\": \"quantitative\"}}, \"title\": {\"text\": \"Distribution of counts of values in column surname\", \"subtitle\": \"In this col, 0 values (0.0%) are null and there are 1741 distinct values\"}}, {\"mark\": \"bar\", \"data\": {\"values\": [{\"value_count\": 123, \"group_name\": \"surname\", \"value\": \" white\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 86, \"group_name\": \"surname\", \"value\": \" clarke\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 79, \"group_name\": \"surname\", \"value\": \" \", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 73, \"group_name\": \"surname\", \"value\": \" campbell\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 69, \"group_name\": \"surname\", \"value\": \" ryan\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 57, \"group_name\": \"surname\", \"value\": \" green\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 50, \"group_name\": \"surname\", \"value\": \" reid\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 47, \"group_name\": \"surname\", \"value\": \" dixon\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 46, \"group_name\": \"surname\", \"value\": \" nguyen\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 44, \"group_name\": \"surname\", \"value\": \" morrison\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}]}, \"encoding\": {\"tooltip\": [{\"field\": \"value\", \"type\": \"nominal\"}, {\"field\": \"value_count\", \"type\": \"quantitative\"}, {\"field\": \"total_non_null_rows\", \"type\": \"quantitative\"}, {\"field\": \"total_rows_inc_nulls\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"value\", \"sort\": \"-y\", \"title\": null, \"type\": \"nominal\"}, \"y\": {\"field\": \"value_count\", \"title\": \"Value count\", \"type\": \"quantitative\"}}, \"title\": \"Top 10 values by value count\"}, {\"mark\": \"bar\", \"data\": {\"values\": [{\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" matthiessen\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" jennion\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" pitno\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" grifefn\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" daykin\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" colegte\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" lazavroff\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" loyck\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" stingle\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}, {\"value_count\": 1, \"group_name\": \"surname\", \"value\": \" wisowaty\", \"total_non_null_rows\": 5000, \"total_rows_inc_nulls\": 5000, \"distinct_value_count\": 1741}]}, \"encoding\": {\"tooltip\": [{\"field\": \"value\", \"type\": \"nominal\"}, {\"field\": \"value_count\", \"type\": \"quantitative\"}, {\"field\": \"total_non_null_rows\", \"type\": \"quantitative\"}, {\"field\": \"total_rows_inc_nulls\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"value\", \"sort\": \"-y\", \"title\": null, \"type\": \"nominal\"}, \"y\": {\"field\": \"value_count\", \"scale\": {\"domain\": [0, 123]}, \"title\": \"Value count\", \"type\": \"quantitative\"}}, \"title\": \"Bottom 10 values by value count\"}]}], \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\"}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.VConcatChart(...)"
            ]
          },
          "execution_count": 36,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "from splink.exploratory import profile_columns\n",
        "\n",
        "profile_columns(df, db_api=DuckDBAPI(), column_expressions=[\"given_name\", \"surname\"])"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 37,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:27.050491Z",
          "iopub.status.busy": "2024-06-07T09:11:27.050266Z",
          "iopub.status.idle": "2024-06-07T09:11:27.428593Z",
          "shell.execute_reply": "2024-06-07T09:11:27.428055Z"
        }
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-f28e60a14c9545e3adae9e0a1fd07164.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-f28e60a14c9545e3adae9e0a1fd07164.vega-embed details,\n",
              "  #altair-viz-f28e60a14c9545e3adae9e0a1fd07164.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-f28e60a14c9545e3adae9e0a1fd07164\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-f28e60a14c9545e3adae9e0a1fd07164\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-f28e60a14c9545e3adae9e0a1fd07164\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}, \"data\": {\"name\": \"data-4ab5b177c099ba5baa7c18ee8b59ed20\"}, \"mark\": \"bar\", \"encoding\": {\"order\": {\"field\": \"cumulative_rows\"}, \"tooltip\": [{\"field\": \"blocking_rule\", \"title\": \"SQL Condition\", \"type\": \"nominal\"}, {\"field\": \"row_count\", \"format\": \",\", \"title\": \"Comparisons Generated\", \"type\": \"quantitative\"}, {\"field\": \"cumulative_rows\", \"format\": \",\", \"title\": \"Cumulative Comparisons\", \"type\": \"quantitative\"}, {\"field\": \"cartesian\", \"format\": \",\", \"title\": \"Total comparisons in Cartesian product\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"start\", \"title\": \"Comparisons Generated by Rule(s)\", \"type\": \"quantitative\"}, \"x2\": {\"field\": \"cumulative_rows\"}, \"y\": {\"field\": \"blocking_rule\", \"sort\": [\"-x2\"], \"title\": \"SQL Blocking Rule\"}}, \"height\": {\"step\": 20}, \"title\": {\"text\": \"Count of Additional Comparisons Generated by Each Blocking Rule\", \"subtitle\": \"(Counts exclude comparisons already generated by previous rules)\"}, \"width\": 450, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-4ab5b177c099ba5baa7c18ee8b59ed20\": [{\"blocking_rule\": \"l.\\\"soc_sec_id\\\" = r.\\\"soc_sec_id\\\"\", \"row_count\": 5601, \"cumulative_rows\": 5601, \"cartesian\": 12497500, \"match_key\": \"0\", \"start\": 0}, {\"blocking_rule\": \"l.\\\"given_name\\\" = r.\\\"given_name\\\"\", \"row_count\": 48681, \"cumulative_rows\": 54282, \"cartesian\": 12497500, \"match_key\": \"1\", \"start\": 5601}, {\"blocking_rule\": \"l.\\\"surname\\\" = r.\\\"surname\\\"\", \"row_count\": 36675, \"cumulative_rows\": 90957, \"cartesian\": 12497500, \"match_key\": \"2\", \"start\": 54282}, {\"blocking_rule\": \"l.\\\"date_of_birth\\\" = r.\\\"date_of_birth\\\"\", \"row_count\": 12256, \"cumulative_rows\": 103213, \"cartesian\": 12497500, \"match_key\": \"3\", \"start\": 90957}, {\"blocking_rule\": \"l.\\\"postcode\\\" = r.\\\"postcode\\\"\", \"row_count\": 11037, \"cumulative_rows\": 114250, \"cartesian\": 12497500, \"match_key\": \"4\", \"start\": 103213}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.Chart(...)"
            ]
          },
          "execution_count": 37,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "from splink import DuckDBAPI, block_on\n",
        "from splink.blocking_analysis import (\n",
        "    cumulative_comparisons_to_be_scored_from_blocking_rules_chart,\n",
        ")\n",
        "\n",
        "blocking_rules = [\n",
        "    block_on(\"soc_sec_id\"),\n",
        "    block_on(\"given_name\"),\n",
        "    block_on(\"surname\"),\n",
        "    block_on(\"date_of_birth\"),\n",
        "    block_on(\"postcode\"),\n",
        "]\n",
        "\n",
        "db_api = DuckDBAPI()\n",
        "cumulative_comparisons_to_be_scored_from_blocking_rules_chart(\n",
        "    table_or_tables=df,\n",
        "    blocking_rules=blocking_rules,\n",
        "    db_api=db_api,\n",
        "    link_type=\"dedupe_only\",\n",
        "    unique_id_column_name=\"rec_id\",\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 38,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:27.431702Z",
          "iopub.status.busy": "2024-06-07T09:11:27.431466Z",
          "iopub.status.idle": "2024-06-07T09:11:27.591229Z",
          "shell.execute_reply": "2024-06-07T09:11:27.590491Z"
        }
      },
      "outputs": [],
      "source": [
        "import splink.comparison_library as cl\n",
        "\n",
        "from splink import Linker\n",
        "\n",
        "settings = SettingsCreator(\n",
        "    unique_id_column_name=\"rec_id\",\n",
        "    link_type=\"dedupe_only\",\n",
        "    blocking_rules_to_generate_predictions=blocking_rules,\n",
        "    comparisons=[\n",
        "        cl.NameComparison(\"given_name\"),\n",
        "        cl.NameComparison(\"surname\"),\n",
        "        cl.DateOfBirthComparison(\n",
        "            \"date_of_birth\",\n",
        "            input_is_string=True,\n",
        "            datetime_format=\"%Y%m%d\",\n",
        "        ),\n",
        "        cl.DamerauLevenshteinAtThresholds(\"soc_sec_id\", [2]),\n",
        "        cl.ExactMatch(\"street_number\").configure(term_frequency_adjustments=True),\n",
        "        cl.ExactMatch(\"postcode\").configure(term_frequency_adjustments=True),\n",
        "    ],\n",
        "    retain_intermediate_calculation_columns=True,\n",
        ")\n",
        "\n",
        "linker = Linker(df, settings, db_api=DuckDBAPI())"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 39,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:27.594493Z",
          "iopub.status.busy": "2024-06-07T09:11:27.594264Z",
          "iopub.status.idle": "2024-06-07T09:11:27.787352Z",
          "shell.execute_reply": "2024-06-07T09:11:27.786769Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "Probability two random records match is estimated to be  0.000528.\n",
            "This means that amongst all possible pairwise record comparisons, one in 1,893.56 are expected to match.  With 12,497,500 total possible comparisons, we expect a total of around 6,600.00 matching pairs\n"
          ]
        }
      ],
      "source": [
        "from splink import block_on\n",
        "\n",
        "deterministic_rules = [\n",
        "    block_on(\"soc_sec_id\"),\n",
        "    block_on(\"given_name\", \"surname\", \"date_of_birth\"),\n",
        "    \"l.given_name = r.surname and l.surname = r.given_name and l.date_of_birth = r.date_of_birth\",\n",
        "]\n",
        "\n",
        "linker.training.estimate_probability_two_random_records_match(\n",
        "    deterministic_rules, recall=0.9\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 40,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:27.790368Z",
          "iopub.status.busy": "2024-06-07T09:11:27.790145Z",
          "iopub.status.idle": "2024-06-07T09:11:35.433199Z",
          "shell.execute_reply": "2024-06-07T09:11:35.431006Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "You are using the default value for `max_pairs`, which may be too small and thus lead to inaccurate estimates for your model's u-parameters. Consider increasing to 1e8 or 1e9, which will result in more accurate estimates, but with a longer run time.\n",
            "----- Estimating u probabilities using random sampling -----\n"
          ]
        },
        {
          "data": {
            "application/vnd.jupyter.widget-view+json": {
              "model_id": "80ec855655d34fb49588ee24a928ae25",
              "version_major": 2,
              "version_minor": 0
            },
            "text/plain": [
              "FloatProgress(value=0.0, layout=Layout(width='auto'), style=ProgressStyle(bar_color='black'))"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "u probability not trained for date_of_birth - Abs difference of 'transformed date_of_birth <= 1 month' (comparison vector value: 3). This usually means the comparison level was never observed in the training data.\n",
            "u probability not trained for date_of_birth - Abs difference of 'transformed date_of_birth <= 1 year' (comparison vector value: 2). This usually means the comparison level was never observed in the training data.\n",
            "u probability not trained for date_of_birth - Abs difference of 'transformed date_of_birth <= 10 year' (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Estimated u probabilities using random sampling\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - given_name (no m values are trained).\n",
            "    - surname (no m values are trained).\n",
            "    - date_of_birth (some u values are not trained, no m values are trained).\n",
            "    - soc_sec_id (no m values are trained).\n",
            "    - street_number (no m values are trained).\n",
            "    - postcode (no m values are trained).\n"
          ]
        }
      ],
      "source": [
        "linker.training.estimate_u_using_random_sampling(max_pairs=1e6)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 41,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:35.446472Z",
          "iopub.status.busy": "2024-06-07T09:11:35.440198Z",
          "iopub.status.idle": "2024-06-07T09:11:36.895235Z",
          "shell.execute_reply": "2024-06-07T09:11:36.894603Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "l.\"date_of_birth\" = r.\"date_of_birth\"\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - given_name\n",
            "    - surname\n",
            "    - soc_sec_id\n",
            "    - street_number\n",
            "    - postcode\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - date_of_birth\n",
            "\n",
            "Iteration 1: Largest change in params was -0.376 in the m_probability of surname, level `Exact match on surname`\n",
            "Iteration 2: Largest change in params was 0.0156 in the m_probability of surname, level `All other comparisons`\n",
            "Iteration 3: Largest change in params was 0.000699 in the m_probability of postcode, level `All other comparisons`\n",
            "Iteration 4: Largest change in params was -3.77e-05 in the m_probability of postcode, level `Exact match on postcode`\n",
            "\n",
            "EM converged after 4 iterations\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - date_of_birth (some u values are not trained, no m values are trained).\n"
          ]
        }
      ],
      "source": [
        "em_blocking_rule_1 = block_on(\"date_of_birth\")\n",
        "session_dob = linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    em_blocking_rule_1\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 42,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:36.898638Z",
          "iopub.status.busy": "2024-06-07T09:11:36.898156Z",
          "iopub.status.idle": "2024-06-07T09:11:37.517318Z",
          "shell.execute_reply": "2024-06-07T09:11:37.516459Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "l.\"postcode\" = r.\"postcode\"\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - given_name\n",
            "    - surname\n",
            "    - date_of_birth\n",
            "    - soc_sec_id\n",
            "    - street_number\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - postcode\n",
            "\n",
            "WARNING:\n",
            "Level Abs difference of 'transformed date_of_birth <= 1 month' on comparison date_of_birth not observed in dataset, unable to train m value\n",
            "\n",
            "WARNING:\n",
            "Level Abs difference of 'transformed date_of_birth <= 1 year' on comparison date_of_birth not observed in dataset, unable to train m value\n",
            "\n",
            "WARNING:\n",
            "Level Abs difference of 'transformed date_of_birth <= 10 year' on comparison date_of_birth not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was 0.0681 in probability_two_random_records_match\n",
            "Iteration 2: Largest change in params was -0.00185 in the m_probability of date_of_birth, level `Exact match on date_of_birth`\n",
            "Iteration 3: Largest change in params was -5.7e-05 in the m_probability of date_of_birth, level `Exact match on date_of_birth`\n",
            "\n",
            "EM converged after 3 iterations\n",
            "m probability not trained for date_of_birth - Abs difference of 'transformed date_of_birth <= 1 month' (comparison vector value: 3). This usually means the comparison level was never observed in the training data.\n",
            "m probability not trained for date_of_birth - Abs difference of 'transformed date_of_birth <= 1 year' (comparison vector value: 2). This usually means the comparison level was never observed in the training data.\n",
            "m probability not trained for date_of_birth - Abs difference of 'transformed date_of_birth <= 10 year' (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - date_of_birth (some u values are not trained, some m values are not trained).\n"
          ]
        }
      ],
      "source": [
        "em_blocking_rule_2 = block_on(\"postcode\")\n",
        "session_postcode = linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    em_blocking_rule_2\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 43,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:37.523135Z",
          "iopub.status.busy": "2024-06-07T09:11:37.522810Z",
          "iopub.status.idle": "2024-06-07T09:11:37.957335Z",
          "shell.execute_reply": "2024-06-07T09:11:37.956712Z"
        }
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-757202117df24c2b9d49624042ac61c7.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-757202117df24c2b9d49624042ac61c7.vega-embed details,\n",
              "  #altair-viz-757202117df24c2b9d49624042ac61c7.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-757202117df24c2b9d49624042ac61c7\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-757202117df24c2b9d49624042ac61c7\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-757202117df24c2b9d49624042ac61c7\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300, \"discreteHeight\": 60, \"discreteWidth\": 400}, \"header\": {\"title\": null}, \"mark\": {\"tooltip\": null}, \"title\": {\"anchor\": \"middle\"}}, \"vconcat\": [{\"mark\": {\"type\": \"bar\", \"clip\": true, \"height\": 15}, \"encoding\": {\"color\": {\"field\": \"log2_bayes_factor\", \"scale\": {\"domain\": [-10, 0, 10], \"interpolate\": \"lab\", \"range\": [\"red\", \"#bbbbbb\", \"green\"]}, \"title\": \"Match weight\", \"type\": \"quantitative\"}, \"tooltip\": [{\"field\": \"comparison_name\", \"title\": \"Comparison name\", \"type\": \"nominal\"}, {\"field\": \"probability_two_random_records_match\", \"format\": \".4f\", \"title\": \"Probability two random records match\", \"type\": \"nominal\"}, {\"field\": \"log2_bayes_factor\", \"format\": \",.4f\", \"title\": \"Equivalent match weight\", \"type\": \"quantitative\"}, {\"field\": \"bayes_factor_description\", \"title\": \"Match weight description\", \"type\": \"nominal\"}], \"x\": {\"axis\": {\"domain\": false, \"gridColor\": {\"condition\": {\"test\": \"abs(datum.value / 10)  <= 1 & datum.value % 10 === 0\", \"value\": \"#aaa\"}, \"value\": \"#ddd\"}, \"gridDash\": {\"condition\": {\"test\": \"abs(datum.value / 10) == 1\", \"value\": [3]}, \"value\": null}, \"gridWidth\": {\"condition\": {\"test\": \"abs(datum.value / 10)  <= 1 & datum.value % 10 === 0\", \"value\": 2}, \"value\": 1}, \"labels\": false, \"ticks\": false, \"title\": \"\"}, \"field\": \"log2_bayes_factor\", \"scale\": {\"domain\": [-11, 11]}, \"type\": \"quantitative\"}, \"y\": {\"axis\": {\"title\": \"Prior (starting) match weight\", \"titleAlign\": \"right\", \"titleAngle\": 0, \"titleFontWeight\": \"normal\"}, \"field\": \"label_for_charts\", \"sort\": {\"field\": \"comparison_vector_value\", \"order\": \"descending\"}, \"type\": \"nominal\"}}, \"height\": 20, \"transform\": [{\"filter\": \"(datum.comparison_name == 'probability_two_random_records_match')\"}]}, {\"mark\": {\"type\": \"bar\", \"clip\": true}, \"encoding\": {\"color\": {\"field\": \"log2_bayes_factor\", \"scale\": {\"domain\": [-10, 0, 10], \"interpolate\": \"lab\", \"range\": [\"red\", \"#bbbbbb\", \"green\"]}, \"title\": \"Match weight\", \"type\": \"quantitative\"}, \"row\": {\"field\": \"comparison_name\", \"header\": {\"labelAlign\": \"left\", \"labelAnchor\": \"middle\", \"labelAngle\": 0}, \"sort\": {\"field\": \"comparison_sort_order\"}, \"type\": \"nominal\"}, \"tooltip\": [{\"field\": \"comparison_name\", \"title\": \"Comparison name\", \"type\": \"nominal\"}, {\"field\": \"label_for_charts\", \"title\": \"Label\", \"type\": \"ordinal\"}, {\"field\": \"sql_condition\", \"title\": \"SQL condition\", \"type\": \"nominal\"}, {\"field\": \"m_probability\", \"format\": \".4f\", \"title\": \"M probability\", \"type\": \"quantitative\"}, {\"field\": \"u_probability\", \"format\": \".4f\", \"title\": \"U probability\", \"type\": \"quantitative\"}, {\"field\": \"bayes_factor\", \"format\": \",.4f\", \"title\": \"Bayes factor = m/u\", \"type\": \"quantitative\"}, {\"field\": \"log2_bayes_factor\", \"format\": \",.4f\", \"title\": \"Match weight = log2(m/u)\", \"type\": \"quantitative\"}, {\"field\": \"bayes_factor_description\", \"title\": \"Match weight description\", \"type\": \"nominal\"}], \"x\": {\"axis\": {\"gridColor\": {\"condition\": {\"test\": \"abs(datum.value / 10)  <= 1 & datum.value % 10 === 0\", \"value\": \"#aaa\"}, \"value\": \"#ddd\"}, \"gridDash\": {\"condition\": {\"test\": \"abs(datum.value / 10) == 1\", \"value\": [3]}, \"value\": null}, \"gridWidth\": {\"condition\": {\"test\": \"abs(datum.value / 10)  <= 1 & datum.value % 10 === 0\", \"value\": 2}, \"value\": 1}, \"title\": \"Comparison level match weight = log2(m/u)\"}, \"field\": \"log2_bayes_factor\", \"scale\": {\"domain\": [-11, 11]}, \"type\": \"quantitative\"}, \"y\": {\"axis\": {\"title\": null}, \"field\": \"label_for_charts\", \"sort\": {\"field\": \"comparison_vector_value\", \"order\": \"descending\"}, \"type\": \"nominal\"}}, \"height\": {\"step\": 12}, \"resolve\": {\"axis\": {\"y\": \"independent\"}, \"scale\": {\"y\": \"independent\"}}, \"transform\": [{\"filter\": \"(datum.comparison_name != 'probability_two_random_records_match')\"}]}], \"data\": {\"name\": \"data-0471c8df175b56f506570c64c91f5282\"}, \"params\": [{\"name\": \"mouse_zoom\", \"select\": {\"type\": \"interval\", \"encodings\": [\"x\"]}, \"bind\": \"scales\", \"views\": []}], \"resolve\": {\"axis\": {\"y\": \"independent\"}, \"scale\": {\"y\": \"independent\"}}, \"title\": {\"text\": \"Model parameters (components of final match weight)\", \"subtitle\": \"Use mousewheel to zoom\"}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-0471c8df175b56f506570c64c91f5282\": [{\"comparison_name\": \"probability_two_random_records_match\", \"sql_condition\": null, \"label_for_charts\": \"\", \"m_probability\": null, \"u_probability\": null, \"m_probability_description\": null, \"u_probability_description\": null, \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": null, \"is_null_level\": false, \"bayes_factor\": 0.0005283846640354178, \"log2_bayes_factor\": -10.886123785487664, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 0, \"bayes_factor_description\": \"The probability that two random records drawn at random match is 0.001 or one in  1,893.6 records.This is equivalent to a starting match weight of -10.886.\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": -1}, {\"comparison_name\": \"given_name\", \"sql_condition\": \"\\\"given_name_l\\\" = \\\"given_name_r\\\"\", \"label_for_charts\": \"Exact match on given_name\", \"m_probability\": 0.5749906728478607, \"u_probability\": 0.00404815928514168, \"m_probability_description\": \"Amongst matching record comparisons, 57.50% of records are in the exact match on given_name comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.40% of records are in the exact match on given_name comparison level\", \"has_tf_adjustments\": true, \"tf_adjustment_column\": \"given_name\", \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 142.0375613574051, \"log2_bayes_factor\": 7.15012868583073, \"comparison_vector_value\": 4, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `exact match on given_name` then comparison is 142.04 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 0}, {\"comparison_name\": \"given_name\", \"sql_condition\": \"jaro_winkler_similarity(\\\"given_name_l\\\", \\\"given_name_r\\\") >= 0.92\", \"label_for_charts\": \"Jaro-Winkler distance of given_name >= 0.92\", \"m_probability\": 0.16393316504876188, \"u_probability\": 0.0018169243699836326, \"m_probability_description\": \"Amongst matching record comparisons, 16.39% of records are in the jaro-winkler distance of given_name >= 0.92 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.18% of records are in the jaro-winkler distance of given_name >= 0.92 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 90.22564051482155, \"log2_bayes_factor\": 6.495465574786281, \"comparison_vector_value\": 3, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `jaro-winkler distance of given_name >= 0.92` then comparison is 90.23 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 0}, {\"comparison_name\": \"given_name\", \"sql_condition\": \"jaro_winkler_similarity(\\\"given_name_l\\\", \\\"given_name_r\\\") >= 0.88\", \"label_for_charts\": \"Jaro-Winkler distance of given_name >= 0.88\", \"m_probability\": 0.007490693133682454, \"u_probability\": 0.002347759755988352, \"m_probability_description\": \"Amongst matching record comparisons, 0.75% of records are in the jaro-winkler distance of given_name >= 0.88 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.23% of records are in the jaro-winkler distance of given_name >= 0.88 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 3.190570549041994, \"log2_bayes_factor\": 1.6738144348590274, \"comparison_vector_value\": 2, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `jaro-winkler distance of given_name >= 0.88` then comparison is 3.19 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 0}, {\"comparison_name\": \"given_name\", \"sql_condition\": \"jaro_winkler_similarity(\\\"given_name_l\\\", \\\"given_name_r\\\") >= 0.7\", \"label_for_charts\": \"Jaro-Winkler distance of given_name >= 0.7\", \"m_probability\": 0.059118979327504914, \"u_probability\": 0.11246912900918281, \"m_probability_description\": \"Amongst matching record comparisons, 5.91% of records are in the jaro-winkler distance of given_name >= 0.7 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 11.25% of records are in the jaro-winkler distance of given_name >= 0.7 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.5256462804355674, \"log2_bayes_factor\": -0.9278357918003998, \"comparison_vector_value\": 1, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `jaro-winkler distance of given_name >= 0.7` then comparison is  1.90 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 0}, {\"comparison_name\": \"given_name\", \"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"m_probability_description\": \"Amongst matching record comparisons, 19.45% of records are in the all other comparisons comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 87.93% of records are in the all other comparisons comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 0}, {\"comparison_name\": \"surname\", \"sql_condition\": \"\\\"surname_l\\\" = \\\"surname_r\\\"\", \"label_for_charts\": \"Exact match on surname\", \"m_probability\": 0.5618584392848496, \"u_probability\": 0.0027005710796134397, \"m_probability_description\": \"Amongst matching record comparisons, 56.19% of records are in the exact match on surname comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.27% of records are in the exact match on surname comparison level\", \"has_tf_adjustments\": true, \"tf_adjustment_column\": \"surname\", \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 208.05171303444237, \"log2_bayes_factor\": 7.70079835691789, \"comparison_vector_value\": 4, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `exact match on surname` then comparison is 208.05 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 1}, {\"comparison_name\": \"surname\", \"sql_condition\": \"jaro_winkler_similarity(\\\"surname_l\\\", \\\"surname_r\\\") >= 0.92\", \"label_for_charts\": \"Jaro-Winkler distance of surname >= 0.92\", \"m_probability\": 0.22716362508192303, \"u_probability\": 0.0010551971697410883, \"m_probability_description\": \"Amongst matching record comparisons, 22.72% of records are in the jaro-winkler distance of surname >= 0.92 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.11% of records are in the jaro-winkler distance of surname >= 0.92 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 215.28073766314378, \"log2_bayes_factor\": 7.7500754292934895, \"comparison_vector_value\": 3, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `jaro-winkler distance of surname >= 0.92` then comparison is 215.28 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 1}, {\"comparison_name\": \"surname\", \"sql_condition\": \"jaro_winkler_similarity(\\\"surname_l\\\", \\\"surname_r\\\") >= 0.88\", \"label_for_charts\": \"Jaro-Winkler distance of surname >= 0.88\", \"m_probability\": 0.008468347693889898, \"u_probability\": 0.0005329932534275027, \"m_probability_description\": \"Amongst matching record comparisons, 0.85% of records are in the jaro-winkler distance of surname >= 0.88 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.05% of records are in the jaro-winkler distance of surname >= 0.88 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 15.888283086948597, \"log2_bayes_factor\": 3.989891328083966, \"comparison_vector_value\": 2, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `jaro-winkler distance of surname >= 0.88` then comparison is 15.89 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 1}, {\"comparison_name\": \"surname\", \"sql_condition\": \"jaro_winkler_similarity(\\\"surname_l\\\", \\\"surname_r\\\") >= 0.7\", \"label_for_charts\": \"Jaro-Winkler distance of surname >= 0.7\", \"m_probability\": 0.03213333956467282, \"u_probability\": 0.07473881712181485, \"m_probability_description\": \"Amongst matching record comparisons, 3.21% of records are in the jaro-winkler distance of surname >= 0.7 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 7.47% of records are in the jaro-winkler distance of surname >= 0.7 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.42994177325953026, \"log2_bayes_factor\": -1.217786805113863, \"comparison_vector_value\": 1, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `jaro-winkler distance of surname >= 0.7` then comparison is  2.33 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 1}, {\"comparison_name\": \"surname\", \"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"m_probability_description\": \"Amongst matching record comparisons, 17.04% of records are in the all other comparisons comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 92.10% of records are in the all other comparisons comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 1}, {\"comparison_name\": \"date_of_birth\", \"sql_condition\": \"\\\"date_of_birth_l\\\" = \\\"date_of_birth_r\\\"\", \"label_for_charts\": \"Exact match on date_of_birth\", \"m_probability\": 0.9298272546074744, \"u_probability\": 0.0005239066567033322, \"m_probability_description\": \"Amongst matching record comparisons, 92.98% of records are in the exact match on date_of_birth comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.05% of records are in the exact match on date_of_birth comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 1774.7956486340258, \"log2_bayes_factor\": 10.793437205800899, \"comparison_vector_value\": 5, \"max_comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `exact match on date_of_birth` then comparison is 1,774.80 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 2}, {\"comparison_name\": \"date_of_birth\", \"sql_condition\": \"damerau_levenshtein(\\\"date_of_birth_l\\\", \\\"date_of_birth_r\\\") <= 1\", \"label_for_charts\": \"Levenshtein distance of date_of_birth <= 1\", \"m_probability\": 0.012193577393529345, \"u_probability\": 0.0011024611732895517, \"m_probability_description\": \"Amongst matching record comparisons, 1.22% of records are in the levenshtein distance of date_of_birth <= 1 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.11% of records are in the levenshtein distance of date_of_birth <= 1 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 11.0603236548874, \"log2_bayes_factor\": 3.4673216982239112, \"comparison_vector_value\": 4, \"max_comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `levenshtein distance of date_of_birth <= 1` then comparison is 11.06 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 2}, {\"comparison_name\": \"date_of_birth\", \"sql_condition\": \"ABS(EPOCH(try_strptime(\\\"date_of_birth_l\\\", '%Y-%m-%d')) - EPOCH(try_strptime(\\\"date_of_birth_r\\\", '%Y-%m-%d'))) <= 2629800.0\", \"label_for_charts\": \"Abs difference of 'transformed date_of_birth <= 1 month'\", \"m_probability\": 0.010000000000000009, \"u_probability\": 0.0050000000000000044, \"m_probability_description\": \"Amongst matching record comparisons, 1.00% of records are in the abs difference of 'transformed date_of_birth <= 1 month' comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.50% of records are in the abs difference of 'transformed date_of_birth <= 1 month' comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 2.0, \"log2_bayes_factor\": 1.0, \"comparison_vector_value\": 3, \"max_comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `abs difference of 'transformed date_of_birth <= 1 month'` then comparison is 2.00 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 2}, {\"comparison_name\": \"date_of_birth\", \"sql_condition\": \"ABS(EPOCH(try_strptime(\\\"date_of_birth_l\\\", '%Y-%m-%d')) - EPOCH(try_strptime(\\\"date_of_birth_r\\\", '%Y-%m-%d'))) <= 31557600.0\", \"label_for_charts\": \"Abs difference of 'transformed date_of_birth <= 1 year'\", \"m_probability\": 0.010000000000000009, \"u_probability\": 0.020000000000000018, \"m_probability_description\": \"Amongst matching record comparisons, 1.00% of records are in the abs difference of 'transformed date_of_birth <= 1 year' comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 2.00% of records are in the abs difference of 'transformed date_of_birth <= 1 year' comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.5, \"log2_bayes_factor\": -1.0, \"comparison_vector_value\": 2, \"max_comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `abs difference of 'transformed date_of_birth <= 1 year'` then comparison is  2.00 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 2}, {\"comparison_name\": \"date_of_birth\", \"sql_condition\": \"ABS(EPOCH(try_strptime(\\\"date_of_birth_l\\\", '%Y-%m-%d')) - EPOCH(try_strptime(\\\"date_of_birth_r\\\", '%Y-%m-%d'))) <= 315576000.0\", \"label_for_charts\": \"Abs difference of 'transformed date_of_birth <= 10 year'\", \"m_probability\": 0.010000000000000009, \"u_probability\": 0.08000000000000007, \"m_probability_description\": \"Amongst matching record comparisons, 1.00% of records are in the abs difference of 'transformed date_of_birth <= 10 year' comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 8.00% of records are in the abs difference of 'transformed date_of_birth <= 10 year' comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.125, \"log2_bayes_factor\": -3.0, \"comparison_vector_value\": 1, \"max_comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `abs difference of 'transformed date_of_birth <= 10 year'` then comparison is  8.00 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 2}, {\"comparison_name\": \"date_of_birth\", \"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.0579791679989962, \"u_probability\": 0.9983736321700071, \"m_probability_description\": \"Amongst matching record comparisons, 5.80% of records are in the all other comparisons comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 99.84% of records are in the all other comparisons comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.05807361706155645, \"log2_bayes_factor\": -4.105973296008658, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  17.22 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 2}, {\"comparison_name\": \"soc_sec_id\", \"sql_condition\": \"\\\"soc_sec_id_l\\\" = \\\"soc_sec_id_r\\\"\", \"label_for_charts\": \"Exact match on soc_sec_id\", \"m_probability\": 0.8590418481224762, \"u_probability\": 0.0004563889599186916, \"m_probability_description\": \"Amongst matching record comparisons, 85.90% of records are in the exact match on soc_sec_id comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.05% of records are in the exact match on soc_sec_id comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 1882.2581691623734, \"log2_bayes_factor\": 10.878248805303441, \"comparison_vector_value\": 2, \"max_comparison_vector_value\": 2, \"bayes_factor_description\": \"If comparison level is `exact match on soc_sec_id` then comparison is 1,882.26 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 3}, {\"comparison_name\": \"soc_sec_id\", \"sql_condition\": \"damerau_levenshtein(\\\"soc_sec_id_l\\\", \\\"soc_sec_id_r\\\") <= 2\", \"label_for_charts\": \"Damerau-Levenshtein distance of soc_sec_id <= 2\", \"m_probability\": 0.07831886200889254, \"u_probability\": 0.0002967067706327191, \"m_probability_description\": \"Amongst matching record comparisons, 7.83% of records are in the damerau-levenshtein distance of soc_sec_id <= 2 comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.03% of records are in the damerau-levenshtein distance of soc_sec_id <= 2 comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 263.96048139339626, \"log2_bayes_factor\": 8.044178143731921, \"comparison_vector_value\": 1, \"max_comparison_vector_value\": 2, \"bayes_factor_description\": \"If comparison level is `damerau-levenshtein distance of soc_sec_id <= 2` then comparison is 263.96 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 3}, {\"comparison_name\": \"soc_sec_id\", \"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"m_probability_description\": \"Amongst matching record comparisons, 6.26% of records are in the all other comparisons comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 99.92% of records are in the all other comparisons comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 2, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 3}, {\"comparison_name\": \"street_number\", \"sql_condition\": \"\\\"street_number_l\\\" = \\\"street_number_r\\\"\", \"label_for_charts\": \"Exact match on street_number\", \"m_probability\": 0.768227813758807, \"u_probability\": 0.014992862853499144, \"m_probability_description\": \"Amongst matching record comparisons, 76.82% of records are in the exact match on street_number comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 1.50% of records are in the exact match on street_number comparison level\", \"has_tf_adjustments\": true, \"tf_adjustment_column\": \"street_number\", \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 51.239567870756076, \"log2_bayes_factor\": 5.679186403639793, \"comparison_vector_value\": 1, \"max_comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `exact match on street_number` then comparison is 51.24 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 4}, {\"comparison_name\": \"street_number\", \"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"m_probability_description\": \"Amongst matching record comparisons, 23.18% of records are in the all other comparisons comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 98.50% of records are in the all other comparisons comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 4}, {\"comparison_name\": \"postcode\", \"sql_condition\": \"\\\"postcode_l\\\" = \\\"postcode_r\\\"\", \"label_for_charts\": \"Exact match on postcode\", \"m_probability\": 0.7701234367130003, \"u_probability\": 0.0012882468514016968, \"m_probability_description\": \"Amongst matching record comparisons, 77.01% of records are in the exact match on postcode comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 0.13% of records are in the exact match on postcode comparison level\", \"has_tf_adjustments\": true, \"tf_adjustment_column\": \"postcode\", \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 597.8073502567117, \"log2_bayes_factor\": 9.223536825354994, \"comparison_vector_value\": 1, \"max_comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `exact match on postcode` then comparison is 597.81 times more likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 5}, {\"comparison_name\": \"postcode\", \"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"m_probability_description\": \"Amongst matching record comparisons, 22.99% of records are in the all other comparisons comparison level\", \"u_probability_description\": \"Amongst non-matching record comparisons, 99.87% of records are in the all other comparisons comparison level\", \"has_tf_adjustments\": false, \"tf_adjustment_column\": null, \"tf_adjustment_weight\": 1.0, \"is_null_level\": false, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"max_comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"probability_two_random_records_match\": 0.0005281056211242248, \"comparison_sort_order\": 5}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.VConcatChart(...)"
            ]
          },
          "execution_count": 43,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.visualisations.match_weights_chart()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 44,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:37.960629Z",
          "iopub.status.busy": "2024-06-07T09:11:37.960358Z",
          "iopub.status.idle": "2024-06-07T09:11:44.496784Z",
          "shell.execute_reply": "2024-06-07T09:11:44.496254Z"
        }
      },
      "outputs": [
        {
          "data": {
            "application/vnd.jupyter.widget-view+json": {
              "model_id": "7317c56423e44b84abdfb32562eda774",
              "version_major": 2,
              "version_minor": 0
            },
            "text/plain": [
              "FloatProgress(value=0.0, layout=Layout(width='auto'), style=ProgressStyle(bar_color='black'))"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'date_of_birth':\n",
            "    m values not fully trained\n",
            "Comparison: 'date_of_birth':\n",
            "    u values not fully trained\n"
          ]
        }
      ],
      "source": [
        "results = linker.inference.predict(threshold_match_probability=0.2)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 45,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:44.499943Z",
          "iopub.status.busy": "2024-06-07T09:11:44.499693Z",
          "iopub.status.idle": "2024-06-07T09:11:47.310831Z",
          "shell.execute_reply": "2024-06-07T09:11:47.310208Z"
        }
      },
      "outputs": [
        {
          "data": {
            "application/vnd.jupyter.widget-view+json": {
              "model_id": "1c4e16cfc8fc4df7bbdd87024c2d86cf",
              "version_major": 2,
              "version_minor": 0
            },
            "text/plain": [
              "FloatProgress(value=0.0, layout=Layout(width='auto'), style=ProgressStyle(bar_color='black'))"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'date_of_birth':\n",
            "    m values not fully trained\n",
            "Comparison: 'date_of_birth':\n",
            "    u values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-8acddffca1f84e23a9a13bfaf4f1e5e3.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-8acddffca1f84e23a9a13bfaf4f1e5e3.vega-embed details,\n",
              "  #altair-viz-8acddffca1f84e23a9a13bfaf4f1e5e3.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-8acddffca1f84e23a9a13bfaf4f1e5e3\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-8acddffca1f84e23a9a13bfaf4f1e5e3\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-8acddffca1f84e23a9a13bfaf4f1e5e3\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}, \"layer\": [{\"layer\": [{\"mark\": \"point\", \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"point_selection\", \"value\": 1, \"empty\": false}, \"value\": 0}, \"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".3f\", \"title\": \"Match weight\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".3%\", \"title\": \"Match probability\", \"type\": \"quantitative\"}, {\"field\": \"tp\", \"format\": \",.0f\", \"title\": \"TP\", \"type\": \"quantitative\"}, {\"field\": \"tn\", \"format\": \",.0f\", \"title\": \"TN\", \"type\": \"quantitative\"}, {\"field\": \"fp\", \"format\": \",.0f\", \"title\": \"FP\", \"type\": \"quantitative\"}, {\"field\": \"fn\", \"format\": \",.0f\", \"title\": \"FN\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"title\": \"Precision\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"title\": \"Recall (TPR)\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FPR\", \"type\": \"quantitative\"}]}, \"params\": [{\"name\": \"metric\", \"select\": {\"type\": \"point\", \"fields\": [\"metric\"]}, \"bind\": \"legend\", \"value\": [{\"metric\": \"precision\"}, {\"metric\": \"recall\"}]}, {\"name\": \"point_selection\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\"}}], \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": \"line\", \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"metric\", \"value\": 1}, \"value\": 0.1}}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"type\": \"nominal\", \"sort\": [\"precision\", \"recall\"], \"title\": \"Metric\", \"legend\": {\"labelExpr\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.value]\"}}, \"x\": {\"type\": \"quantitative\", \"field\": \"truth_threshold\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Match weight threshold\", \"titleFontSize\": 16, \"titlePadding\": 10}}, \"y\": {\"field\": \"value\", \"type\": \"quantitative\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Score\", \"titleFontSize\": 18, \"titlePadding\": 10}}}}, {\"layer\": [{\"mark\": {\"type\": \"rule\", \"color\": \"gray\"}, \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}, {\"layer\": [{\"mark\": {\"type\": \"text\", \"align\": \"right\", \"baseline\": \"middle\", \"fontSize\": 14, \"x2\": 200, \"xOffset\": -10, \"y\": 100, \"y2\": 200}, \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}], \"encoding\": {\"color\": {\"field\": \"metric\", \"sort\": [\"precision\", \"recall\"]}, \"text\": {\"field\": \"y_text\"}, \"y\": {\"field\": \"score_index\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"color\": \"black\", \"fontSize\": 14, \"xOffset\": 25, \"y\": \"height\", \"yOffset\": -20}, \"encoding\": {\"text\": {\"condition\": {\"param\": \"point_selection\", \"aggregate\": \"min\", \"empty\": false, \"field\": \"truth_threshold\", \"format\": \"+.2f\", \"type\": \"nominal\"}, \"value\": \" \"}, \"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}], \"transform\": [{\"filter\": {\"param\": \"point_selection\", \"empty\": false}}]}], \"data\": {\"name\": \"data-fe0ff28acc0c98bb1d7e463fcfe76e87\"}, \"height\": 400, \"title\": {\"text\": \"Link Quality Evaluation\", \"fontSize\": 20, \"subtitle\": [\"Click a legend value to show a specific evaluation metric\", \"Shift + Click to show multiple metrics\"]}, \"transform\": [{\"fold\": [\"precision\", \"recall\"], \"as\": [\"metric\", \"value\"]}, {\"calculate\": \"0.275 - 0.05*indexof(['precision', 'recall'], datum.metric)\", \"as\": \"score_index\"}, {\"calculate\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.metric]\", \"as\": \"metric_text\"}, {\"calculate\": \"datum.metric_text + ' = ' + format(datum.value, ',.3f')\", \"as\": \"y_text\"}], \"width\": 400, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.14.1.json\", \"datasets\": {\"data-fe0ff28acc0c98bb1d7e463fcfe76e87\": [{\"truth_threshold\": -23.70000035315752, \"match_probability\": 7.33819020765569e-08, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12383243.0, \"fp\": 107719.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9913762446799534, \"fp_rate\": 0.008623755320046606, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.05716411378555799, \"recall\": 0.9989293361884368, \"specificity\": 0.9913762446799534, \"npv\": 0.9999994347202875, \"accuracy\": 0.9913801960392078, \"f1\": 0.10813988144517667, \"f2\": 0.2325821569493312, \"f0_5\": 0.0704472988190828, \"p4\": 0.19509089158934365, \"phi\": 0.23792726011867513}, {\"truth_threshold\": -22.500000335276127, \"match_probability\": 1.6858732644290343e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12392909.0, \"fp\": 98053.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9921500841968777, \"fp_rate\": 0.00784991580312229, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.062447410693796376, \"recall\": 0.9989293361884368, \"specificity\": 0.9921500841968777, \"npv\": 0.9999994351611841, \"accuracy\": 0.9921536307261453, \"f1\": 0.1175464804449164, \"f2\": 0.2497781789254681, \"f0_5\": 0.07685808027791768, \"p4\": 0.21027776082808605, \"phi\": 0.24877657404756795}, {\"truth_threshold\": -21.40000031888485, \"match_probability\": 3.613747796401302e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12394654.0, \"fp\": 96308.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9922897852062955, \"fp_rate\": 0.00771021479370444, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.06350703526872101, \"recall\": 0.9989293361884368, \"specificity\": 0.9922897852062955, \"npv\": 0.9999994352407057, \"accuracy\": 0.9922932586517303, \"f1\": 0.1194218162867879, \"f2\": 0.25315719701374517, \"f0_5\": 0.07814182543898691, \"p4\": 0.213274987672736, \"phi\": 0.25089604999129494}, {\"truth_threshold\": -21.200000315904617, \"match_probability\": 4.151105934612292e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12404658.0, \"fp\": 86304.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9930906842883679, \"fp_rate\": 0.006909315711632139, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.07035062207141703, \"recall\": 0.9989293361884368, \"specificity\": 0.9930906842883679, \"npv\": 0.9999994356961676, \"accuracy\": 0.9930937387477495, \"f1\": 0.13144415485091523, \"f2\": 0.27444174573692925, \"f0_5\": 0.08641678001894791, \"p4\": 0.2322537096051115, \"phi\": 0.2641754450117263}, {\"truth_threshold\": -20.90000031143427, \"match_probability\": 5.110610406071033e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12404715.0, \"fp\": 86247.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9930952475878159, \"fp_rate\": 0.0069047524121841055, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.07039384336803983, \"recall\": 0.9989293361884368, \"specificity\": 0.9930952475878159, \"npv\": 0.9999994356987605, \"accuracy\": 0.993098299659932, \"f1\": 0.13151959402311814, \"f2\": 0.27457327839905826, \"f0_5\": 0.08646895273401298, \"p4\": 0.23237152741238035, \"phi\": 0.26425719170325}, {\"truth_threshold\": -20.500000305473804, \"match_probability\": 6.743489786418308e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12410486.0, \"fp\": 80476.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.993557261642458, \"fp_rate\": 0.006442738357542038, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.07506292597147356, \"recall\": 0.9989293361884368, \"specificity\": 0.993557261642458, \"npv\": 0.9999994359611661, \"accuracy\": 0.9935600720144029, \"f1\": 0.13963333155165963, \"f2\": 0.2885762511156868, \"f0_5\": 0.09209850916331515, \"p4\": 0.24495225714369975, \"phi\": 0.2729439287100499}, {\"truth_threshold\": -20.300000302493572, \"match_probability\": 7.746234863849234e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12412984.0, \"fp\": 77978.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9937572462393208, \"fp_rate\": 0.0062427537606791214, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.07728170964039333, \"recall\": 0.9989293361884368, \"specificity\": 0.9937572462393208, \"npv\": 0.9999994360746737, \"accuracy\": 0.9937599519903981, \"f1\": 0.14346436455896405, \"f2\": 0.29509041125599805, \"f0_5\": 0.09476919326472688, \"p4\": 0.2508304576657482, \"phi\": 0.2769764546418909}, {\"truth_threshold\": -20.200000301003456, \"match_probability\": 8.30220850864085e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12415751.0, \"fp\": 75211.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9939787664072631, \"fp_rate\": 0.0060212335927368925, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.07989772699469061, \"recall\": 0.9989293361884368, \"specificity\": 0.9939787664072631, \"npv\": 0.9999994362003511, \"accuracy\": 0.9939813562712543, \"f1\": 0.14796103307657454, \"f2\": 0.3026581644947819, \"f0_5\": 0.09791428040275138, \"p4\": 0.25767997952926913, \"phi\": 0.281656778208457}, {\"truth_threshold\": -20.10000029951334, \"match_probability\": 8.898086238977229e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12421026.0, \"fp\": 69936.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9944010717509187, \"fp_rate\": 0.005598928249081215, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.08540939228686885, \"recall\": 0.9989293361884368, \"specificity\": 0.9944010717509187, \"npv\": 0.9999994364397873, \"accuracy\": 0.9944034406881376, \"f1\": 0.1573640142160111, \"f2\": 0.3182159249261833, \"f0_5\": 0.10452744185450984, \"p4\": 0.2718311500524357, \"phi\": 0.29127167160688977}, {\"truth_threshold\": -20.000000298023224, \"match_probability\": 9.53673209908534e-07, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12424758.0, \"fp\": 66204.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9946998477779374, \"fp_rate\": 0.00530015222206264, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.08979170963085172, \"recall\": 0.9989293361884368, \"specificity\": 0.9946998477779374, \"npv\": 0.9999994366090627, \"accuracy\": 0.9947020604120824, \"f1\": 0.16477236890240057, \"f2\": 0.3302254087999434, \"f0_5\": 0.10977282353653044, \"p4\": 0.28281969900812987, \"phi\": 0.2986956701490189}, {\"truth_threshold\": -19.900000296533108, \"match_probability\": 1.0221215694048732e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12426446.0, \"fp\": 64516.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9948349854879072, \"fp_rate\": 0.005165014512092824, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.09192506369023322, \"recall\": 0.9989293361884368, \"specificity\": 0.9948349854879072, \"npv\": 0.9999994366855932, \"accuracy\": 0.9948371274254851, \"f1\": 0.16835728555777535, \"f2\": 0.33596024650459366, \"f0_5\": 0.11232225531944168, \"p4\": 0.2880870908900059, \"phi\": 0.302243757191807}, {\"truth_threshold\": -19.80000029504299, \"match_probability\": 1.095482694339651e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12427476.0, \"fp\": 63486.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.994917445109512, \"fp_rate\": 0.005082554890488019, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.09327734693003127, \"recall\": 0.9989293361884368, \"specificity\": 0.994917445109512, \"npv\": 0.9999994367322812, \"accuracy\": 0.9949195439087818, \"f1\": 0.17062242831950886, \"f2\": 0.33955848558267215, \"f0_5\": 0.11393690292596806, \"p4\": 0.29139869419914954, \"phi\": 0.3044713963784026}, {\"truth_threshold\": -19.700000293552876, \"match_probability\": 1.174109189357499e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12429264.0, \"fp\": 61698.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9950605886079871, \"fp_rate\": 0.004939411392012881, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.09572176054170514, \"recall\": 0.9989293361884368, \"specificity\": 0.9950605886079871, \"npv\": 0.9999994368133095, \"accuracy\": 0.9950626125225045, \"f1\": 0.17470274318884, \"f2\": 0.34599124823852256, \"f0_5\": 0.11685286308301188, \"p4\": 0.29733185596395184, \"phi\": 0.30845729973585423}, {\"truth_threshold\": -19.60000029206276, \"match_probability\": 1.2583789665296601e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12432140.0, \"fp\": 58822.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9952908350854002, \"fp_rate\": 0.0047091649145998525, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.09993420347956483, \"recall\": 0.9989293361884368, \"specificity\": 0.9952908350854002, \"npv\": 0.9999994369435947, \"accuracy\": 0.9952927385477095, \"f1\": 0.18169172775451725, \"f2\": 0.35686574504125457, \"f0_5\": 0.12186975181936928, \"p4\": 0.3073993843463305, \"phi\": 0.3152079331132573}, {\"truth_threshold\": -19.500000290572643, \"match_probability\": 1.3486970617214505e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12435202.0, \"fp\": 55760.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.995535972329433, \"fp_rate\": 0.004464027670566926, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.10484660705398854, \"recall\": 0.9989293361884368, \"specificity\": 0.995535972329433, \"npv\": 0.9999994370822396, \"accuracy\": 0.9955377475495099, \"f1\": 0.18977465893736653, \"f2\": 0.3692208541094264, \"f0_5\": 0.12770725297416524, \"p4\": 0.31889535947667214, \"phi\": 0.322902080558197}, {\"truth_threshold\": -19.400000289082527, \"match_probability\": 1.4454975813216156e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12436891.0, \"fp\": 54071.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.995671190097288, \"fp_rate\": 0.004328809902712057, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.10776872050427379, \"recall\": 0.9989293361884368, \"specificity\": 0.995671190097288, \"npv\": 0.999999437158687, \"accuracy\": 0.9956728945789158, \"f1\": 0.19454870420017872, \"f2\": 0.3764091569264818, \"f0_5\": 0.1311730254754043, \"p4\": 0.3256122446833887, \"phi\": 0.3273931372653125}, {\"truth_threshold\": -19.30000028759241, \"match_probability\": 1.549245788689352e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12441420.0, \"fp\": 49542.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9960337722586939, \"fp_rate\": 0.003966227741306074, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1164731689048205, \"recall\": 0.9989293361884368, \"specificity\": 0.9960337722586939, \"npv\": 0.9999994373635758, \"accuracy\": 0.9960352870574115, \"f1\": 0.20862148823689128, \"f2\": 0.39714198844633625, \"f0_5\": 0.141467746826669, \"p4\": 0.3451035873739845, \"phi\": 0.3404202496646495}, {\"truth_threshold\": -19.200000286102295, \"match_probability\": 1.66044034034615e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12443432.0, \"fp\": 47530.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.996194848723421, \"fp_rate\": 0.0038051512765790176, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1208079761750615, \"recall\": 0.9989293361884368, \"specificity\": 0.996194848723421, \"npv\": 0.9999994374545493, \"accuracy\": 0.9961962792558512, \"f1\": 0.21554811135497284, \"f2\": 0.40710358670041014, \"f0_5\": 0.14657826933953372, \"p4\": 0.3545316303206286, \"phi\": 0.34672522120474036}, {\"truth_threshold\": -19.10000028461218, \"match_probability\": 1.7796156826591604e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12445374.0, \"fp\": 45588.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9963503211361943, \"fp_rate\": 0.0036496788638056862, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1253093881310079, \"recall\": 0.9989293361884368, \"specificity\": 0.9963503211361943, \"npv\": 0.99999943754233, \"accuracy\": 0.9963516703340668, \"f1\": 0.22268441959186458, \"f2\": 0.41720432855080425, \"f0_5\": 0.15187383147143907, \"p4\": 0.3641334564718527, \"phi\": 0.3531534016201906}, {\"truth_threshold\": -19.000000283122063, \"match_probability\": 1.907344620533969e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12447381.0, \"fp\": 43581.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9965109973114961, \"fp_rate\": 0.003489002688503896, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1303280651340996, \"recall\": 0.9989293361884368, \"specificity\": 0.9965109973114961, \"npv\": 0.9999994376330199, \"accuracy\": 0.9965122624524905, \"f1\": 0.23057369814651368, \"f2\": 0.4281836777509703, \"f0_5\": 0.15776429323722377, \"p4\": 0.3746188951253199, \"phi\": 0.36018502581133893}, {\"truth_threshold\": -18.900000281631947, \"match_probability\": 2.0442410704611823e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12449076.0, \"fp\": 41886.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9966466954266613, \"fp_rate\": 0.003353304573338707, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.13489063758597186, \"recall\": 0.9989293361884368, \"specificity\": 0.9966466954266613, \"npv\": 0.9999994377095888, \"accuracy\": 0.9966478895779156, \"f1\": 0.2376853789464107, \"f2\": 0.43791656050101246, \"f0_5\": 0.1631069997902161, \"p4\": 0.38395638571286755, \"phi\": 0.3664605379396477}, {\"truth_threshold\": -18.80000028014183, \"match_probability\": 2.1909630111470102e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12450614.0, \"fp\": 40348.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9967698244538731, \"fp_rate\": 0.0032301755461268717, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1393161116918023, \"recall\": 0.9989293361884368, \"specificity\": 0.9967698244538731, \"npv\": 0.9999994377790473, \"accuracy\": 0.9967709541908382, \"f1\": 0.24452889529550517, \"f2\": 0.447138886226397, \"f0_5\": 0.16827790202727075, \"p4\": 0.392841110968318, \"phi\": 0.37244647356707133}, {\"truth_threshold\": -18.700000278651714, \"match_probability\": 2.348215645907411e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12452092.0, \"fp\": 38870.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9968881500079818, \"fp_rate\": 0.0031118499920182287, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1438514570163653, \"recall\": 0.9989293361884368, \"specificity\": 0.9968881500079818, \"npv\": 0.99999943784578, \"accuracy\": 0.9968892178435688, \"f1\": 0.2514873216658003, \"f2\": 0.45637499475912957, \"f0_5\": 0.1735657110055171, \"p4\": 0.4017754735326811, \"phi\": 0.3784828087360466}, {\"truth_threshold\": -18.600000277161598, \"match_probability\": 2.516754792022793e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12456210.0, \"fp\": 34752.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9972178283786309, \"fp_rate\": 0.0027821716213691145, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1582007121575467, \"recall\": 0.9989293361884368, \"specificity\": 0.9972178283786309, \"npv\": 0.999999438031627, \"accuracy\": 0.997218723744749, \"f1\": 0.2731435980008783, \"f2\": 0.48424408689849485, \"f0_5\": 0.19021960738626434, \"p4\": 0.42895689071978743, \"phi\": 0.39697686462189424}, {\"truth_threshold\": -18.500000275671482, \"match_probability\": 2.6973905133407355e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12458109.0, \"fp\": 32853.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9973698583023469, \"fp_rate\": 0.002630141697653071, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.1658287629494211, \"recall\": 0.9989293361884368, \"specificity\": 0.9973698583023469, \"npv\": 0.9999994381172883, \"accuracy\": 0.9973706741348269, \"f1\": 0.2844388310613649, \"f2\": 0.4982757568359375, \"f0_5\": 0.19902604922169265, \"p4\": 0.4427704572197383, \"phi\": 0.4064658695962286}, {\"truth_threshold\": -18.400000274181366, \"match_probability\": 2.8909910135828424e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12459932.0, \"fp\": 31030.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9975158038267989, \"fp_rate\": 0.0024841961732010713, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.17387715982002608, \"recall\": 0.9989293361884368, \"specificity\": 0.9975158038267989, \"npv\": 0.9999994381994968, \"accuracy\": 0.9975165433086617, \"f1\": 0.29619719268010614, \"f2\": 0.512532764114074, \"f0_5\": 0.2082828385911648, \"p4\": 0.4568948596139775, \"phi\": 0.41624330752450106}, {\"truth_threshold\": -18.30000027269125, \"match_probability\": 3.098486809064348e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12460632.0, \"fp\": 30330.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9975718443463362, \"fp_rate\": 0.0024281556536638253, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.17717913241637503, \"recall\": 0.9989293361884368, \"specificity\": 0.9975718443463362, \"npv\": 0.999999438231057, \"accuracy\": 0.9975725545109022, \"f1\": 0.3009746768358718, \"f2\": 0.518226397727453, \"f0_5\": 0.2120702419763349, \"p4\": 0.46256079465987976, \"phi\": 0.4201888323309292}, {\"truth_threshold\": -18.200000271201134, \"match_probability\": 3.3208752008774106e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12461750.0, \"fp\": 29212.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9976613490618257, \"fp_rate\": 0.0023386509381743375, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.18272109224183755, \"recall\": 0.9989293361884368, \"specificity\": 0.9976613490618257, \"npv\": 0.9999994382814558, \"accuracy\": 0.9976620124024805, \"f1\": 0.3089330905134694, \"f2\": 0.5275870425720979, \"f0_5\": 0.21841348404788977, \"p4\": 0.4719074424269901, \"phi\": 0.4267289288570678}, {\"truth_threshold\": -18.100000269711018, \"match_probability\": 3.5592250680276667e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12462992.0, \"fp\": 27970.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9977607809550617, \"fp_rate\": 0.002239219044938252, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.18929886090258252, \"recall\": 0.9989293361884368, \"specificity\": 0.9977607809550617, \"npv\": 0.9999994383374339, \"accuracy\": 0.9977613922784557, \"f1\": 0.318282609225371, \"f2\": 0.538390516544936, \"f0_5\": 0.22592049369733364, \"p4\": 0.4827438046601937, \"phi\": 0.43436360810339186}, {\"truth_threshold\": -18.0000002682209, \"match_probability\": 3.8146820045553597e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12464246.0, \"fp\": 26716.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9978611735429185, \"fp_rate\": 0.0021388264570815284, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.19643877643095617, \"recall\": 0.9989293361884368, \"specificity\": 0.9978611735429185, \"npv\": 0.9999994383939415, \"accuracy\": 0.9978617323464692, \"f1\": 0.32831469146663317, \"f2\": 0.549756729911278, \"f0_5\": 0.2340424006995112, \"p4\": 0.4942017531113097, \"phi\": 0.4425016929753232}, {\"truth_threshold\": -17.900000266730785, \"match_probability\": 4.088473825324779e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12465293.0, \"fp\": 25669.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9979449941485692, \"fp_rate\": 0.0020550058514308186, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.20282608695652174, \"recall\": 0.9989293361884368, \"specificity\": 0.9979449941485692, \"npv\": 0.9999994384411125, \"accuracy\": 0.9979455091018203, \"f1\": 0.3371882905674015, \"f2\": 0.5596209213051824, \"f0_5\": 0.24128478328333505, \"p4\": 0.5041933844430416, \"phi\": 0.449657158072701}, {\"truth_threshold\": -17.80000026524067, \"match_probability\": 4.381916466936514e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12466882.0, \"fp\": 24080.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9980722061279187, \"fp_rate\": 0.0019277938720812697, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2133546764235079, \"recall\": 0.9989293361884368, \"specificity\": 0.9980722061279187, \"npv\": 0.9999994385126875, \"accuracy\": 0.9980726545309062, \"f1\": 0.3516110797060486, \"f2\": 0.5752867184609693, \"f0_5\": 0.25317486160859654, \"p4\": 0.5201536812431785, \"phi\": 0.461209693795488}, {\"truth_threshold\": -17.700000263750553, \"match_probability\": 4.696420312114957e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12467920.0, \"fp\": 23042.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.998155306212604, \"fp_rate\": 0.001844693787396039, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2208433368275116, \"recall\": 0.9989293361884368, \"specificity\": 0.998155306212604, \"npv\": 0.9999994385594333, \"accuracy\": 0.9981557111422285, \"f1\": 0.3617180360554956, \"f2\": 0.5860026917900404, \"f0_5\": 0.26159577024753666, \"p4\": 0.5311367468506168, \"phi\": 0.46925359851954823}, {\"truth_threshold\": -17.600000262260437, \"match_probability\": 5.03349696795731e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12468491.0, \"fp\": 22471.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9982010192649693, \"fp_rate\": 0.0017989807350306565, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2251913661126819, \"recall\": 0.9989293361884368, \"specificity\": 0.9982010192649693, \"npv\": 0.9999994385851447, \"accuracy\": 0.998201400280056, \"f1\": 0.3675295441755768, \"f2\": 0.5920694781883453, \"f0_5\": 0.26647136585445463, \"p4\": 0.5373785628823172, \"phi\": 0.47386135855667993}, {\"truth_threshold\": -17.50000026077032, \"match_probability\": 5.394766530610173e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12469574.0, \"fp\": 21388.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.998287721954482, \"fp_rate\": 0.0017122780455180314, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.23392671657294314, \"recall\": 0.9989293361884368, \"specificity\": 0.998287721954482, \"npv\": 0.9999994386339044, \"accuracy\": 0.9982880576115223, \"f1\": 0.3790811736367066, \"f2\": 0.6039281685191692, \"f0_5\": 0.27623631718747355, \"p4\": 0.5496294323973907, \"phi\": 0.4829856647127907}, {\"truth_threshold\": -17.400000259280205, \"match_probability\": 5.781965371275756e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12470687.0, \"fp\": 20275.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9983768263805461, \"fp_rate\": 0.00162317361945381, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.24363948369767963, \"recall\": 0.9989293361884368, \"specificity\": 0.9983768263805461, \"npv\": 0.9999994386840059, \"accuracy\": 0.9983771154230846, \"f1\": 0.39173464491362764, \"f2\": 0.6166207183050719, \"f0_5\": 0.2870466412334523, \"p4\": 0.5628156242291301, \"phi\": 0.4929326513154927}, {\"truth_threshold\": -17.30000025779009, \"match_probability\": 6.196954480953251e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12471634.0, \"fp\": 19328.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9984526411976916, \"fp_rate\": 0.001547358802308421, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2525619706871882, \"recall\": 0.9989293361884368, \"specificity\": 0.9984526411976916, \"npv\": 0.9999994387266279, \"accuracy\": 0.9984528905781156, \"f1\": 0.40318548013704975, \"f2\": 0.6278479552402376, \"f0_5\": 0.2969338207212614, \"p4\": 0.5745437134916774, \"phi\": 0.5018966008378414}, {\"truth_threshold\": -17.200000256299973, \"match_probability\": 6.6417284140038195e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12472792.0, \"fp\": 18170.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9985453482285832, \"fp_rate\": 0.0014546517714168052, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2644022509210153, \"recall\": 0.9989293361884368, \"specificity\": 0.9985453482285832, \"npv\": 0.9999994387787376, \"accuracy\": 0.998545549109822, \"f1\": 0.418131182176126, \"f2\": 0.6421450061943248, \"f0_5\": 0.30999031725237797, \"p4\": 0.5895665696521564, \"phi\": 0.5135503792156988}, {\"truth_threshold\": -17.100000254809856, \"match_probability\": 7.118424873502875e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12473796.0, \"fp\": 17166.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9986257263451767, \"fp_rate\": 0.0013742736548233835, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2756045068996075, \"recall\": 0.9989293361884368, \"specificity\": 0.9986257263451767, \"npv\": 0.9999994388239096, \"accuracy\": 0.9986258851770354, \"f1\": 0.4320158756408136, \"f2\": 0.6550783365764609, \"f0_5\": 0.32227661212324576, \"p4\": 0.6032421648075817, \"phi\": 0.5243377836001654}, {\"truth_threshold\": -17.00000025331974, \"match_probability\": 7.629334984424643e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12474933.0, \"fp\": 16029.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9987167521604822, \"fp_rate\": 0.001283247839517885, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.28949468085106383, \"recall\": 0.9989293361884368, \"specificity\": 0.9987167521604822, \"npv\": 0.9999994388750567, \"accuracy\": 0.9987168633726745, \"f1\": 0.4488968313973469, \"f2\": 0.6703686976515028, \"f0_5\": 0.3374217280786956, \"p4\": 0.6195161043881651, \"phi\": 0.5374129475038517}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12475017.0, \"fp\": 15945.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9987234770228266, \"fp_rate\": 0.0012765229771734155, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.2905766150560598, \"recall\": 0.9989293361884368, \"specificity\": 0.9987234770228266, \"npv\": 0.999999438878835, \"accuracy\": 0.9987235847169433, \"f1\": 0.4501964568828841, \"f2\": 0.6715266924405692, \"f0_5\": 0.338597291636424, \"p4\": 0.6207533013548003, \"phi\": 0.5384180695890283}, {\"truth_threshold\": -16.800000250339508, \"match_probability\": 8.76379462217525e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12476308.0, \"fp\": 14654.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9988268317524303, \"fp_rate\": 0.0011731682475697228, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.308284163323106, \"recall\": 0.9989293361884368, \"specificity\": 0.9988268317524303, \"npv\": 0.9999994389368977, \"accuracy\": 0.9988268853770754, \"f1\": 0.4711611297478628, \"f2\": 0.6898409278154509, \"f0_5\": 0.3577532373627818, \"p4\": 0.640409098934062, \"phi\": 0.5546096515838393}, {\"truth_threshold\": -16.700000248849392, \"match_probability\": 9.392796608724036e-06, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12477216.0, \"fp\": 13746.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9988995243120586, \"fp_rate\": 0.0011004756879414092, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.3220890664299453, \"recall\": 0.9989293361884368, \"specificity\": 0.9988995243120586, \"npv\": 0.9999994389777277, \"accuracy\": 0.9988995399079816, \"f1\": 0.4871154204736155, \"f2\": 0.7033319692433608, \"f0_5\": 0.3725783264495813, \"p4\": 0.6549962339302463, \"phi\": 0.5669119916982228}, {\"truth_threshold\": -16.600000247359276, \"match_probability\": 1.0066943367963594e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12477830.0, \"fp\": 13132.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9989486798534812, \"fp_rate\": 0.0010513201465187389, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.33214667141331433, \"recall\": 0.9989293361884368, \"specificity\": 0.9989486798534812, \"npv\": 0.9999994390053341, \"accuracy\": 0.9989486697339468, \"f1\": 0.49853059043547954, \"f2\": 0.7127578304048893, \"f0_5\": 0.38331963845521777, \"p4\": 0.6652427243109729, \"phi\": 0.5757093899708434}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12478388.0, \"fp\": 12574.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.998993352153341, \"fp_rate\": 0.0010066478466590483, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.34184768385239467, \"recall\": 0.9989293361884368, \"specificity\": 0.998993352153341, \"npv\": 0.9999994390304202, \"accuracy\": 0.9989933186637328, \"f1\": 0.5093787778341068, \"f2\": 0.7215458382128731, \"f0_5\": 0.3936329226837677, \"p4\": 0.6748367497401738, \"phi\": 0.5840693450859415}, {\"truth_threshold\": -16.400000244379044, \"match_probability\": 1.1563864000129272e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12479031.0, \"fp\": 11931.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9990448293734302, \"fp_rate\": 0.0009551706265698351, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.35375365615859605, \"recall\": 0.9989293361884368, \"specificity\": 0.9990448293734302, \"npv\": 0.999999439059325, \"accuracy\": 0.9990447689537908, \"f1\": 0.52248, \"f2\": 0.7319451293315999, \"f0_5\": 0.40622745254148734, \"p4\": 0.6862411977800164, \"phi\": 0.5941687050114911}, {\"truth_threshold\": -16.300000242888927, \"match_probability\": 1.239383228590334e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12479716.0, \"fp\": 11246.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9990996690246916, \"fp_rate\": 0.000900330975308387, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.36738482308601, \"recall\": 0.9989293361884368, \"specificity\": 0.9990996690246916, \"npv\": 0.9999994390901145, \"accuracy\": 0.9990995799159832, \"f1\": 0.5371992597162245, \"f2\": 0.7433586013794987, \"f0_5\": 0.42056255312572444, \"p4\": 0.6988223939421678, \"phi\": 0.6055246895962184}, {\"truth_threshold\": -16.20000024139881, \"match_probability\": 1.328336874067903e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12479912.0, \"fp\": 11050.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.999115360370162, \"fp_rate\": 0.000884639629837958, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.37148057562140946, \"recall\": 0.9989293361884368, \"specificity\": 0.999115360370162, \"npv\": 0.9999994390989236, \"accuracy\": 0.9991152630526106, \"f1\": 0.5415647414901116, \"f2\": 0.746690142455354, \"f0_5\": 0.42485233275220524, \"p4\": 0.7025075989870674, \"phi\": 0.6088954414093923}, {\"truth_threshold\": -16.100000239908695, \"match_probability\": 1.4236748550826774e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12480323.0, \"fp\": 10639.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.999148264160919, \"fp_rate\": 0.0008517358390810892, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.38037274315666864, \"recall\": 0.9989293361884368, \"specificity\": 0.999148264160919, \"npv\": 0.9999994391173951, \"accuracy\": 0.9991481496299259, \"f1\": 0.5509532647207693, \"f2\": 0.7537740639859656, \"f0_5\": 0.43413810524076685, \"p4\": 0.7103628638353155, \"phi\": 0.6161501074606295}, {\"truth_threshold\": -16.00000023841858, \"match_probability\": 1.5258553713831415e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12481202.0, \"fp\": 9760.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9992186350418807, \"fp_rate\": 0.0007813649581193186, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.4008962003560248, \"recall\": 0.9989293361884368, \"specificity\": 0.9992186350418807, \"npv\": 0.9999994391568958, \"accuracy\": 0.9992184836967394, \"f1\": 0.5721669805948574, \"f2\": 0.7693848219965601, \"f0_5\": 0.4554266268723327, \"p4\": 0.7277668654263086, \"phi\": 0.6325766113720159}, {\"truth_threshold\": -15.900000236928463, \"match_probability\": 1.6353695054159956e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12481421.0, \"fp\": 9541.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9992361677187074, \"fp_rate\": 0.0007638322812926658, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.4063588850174216, \"recall\": 0.9989293361884368, \"specificity\": 0.9992361677187074, \"npv\": 0.9999994391667364, \"accuracy\": 0.9992360072014402, \"f1\": 0.5777089783281734, \"f2\": 0.7733753315649867, \"f0_5\": 0.461059497924491, \"p4\": 0.7322365356252989, \"phi\": 0.636877430457496}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12481884.0, \"fp\": 9078.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.999273234519487, \"fp_rate\": 0.0007267654805130302, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.41841245435325775, \"recall\": 0.9989293361884368, \"specificity\": 0.999273234519487, \"npv\": 0.9999994391875398, \"accuracy\": 0.9992730546109222, \"f1\": 0.5897864270555832, \"f2\": 0.781949665956275, \"f0_5\": 0.4734392669701627, \"p4\": 0.7418692219470949, \"phi\": 0.6462660657555789}, {\"truth_threshold\": -15.70000023394823, \"match_probability\": 1.8785416963874395e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12482361.0, \"fp\": 8601.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9993114221306574, \"fp_rate\": 0.0006885778693426495, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.4316019032513878, \"recall\": 0.9989293361884368, \"specificity\": 0.9993114221306574, \"npv\": 0.9999994392089706, \"accuracy\": 0.9993112222444489, \"f1\": 0.6027688047992616, \"f2\": 0.7909844007363628, \"f0_5\": 0.48690841857274925, \"p4\": 0.7520618684823054, \"phi\": 0.6563855965735135}, {\"truth_threshold\": -15.600000232458115, \"match_probability\": 2.0133684259220603e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12482971.0, \"fp\": 7991.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9993602574405398, \"fp_rate\": 0.0006397425594601922, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.4497314419501446, \"recall\": 0.9989293361884368, \"specificity\": 0.9993602574405398, \"npv\": 0.9999994392363746, \"accuracy\": 0.9993600320064013, \"f1\": 0.6202279202279203, \"f2\": 0.8028470275851896, \"f0_5\": 0.5052919877448705, \"p4\": 0.7655118902413685, \"phi\": 0.6700460123171331}, {\"truth_threshold\": -15.500000230967999, \"match_probability\": 2.1578717331772276e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6531.0, \"tn\": 12483497.0, \"fp\": 7465.0, \"fn\": 7.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9989293361884368, \"tn_rate\": 0.9994023678880778, \"fp_rate\": 0.0005976321119222042, \"fn_rate\": 0.0010706638115631692, \"precision\": 0.46663332380680195, \"recall\": 0.9989293361884368, \"specificity\": 0.9994023678880778, \"npv\": 0.9999994392600027, \"accuracy\": 0.9994021204240848, \"f1\": 0.6361157105288789, \"f2\": 0.813365547474345, \"f0_5\": 0.5222961517545824, \"p4\": 0.7775021026907593, \"phi\": 0.6825351870273176}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12483885.0, \"fp\": 7077.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9994334303474784, \"fp_rate\": 0.0005665696525215592, \"fn_rate\": 0.001223615784643622, \"precision\": 0.47990005144410963, \"recall\": 0.9987763842153564, \"specificity\": 0.9994334303474784, \"npv\": 0.9999993591742575, \"accuracy\": 0.9994330866173234, \"f1\": 0.6482998262596178, \"f2\": 0.8211977162403481, \"f0_5\": 0.5355444017977233, \"p4\": 0.7865407346601536, \"phi\": 0.6921273738858867}, {\"truth_threshold\": -15.300000227987766, \"match_probability\": 2.478735761747151e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12484139.0, \"fp\": 6823.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9994537650502819, \"fp_rate\": 0.0005462349497180442, \"fn_rate\": 0.001223615784643622, \"precision\": 0.4890286826930278, \"recall\": 0.9987763842153564, \"specificity\": 0.9994537650502819, \"npv\": 0.9999993591872957, \"accuracy\": 0.9994534106821364, \"f1\": 0.6565783520185008, \"f2\": 0.8264776610555626, \"f0_5\": 0.5446205170975813, \"p4\": 0.792606232883693, \"phi\": 0.6986862924383719}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12484554.0, \"fp\": 6408.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.999486989072579, \"fp_rate\": 0.0005130109274209625, \"fn_rate\": 0.001223615784643622, \"precision\": 0.504714793631164, \"recall\": 0.9987763842153564, \"specificity\": 0.999486989072579, \"npv\": 0.999999359208597, \"accuracy\": 0.9994866173234647, \"f1\": 0.6705689053193674, \"f2\": 0.8352519826042466, \"f0_5\": 0.5601303825699091, \"p4\": 0.8027202538435725, \"phi\": 0.7098152243788396}, {\"truth_threshold\": -15.100000225007534, \"match_probability\": 2.8473092031487608e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12484735.0, \"fp\": 6227.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9995014795497736, \"fp_rate\": 0.0004985204502263316, \"fn_rate\": 0.001223615784643622, \"precision\": 0.5118758328760681, \"recall\": 0.9987763842153564, \"specificity\": 0.9995014795497736, \"npv\": 0.999999359217887, \"accuracy\": 0.999501100220044, \"f1\": 0.6768592899714952, \"f2\": 0.8391374746202678, \"f0_5\": 0.5671750686168919, \"p4\": 0.8072127201685391, \"phi\": 0.7148382147576605}, {\"truth_threshold\": -15.000000223517418, \"match_probability\": 3.0516642103032495e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12485241.0, \"fp\": 5721.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9995419888396105, \"fp_rate\": 0.00045801116038940795, \"fn_rate\": 0.001223615784643622, \"precision\": 0.5330177128397682, \"recall\": 0.9987763842153564, \"specificity\": 0.9995419888396105, \"npv\": 0.9999993592438565, \"accuracy\": 0.9995415883176635, \"f1\": 0.6950875512267817, \"f2\": 0.8501939952607869, \"f0_5\": 0.5878434337978466, \"p4\": 0.8200427909967731, \"phi\": 0.7294660624134931}, {\"truth_threshold\": -14.900000222027302, \"match_probability\": 3.270685556819147e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12485467.0, \"fp\": 5495.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9995600819216326, \"fp_rate\": 0.00043991807836738276, \"fn_rate\": 0.001223615784643622, \"precision\": 0.5430353430353431, \"recall\": 0.9987763842153564, \"specificity\": 0.9995600819216326, \"npv\": 0.9999993592554548, \"accuracy\": 0.9995596719343869, \"f1\": 0.7035500727253138, \"f2\": 0.8552269691175315, \"f0_5\": 0.5975694571543614, \"p4\": 0.8259059212943968, \"phi\": 0.7362956917399063}, {\"truth_threshold\": -14.800000220537186, \"match_probability\": 3.505425758788192e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12485690.0, \"fp\": 5272.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9995779348299995, \"fp_rate\": 0.0004220651700005172, \"fn_rate\": 0.001223615784643622, \"precision\": 0.553296051516692, \"recall\": 0.9987763842153564, \"specificity\": 0.9995779348299995, \"npv\": 0.9999993592668989, \"accuracy\": 0.9995775155031006, \"f1\": 0.7121046892039259, \"f2\": 0.8602518838594088, \"f0_5\": 0.6074870688051204, \"p4\": 0.8317739844468334, \"phi\": 0.7432259838459446}, {\"truth_threshold\": -14.70000021904707, \"match_probability\": 3.757012854526189e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12486013.0, \"fp\": 4949.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9996037935268717, \"fp_rate\": 0.0003962064731283307, \"fn_rate\": 0.001223615784643622, \"precision\": 0.5688648837006708, \"recall\": 0.9987763842153564, \"specificity\": 0.9996037935268717, \"npv\": 0.9999993592834739, \"accuracy\": 0.9996033606721344, \"f1\": 0.7248709552089693, \"f2\": 0.867635725864314, \"f0_5\": 0.6224501467952873, \"p4\": 0.8404228611560399, \"phi\": 0.7536197874403965}, {\"truth_threshold\": -14.600000217556953, \"match_probability\": 4.026655822016454e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12486340.0, \"fp\": 4622.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9996299724552841, \"fp_rate\": 0.0003700275447159314, \"fn_rate\": 0.001223615784643622, \"precision\": 0.585545193687231, \"recall\": 0.9987763842153564, \"specificity\": 0.9996299724552841, \"npv\": 0.9999993593002534, \"accuracy\": 0.9996295259051811, \"f1\": 0.7382702091577162, \"f2\": 0.8752412609907785, \"f0_5\": 0.6383685918742423, \"p4\": 0.8493640125399118, \"phi\": 0.7645988515663446}, {\"truth_threshold\": -14.500000216066837, \"match_probability\": 4.315650384728788e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12486451.0, \"fp\": 4511.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.999638858880525, \"fp_rate\": 0.00036114111947502524, \"fn_rate\": 0.001223615784643622, \"precision\": 0.5914319355130876, \"recall\": 0.9987763842153564, \"specificity\": 0.999638858880525, \"npv\": 0.999999359305949, \"accuracy\": 0.9996384076815363, \"f1\": 0.7429319073894989, \"f2\": 0.8778533595031323, \"f0_5\": 0.6439588181925763, \"p4\": 0.8524424894273256, \"phi\": 0.7684360871219286}, {\"truth_threshold\": -14.400000214576721, \"match_probability\": 4.625385233621647e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12486656.0, \"fp\": 4306.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9996552707469609, \"fp_rate\": 0.0003447292530391174, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6026208933185677, \"recall\": 0.9987763842153564, \"specificity\": 0.9996552707469609, \"npv\": 0.9999993593164676, \"accuracy\": 0.9996548109621924, \"f1\": 0.7516979394497525, \"f2\": 0.8827187195847301, \"f0_5\": 0.6545447255523035, \"p4\": 0.8581870241852991, \"phi\": 0.7756772113142218}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12486872.0, \"fp\": 4090.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9996725632501324, \"fp_rate\": 0.0003274367498676243, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6148775894538606, \"recall\": 0.9987763842153564, \"specificity\": 0.9996725632501324, \"npv\": 0.9999993593275502, \"accuracy\": 0.9996720944188838, \"f1\": 0.7611609744725493, \"f2\": 0.8879038398781681, \"f0_5\": 0.6660818474845975, \"p4\": 0.8643241681654743, \"phi\": 0.7835325406973158}, {\"truth_threshold\": -14.200000211596489, \"match_probability\": 5.313135876996633e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12486914.0, \"fp\": 4048.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9996759256813046, \"fp_rate\": 0.0003240743186953895, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6173189638873133, \"recall\": 0.9987763842153564, \"specificity\": 0.9996759256813046, \"npv\": 0.9999993593297051, \"accuracy\": 0.9996754550910182, \"f1\": 0.7630287450338864, \"f2\": 0.8889191396678464, \"f0_5\": 0.6683725690890481, \"p4\": 0.8655277091361168, \"phi\": 0.7850878331483462}, {\"truth_threshold\": -14.100000210106373, \"match_probability\": 5.694456326333118e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487103.0, \"fp\": 3859.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9996910566215796, \"fp_rate\": 0.00030894337842033305, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6285494272788527, \"recall\": 0.9987763842153564, \"specificity\": 0.9996910566215796, \"npv\": 0.9999993593394021, \"accuracy\": 0.9996905781156231, \"f1\": 0.7715484137768063, \"f2\": 0.8935168714594565, \"f0_5\": 0.6788788622281365, \"p4\": 0.8709853832346881, \"phi\": 0.7922029401711976}, {\"truth_threshold\": -14.000000208616257, \"match_probability\": 6.103142236234761e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487233.0, \"fp\": 3729.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997014641466366, \"fp_rate\": 0.0002985358533634159, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6365142801442636, \"recall\": 0.9987763842153564, \"specificity\": 0.9997014641466366, \"npv\": 0.9999993593460718, \"accuracy\": 0.9997009801960393, \"f1\": 0.777519795201524, \"f2\": 0.8967070390815962, \"f0_5\": 0.6862992390801699, \"p4\": 0.8747794655128212, \"phi\": 0.7972106178491172}, {\"truth_threshold\": -13.90000020712614, \"match_probability\": 6.541157240512605e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487340.0, \"fp\": 3622.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997100303403372, \"fp_rate\": 0.00028996965966272256, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6432230102442869, \"recall\": 0.9987763842153564, \"specificity\": 0.9997100303403372, \"npv\": 0.9999993593515613, \"accuracy\": 0.9997095419083817, \"f1\": 0.7825044937088077, \"f2\": 0.8993499338915822, \"f0_5\": 0.6925295889364952, \"p4\": 0.8779271760779108, \"phi\": 0.8014042660104671}, {\"truth_threshold\": -13.800000205636024, \"match_probability\": 7.010605838401368e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487482.0, \"fp\": 3480.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997213985600149, \"fp_rate\": 0.00027860143998516686, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6523476523476524, \"recall\": 0.9987763842153564, \"specificity\": 0.9997213985600149, \"npv\": 0.9999993593588463, \"accuracy\": 0.9997209041808361, \"f1\": 0.7892192409958907, \"f2\": 0.9028814777943698, \"f0_5\": 0.7009747090901284, \"p4\": 0.8821396586903103, \"phi\": 0.8070731347214316}, {\"truth_threshold\": -13.700000204145908, \"match_probability\": 7.51374349434771e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487649.0, \"fp\": 3313.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997347682268187, \"fp_rate\": 0.000265231773181281, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6634156253174845, \"recall\": 0.9987763842153564, \"specificity\": 0.9997347682268187, \"npv\": 0.9999993593674138, \"accuracy\": 0.9997342668533706, \"f1\": 0.7972651242292901, \"f2\": 0.9070704264481178, \"f0_5\": 0.7111740361576998, \"p4\": 0.8871457958974402, \"phi\": 0.8138963453076354}, {\"truth_threshold\": -13.600000202655792, \"match_probability\": 8.052987461117984e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487752.0, \"fp\": 3210.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997430141889792, \"fp_rate\": 0.0002569858110208005, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6704312114989733, \"recall\": 0.9987763842153564, \"specificity\": 0.9997430141889792, \"npv\": 0.9999993593726978, \"accuracy\": 0.9997425085017003, \"f1\": 0.8023098660769137, \"f2\": 0.9096734648389613, \"f0_5\": 0.7176139610532332, \"p4\": 0.8902618422490433, \"phi\": 0.8181918664339412}, {\"truth_threshold\": -13.500000201165676, \"match_probability\": 8.630928377906233e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487794.0, \"fp\": 3168.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997463766201514, \"fp_rate\": 0.0002536233798485657, \"fn_rate\": 0.001223615784643622, \"precision\": 0.673334708187255, \"recall\": 0.9987763842153564, \"specificity\": 0.9997463766201514, \"npv\": 0.9999993593748524, \"accuracy\": 0.9997458691738348, \"f1\": 0.8043853165804385, \"f2\": 0.9107391910739191, \"f0_5\": 0.7202735495257004, \"p4\": 0.8915387549303468, \"phi\": 0.8199630431346999}, {\"truth_threshold\": -13.40000019967556, \"match_probability\": 9.25034269879762e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12487961.0, \"fp\": 3001.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997597462869553, \"fp_rate\": 0.00024025371304467984, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6851327247927814, \"recall\": 0.9987763842153564, \"specificity\": 0.9997597462869553, \"npv\": 0.9999993593834193, \"accuracy\": 0.9997592318463693, \"f1\": 0.8127450370278175, \"f2\": 0.9150015413502228, \"f0_5\": 0.7310465272491156, \"p4\": 0.8966524491675966, \"phi\": 0.8271209949072068}, {\"truth_threshold\": -13.300000198185444, \"match_probability\": 9.914206010875549e-05, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12488085.0, \"fp\": 2877.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997696734647019, \"fp_rate\": 0.00023032653529808192, \"fn_rate\": 0.001223615784643622, \"precision\": 0.6941639204847454, \"recall\": 0.9987763842153564, \"specificity\": 0.9997696734647019, \"npv\": 0.9999993593897804, \"accuracy\": 0.9997691538307661, \"f1\": 0.8190655377861399, \"f2\": 0.9181923001209258, \"f0_5\": 0.7392564416066658, \"p4\": 0.9004875558947281, \"phi\": 0.8325587105453685}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12488210.0, \"fp\": 2752.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997796807003335, \"fp_rate\": 0.00022031929966643082, \"fn_rate\": 0.001223615784643622, \"precision\": 0.7035121741004094, \"recall\": 0.9987763842153564, \"specificity\": 0.9997796807003335, \"npv\": 0.9999993593961924, \"accuracy\": 0.9997791558311663, \"f1\": 0.8255372945638433, \"f2\": 0.9214313935767906, \"f0_5\": 0.747721339257088, \"p4\": 0.9043869391323841, \"phi\": 0.8381501706204977}, {\"truth_threshold\": -13.100000195205212, \"match_probability\": 0.00011388264270550263, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12488247.0, \"fp\": 2715.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9997826428420805, \"fp_rate\": 0.0002173571579194621, \"fn_rate\": 0.001223615784643622, \"precision\": 0.7063277447268794, \"recall\": 0.9987763842153564, \"specificity\": 0.9997826428420805, \"npv\": 0.9999993593980905, \"accuracy\": 0.9997821164232846, \"f1\": 0.8274725970981436, \"f2\": 0.922394553210724, \"f0_5\": 0.7502642584677605, \"p4\": 0.9055476424271861, \"phi\": 0.8398269499601253}, {\"truth_threshold\": -13.000000193715096, \"match_probability\": 0.00012205539677081966, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12488332.0, \"fp\": 2630.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.99978944776231, \"fp_rate\": 0.00021055223768993934, \"fn_rate\": 0.001223615784643622, \"precision\": 0.712882096069869, \"recall\": 0.9987763842153564, \"specificity\": 0.99978944776231, \"npv\": 0.9999993594024507, \"accuracy\": 0.9997889177835567, \"f1\": 0.8319531150465027, \"f2\": 0.924614861803353, \"f0_5\": 0.7561721246931308, \"p4\": 0.9082254403335304, \"phi\": 0.8437174074802337}, {\"truth_threshold\": -12.90000019222498, \"match_probability\": 0.00013081458937332365, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6530.0, \"tn\": 12488488.0, \"fp\": 2474.0, \"fn\": 8.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9987763842153564, \"tn_rate\": 0.9998019367923784, \"fp_rate\": 0.00019806320762163874, \"fn_rate\": 0.001223615784643622, \"precision\": 0.7252332296756997, \"recall\": 0.9987763842153564, \"specificity\": 0.9998019367923784, \"npv\": 0.9999993594104526, \"accuracy\": 0.999801400280056, \"f1\": 0.8403036932183761, \"f2\": 0.9287177153259757, \"f0_5\": 0.767260422051981, \"p4\": 0.9131814166998666, \"phi\": 0.8510003179492605}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6520.0, \"tn\": 12488754.0, \"fp\": 2208.0, \"fn\": 18.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9972468644845518, \"tn_rate\": 0.9998232321898025, \"fp_rate\": 0.0001767678101974852, \"fn_rate\": 0.0027531355154481493, \"precision\": 0.7470210815765352, \"recall\": 0.9972468644845518, \"specificity\": 0.9998232321898025, \"npv\": 0.9999985587053715, \"accuracy\": 0.9998218843768754, \"f1\": 0.8541857723044675, \"f2\": 0.9346330275229358, \"f0_5\": 0.7864897466827503, \"p4\": 0.921321603446535, \"phi\": 0.8630358885663575}, {\"truth_threshold\": -12.700000189244747, \"match_probability\": 0.00015026358101882152, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6520.0, \"tn\": 12488887.0, \"fp\": 2075.0, \"fn\": 18.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9972468644845518, \"tn_rate\": 0.9998338798885146, \"fp_rate\": 0.0001661201114854084, \"fn_rate\": 0.0027531355154481493, \"precision\": 0.7585805700988947, \"recall\": 0.9972468644845518, \"specificity\": 0.9998338798885146, \"npv\": 0.9999985587207205, \"accuracy\": 0.9998325265053011, \"f1\": 0.8616929888323531, \"f2\": 0.9382104929922007, \"f0_5\": 0.7967153819834791, \"p4\": 0.9256731102575878, \"phi\": 0.869692268810833}, {\"truth_threshold\": -12.600000187754631, \"match_probability\": 0.0001610467818084837, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6520.0, \"tn\": 12489010.0, \"fp\": 1952.0, \"fn\": 18.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9972468644845518, \"tn_rate\": 0.9998437270083761, \"fp_rate\": 0.00015627299162386372, \"fn_rate\": 0.0027531355154481493, \"precision\": 0.7695939565627951, \"recall\": 0.9972468644845518, \"specificity\": 0.9998437270083761, \"npv\": 0.9999985587349152, \"accuracy\": 0.9998423684736948, \"f1\": 0.8687541638907396, \"f2\": 0.941543438077634, \"f0_5\": 0.806411715232771, \"p4\": 0.9297341821425537, \"phi\": 0.8759871324892415}, {\"truth_threshold\": -12.500000186264515, \"match_probability\": 0.00017260367204143044, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6520.0, \"tn\": 12489098.0, \"fp\": 1864.0, \"fn\": 18.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9972468644845518, \"tn_rate\": 0.9998507721022608, \"fp_rate\": 0.00014922789773918135, \"fn_rate\": 0.0027531355154481493, \"precision\": 0.7776717557251909, \"recall\": 0.9972468644845518, \"specificity\": 0.9998507721022608, \"npv\": 0.9999985587450705, \"accuracy\": 0.9998494098819763, \"f1\": 0.873877496314167, \"f2\": 0.9439425526986334, \"f0_5\": 0.8134950341867545, \"p4\": 0.9326615997575173, \"phi\": 0.8805755121521498}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6520.0, \"tn\": 12489238.0, \"fp\": 1724.0, \"fn\": 18.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9972468644845518, \"tn_rate\": 0.9998619802061682, \"fp_rate\": 0.0001380197938317321, \"fn_rate\": 0.0027531355154481493, \"precision\": 0.7908782144590005, \"recall\": 0.9972468644845518, \"specificity\": 0.9998619802061682, \"npv\": 0.9999985587612265, \"accuracy\": 0.9998606121224245, \"f1\": 0.8821539710458666, \"f2\": 0.9477846261193162, \"f0_5\": 0.8250240421116566, \"p4\": 0.9373570396800788, \"phi\": 0.8880260275680804}, {\"truth_threshold\": -12.300000183284283, \"match_probability\": 0.00019826446591752426, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6520.0, \"tn\": 12489296.0, \"fp\": 1666.0, \"fn\": 18.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9972468644845518, \"tn_rate\": 0.9998666235635013, \"fp_rate\": 0.00013337643649864598, \"fn_rate\": 0.0027531355154481493, \"precision\": 0.7964817981920351, \"recall\": 0.9972468644845518, \"specificity\": 0.9998666235635013, \"npv\": 0.9999985587679195, \"accuracy\": 0.9998652530506101, \"f1\": 0.8856289051888074, \"f2\": 0.9493855204147009, \"f0_5\": 0.8298966447736877, \"p4\": 0.9393161727989389, \"phi\": 0.891168506450996}, {\"truth_threshold\": -12.200000181794167, \"match_probability\": 0.00021249156957169895, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6519.0, \"tn\": 12489339.0, \"fp\": 1623.0, \"fn\": 19.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9970939125114714, \"tn_rate\": 0.9998700660525587, \"fp_rate\": 0.000129933947441358, \"fn_rate\": 0.002906087488528602, \"precision\": 0.8006632277081798, \"recall\": 0.9970939125114714, \"specificity\": 0.9998700660525587, \"npv\": 0.9999984787048302, \"accuracy\": 0.9998686137227446, \"f1\": 0.8881471389645776, \"f2\": 0.9504578060302094, \"f0_5\": 0.8335038101570091, \"p4\": 0.9407314283249447, \"phi\": 0.893437675671549}, {\"truth_threshold\": -12.10000018030405, \"match_probability\": 0.0002277393522037113, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6519.0, \"tn\": 12489402.0, \"fp\": 1560.0, \"fn\": 19.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9970939125114714, \"tn_rate\": 0.999875109699317, \"fp_rate\": 0.00012489030068300584, \"fn_rate\": 0.002906087488528602, \"precision\": 0.8069067953954697, \"recall\": 0.9970939125114714, \"specificity\": 0.999875109699317, \"npv\": 0.9999984787125039, \"accuracy\": 0.9998736547309461, \"f1\": 0.8919750974892249, \"f2\": 0.9522070637726038, \"f0_5\": 0.838909764760385, \"p4\": 0.9428755343310333, \"phi\": 0.8969166974385941}, {\"truth_threshold\": -12.000000178813934, \"match_probability\": 0.00024408100465850272, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6519.0, \"tn\": 12489442.0, \"fp\": 1520.0, \"fn\": 19.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9970939125114714, \"tn_rate\": 0.9998783120147191, \"fp_rate\": 0.00012168798528087749, \"fn_rate\": 0.002906087488528602, \"precision\": 0.8109217564373679, \"recall\": 0.9970939125114714, \"specificity\": 0.9998783120147191, \"npv\": 0.9999984787173762, \"accuracy\": 0.9998768553710742, \"f1\": 0.8944227207244289, \"f2\": 0.9533210493989647, \"f0_5\": 0.8423786633586603, \"p4\": 0.9442419515675881, \"phi\": 0.8991467904383169}, {\"truth_threshold\": -11.900000177323818, \"match_probability\": 0.0002615949610108224, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6519.0, \"tn\": 12489517.0, \"fp\": 1445.0, \"fn\": 19.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9970939125114714, \"tn_rate\": 0.9998843163560981, \"fp_rate\": 0.00011568364390188682, \"fn_rate\": 0.002906087488528602, \"precision\": 0.8185585133098945, \"recall\": 0.9970939125114714, \"specificity\": 0.9998843163560981, \"npv\": 0.9999984787265115, \"accuracy\": 0.9998828565713143, \"f1\": 0.8990484071162598, \"f2\": 0.955416813225466, \"f0_5\": 0.8489607751211127, \"p4\": 0.9468146875536249, \"phi\": 0.9033734001771478}, {\"truth_threshold\": -11.800000175833702, \"match_probability\": 0.0002803652734145845, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6519.0, \"tn\": 12489549.0, \"fp\": 1413.0, \"fn\": 19.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9970939125114714, \"tn_rate\": 0.9998868782084198, \"fp_rate\": 0.00011312179158018414, \"fn_rate\": 0.002906087488528602, \"precision\": 0.8218608169440242, \"recall\": 0.9970939125114714, \"specificity\": 0.9998868782084198, \"npv\": 0.9999984787304093, \"accuracy\": 0.9998854170834167, \"f1\": 0.9010366275051831, \"f2\": 0.9563138129327544, \"f0_5\": 0.8518005540166205, \"p4\": 0.9479166601603857, \"phi\": 0.9051949682040822}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6519.0, \"tn\": 12489593.0, \"fp\": 1369.0, \"fn\": 19.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9970939125114714, \"tn_rate\": 0.9998904007553622, \"fp_rate\": 0.00010959924463784295, \"fn_rate\": 0.002906087488528602, \"precision\": 0.8264452332657201, \"recall\": 0.9970939125114714, \"specificity\": 0.9998904007553622, \"npv\": 0.9999984787357685, \"accuracy\": 0.9998889377875575, \"f1\": 0.903784832940524, \"f2\": 0.9575499412455935, \"f0_5\": 0.8557364137568916, \"p4\": 0.9494360675529046, \"phi\": 0.9077176965684303}, {\"truth_threshold\": -11.60000017285347, \"match_probability\": 0.0003220417031628006, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12489724.0, \"fp\": 1238.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999008883383042, \"fp_rate\": 9.911166169587258e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8403816400206292, \"recall\": 0.996940960538391, \"specificity\": 0.9999008883383042, \"npv\": 0.999998398686154, \"accuracy\": 0.9998993398679736, \"f1\": 0.9119910451937876, \"f2\": 0.961130116786599, \"f0_5\": 0.8676321814599861, \"p4\": 0.9539470809745161, \"phi\": 0.9152737249279793}, {\"truth_threshold\": -11.500000171363354, \"match_probability\": 0.00034514777387400505, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12489792.0, \"fp\": 1170.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999063322744878, \"fp_rate\": 9.366772551225438e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8478147762747138, \"recall\": 0.996940960538391, \"specificity\": 0.9999063322744878, \"npv\": 0.9999983986948723, \"accuracy\": 0.9999047809561913, \"f1\": 0.9163503444397582, \"f2\": 0.9630614657210402, \"f0_5\": 0.8739608474121748, \"p4\": 0.9563277125906504, \"phi\": 0.9193151082624081}, {\"truth_threshold\": -11.400000169873238, \"match_probability\": 0.0003699110614699968, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12489861.0, \"fp\": 1101.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999118562685564, \"fp_rate\": 8.814373144358297e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8554928468302927, \"recall\": 0.996940960538391, \"specificity\": 0.9999118562685564, \"npv\": 0.9999983987037186, \"accuracy\": 0.9999103020604121, \"f1\": 0.9208165571801935, \"f2\": 0.9650291670368067, \"f0_5\": 0.8804776571027179, \"p4\": 0.9587555297180819, \"phi\": 0.923471092056629}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12489927.0, \"fp\": 1035.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999171400889699, \"fp_rate\": 8.285991103007118e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8629683569442606, \"recall\": 0.996940960538391, \"specificity\": 0.9999171400889699, \"npv\": 0.9999983987121803, \"accuracy\": 0.9999155831166233, \"f1\": 0.9251295152934497, \"f2\": 0.966918854769322, \"f0_5\": 0.8868027210884354, \"p4\": 0.9610893523018439, \"phi\": 0.9274995422900408}, {\"truth_threshold\": -11.200000166893005, \"match_probability\": 0.00042489285738089063, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490004.0, \"fp\": 958.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.999923304546119, \"fp_rate\": 7.66954538809741e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.871856607811664, \"recall\": 0.996940960538391, \"specificity\": 0.999923304546119, \"npv\": 0.9999983987220521, \"accuracy\": 0.9999217443488698, \"f1\": 0.9302126444983588, \"f2\": 0.9691328654692518, \"f0_5\": 0.8942977882662861, \"p4\": 0.9638265466033703, \"phi\": 0.9322666465923167}, {\"truth_threshold\": -11.10000016540289, \"match_probability\": 0.00045537500230174836, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490056.0, \"fp\": 906.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999274675561418, \"fp_rate\": 7.253244385820724e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8779633620689655, \"recall\": 0.996940960538391, \"specificity\": 0.9999274675561418, \"npv\": 0.9999983987287187, \"accuracy\": 0.9999259051810362, \"f1\": 0.9336771236212577, \"f2\": 0.9706337860376459, \"f0_5\": 0.899431473201965, \"p4\": 0.9656838782496495, \"phi\": 0.9355278512227713}, {\"truth_threshold\": -11.000000163912773, \"match_probability\": 0.00048804289235713973, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490079.0, \"fp\": 883.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.999929308887498, \"fp_rate\": 7.069111250198343e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8806917984056208, \"recall\": 0.996940960538391, \"specificity\": 0.999929308887498, \"npv\": 0.9999983987316674, \"accuracy\": 0.9999277455491098, \"f1\": 0.9352177344142334, \"f2\": 0.9712991386761244, \"f0_5\": 0.9017209894305794, \"p4\": 0.966507675566405, \"phi\": 0.9369812560030152}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490132.0, \"fp\": 830.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999335519554058, \"fp_rate\": 6.644804459416336e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.88704409363092, \"recall\": 0.996940960538391, \"specificity\": 0.9999335519554058, \"npv\": 0.9999983987384621, \"accuracy\": 0.9999319863972794, \"f1\": 0.9387872677516923, \"f2\": 0.9728358208955223, \"f0_5\": 0.9070414695240746, \"p4\": 0.9684113527127617, \"phi\": 0.9403563431710397}, {\"truth_threshold\": -10.800000160932541, \"match_probability\": 0.0005605733873065377, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490174.0, \"fp\": 788.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999369143865781, \"fp_rate\": 6.308561342192859e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8921434437448672, \"recall\": 0.996940960538391, \"specificity\": 0.9999369143865781, \"npv\": 0.9999983987438466, \"accuracy\": 0.9999353470694139, \"f1\": 0.9416353655013002, \"f2\": 0.974057026720067, \"f0_5\": 0.9113024998601867, \"p4\": 0.9699252613909449, \"phi\": 0.9430569781682311}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490213.0, \"fp\": 749.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999400366440951, \"fp_rate\": 5.996335590485345e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.8969313334250723, \"recall\": 0.996940960538391, \"specificity\": 0.9999400366440951, \"npv\": 0.9999983987488464, \"accuracy\": 0.9999384676935387, \"f1\": 0.9442955450923578, \"f2\": 0.9751937520572129, \"f0_5\": 0.9152951749705106, \"p4\": 0.9713352781801945, \"phi\": 0.9455856417121382}, {\"truth_threshold\": -10.600000157952309, \"match_probability\": 0.0006438760580315065, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490256.0, \"fp\": 706.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999434791331524, \"fp_rate\": 5.652086684756546e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9022702104097453, \"recall\": 0.996940960538391, \"specificity\": 0.9999434791331524, \"npv\": 0.999998398754359, \"accuracy\": 0.9999419083816763, \"f1\": 0.9472460398197936, \"f2\": 0.9764501438159157, \"f0_5\": 0.919738104645256, \"p4\": 0.9728946715566851, \"phi\": 0.9483973517886798}, {\"truth_threshold\": -10.500000156462193, \"match_probability\": 0.0006900573831033208, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490283.0, \"fp\": 679.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999456406960489, \"fp_rate\": 5.4359303951128825e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.905655134083646, \"recall\": 0.996940960538391, \"specificity\": 0.9999456406960489, \"npv\": 0.9999983987578204, \"accuracy\": 0.9999440688137627, \"f1\": 0.9491081179468511, \"f2\": 0.97724069687247, \"f0_5\": 0.9225499631999095, \"p4\": 0.9738763870978169, \"phi\": 0.9501757059882622}, {\"truth_threshold\": -10.400000154972076, \"match_probability\": 0.0007395485633816526, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490317.0, \"fp\": 645.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999483626641407, \"fp_rate\": 5.163733585931972e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9099539299176322, \"recall\": 0.996940960538391, \"specificity\": 0.9999483626641407, \"npv\": 0.9999983987621791, \"accuracy\": 0.9999467893578716, \"f1\": 0.951463396832348, \"f2\": 0.9782380309170043, \"f0_5\": 0.9261153736857062, \"p4\": 0.9751154433623096, \"phi\": 0.952429399527432}, {\"truth_threshold\": -10.30000015348196, \"match_probability\": 0.0007925864548491303, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490327.0, \"fp\": 635.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999491632429912, \"fp_rate\": 5.083675700878763e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9112260589962253, \"recall\": 0.996940960538391, \"specificity\": 0.9999491632429912, \"npv\": 0.9999983987634611, \"accuracy\": 0.9999475895179036, \"f1\": 0.9521583522021766, \"f2\": 0.9785317519891908, \"f0_5\": 0.9271692745376956, \"p4\": 0.975480471929477, \"phi\": 0.9530953060888908}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490353.0, \"fp\": 609.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999512447480026, \"fp_rate\": 4.87552519974042e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9145503016697067, \"recall\": 0.996940960538391, \"specificity\": 0.9999512447480026, \"npv\": 0.9999983987667942, \"accuracy\": 0.9999496699339868, \"f1\": 0.9539699963410172, \"f2\": 0.9792962528922143, \"f0_5\": 0.9299206756833875, \"p4\": 0.9764308264575142, \"phi\": 0.9548332162320019}, {\"truth_threshold\": -10.100000150501728, \"match_probability\": 0.0009103354699850551, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490391.0, \"fp\": 571.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999542869476347, \"fp_rate\": 4.571305236538227e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9194526731555932, \"recall\": 0.996940960538391, \"specificity\": 0.9999542869476347, \"npv\": 0.9999983987716657, \"accuracy\": 0.9999527105421084, \"f1\": 0.9566302194173333, \"f2\": 0.9804157516320207, \"f0_5\": 0.9339714564108442, \"p4\": 0.9778231418596417, \"phi\": 0.9573904135201147}, {\"truth_threshold\": -10.000000149011612, \"match_probability\": 0.0009756096554280922, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490437.0, \"fp\": 525.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999579696103471, \"fp_rate\": 4.2030389652934656e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.925457901462445, \"recall\": 0.996940960538391, \"specificity\": 0.9999579696103471, \"npv\": 0.9999983987775627, \"accuracy\": 0.9999563912782556, \"f1\": 0.9598704071865106, \"f2\": 0.9817743636089773, \"f0_5\": 0.9389225007202535, \"p4\": 0.97951389803197, \"phi\": 0.9605136103193297}, {\"truth_threshold\": -9.900000147521496, \"match_probability\": 0.0010455593264824352, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490503.0, \"fp\": 459.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999632534307605, \"fp_rate\": 3.6746569239422875e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9342124122115523, \"recall\": 0.996940960538391, \"specificity\": 0.9999632534307605, \"npv\": 0.9999983987860236, \"accuracy\": 0.9999616723344669, \"f1\": 0.9645578986311506, \"f2\": 0.9837302665338525, \"f0_5\": 0.9461185623875051, \"p4\": 0.981950003845494, \"phi\": 0.9650485450064271}, {\"truth_threshold\": -9.80000014603138, \"match_probability\": 0.0011205186532430977, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490529.0, \"fp\": 433.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.999965334935772, \"fp_rate\": 3.466506422803944e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9377068047762912, \"recall\": 0.996940960538391, \"specificity\": 0.999965334935772, \"npv\": 0.9999983987893567, \"accuracy\": 0.9999637527505502, \"f1\": 0.9664170805841797, \"f2\": 0.9845029151436425, \"f0_5\": 0.9489837516743347, \"p4\": 0.9829130121692145, \"phi\": 0.9668527399420636}, {\"truth_threshold\": -9.700000144541264, \"match_probability\": 0.001200845581852835, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490544.0, \"fp\": 418.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999665358040477, \"fp_rate\": 3.346419595224131e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9397347174163783, \"recall\": 0.996940960538391, \"specificity\": 0.9999665358040477, \"npv\": 0.9999983987912796, \"accuracy\": 0.9999649529905981, \"f1\": 0.9674929493839988, \"f2\": 0.9849492263056093, \"f0_5\": 0.9506446531707602, \"p4\": 0.9834694535895051, \"phi\": 0.9678982323099129}, {\"truth_threshold\": -9.600000143051147, \"match_probability\": 0.001286923510110021, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490569.0, \"fp\": 393.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999685372511741, \"fp_rate\": 3.146274882591109e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9431341339892925, \"recall\": 0.996940960538391, \"specificity\": 0.9999685372511741, \"npv\": 0.9999983987944844, \"accuracy\": 0.9999669533906781, \"f1\": 0.9692913971298981, \"f2\": 0.9856939781629012, \"f0_5\": 0.9534257796501082, \"p4\": 0.9843982573200798, \"phi\": 0.9696482762354993}, {\"truth_threshold\": -9.500000141561031, \"match_probability\": 0.0013791630787767571, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490587.0, \"fp\": 375.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999699782931051, \"fp_rate\": 3.002170689495333e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9455969824459597, \"recall\": 0.996940960538391, \"specificity\": 0.9999699782931051, \"npv\": 0.9999983987967919, \"accuracy\": 0.9999683936787358, \"f1\": 0.9705904251358797, \"f2\": 0.9862308972613103, \"f0_5\": 0.9554382878921137, \"p4\": 0.9850680830395252, \"phi\": 0.9709141976866122}, {\"truth_threshold\": -9.400000140070915, \"match_probability\": 0.001478004086219237, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490596.0, \"fp\": 366.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999706988140705, \"fp_rate\": 2.9301185929474446e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9468332364904125, \"recall\": 0.996940960538391, \"specificity\": 0.9999706988140705, \"npv\": 0.9999983987979456, \"accuracy\": 0.9999691138227645, \"f1\": 0.9712412457159887, \"f2\": 0.9864995762198814, \"f0_5\": 0.9564477314081118, \"p4\": 0.9854033378482704, \"phi\": 0.9715490191764499}, {\"truth_threshold\": -9.200000137090683, \"match_probability\": 0.0016974078152024628, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6518.0, \"tn\": 12490614.0, \"fp\": 348.0, \"fn\": 20.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.996940960538391, \"tn_rate\": 0.9999721398560015, \"fp_rate\": 2.7860143998516688e-05, \"fn_rate\": 0.0030590394616090547, \"precision\": 0.9493154675211185, \"recall\": 0.996940960538391, \"specificity\": 0.9999721398560015, \"npv\": 0.9999983988002531, \"accuracy\": 0.9999705541108221, \"f1\": 0.9725455088033423, \"f2\": 0.9870373735538192, \"f0_5\": 0.9584730309981766, \"p4\": 0.9860745325283752, \"phi\": 0.9728224039987085}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6517.0, \"tn\": 12490619.0, \"fp\": 343.0, \"fn\": 21.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9967880085653105, \"tn_rate\": 0.9999725401454268, \"fp_rate\": 2.7459854573250644e-05, \"fn_rate\": 0.0032119914346895075, \"precision\": 0.95, \"recall\": 0.9967880085653105, \"specificity\": 0.9999725401454268, \"npv\": 0.9999983187410734, \"accuracy\": 0.999970874174835, \"f1\": 0.9728317659352143, \"f2\": 0.9870653095843935, \"f0_5\": 0.9590028842192007, \"p4\": 0.9862217274200477, \"phi\": 0.9730985795193775}, {\"truth_threshold\": -9.00000013411045, \"match_probability\": 0.0019493175579394322, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6517.0, \"tn\": 12490633.0, \"fp\": 329.0, \"fn\": 21.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9967880085653105, \"tn_rate\": 0.9999736609558175, \"fp_rate\": 2.633904418250572e-05, \"fn_rate\": 0.0032119914346895075, \"precision\": 0.9519427402862985, \"recall\": 0.9967880085653105, \"specificity\": 0.9999736609558175, \"npv\": 0.9999983187429577, \"accuracy\": 0.9999719943988797, \"f1\": 0.9738493723849372, \"f2\": 0.9874840899448452, \"f0_5\": 0.9605860503508048, \"p4\": 0.9867446365264912, \"phi\": 0.9740936090970478}, {\"truth_threshold\": -8.900000132620335, \"match_probability\": 0.002088934569496736, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6517.0, \"tn\": 12490645.0, \"fp\": 317.0, \"fn\": 21.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9967880085653105, \"tn_rate\": 0.9999746216504382, \"fp_rate\": 2.537834956186721e-05, \"fn_rate\": 0.0032119914346895075, \"precision\": 0.9536142815335089, \"recall\": 0.9967880085653105, \"specificity\": 0.9999746216504382, \"npv\": 0.9999983187445729, \"accuracy\": 0.9999729545909182, \"f1\": 0.9747233024229733, \"f2\": 0.987843327472261, \"f0_5\": 0.9619472161539824, \"p4\": 0.9871932858753212, \"phi\": 0.9749489236160325}, {\"truth_threshold\": -8.800000131130219, \"match_probability\": 0.0022385290160630528, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6517.0, \"tn\": 12490653.0, \"fp\": 309.0, \"fn\": 21.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9967880085653105, \"tn_rate\": 0.9999752621135185, \"fp_rate\": 2.473788648144154e-05, \"fn_rate\": 0.0032119914346895075, \"precision\": 0.9547319074128333, \"recall\": 0.9967880085653105, \"specificity\": 0.9999752621135185, \"npv\": 0.9999983187456497, \"accuracy\": 0.9999735947189438, \"f1\": 0.9753067943729422, \"f2\": 0.9880829644005095, \"f0_5\": 0.9628568051533597, \"p4\": 0.9874926121654626, \"phi\": 0.9755203856617595}, {\"truth_threshold\": -8.700000129640102, \"match_probability\": 0.002398810587356977, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490670.0, \"fp\": 292.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999766230975644, \"fp_rate\": 2.337690243553699e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9570903747244673, \"recall\": 0.9961762006729887, \"specificity\": 0.9999766230975644, \"npv\": 0.9999979985100909, \"accuracy\": 0.9999746349269854, \"f1\": 0.9762422243873192, \"f2\": 0.9881057135054768, \"f0_5\": 0.9646602286865336, \"p4\": 0.9879721147965851, \"phi\": 0.976425258581306}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490698.0, \"fp\": 264.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.999978864718346, \"fp_rate\": 2.113528165404714e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9610447100486941, \"recall\": 0.9961762006729887, \"specificity\": 0.999978864718346, \"npv\": 0.9999979985145776, \"accuracy\": 0.9999768753750751, \"f1\": 0.9782951558392791, \"f2\": 0.9889459139360443, \"f0_5\": 0.9678713665814659, \"p4\": 0.9890228526432743, \"phi\": 0.9784413943577362}, {\"truth_threshold\": -8.50000012665987, \"match_probability\": 0.0027545272436909716, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490705.0, \"fp\": 257.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999794251235413, \"fp_rate\": 2.057487645867468e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9620384047267356, \"recall\": 0.9961762006729887, \"specificity\": 0.9999794251235413, \"npv\": 0.9999979985156993, \"accuracy\": 0.9999774354870974, \"f1\": 0.978809738503156, \"f2\": 0.9891561873519227, \"f0_5\": 0.968677494199536, \"p4\": 0.989285886413399, \"phi\": 0.9789473812779779}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490710.0, \"fp\": 252.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999798254129666, \"fp_rate\": 2.0174587033408636e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.962749445676275, \"recall\": 0.9961762006729887, \"specificity\": 0.9999798254129666, \"npv\": 0.9999979985165004, \"accuracy\": 0.9999778355671134, \"f1\": 0.9791776291062166, \"f2\": 0.9893064374031655, \"f0_5\": 0.9692541222691827, \"p4\": 0.9894738533527309, \"phi\": 0.9793092811423187}, {\"truth_threshold\": -8.300000123679638, \"match_probability\": 0.0031628254468557835, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490726.0, \"fp\": 236.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999811063391274, \"fp_rate\": 1.8893660872557295e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9650318565713439, \"recall\": 0.9961762006729887, \"specificity\": 0.9999811063391274, \"npv\": 0.9999979985190642, \"accuracy\": 0.9999791158231647, \"f1\": 0.9803567396703545, \"f2\": 0.9897875444515364, \"f0_5\": 0.9711039541957416, \"p4\": 0.9900758278491553, \"phi\": 0.980470061569018}, {\"truth_threshold\": -8.200000122189522, \"match_probability\": 0.0033890630432542824, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490743.0, \"fp\": 219.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999824673231733, \"fp_rate\": 1.753267682665274e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9674688057040999, \"recall\": 0.9961762006729887, \"specificity\": 0.9999824673231733, \"npv\": 0.9999979985217883, \"accuracy\": 0.999980476095219, \"f1\": 0.9816126601356443, \"f2\": 0.9902992336698698, \"f0_5\": 0.9730771529313333, \"p4\": 0.9907162288247452, \"phi\": 0.981707921445154}, {\"truth_threshold\": -8.100000120699406, \"match_probability\": 0.003631424511270156, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490751.0, \"fp\": 211.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999831077862538, \"fp_rate\": 1.689221374622707e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9686198691255206, \"recall\": 0.9961762006729887, \"specificity\": 0.9999831077862538, \"npv\": 0.9999979985230701, \"accuracy\": 0.9999811162232447, \"f1\": 0.9822047956567637, \"f2\": 0.990540211704587, \"f0_5\": 0.9740084943470718, \"p4\": 0.9910178807370712, \"phi\": 0.9822920671562435}, {\"truth_threshold\": -8.00000011920929, \"match_probability\": 0.0038910502633927486, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490757.0, \"fp\": 205.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999835881335641, \"fp_rate\": 1.641186643590782e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9694849657636201, \"recall\": 0.9961762006729887, \"specificity\": 0.9999835881335641, \"npv\": 0.9999979985240316, \"accuracy\": 0.9999815963192639, \"f1\": 0.9826493663246831, \"f2\": 0.990721022208701, \"f0_5\": 0.9747081712062257, \"p4\": 0.9912442402473263, \"phi\": 0.982730860895463}, {\"truth_threshold\": -7.800000116229057, \"match_probability\": 0.004467058438231288, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490767.0, \"fp\": 195.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999843887124146, \"fp_rate\": 1.561128758537573e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9709302325581395, \"recall\": 0.9961762006729887, \"specificity\": 0.9999843887124146, \"npv\": 0.9999979985256339, \"accuracy\": 0.9999823964792959, \"f1\": 0.9833912124414917, \"f2\": 0.9910225197808886, \"f0_5\": 0.9758765358106083, \"p4\": 0.9916217359762581, \"phi\": 0.9834634914054201}, {\"truth_threshold\": -7.700000114738941, \"match_probability\": 0.004786140180292905, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490780.0, \"fp\": 182.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999854294649203, \"fp_rate\": 1.4570535079684014e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9728155339805825, \"recall\": 0.9961762006729887, \"specificity\": 0.9999854294649203, \"npv\": 0.999997998527717, \"accuracy\": 0.9999834366873375, \"f1\": 0.9843572885966901, \"f2\": 0.991414741072244, \"f0_5\": 0.9773996038177561, \"p4\": 0.9921129104840011, \"phi\": 0.9844183634835777}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490787.0, \"fp\": 175.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999859898701157, \"fp_rate\": 1.4010129884311553e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9738337320574163, \"recall\": 0.9961762006729887, \"specificity\": 0.9999859898701157, \"npv\": 0.9999979985288386, \"accuracy\": 0.9999839967993599, \"f1\": 0.9848782700740965, \"f2\": 0.991626065773447, \"f0_5\": 0.978221688194653, \"p4\": 0.9923775906612751, \"phi\": 0.984933677950544}, {\"truth_threshold\": -7.500000111758709, \"match_probability\": 0.005493921387833209, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490789.0, \"fp\": 173.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999861499858858, \"fp_rate\": 1.3850014114205135e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9741250373915644, \"recall\": 0.9961762006729887, \"specificity\": 0.9999861499858858, \"npv\": 0.9999979985291592, \"accuracy\": 0.9999841568313663, \"f1\": 0.98502722323049, \"f2\": 0.9916864608076009, \"f0_5\": 0.9784568235082026, \"p4\": 0.9924532395103108, \"phi\": 0.9850810592503275}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490797.0, \"fp\": 165.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999867904489662, \"fp_rate\": 1.3209551033779465e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9752920035938903, \"recall\": 0.9961762006729887, \"specificity\": 0.9999867904489662, \"npv\": 0.999997998530441, \"accuracy\": 0.9999847969593919, \"f1\": 0.9856234866828087, \"f2\": 0.9919281145293939, \"f0_5\": 0.9793984962406015, \"p4\": 0.9927559502755972, \"phi\": 0.9856712462720578}, {\"truth_threshold\": -7.300000108778477, \"match_probability\": 0.006305707107734554, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490806.0, \"fp\": 156.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999875109699317, \"fp_rate\": 1.2489030068300584e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9766081871345029, \"recall\": 0.9961762006729887, \"specificity\": 0.9999875109699317, \"npv\": 0.9999979985318831, \"accuracy\": 0.9999855171034207, \"f1\": 0.9862951465132127, \"f2\": 0.9922001157795314, \"f0_5\": 0.9804600469681459, \"p4\": 0.9930967206899199, \"phi\": 0.9863364754113385}, {\"truth_threshold\": -7.200000107288361, \"match_probability\": 0.006755232248084272, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490807.0, \"fp\": 155.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999875910278168, \"fp_rate\": 1.2408972183247375e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9767546490701859, \"recall\": 0.9961762006729887, \"specificity\": 0.9999875910278168, \"npv\": 0.9999979985320433, \"accuracy\": 0.9999855971194239, \"f1\": 0.9863698318945934, \"f2\": 0.9922303473491774, \"f0_5\": 0.9805781391147245, \"p4\": 0.9931345985107902, \"phi\": 0.9864104728708214}, {\"truth_threshold\": -7.1000001057982445, \"match_probability\": 0.007236570039195372, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490812.0, \"fp\": 150.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.999987991317242, \"fp_rate\": 1.200868275798133e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9774876181900045, \"recall\": 0.9961762006729887, \"specificity\": 0.999987991317242, \"npv\": 0.9999979985328445, \"accuracy\": 0.9999859971994399, \"f1\": 0.9867434285281418, \"f2\": 0.9923815328355935, \"f0_5\": 0.9811690268153058, \"p4\": 0.9933240309644359, \"phi\": 0.9867807099679184}, {\"truth_threshold\": -7.000000104308128, \"match_probability\": 0.00775193742836891, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490817.0, \"fp\": 145.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999883916066673, \"fp_rate\": 1.1608393332715287e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.978221688194653, \"recall\": 0.9961762006729887, \"specificity\": 0.9999883916066673, \"npv\": 0.9999979985336457, \"accuracy\": 0.9999863972794559, \"f1\": 0.9871173082752349, \"f2\": 0.9925327644010973, \"f0_5\": 0.981760627072656, \"p4\": 0.9935135356972294, \"phi\": 0.9871513639709102}, {\"truth_threshold\": -6.900000102818012, \"match_probability\": 0.008303700786279804, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6513.0, \"tn\": 12490822.0, \"fp\": 140.0, \"fn\": 25.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9961762006729887, \"tn_rate\": 0.9999887918960926, \"fp_rate\": 1.1208103907449243e-05, \"fn_rate\": 0.0038237993270113183, \"precision\": 0.9789568615662108, \"recall\": 0.9961762006729887, \"specificity\": 0.9999887918960926, \"npv\": 0.9999979985344468, \"accuracy\": 0.9999867973594719, \"f1\": 0.9874914714578121, \"f2\": 0.9926840420667581, \"f0_5\": 0.9823529411764705, \"p4\": 0.9937031127505465, \"phi\": 0.9875224356629259}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6512.0, \"tn\": 12490828.0, \"fp\": 134.0, \"fn\": 26.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9960232486999082, \"tn_rate\": 0.9999892722434028, \"fp_rate\": 1.0727756597129988e-05, \"fn_rate\": 0.0039767513000917715, \"precision\": 0.9798374962383388, \"recall\": 0.9960232486999082, \"specificity\": 0.9999892722434028, \"npv\": 0.9999979184769913, \"accuracy\": 0.9999871974394879, \"f1\": 0.9878640776699029, \"f2\": 0.9927434599670711, \"f0_5\": 0.9830324255781656, \"p4\": 0.9938918305366806, \"phi\": 0.9878908550645412}, {\"truth_threshold\": -6.70000009983778, \"match_probability\": 0.009526684411466419, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6512.0, \"tn\": 12490839.0, \"fp\": 123.0, \"fn\": 26.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9960232486999082, \"tn_rate\": 0.9999901528801385, \"fp_rate\": 9.847119861544691e-06, \"fn_rate\": 0.0039767513000917715, \"precision\": 0.9814619442351168, \"recall\": 0.9960232486999082, \"specificity\": 0.9999901528801385, \"npv\": 0.9999979184788244, \"accuracy\": 0.9999880776155231, \"f1\": 0.9886889850451681, \"f2\": 0.9930765242321652, \"f0_5\": 0.9843400447427293, \"p4\": 0.994309377488922, \"phi\": 0.9887098544952397}, {\"truth_threshold\": -6.600000098347664, \"match_probability\": 0.010203470791514735, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6511.0, \"tn\": 12490841.0, \"fp\": 121.0, \"fn\": 27.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9958702967268278, \"tn_rate\": 0.9999903129959086, \"fp_rate\": 9.687004091438274e-06, \"fn_rate\": 0.004129703273172224, \"precision\": 0.9817551266586249, \"recall\": 0.9958702967268278, \"specificity\": 0.9999903129959086, \"npv\": 0.9999978384208367, \"accuracy\": 0.9999881576315263, \"f1\": 0.9887623386484434, \"f2\": 0.9930148853099072, \"f0_5\": 0.9845460593963588, \"p4\": 0.9943464908896981, \"phi\": 0.9887816272570688}, {\"truth_threshold\": -6.500000096857548, \"match_probability\": 0.010927806378730125, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6511.0, \"tn\": 12490851.0, \"fp\": 111.0, \"fn\": 27.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9958702967268278, \"tn_rate\": 0.9999911135747591, \"fp_rate\": 8.886425240906184e-06, \"fn_rate\": 0.004129703273172224, \"precision\": 0.9832376925400181, \"recall\": 0.9958702967268278, \"specificity\": 0.9999911135747591, \"npv\": 0.9999978384225673, \"accuracy\": 0.9999889577915583, \"f1\": 0.9895136778115502, \"f2\": 0.9933178739244523, \"f0_5\": 0.985738509053473, \"p4\": 0.9947264702947277, \"phi\": 0.9895283333604096}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6510.0, \"tn\": 12490852.0, \"fp\": 110.0, \"fn\": 28.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9957173447537473, \"tn_rate\": 0.9999911936326441, \"fp_rate\": 8.806367355852975e-06, \"fn_rate\": 0.004282655246252677, \"precision\": 0.9833836858006042, \"recall\": 0.9957173447537473, \"specificity\": 0.9999911936326441, \"npv\": 0.9999977583645028, \"accuracy\": 0.9999889577915583, \"f1\": 0.9895120839033288, \"f2\": 0.9932259245697547, \"f0_5\": 0.9858259131382882, \"p4\": 0.9947256649213094, \"phi\": 0.9895257957455872}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6510.0, \"tn\": 12490853.0, \"fp\": 109.0, \"fn\": 28.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9957173447537473, \"tn_rate\": 0.9999912736905292, \"fp_rate\": 8.726309470799766e-06, \"fn_rate\": 0.004282655246252677, \"precision\": 0.9835322556277384, \"recall\": 0.9957173447537473, \"specificity\": 0.9999912736905292, \"npv\": 0.9999977583646822, \"accuracy\": 0.9999890378075615, \"f1\": 0.9895872919358516, \"f2\": 0.9932562326447164, \"f0_5\": 0.9859453565154177, \"p4\": 0.9947636846099088, \"phi\": 0.9896005817656298}, {\"truth_threshold\": -6.200000092387199, \"match_probability\": 0.013419810695865477, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6510.0, \"tn\": 12490860.0, \"fp\": 102.0, \"fn\": 28.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9957173447537473, \"tn_rate\": 0.9999918340957246, \"fp_rate\": 8.165904275427305e-06, \"fn_rate\": 0.004282655246252677, \"precision\": 0.984573502722323, \"recall\": 0.9957173447537473, \"specificity\": 0.9999918340957246, \"npv\": 0.9999977583659384, \"accuracy\": 0.9999895979195839, \"f1\": 0.9901140684410646, \"f2\": 0.9934684409717983, \"f0_5\": 0.9867822712665979, \"p4\": 0.9950299038288203, \"phi\": 0.9901245587788345}, {\"truth_threshold\": -6.100000090897083, \"match_probability\": 0.014369156816028038, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6506.0, \"tn\": 12490860.0, \"fp\": 102.0, \"fn\": 32.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9951055368614256, \"tn_rate\": 0.9999918340957246, \"fp_rate\": 8.165904275427305e-06, \"fn_rate\": 0.004894463138574488, \"precision\": 0.9845641646489104, \"recall\": 0.9951055368614256, \"specificity\": 0.9999918340957246, \"npv\": 0.9999974381333214, \"accuracy\": 0.9999892778555711, \"f1\": 0.9898067853339418, \"f2\": 0.992979242979243, \"f0_5\": 0.9866545344252351, \"p4\": 0.9948746296495403, \"phi\": 0.9898154689001162}, {\"truth_threshold\": -6.000000089406967, \"match_probability\": 0.015384614445865122, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6506.0, \"tn\": 12490866.0, \"fp\": 96.0, \"fn\": 32.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9951055368614256, \"tn_rate\": 0.9999923144430349, \"fp_rate\": 7.685556965108052e-06, \"fn_rate\": 0.004894463138574488, \"precision\": 0.985458951832778, \"recall\": 0.9951055368614256, \"specificity\": 0.9999923144430349, \"npv\": 0.9999974381345521, \"accuracy\": 0.9999897579515903, \"f1\": 0.9902587519025875, \"f2\": 0.9931611406240459, \"f0_5\": 0.9873732774843683, \"p4\": 0.9951030000124863, \"phi\": 0.9902653867578834}, {\"truth_threshold\": -5.900000087916851, \"match_probability\": 0.016470634520449206, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6504.0, \"tn\": 12490885.0, \"fp\": 77.0, \"fn\": 34.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9947996329152646, \"tn_rate\": 0.9999938355428509, \"fp_rate\": 6.164457149097083e-06, \"fn_rate\": 0.005200367084735393, \"precision\": 0.9882996505090412, \"recall\": 0.9947996329152646, \"specificity\": 0.9999938355428509, \"npv\": 0.9999972780225378, \"accuracy\": 0.9999911182236447, \"f1\": 0.9915389892522296, \"f2\": 0.9934928054257172, \"f0_5\": 0.9895928427971518, \"p4\": 0.9957493186364069, \"phi\": 0.991539877866963}, {\"truth_threshold\": -5.800000086426735, \"match_probability\": 0.017631945325087592, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6504.0, \"tn\": 12490901.0, \"fp\": 61.0, \"fn\": 34.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9947996329152646, \"tn_rate\": 0.9999951164690117, \"fp_rate\": 4.883530988245741e-06, \"fn_rate\": 0.005200367084735393, \"precision\": 0.9907083015993907, \"recall\": 0.9947996329152646, \"specificity\": 0.9999951164690117, \"npv\": 0.9999972780260244, \"accuracy\": 0.9999923984796959, \"f1\": 0.9927497519651988, \"f2\": 0.9939786655255678, \"f0_5\": 0.9915238734069151, \"p4\": 0.9963597990754883, \"phi\": 0.9927480590749445}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6504.0, \"tn\": 12490903.0, \"fp\": 59.0, \"fn\": 34.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9947996329152646, \"tn_rate\": 0.9999952765847818, \"fp_rate\": 4.723415218139324e-06, \"fn_rate\": 0.005200367084735393, \"precision\": 0.9910102087460003, \"recall\": 0.9947996329152646, \"specificity\": 0.9999952765847818, \"npv\": 0.9999972780264603, \"accuracy\": 0.9999925585117023, \"f1\": 0.9929013052438745, \"f2\": 0.9940394314534617, \"f0_5\": 0.9917657822506862, \"p4\": 0.9964361617670954, \"phi\": 0.992899392243584}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6501.0, \"tn\": 12490918.0, \"fp\": 44.0, \"fn\": 37.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9943407769960233, \"tn_rate\": 0.9999964774530576, \"fp_rate\": 3.5225469423411903e-06, \"fn_rate\": 0.005659223003976751, \"precision\": 0.9932773109243698, \"recall\": 0.9943407769960233, \"specificity\": 0.9999964774530576, \"npv\": 0.999997037856593, \"accuracy\": 0.9999935187037408, \"f1\": 0.9938087594588397, \"f2\": 0.9941279016423525, \"f0_5\": 0.9934898221162662, \"p4\": 0.9968931559848915, \"phi\": 0.993805659513751}, {\"truth_threshold\": -5.500000081956387, \"match_probability\": 0.02161936078957948, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6498.0, \"tn\": 12490920.0, \"fp\": 42.0, \"fn\": 40.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9938819210767819, \"tn_rate\": 0.9999966375688277, \"fp_rate\": 3.3624311722347727e-06, \"fn_rate\": 0.0061180789232181095, \"precision\": 0.9935779816513761, \"recall\": 0.9938819210767819, \"specificity\": 0.9999966375688277, \"npv\": 0.9999967976840851, \"accuracy\": 0.9999934386877376, \"f1\": 0.9937299281235663, \"f2\": 0.9938211183164077, \"f0_5\": 0.9936387546638938, \"p4\": 0.9968534738190312, \"phi\": 0.9937266573824806}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6498.0, \"tn\": 12490930.0, \"fp\": 32.0, \"fn\": 40.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9938819210767819, \"tn_rate\": 0.9999974381476783, \"fp_rate\": 2.561852321702684e-06, \"fn_rate\": 0.0061180789232181095, \"precision\": 0.9950995405819295, \"recall\": 0.9938819210767819, \"specificity\": 0.9999974381476783, \"npv\": 0.9999967976866488, \"accuracy\": 0.9999942388477695, \"f1\": 0.9944903581267218, \"f2\": 0.9941252065357077, \"f0_5\": 0.9948557780635678, \"p4\": 0.9972361359677916, \"phi\": 0.9944876625906333}, {\"truth_threshold\": -5.200000077486038, \"match_probability\": 0.02648420859582165, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6498.0, \"tn\": 12490934.0, \"fp\": 28.0, \"fn\": 40.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9938819210767819, \"tn_rate\": 0.9999977583792186, \"fp_rate\": 2.241620781489848e-06, \"fn_rate\": 0.0061180789232181095, \"precision\": 0.9957094698130555, \"recall\": 0.9938819210767819, \"specificity\": 0.9999977583792186, \"npv\": 0.9999967976876744, \"accuracy\": 0.9999945589117823, \"f1\": 0.9947948560930803, \"f2\": 0.9942468939347573, \"f0_5\": 0.9953434225844005, \"p4\": 0.9973892830996511, \"phi\": 0.9947925542423184}, {\"truth_threshold\": -5.100000075995922, \"match_probability\": 0.02833121820332325, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6497.0, \"tn\": 12490936.0, \"fp\": 26.0, \"fn\": 41.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9937289691037015, \"tn_rate\": 0.9999979184949886, \"fp_rate\": 2.0815050113834306e-06, \"fn_rate\": 0.006271030896298563, \"precision\": 0.996014103939905, \"recall\": 0.9937289691037015, \"specificity\": 0.9999979184949886, \"npv\": 0.9999967176306546, \"accuracy\": 0.9999946389277855, \"f1\": 0.9948702243319807, \"f2\": 0.9941851568477429, \"f0_5\": 0.9955562365920931, \"p4\": 0.9974271825166287, \"phi\": 0.9948681991771203}, {\"truth_threshold\": -5.000000074505806, \"match_probability\": 0.030303028785498974, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6496.0, \"tn\": 12490940.0, \"fp\": 22.0, \"fn\": 42.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9935760171306209, \"tn_rate\": 0.9999982387265288, \"fp_rate\": 1.7612734711705952e-06, \"fn_rate\": 0.006423982869379015, \"precision\": 0.996624731512734, \"recall\": 0.9935760171306209, \"specificity\": 0.9999982387265288, \"npv\": 0.9999966375742115, \"accuracy\": 0.9999948789757952, \"f1\": 0.9950980392156863, \"f2\": 0.9941842669115396, \"f0_5\": 0.9960134927936216, \"p4\": 0.9975417229029651, \"phi\": 0.9950966461414167}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6495.0, \"tn\": 12490941.0, \"fp\": 21.0, \"fn\": 43.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9934230651575405, \"tn_rate\": 0.9999983187844139, \"fp_rate\": 1.6812155861173864e-06, \"fn_rate\": 0.006576934842459468, \"precision\": 0.9967771639042358, \"recall\": 0.9934230651575405, \"specificity\": 0.9999983187844139, \"npv\": 0.9999965575170059, \"accuracy\": 0.9999948789757952, \"f1\": 0.9950972881875287, \"f2\": 0.9940920778743725, \"f0_5\": 0.9961045334642047, \"p4\": 0.9975413455422657, \"phi\": 0.9950961409890313}, {\"truth_threshold\": -4.700000070035458, \"match_probability\": 0.037047907242669466, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6493.0, \"tn\": 12490942.0, \"fp\": 20.0, \"fn\": 45.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9931171612113796, \"tn_rate\": 0.9999983988422989, \"fp_rate\": 1.6011577010641774e-06, \"fn_rate\": 0.0068828387886203735, \"precision\": 0.9969292184861047, \"recall\": 0.9931171612113796, \"specificity\": 0.9999983988422989, \"npv\": 0.999996397402383, \"accuracy\": 0.9999947989597919, \"f1\": 0.9950195387326641, \"f2\": 0.9938772386346242, \"f0_5\": 0.9961644676281067, \"p4\": 0.9975022581829593, \"phi\": 0.9950187643230421}, {\"truth_threshold\": -4.6000000685453415, \"match_probability\": 0.039601660807737325, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6492.0, \"tn\": 12490943.0, \"fp\": 19.0, \"fn\": 46.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9929642092382992, \"tn_rate\": 0.999998478900184, \"fp_rate\": 1.5210998160109686e-06, \"fn_rate\": 0.007035790761700826, \"precision\": 0.9970818614652127, \"recall\": 0.9929642092382992, \"specificity\": 0.999998478900184, \"npv\": 0.9999963173452479, \"accuracy\": 0.9999947989597919, \"f1\": 0.995018775385087, \"f2\": 0.9937850166855463, \"f0_5\": 0.9962556012522251, \"p4\": 0.9975018746020875, \"phi\": 0.9950183057189743}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6492.0, \"tn\": 12490945.0, \"fp\": 17.0, \"fn\": 46.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9929642092382992, \"tn_rate\": 0.9999986390159541, \"fp_rate\": 1.3609840459045507e-06, \"fn_rate\": 0.007035790761700826, \"precision\": 0.9973882316792134, \"recall\": 0.9929642092382992, \"specificity\": 0.9999986390159541, \"npv\": 0.9999963173458375, \"accuracy\": 0.9999949589917984, \"f1\": 0.995171303747988, \"f2\": 0.993845871222559, \"f0_5\": 0.9965002762939767, \"p4\": 0.9975785538737297, \"phi\": 0.9951712428545644}, {\"truth_threshold\": -4.400000065565109, \"match_probability\": 0.04522405175894309, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6492.0, \"tn\": 12490946.0, \"fp\": 16.0, \"fn\": 46.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9929642092382992, \"tn_rate\": 0.9999987190738392, \"fp_rate\": 1.280926160851342e-06, \"fn_rate\": 0.007035790761700826, \"precision\": 0.9975414874001229, \"recall\": 0.9929642092382992, \"specificity\": 0.9999987190738392, \"npv\": 0.9999963173461324, \"accuracy\": 0.9999950390078015, \"f1\": 0.9952475854668098, \"f2\": 0.9938763012859767, \"f0_5\": 0.9966226588885477, \"p4\": 0.9976168979305429, \"phi\": 0.9952477378536982}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6491.0, \"tn\": 12490946.0, \"fp\": 16.0, \"fn\": 47.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9928112572652187, \"tn_rate\": 0.9999987190738392, \"fp_rate\": 1.280926160851342e-06, \"fn_rate\": 0.007188742734781279, \"precision\": 0.9975411095743046, \"recall\": 0.9928112572652187, \"specificity\": 0.9999987190738392, \"npv\": 0.9999962372887408, \"accuracy\": 0.9999949589917984, \"f1\": 0.995170563434266, \"f2\": 0.993753636057442, \"f0_5\": 0.9965915371860222, \"p4\": 0.9975781819238979, \"phi\": 0.995170854545302}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6490.0, \"tn\": 12490950.0, \"fp\": 12.0, \"fn\": 48.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9926583052921383, \"tn_rate\": 0.9999990393053794, \"fp_rate\": 9.606946206385065e-07, \"fn_rate\": 0.007341694707861731, \"precision\": 0.9981544140264534, \"recall\": 0.9926583052921383, \"specificity\": 0.9999990393053794, \"npv\": 0.9999961572325926, \"accuracy\": 0.9999951990398079, \"f1\": 0.995398773006135, \"f2\": 0.9937526796104612, \"f0_5\": 0.9970503287654396, \"p4\": 0.9976928861396193, \"phi\": 0.9954001685626239}, {\"truth_threshold\": -4.100000061094761, \"match_probability\": 0.0551013486283602, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6488.0, \"tn\": 12490952.0, \"fp\": 10.0, \"fn\": 50.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9923524013459774, \"tn_rate\": 0.9999991994211495, \"fp_rate\": 8.005788505320887e-07, \"fn_rate\": 0.007647598654022637, \"precision\": 0.9984610649430594, \"recall\": 0.9923524013459774, \"specificity\": 0.9999991994211495, \"npv\": 0.9999959971185658, \"accuracy\": 0.9999951990398079, \"f1\": 0.9953973611537281, \"f2\": 0.9935681470137825, \"f0_5\": 0.9972333230863818, \"p4\": 0.9976921769554347, \"phi\": 0.9953996503084056}, {\"truth_threshold\": -3.7000000551342964, \"match_probability\": 0.07144878715678568, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6487.0, \"tn\": 12490952.0, \"fp\": 10.0, \"fn\": 51.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.992199449372897, \"tn_rate\": 0.9999991994211495, \"fp_rate\": 8.005788505320887e-07, \"fn_rate\": 0.00780055062710309, \"precision\": 0.9984608280744959, \"recall\": 0.992199449372897, \"specificity\": 0.9999991994211495, \"npv\": 0.999995917061264, \"accuracy\": 0.9999951190238048, \"f1\": 0.9953202915228232, \"f2\": 0.9934454347759503, \"f0_5\": 0.9972022382094324, \"p4\": 0.9976534428408187, \"phi\": 0.9953227785010259}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6486.0, \"tn\": 12490953.0, \"fp\": 9.0, \"fn\": 52.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9920464973998164, \"tn_rate\": 0.9999992794790346, \"fp_rate\": 7.205209654788799e-07, \"fn_rate\": 0.007953502600183543, \"precision\": 0.9986143187066975, \"recall\": 0.9920464973998164, \"specificity\": 0.9999992794790346, \"npv\": 0.9999958370043083, \"accuracy\": 0.9999951190238048, \"f1\": 0.9953195733906238, \"f2\": 0.9933531411768309, \"f0_5\": 0.9972938065071653, \"p4\": 0.9976530820893246, \"phi\": 0.9953225546130436}, {\"truth_threshold\": -3.400000050663948, \"match_probability\": 0.08653465658300358, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6486.0, \"tn\": 12490954.0, \"fp\": 8.0, \"fn\": 52.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9920464973998164, \"tn_rate\": 0.9999993595369195, \"fp_rate\": 6.40463080425671e-07, \"fn_rate\": 0.007953502600183543, \"precision\": 0.9987680936248845, \"recall\": 0.9920464973998164, \"specificity\": 0.9999993595369195, \"npv\": 0.9999958370046416, \"accuracy\": 0.9999951990398079, \"f1\": 0.9953959484346224, \"f2\": 0.9933835691968388, \"f0_5\": 0.9974164975087655, \"p4\": 0.9976914673348959, \"phi\": 0.9953992261594695}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6481.0, \"tn\": 12490954.0, \"fp\": 8.0, \"fn\": 57.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9912817375344142, \"tn_rate\": 0.9999993595369195, \"fp_rate\": 6.40463080425671e-07, \"fn_rate\": 0.008718262465585805, \"precision\": 0.9987671443982123, \"recall\": 0.9912817375344142, \"specificity\": 0.9999993595369195, \"npv\": 0.9999954367184529, \"accuracy\": 0.9999947989597919, \"f1\": 0.9950103630920396, \"f2\": 0.9927698293557182, \"f0_5\": 0.9972610328060565, \"p4\": 0.9974976474195548, \"phi\": 0.9950148074893785}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6480.0, \"tn\": 12490954.0, \"fp\": 8.0, \"fn\": 58.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9911287855613338, \"tn_rate\": 0.9999993595369195, \"fp_rate\": 6.40463080425671e-07, \"fn_rate\": 0.008871214438666258, \"precision\": 0.998766954377312, \"recall\": 0.9911287855613338, \"specificity\": 0.9999993595369195, \"npv\": 0.9999953566612537, \"accuracy\": 0.9999947189437888, \"f1\": 0.9949332105020727, \"f2\": 0.9926470588235294, \"f0_5\": 0.997229916897507, \"p4\": 0.9974588566084989, \"phi\": 0.9949379059688591}, {\"truth_threshold\": -3.0000000447034836, \"match_probability\": 0.11111110805075623, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6480.0, \"tn\": 12490955.0, \"fp\": 7.0, \"fn\": 58.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9911287855613338, \"tn_rate\": 0.9999994395948046, \"fp_rate\": 5.60405195372462e-07, \"fn_rate\": 0.008871214438666258, \"precision\": 0.9989209187605981, \"recall\": 0.9911287855613338, \"specificity\": 0.9999994395948046, \"npv\": 0.9999953566616254, \"accuracy\": 0.9999947989597919, \"f1\": 0.9950095969289827, \"f2\": 0.9926774717362664, \"f0_5\": 0.9973527057809518, \"p4\": 0.9974972624203667, \"phi\": 0.9950146306154541}, {\"truth_threshold\": -2.8000000417232513, \"match_probability\": 0.1255586621587546, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6479.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 59.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9909758335882533, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.009024166411746712, \"precision\": 0.9993830016967453, \"recall\": 0.9909758335882533, \"specificity\": 0.9999996797684598, \"npv\": 0.9999952766055799, \"accuracy\": 0.9999949589917984, \"f1\": 0.9951616619307273, \"f2\": 0.992645932281293, \"f0_5\": 0.9976901755466585, \"p4\": 0.9975737095919017, \"phi\": 0.9951680272085688}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6478.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 60.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9908228816151728, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.009177118384827165, \"precision\": 0.9993829065103363, \"recall\": 0.9908228816151728, \"specificity\": 0.9999996797684598, \"npv\": 0.9999951965484318, \"accuracy\": 0.9999948789757952, \"f1\": 0.9950844854070661, \"f2\": 0.9925231353802783, \"f0_5\": 0.9976590895090248, \"p4\": 0.9975349126289774, \"phi\": 0.9950911374660136}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6474.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 64.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.990211073722851, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.009788926277148976, \"precision\": 0.9993825254708243, \"recall\": 0.990211073722851, \"specificity\": 0.9999996797684598, \"npv\": 0.9999948763199681, \"accuracy\": 0.9999945589117823, \"f1\": 0.9947756607252612, \"f2\": 0.9920318725099602, \"f0_5\": 0.9975346687211094, \"p4\": 0.9973796351986651, \"phi\": 0.994783519188697}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6473.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 65.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9900581217497706, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.009941878250229429, \"precision\": 0.9993824301374092, \"recall\": 0.9900581217497706, \"specificity\": 0.9999996797684598, \"npv\": 0.9999947962628841, \"accuracy\": 0.9999944788957792, \"f1\": 0.9946984248943527, \"f2\": 0.9919090379723559, \"f0_5\": 0.9975035443506133, \"p4\": 0.9973407934309051, \"phi\": 0.9947065997857164}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6469.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 69.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9894463138574487, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.01055368614255124, \"precision\": 0.999382048509192, \"recall\": 0.9894463138574487, \"specificity\": 0.9999996797684598, \"npv\": 0.9999944760346767, \"accuracy\": 0.9999941588317663, \"f1\": 0.9943893628468219, \"f2\": 0.9914176245210728, \"f0_5\": 0.9973789700894233, \"p4\": 0.9971853366258606, \"phi\": 0.9943988627978791}, {\"truth_threshold\": -1.9000000283122063, \"match_probability\": 0.2113212378007128, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6468.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 70.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9892933618843683, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.010706638115631691, \"precision\": 0.9993819530284301, \"recall\": 0.9892933618843683, \"specificity\": 0.9999996797684598, \"npv\": 0.9999943959776569, \"accuracy\": 0.9999940788157632, \"f1\": 0.9943120676402767, \"f2\": 0.9912947523295733, \"f0_5\": 0.9973478073151175, \"p4\": 0.9971464499755344, \"phi\": 0.9943219137000502}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6467.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 71.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9891404099112878, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.010859590088712144, \"precision\": 0.9993818575181579, \"recall\": 0.9891404099112878, \"specificity\": 0.9999996797684598, \"npv\": 0.9999943159206499, \"accuracy\": 0.9999939987997599, \"f1\": 0.9942347605503882, \"f2\": 0.9911718726052172, \"f0_5\": 0.9973166368515206, \"p4\": 0.9971075543393512, \"phi\": 0.9942449586591152}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6466.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 72.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9889874579382074, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.011012542061792597, \"precision\": 0.9993817619783617, \"recall\": 0.9889874579382074, \"specificity\": 0.9999996797684598, \"npv\": 0.9999942358636558, \"accuracy\": 0.9999939187837568, \"f1\": 0.9941574415744158, \"f2\": 0.9910489853473117, \"f0_5\": 0.9972854586957863, \"p4\": 0.9970686497141934, \"phi\": 0.9941679976736937}, {\"truth_threshold\": -1.5000000223517418, \"match_probability\": 0.2612038719739489, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6465.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 73.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9888345059651269, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.01116549403487305, \"precision\": 0.9993816664090277, \"recall\": 0.9888345059651269, \"specificity\": 0.9999996797684598, \"npv\": 0.9999941558066744, \"accuracy\": 0.9999938387677535, \"f1\": 0.9940801107096179, \"f2\": 0.9909260905551639, \"f0_5\": 0.997254272845067, \"p4\": 0.9970297360969416, \"phi\": 0.9940910307424051}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6462.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 76.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9883756500458856, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.011624349954114408, \"precision\": 0.9993813795236622, \"recall\": 0.9883756500458856, \"specificity\": 0.9999996797684598, \"npv\": 0.9999939156358073, \"accuracy\": 0.9999935987197439, \"f1\": 0.9938480467548446, \"f2\": 0.9905573609663376, \"f0_5\": 0.9971606690945003, \"p4\": 0.9969129412614077, \"phi\": 0.9938600942595236}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6461.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 77.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9882226980728052, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.011777301927194861, \"precision\": 0.9993812838360402, \"recall\": 0.9882226980728052, \"specificity\": 0.9999996797684598, \"npv\": 0.9999938355788772, \"accuracy\": 0.9999935187037408, \"f1\": 0.9937706683073136, \"f2\": 0.9904344360302909, \"f0_5\": 0.9971294524353355, \"p4\": 0.9968739916445581, \"phi\": 0.9937831035309505}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6459.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 79.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9879167941266442, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.012083205873355766, \"precision\": 0.9993810923719635, \"recall\": 0.9879167941266442, \"specificity\": 0.9999996797684598, \"npv\": 0.9999936754650555, \"accuracy\": 0.9999933586717343, \"f1\": 0.993615875701869, \"f2\": 0.9901885635443815, \"f0_5\": 0.9970669959864156, \"p4\": 0.9967960653845943, \"phi\": 0.9936291042140856}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6458.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 80.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9877638421535638, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.012236157846436219, \"precision\": 0.9993809965954813, \"recall\": 0.9877638421535638, \"specificity\": 0.9999996797684598, \"npv\": 0.9999935954081638, \"accuracy\": 0.9999932786557312, \"f1\": 0.9935384615384616, \"f2\": 0.9900656159931318, \"f0_5\": 0.9970357561909468, \"p4\": 0.9967570887352227, \"phi\": 0.9935520956230256}, {\"truth_threshold\": -0.800000011920929, \"match_probability\": 0.36481689239780585, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6455.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 83.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9873049862343224, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.012695013765677577, \"precision\": 0.9993807090880942, \"recall\": 0.9873049862343224, \"specificity\": 0.9999996797684598, \"npv\": 0.9999933552375658, \"accuracy\": 0.9999930386077216, \"f1\": 0.9933061475725168, \"f2\": 0.9896967280978811, \"f0_5\": 0.9969419904861926, \"p4\": 0.9966401046719744, \"phi\": 0.9933210341027159}, {\"truth_threshold\": -0.6000000089406967, \"match_probability\": 0.3975010577814427, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6453.0, \"tn\": 12490958.0, \"fp\": 4.0, \"fn\": 85.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9869990822881615, \"tn_rate\": 0.9999996797684598, \"fp_rate\": 3.202315402128355e-07, \"fn_rate\": 0.013000917711838483, \"precision\": 0.9993805172680812, \"recall\": 0.9869990822881615, \"specificity\": 0.9999996797684598, \"npv\": 0.999993195123898, \"accuracy\": 0.9999928785757152, \"f1\": 0.9931512120046172, \"f2\": 0.9894507651261921, \"f0_5\": 0.996879441389112, \"p4\": 0.9965620701691893, \"phi\": 0.9931669632860396}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6453.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 85.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9869990822881615, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.013000917711838483, \"precision\": 0.9996901626646011, \"recall\": 0.9869990822881615, \"specificity\": 0.9999998398842299, \"npv\": 0.9999931951249875, \"accuracy\": 0.9999930386077216, \"f1\": 0.9933040868159778, \"f2\": 0.9895114545956389, \"f0_5\": 0.9971259039495642, \"p4\": 0.9966390673645279, \"phi\": 0.9933208930914527}, {\"truth_threshold\": -0.30000000447034836, \"match_probability\": 0.4482004805735527, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6452.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 86.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9868461303150811, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.013153869684918936, \"precision\": 0.9996901146575767, \"recall\": 0.9868461303150811, \"specificity\": 0.9999998398842299, \"npv\": 0.9999931150681857, \"accuracy\": 0.9999929585917183, \"f1\": 0.9932266009852216, \"f2\": 0.9893884561123719, \"f0_5\": 0.997094640539037, \"p4\": 0.9966000424618431, \"phi\": 0.993243860655932}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6449.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 89.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9863872743958397, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.013612725604160294, \"precision\": 0.999689970547202, \"recall\": 0.9863872743958397, \"specificity\": 0.9999998398842299, \"npv\": 0.9999928748978568, \"accuracy\": 0.9999927185437087, \"f1\": 0.9929940719069982, \"f2\": 0.9890194153912216, \"f0_5\": 0.9970008039082308, \"p4\": 0.9964829135297378, \"phi\": 0.9930127275689071}, {\"truth_threshold\": -0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6448.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 90.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9862343224227592, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.013765677577240747, \"precision\": 0.9996899224806202, \"recall\": 0.9862343224227592, \"specificity\": 0.9999998398842299, \"npv\": 0.9999927948411063, \"accuracy\": 0.9999926385277056, \"f1\": 0.9929165383430859, \"f2\": 0.9888963867247408, \"f0_5\": 0.9969695095553219, \"p4\": 0.9964438524672108, \"phi\": 0.9929356712751172}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6447.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 91.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9860813704496788, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.013918629550321198, \"precision\": 0.9996898743991316, \"recall\": 0.9860813704496788, \"specificity\": 0.9999998398842299, \"npv\": 0.9999927147843685, \"accuracy\": 0.9999925585117023, \"f1\": 0.9928389928389928, \"f2\": 0.9887733505107206, \"f0_5\": 0.99693820745964, \"p4\": 0.9964047823568633, \"phi\": 0.992858609013288}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6446.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 92.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9859284184765984, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.014071581523401652, \"precision\": 0.9996898263027295, \"recall\": 0.9859284184765984, \"specificity\": 0.9999998398842299, \"npv\": 0.9999926347276434, \"accuracy\": 0.9999924784956992, \"f1\": 0.9927614353919606, \"f2\": 0.9886503067484662, \"f0_5\": 0.9969068976183112, \"p4\": 0.9963657031955483, \"phi\": 0.9927815407820304}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6437.0, \"tn\": 12490960.0, \"fp\": 2.0, \"fn\": 101.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9845518507188743, \"tn_rate\": 0.9999998398842299, \"fp_rate\": 1.6011577010641774e-07, \"fn_rate\": 0.015448149281125726, \"precision\": 0.9996893927628514, \"recall\": 0.9845518507188743, \"specificity\": 0.9999998398842299, \"npv\": 0.9999919142176954, \"accuracy\": 0.9999917583516703, \"f1\": 0.9920628804808508, \"f2\": 0.9875425731030039, \"f0_5\": 0.9966247600173407, \"p4\": 0.996013582930222, \"phi\": 0.9920876578468347}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6437.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 101.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9845518507188743, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.015448149281125726, \"precision\": 1.0, \"recall\": 0.9845518507188743, \"specificity\": 1.0, \"npv\": 0.99999191421899, \"accuracy\": 0.9999919183836767, \"f1\": 0.9922157996146436, \"f2\": 0.9876031789867746, \"f0_5\": 0.9968717090999195, \"p4\": 0.9960906864823031, \"phi\": 0.9922418504821375}, {\"truth_threshold\": 1.4000000208616257, \"match_probability\": 0.7252004282056979, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6435.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 103.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9842459467727134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.01575405322728663, \"precision\": 1.0, \"recall\": 0.9842459467727134, \"specificity\": 1.0, \"npv\": 0.9999917541058349, \"accuracy\": 0.9999917583516703, \"f1\": 0.9920604332074309, \"f2\": 0.9873569214717526, \"f0_5\": 0.9968089720552699, \"p4\": 0.9960123495271161, \"phi\": 0.9920876124540634}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6434.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 104.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.984092994799633, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.015907005200367086, \"precision\": 1.0, \"recall\": 0.984092994799633, \"specificity\": 1.0, \"npv\": 0.9999916740492765, \"accuracy\": 0.9999916783356672, \"f1\": 0.9919827320382362, \"f2\": 0.9872337813785061, \"f0_5\": 0.9967775918696165, \"p4\": 0.9959731674188425, \"phi\": 0.992010484465689}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6433.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 105.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9839400428265525, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.016059957173447537, \"precision\": 1.0, \"recall\": 0.9839400428265525, \"specificity\": 1.0, \"npv\": 0.999991593992731, \"accuracy\": 0.9999915983196639, \"f1\": 0.9919050188882893, \"f2\": 0.9871106337271751, \"f0_5\": 0.9967462039045553, \"p4\": 0.995933976219228, \"phi\": 0.991933350492562}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6432.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 106.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.983787090853472, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.01621290914652799, \"precision\": 1.0, \"recall\": 0.983787090853472, \"specificity\": 1.0, \"npv\": 0.9999915139361982, \"accuracy\": 0.9999915183036607, \"f1\": 0.9918272937548188, \"f2\": 0.9869874785170636, \"f0_5\": 0.9967148081571934, \"p4\": 0.995894775925105, \"phi\": 0.9918562105332868}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6431.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 107.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9836341388803915, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.016365861119608444, \"precision\": 1.0, \"recall\": 0.9836341388803915, \"specificity\": 1.0, \"npv\": 0.9999914338796784, \"accuracy\": 0.9999914382876576, \"f1\": 0.9917495566350528, \"f2\": 0.9868643157474757, \"f0_5\": 0.9966834046246358, \"p4\": 0.995855566533305, \"phi\": 0.9917790645864659}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6430.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 108.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9834811869073111, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.016518813092688895, \"precision\": 1.0, \"recall\": 0.9834811869073111, \"specificity\": 1.0, \"npv\": 0.9999913538231713, \"accuracy\": 0.9999913582716543, \"f1\": 0.9916718075262184, \"f2\": 0.9867411454177153, \"f0_5\": 0.9966519933039866, \"p4\": 0.9958163480406573, \"phi\": 0.9917019126507023}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6400.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 138.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9788926277148975, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02110737228510248, \"precision\": 1.0, \"recall\": 0.9788926277148975, \"specificity\": 1.0, \"npv\": 0.9999889521339194, \"accuracy\": 0.9999889577915583, \"f1\": 0.9893337455557273, \"f2\": 0.9830425165888425, \"f0_5\": 0.9957060177982451, \"p4\": 0.9946355455816717, \"phi\": 0.9893845627662881}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6398.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 140.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9785867237687366, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.021413276231263382, \"precision\": 1.0, \"recall\": 0.9785867237687366, \"specificity\": 1.0, \"npv\": 0.9999887920217128, \"accuracy\": 0.999988797759552, \"f1\": 0.9891774891774892, \"f2\": 0.9827956989247312, \"f0_5\": 0.9956427015250545, \"p4\": 0.9945565319796087, \"phi\": 0.9892298801542462}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6397.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 141.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9784337717956562, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.021566228204343837, \"precision\": 1.0, \"recall\": 0.9784337717956562, \"specificity\": 1.0, \"npv\": 0.9999887119656287, \"accuracy\": 0.9999887177435487, \"f1\": 0.9890993428681871, \"f2\": 0.9826722787182401, \"f0_5\": 0.9956110315632198, \"p4\": 0.9945170113707356, \"phi\": 0.9891525297958905}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6393.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 145.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9778219639033343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.022178036096665646, \"precision\": 1.0, \"recall\": 0.9778219639033343, \"specificity\": 1.0, \"npv\": 0.9999883917414205, \"accuracy\": 0.999988397679536, \"f1\": 0.9887866367643647, \"f2\": 0.9821785220463973, \"f0_5\": 0.9954842728122081, \"p4\": 0.994358836807793, \"phi\": 0.9888430679805228}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6391.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 147.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9775160599571735, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.022483940042826552, \"precision\": 1.0, \"recall\": 0.9775160599571735, \"specificity\": 1.0, \"npv\": 0.9999882316293933, \"accuracy\": 0.9999882376475295, \"f1\": 0.9886302111532215, \"f2\": 0.9819315981931598, \"f0_5\": 0.9954208460532054, \"p4\": 0.9942796942047007, \"phi\": 0.9886883008238269}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6389.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 149.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9772101560110126, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02278984398898746, \"precision\": 1.0, \"recall\": 0.9772101560110126, \"specificity\": 1.0, \"npv\": 0.9999880715174174, \"accuracy\": 0.9999880776155231, \"f1\": 0.9884737371393209, \"f2\": 0.9816846439875849, \"f0_5\": 0.9953573876737085, \"p4\": 0.994200514686091, \"phi\": 0.9885335094859896}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6377.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 161.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9753747323340471, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02462526766595289, \"precision\": 1.0, \"recall\": 0.9753747323340471, \"specificity\": 1.0, \"npv\": 0.9999871108466388, \"accuracy\": 0.9999871174234847, \"f1\": 0.9875338753387534, \"f2\": 0.9802022810415322, \"f0_5\": 0.9949759720401922, \"p4\": 0.9937246608988384, \"phi\": 0.9876042530181497}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6376.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 162.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9752217803609666, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.024778219639033344, \"precision\": 1.0, \"recall\": 0.9752217803609666, \"specificity\": 1.0, \"npv\": 0.999987030790824, \"accuracy\": 0.9999870374074815, \"f1\": 0.9874554746786434, \"f2\": 0.9800787014264634, \"f0_5\": 0.994944135821734, \"p4\": 0.9936849462112912, \"phi\": 0.9875267755892515}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6375.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 163.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9750688283878862, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.024931171612113796, \"precision\": 1.0, \"recall\": 0.9750688283878862, \"specificity\": 1.0, \"npv\": 0.9999869507350219, \"accuracy\": 0.9999869573914782, \"f1\": 0.9873770618756292, \"f2\": 0.9799551142128078, \"f0_5\": 0.9949122916536612, \"p4\": 0.9936452222462342, \"phi\": 0.9874492920937119}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6373.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 165.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9747629244417253, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.025237075558274702, \"precision\": 1.0, \"recall\": 0.9747629244417253, \"specificity\": 1.0, \"npv\": 0.9999867906234562, \"accuracy\": 0.9999867973594719, \"f1\": 0.9872201998296026, \"f2\": 0.9797079169869332, \"f0_5\": 0.9948485794567593, \"p4\": 0.9935657464705724, \"phi\": 0.9872943068969937}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6371.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 167.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9744570204955644, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.025542979504435608, \"precision\": 1.0, \"recall\": 0.9744570204955644, \"specificity\": 1.0, \"npv\": 0.9999866305119417, \"accuracy\": 0.9999866373274655, \"f1\": 0.9870632891780928, \"f2\": 0.9794606893583003, \"f0_5\": 0.9947848354256449, \"p4\": 0.9934862335458013, \"phi\": 0.987139297416563}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6370.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 168.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.974304068522484, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02569593147751606, \"precision\": 1.0, \"recall\": 0.974304068522484, \"specificity\": 1.0, \"npv\": 0.9999865504562038, \"accuracy\": 0.9999865573114622, \"f1\": 0.9869848156182213, \"f2\": 0.9793370641411967, \"f0_5\": 0.994752951464801, \"p4\": 0.9934464631443519, \"phi\": 0.9870617835663803}, {\"truth_threshold\": 4.100000061094761, \"match_probability\": 0.9448986513716398, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6365.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 173.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9735393086570817, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.026460691342918324, \"precision\": 1.0, \"recall\": 0.9735393086570817, \"specificity\": 1.0, \"npv\": 0.999986150177706, \"accuracy\": 0.9999861572314462, \"f1\": 0.9865922653646438, \"f2\": 0.978718823999754, \"f0_5\": 0.9945934120882555, \"p4\": 0.9932474716159481, \"phi\": 0.9866741231585332}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6359.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 179.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9726215968185989, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02737840318140104, \"precision\": 1.0, \"recall\": 0.9726215968185989, \"specificity\": 1.0, \"npv\": 0.9999856698439318, \"accuracy\": 0.9999856771354271, \"f1\": 0.9861208032875862, \"f2\": 0.9779766848143705, \"f0_5\": 0.9944017013823732, \"p4\": 0.9930083744039019, \"phi\": 0.9862087299346529}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6357.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 181.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.972315692872438, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.027684307127561945, \"precision\": 1.0, \"recall\": 0.972315692872438, \"specificity\": 1.0, \"npv\": 0.9999855097327762, \"accuracy\": 0.9999855171034207, \"f1\": 0.9859635517642497, \"f2\": 0.9777292442092959, \"f0_5\": 0.9943377338422074, \"p4\": 0.9929286007038632, \"phi\": 0.9860535501473652}, {\"truth_threshold\": 4.500000067055225, \"match_probability\": 0.9576762895591182, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6356.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 182.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9721627408993576, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.027837259100642397, \"precision\": 1.0, \"recall\": 0.9721627408993576, \"specificity\": 1.0, \"npv\": 0.9999854296772177, \"accuracy\": 0.9999854370874175, \"f1\": 0.9858849077090119, \"f2\": 0.9776055124892334, \"f0_5\": 0.994305738063951, \"p4\": 0.9928886998460839, \"phi\": 0.9859759511136291}, {\"truth_threshold\": 4.6000000685453415, \"match_probability\": 0.9603983391922627, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6355.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 183.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9720097889262771, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02799021107372285, \"precision\": 1.0, \"recall\": 0.9720097889262771, \"specificity\": 1.0, \"npv\": 0.999985349621672, \"accuracy\": 0.9999853570714143, \"f1\": 0.9858062514542775, \"f2\": 0.9774817731565509, \"f0_5\": 0.9942737342762376, \"p4\": 0.9928487896454207, \"phi\": 0.98589834598458}, {\"truth_threshold\": 4.800000071525574, \"match_probability\": 0.9653471069144568, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6353.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 185.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9717038849801163, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.028296115019883758, \"precision\": 1.0, \"recall\": 0.9717038849801163, \"specificity\": 1.0, \"npv\": 0.999985189510619, \"accuracy\": 0.9999851970394079, \"f1\": 0.9856489023349624, \"f2\": 0.9772342716505154, \"f0_5\": 0.9942097026604069, \"p4\": 0.9927689412023019, \"phi\": 0.9857431174347839}, {\"truth_threshold\": 4.90000007301569, \"match_probability\": 0.9675925026740654, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6350.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 188.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9712450290608748, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.028754970939125116, \"precision\": 1.0, \"recall\": 0.9712450290608748, \"specificity\": 1.0, \"npv\": 0.9999849493441356, \"accuracy\": 0.9999849569913983, \"f1\": 0.9854127870887648, \"f2\": 0.9768629622792444, \"f0_5\": 0.9941135950904878, \"p4\": 0.9926490983838245, \"phi\": 0.9855102288592354}, {\"truth_threshold\": 5.000000074505806, \"match_probability\": 0.969696971214501, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6349.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 189.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9710920770877944, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.028907922912205567, \"precision\": 1.0, \"recall\": 0.9710920770877944, \"specificity\": 1.0, \"npv\": 0.9999848692886668, \"accuracy\": 0.9999848769753951, \"f1\": 0.9853340575774036, \"f2\": 0.9767391772560844, \"f0_5\": 0.9940815431828146, \"p4\": 0.9926091320568233, \"phi\": 0.9854325871280583}, {\"truth_threshold\": 5.200000077486038, \"match_probability\": 0.9735157914041783, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6348.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 190.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.970939125114714, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.02906087488528602, \"precision\": 1.0, \"recall\": 0.970939125114714, \"specificity\": 1.0, \"npv\": 0.9999847892332109, \"accuracy\": 0.9999847969593919, \"f1\": 0.9852553158466553, \"f2\": 0.9766153846153847, \"f0_5\": 0.9940494832445975, \"p4\": 0.9925691563639123, \"phi\": 0.9853549392914795}, {\"truth_threshold\": 5.300000078976154, \"match_probability\": 0.9752454557772836, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6340.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 198.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9697155093300703, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.030284490669929644, \"precision\": 1.0, \"recall\": 0.9697155093300703, \"specificity\": 1.0, \"npv\": 0.9999841487900243, \"accuracy\": 0.9999841568313663, \"f1\": 0.984624941761143, \"f2\": 0.9756247691739505, \"f0_5\": 0.9937927142767572, \"p4\": 0.9922490132520717, \"phi\": 0.9847335366310601}, {\"truth_threshold\": 5.4000000804662704, \"match_probability\": 0.9768648415470134, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6339.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 199.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9695625573569899, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.030437442643010095, \"precision\": 1.0, \"recall\": 0.9695625573569899, \"specificity\": 1.0, \"npv\": 0.9999840687346837, \"accuracy\": 0.999984076815363, \"f1\": 0.9845460899277783, \"f2\": 0.9755009079437382, \"f0_5\": 0.9937605819276353, \"p4\": 0.9922089531174841, \"phi\": 0.9846558337808434}, {\"truth_threshold\": 5.500000081956387, \"match_probability\": 0.9783806392104205, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6336.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 202.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9691037014377486, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.030896298562251453, \"precision\": 1.0, \"recall\": 0.9691037014377486, \"specificity\": 1.0, \"npv\": 0.9999838285687387, \"accuracy\": 0.9999838367673535, \"f1\": 0.9843094609290042, \"f2\": 0.9751292785028318, \"f0_5\": 0.9936641365033562, \"p4\": 0.9920887163068618, \"phi\": 0.9844226885052252}, {\"truth_threshold\": 5.600000083446503, \"match_probability\": 0.9797991767207457, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6332.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 206.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9684918935454268, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03150810645457326, \"precision\": 1.0, \"recall\": 0.9684918935454268, \"specificity\": 1.0, \"npv\": 0.9999835083476581, \"accuracy\": 0.9999835167033406, \"f1\": 0.983993783993784, \"f2\": 0.9746336658047039, \"f0_5\": 0.9935354296114981, \"p4\": 0.9919282688196436, \"phi\": 0.9841117423920022}, {\"truth_threshold\": 5.700000084936619, \"match_probability\": 0.9811264334957893, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6327.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 211.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9677271336800245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03227286631997553, \"precision\": 1.0, \"recall\": 0.9677271336800245, \"specificity\": 1.0, \"npv\": 0.9999831080715958, \"accuracy\": 0.9999831166233246, \"f1\": 0.9835989117761368, \"f2\": 0.9740139782628775, \"f0_5\": 0.9933743641273629, \"p4\": 0.9917274974373677, \"phi\": 0.9837229218141498}, {\"truth_threshold\": 5.800000086426735, \"match_probability\": 0.9823680546749124, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6322.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 216.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9669623738146222, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03303762618537779, \"precision\": 1.0, \"recall\": 0.9669623738146222, \"specificity\": 1.0, \"npv\": 0.999982707795854, \"accuracy\": 0.9999827165433086, \"f1\": 0.983203732503888, \"f2\": 0.9733940998953009, \"f0_5\": 0.9932130962106454, \"p4\": 0.9915264900857204, \"phi\": 0.9833339478040268}, {\"truth_threshold\": 5.900000087916851, \"match_probability\": 0.9835293654795508, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6314.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 224.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9657387580299786, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.034261241970021415, \"precision\": 1.0, \"recall\": 0.9657387580299786, \"specificity\": 1.0, \"npv\": 0.9999820673553336, \"accuracy\": 0.999982076415283, \"f1\": 0.9825708061002179, \"f2\": 0.9724018973695558, \"f0_5\": 0.9929546455306033, \"p4\": 0.9912043864665385, \"phi\": 0.9827112697939261}, {\"truth_threshold\": 6.000000089406967, \"match_probability\": 0.9846153855541349, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6312.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 226.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9654328540838176, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03456714591618232, \"precision\": 1.0, \"recall\": 0.9654328540838176, \"specificity\": 1.0, \"npv\": 0.9999819072453316, \"accuracy\": 0.9999819163832766, \"f1\": 0.9824124513618677, \"f2\": 0.9721537703302119, \"f0_5\": 0.9928899515509972, \"p4\": 0.9911237658068457, \"phi\": 0.9825555387580083}, {\"truth_threshold\": 6.100000090897083, \"match_probability\": 0.985630843183972, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6310.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 228.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9651269501376568, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03487304986234322, \"precision\": 1.0, \"recall\": 0.9651269501376568, \"specificity\": 1.0, \"npv\": 0.999981747135381, \"accuracy\": 0.9999817563512703, \"f1\": 0.9822540473225405, \"f2\": 0.9719056127164069, \"f0_5\": 0.9928252249984266, \"p4\": 0.9910431071916149, \"phi\": 0.9823997830853262}, {\"truth_threshold\": 6.200000092387199, \"match_probability\": 0.9865801893041345, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6307.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 231.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9646680942184154, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.035331905781584586, \"precision\": 1.0, \"recall\": 0.9646680942184154, \"specificity\": 1.0, \"npv\": 0.9999815069705512, \"accuracy\": 0.9999815163032606, \"f1\": 0.9820163487738419, \"f2\": 0.9715333189562217, \"f0_5\": 0.9927280740414279, \"p4\": 0.9909220480434291, \"phi\": 0.9821661033567289}, {\"truth_threshold\": 6.3000000938773155, \"match_probability\": 0.987467611228855, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6304.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 234.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.964209238299174, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03579076170082594, \"precision\": 1.0, \"recall\": 0.964209238299174, \"specificity\": 1.0, \"npv\": 0.9999812668058367, \"accuracy\": 0.999981276255251, \"f1\": 0.9817785391683539, \"f2\": 0.9711609563717032, \"f0_5\": 0.9926308496567362, \"p4\": 0.9908009033442303, \"phi\": 0.9819323681294444}, {\"truth_threshold\": 6.400000095367432, \"match_probability\": 0.9882970460445225, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6303.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 235.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9640562863260936, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03594371367390639, \"precision\": 1.0, \"recall\": 0.9640562863260936, \"specificity\": 1.0, \"npv\": 0.9999811867509575, \"accuracy\": 0.9999811962392479, \"f1\": 0.9816992446071178, \"f2\": 0.9710368202126021, \"f0_5\": 0.9925984251968504, \"p4\": 0.9907605027508045, \"phi\": 0.9818544440471244}, {\"truth_threshold\": 6.500000096857548, \"match_probability\": 0.9890721936212699, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6302.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 236.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9639033343530131, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.036096665646986846, \"precision\": 1.0, \"recall\": 0.9639033343530131, \"specificity\": 1.0, \"npv\": 0.9999811066960911, \"accuracy\": 0.9999811162232447, \"f1\": 0.981619937694704, \"f2\": 0.970912676403525, \"f0_5\": 0.9925659925659925, \"p4\": 0.9907200926382568, \"phi\": 0.98177651379241}, {\"truth_threshold\": 6.600000098347664, \"match_probability\": 0.9897965292084853, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6298.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 240.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9632915264606914, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03670847353930866, \"precision\": 1.0, \"recall\": 0.9632915264606914, \"specificity\": 1.0, \"npv\": 0.9999807864767538, \"accuracy\": 0.9999807961592319, \"f1\": 0.9813025864755376, \"f2\": 0.9704160246533128, \"f0_5\": 0.9924361802710369, \"p4\": 0.9905583569294693, \"phi\": 0.9814647310202006}, {\"truth_threshold\": 6.70000009983778, \"match_probability\": 0.9904733155885336, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6296.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 242.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9629856225145305, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03701437748546956, \"precision\": 1.0, \"recall\": 0.9629856225145305, \"specificity\": 1.0, \"npv\": 0.999980626367162, \"accuracy\": 0.9999806361272254, \"f1\": 0.9811438366838087, \"f2\": 0.9701676528599605, \"f0_5\": 0.9923712250173381, \"p4\": 0.9904774318727086, \"phi\": 0.981308802561483}, {\"truth_threshold\": 6.800000101327896, \"match_probability\": 0.9911056147706719, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6293.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 245.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9625267665952891, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03747323340471092, \"precision\": 1.0, \"recall\": 0.9625267665952891, \"specificity\": 1.0, \"npv\": 0.9999803862028706, \"accuracy\": 0.9999803960792158, \"f1\": 0.9809056192034915, \"f2\": 0.9697950377562028, \"f0_5\": 0.9922737306843267, \"p4\": 0.9903559727086484, \"phi\": 0.9810748634994975}, {\"truth_threshold\": 6.900000102818012, \"match_probability\": 0.9916962992137202, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6287.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 251.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9616090547568064, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.038390945243193635, \"precision\": 1.0, \"recall\": 0.9616090547568064, \"specificity\": 1.0, \"npv\": 0.9999799058746336, \"accuracy\": 0.9999799159831967, \"f1\": 0.9804288499025341, \"f2\": 0.9690496007891736, \"f0_5\": 0.9920785204822319, \"p4\": 0.9901127963615985, \"phi\": 0.980606818283407}, {\"truth_threshold\": 7.000000104308128, \"match_probability\": 0.9922480625716311, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6286.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 252.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.961456102783726, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03854389721627409, \"precision\": 1.0, \"recall\": 0.961456102783726, \"specificity\": 1.0, \"npv\": 0.9999798258199724, \"accuracy\": 0.9999798359671934, \"f1\": 0.980349344978166, \"f2\": 0.9689253344842469, \"f0_5\": 0.9920459566946531, \"p4\": 0.9900722334840125, \"phi\": 0.980528789070071}, {\"truth_threshold\": 7.1000001057982445, \"match_probability\": 0.9927634299608046, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6283.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 255.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9609972468644845, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.03900275313551545, \"precision\": 1.0, \"recall\": 0.9609972468644845, \"specificity\": 1.0, \"npv\": 0.9999795856560654, \"accuracy\": 0.9999795959191838, \"f1\": 0.9801107557912799, \"f2\": 0.9685524895945737, \"f0_5\": 0.9919482159772656, \"p4\": 0.9899504873781381, \"phi\": 0.9802946642393637}, {\"truth_threshold\": 7.200000107288361, \"match_probability\": 0.9932447677519157, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6280.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 258.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9605383909452432, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.039461609054756805, \"precision\": 1.0, \"recall\": 0.9605383909452432, \"specificity\": 1.0, \"npv\": 0.9999793454922737, \"accuracy\": 0.9999793558711743, \"f1\": 0.9798720549227649, \"f2\": 0.9681795757276763, \"f0_5\": 0.9918504011624234, \"p4\": 0.9898286549912472, \"phi\": 0.9800604835915109}, {\"truth_threshold\": 7.300000108778477, \"match_probability\": 0.9936942928922654, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6278.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 260.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9602324869990823, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.039767513000917715, \"precision\": 1.0, \"recall\": 0.9602324869990823, \"specificity\": 1.0, \"npv\": 0.9999791853831435, \"accuracy\": 0.9999791958391678, \"f1\": 0.9797128589263421, \"f2\": 0.9679309281529448, \"f0_5\": 0.9917851500789889, \"p4\": 0.989747385420744, \"phi\": 0.9799043321303219}, {\"truth_threshold\": 7.400000110268593, \"match_probability\": 0.9941140817673122, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6276.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 262.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9599265830529213, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04007341694707862, \"precision\": 1.0, \"recall\": 0.9599265830529213, \"specificity\": 1.0, \"npv\": 0.9999790252740645, \"accuracy\": 0.9999790358071614, \"f1\": 0.9795536132355237, \"f2\": 0.9676822499074873, \"f0_5\": 0.9917198660008849, \"p4\": 0.9896660774350681, \"phi\": 0.9797481558318564}, {\"truth_threshold\": 7.500000111758709, \"match_probability\": 0.9945060786121668, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6273.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 265.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.95946772713368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.040532272866319975, \"precision\": 1.0, \"recall\": 0.95946772713368, \"specificity\": 1.0, \"npv\": 0.999978785110542, \"accuracy\": 0.9999787957591518, \"f1\": 0.9793146514713917, \"f2\": 0.9673091750192753, \"f0_5\": 0.9916218779639583, \"p4\": 0.9895440433684495, \"phi\": 0.9795138447882757}, {\"truth_threshold\": 7.600000113248825, \"match_probability\": 0.9948721034855129, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6269.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 269.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9588559192413583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04114408075864179, \"precision\": 1.0, \"recall\": 0.9588559192413583, \"specificity\": 1.0, \"npv\": 0.9999784648926915, \"accuracy\": 0.999978475695139, \"f1\": 0.9789958616381667, \"f2\": 0.9668116344344715, \"f0_5\": 0.9914911115328652, \"p4\": 0.9893811965718026, \"phi\": 0.9792013430220794}, {\"truth_threshold\": 7.700000114738941, \"match_probability\": 0.9952138598197071, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6267.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 271.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9585500152951973, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04144998470480269, \"precision\": 1.0, \"recall\": 0.9585500152951973, \"specificity\": 1.0, \"npv\": 0.9999783047838432, \"accuracy\": 0.9999783156631327, \"f1\": 0.9788363920343616, \"f2\": 0.9665628181004966, \"f0_5\": 0.9914256786686072, \"p4\": 0.9892997153800354, \"phi\": 0.9790450548087244}, {\"truth_threshold\": 7.800000116229057, \"match_probability\": 0.9955329415617687, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6263.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 275.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9579382074028755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0420617925971245, \"precision\": 1.0, \"recall\": 0.9579382074028755, \"specificity\": 1.0, \"npv\": 0.9999779845663004, \"accuracy\": 0.9999779955991198, \"f1\": 0.9785173033356769, \"f2\": 0.9660650933209933, \"f0_5\": 0.9912947135169358, \"p4\": 0.9891366372726919, \"phi\": 0.9787324036618906}, {\"truth_threshold\": 7.900000117719173, \"match_probability\": 0.99583083992065, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6254.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 284.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9565616396451514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.043438360354848576, \"precision\": 1.0, \"recall\": 0.9565616396451514, \"specificity\": 1.0, \"npv\": 0.9999772640775788, \"accuracy\": 0.999977275455091, \"f1\": 0.9777986241400876, \"f2\": 0.9649447633154354, \"f0_5\": 0.9909995563161564, \"p4\": 0.9887691463741032, \"phi\": 0.9780285738841793}, {\"truth_threshold\": 8.00000011920929, \"match_probability\": 0.9961089497366072, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6253.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 285.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.956408687672071, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04359131232792903, \"precision\": 1.0, \"recall\": 0.956408687672071, \"specificity\": 1.0, \"npv\": 0.9999771840233405, \"accuracy\": 0.9999771954390878, \"f1\": 0.9777187084668908, \"f2\": 0.9648202437895387, \"f0_5\": 0.9909667194928684, \"p4\": 0.9887282656678122, \"phi\": 0.9779503393699376}, {\"truth_threshold\": 8.100000120699406, \"match_probability\": 0.9963685754887298, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6241.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 297.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9545732639951056, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04542673600489446, \"precision\": 1.0, \"recall\": 0.9545732639951056, \"specificity\": 1.0, \"npv\": 0.9999762233734806, \"accuracy\": 0.9999762352470494, \"f1\": 0.9767587448157132, \"f2\": 0.9633254098107615, \"f0_5\": 0.990572027172878, \"p4\": 0.9882369404262159, \"phi\": 0.9770110375339279}, {\"truth_threshold\": 8.300000123679638, \"match_probability\": 0.9968371745531442, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6240.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 298.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.954420312022025, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04557968797797492, \"precision\": 1.0, \"recall\": 0.954420312022025, \"specificity\": 1.0, \"npv\": 0.999976143319409, \"accuracy\": 0.9999761552310462, \"f1\": 0.9766786664579746, \"f2\": 0.9632007903185972, \"f0_5\": 0.9905390818464664, \"p4\": 0.9881959334873694, \"phi\": 0.9769327216965821}, {\"truth_threshold\": 8.400000125169754, \"match_probability\": 0.9970483543414643, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6238.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 300.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9541144080758642, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04588559192413582, \"precision\": 1.0, \"recall\": 0.9541144080758642, \"specificity\": 1.0, \"npv\": 0.999975983211304, \"accuracy\": 0.9999759951990398, \"f1\": 0.9765184721352536, \"f2\": 0.9629515282494597, \"f0_5\": 0.9904731660844712, \"p4\": 0.9881138904029276, \"phi\": 0.9767760712219222}, {\"truth_threshold\": 8.50000012665987, \"match_probability\": 0.997245472756309, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6234.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 304.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9535026001835424, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04649739981645763, \"precision\": 1.0, \"recall\": 0.9535026001835424, \"specificity\": 1.0, \"npv\": 0.9999756629952481, \"accuracy\": 0.999975675135027, \"f1\": 0.9761979329783902, \"f2\": 0.9624529117519915, \"f0_5\": 0.9903412340344411, \"p4\": 0.9879496873098409, \"phi\": 0.9764626950305016}, {\"truth_threshold\": 8.600000128149986, \"match_probability\": 0.9974294610402847, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6229.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 309.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9527378403181401, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04726215968185989, \"precision\": 1.0, \"recall\": 0.9527378403181401, \"specificity\": 1.0, \"npv\": 0.9999752627254664, \"accuracy\": 0.999975275055011, \"f1\": 0.975796976580246, \"f2\": 0.9618294678978413, \"f0_5\": 0.9901761302219113, \"p4\": 0.9877442139237783, \"phi\": 0.9760708335877195}, {\"truth_threshold\": 8.700000129640102, \"match_probability\": 0.997601189412643, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6219.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 319.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9512083205873356, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.04879167941266442, \"precision\": 1.0, \"recall\": 0.9512083205873356, \"specificity\": 1.0, \"npv\": 0.9999744621868646, \"accuracy\": 0.999974474894979, \"f1\": 0.9749941208748139, \"f2\": 0.9605820024095642, \"f0_5\": 0.9898452919080665, \"p4\": 0.987332533763769, \"phi\": 0.9752866393050771}, {\"truth_threshold\": 8.800000131130219, \"match_probability\": 0.997761470983937, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6207.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 331.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9493728969103702, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05062710308962986, \"precision\": 1.0, \"recall\": 0.9493728969103702, \"specificity\": 1.0, \"npv\": 0.9999735015422343, \"accuracy\": 0.9999735147029406, \"f1\": 0.9740290309925461, \"f2\": 0.9590840260823882, \"f0_5\": 0.9894471720971753, \"p4\": 0.9868372226531879, \"phi\": 0.9743447747038815}, {\"truth_threshold\": 8.900000132620335, \"match_probability\": 0.9979110654305032, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6205.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 333.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9490669929642093, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05093300703579076, \"precision\": 1.0, \"recall\": 0.9490669929642093, \"specificity\": 1.0, \"npv\": 0.9999733414349753, \"accuracy\": 0.9999733546709342, \"f1\": 0.9738680059640586, \"f2\": 0.9588342553388757, \"f0_5\": 0.989380700299764, \"p4\": 0.9867545330684194, \"phi\": 0.9741877089144907}, {\"truth_threshold\": 9.00000013411045, \"match_probability\": 0.9980506824420605, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6199.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 339.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9481492811257265, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05185071887427348, \"precision\": 1.0, \"recall\": 0.9481492811257265, \"specificity\": 1.0, \"npv\": 0.9999728611135061, \"accuracy\": 0.999972874574915, \"f1\": 0.9733846274632959, \"f2\": 0.9580847578127415, \"f0_5\": 0.9891810812535904, \"p4\": 0.9865062276367278, \"phi\": 0.9737163598348376}, {\"truth_threshold\": 9.100000135600567, \"match_probability\": 0.9981809849551747, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6196.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 342.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9476904252064852, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.052309574793514835, \"precision\": 1.0, \"recall\": 0.9476904252064852, \"specificity\": 1.0, \"npv\": 0.9999726209529446, \"accuracy\": 0.9999726345269054, \"f1\": 0.9731427673943772, \"f2\": 0.9577099047854581, \"f0_5\": 0.9890811570142392, \"p4\": 0.9863819416149898, \"phi\": 0.9734805998815484}, {\"truth_threshold\": 9.200000137090683, \"match_probability\": 0.9983025921847976, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6191.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 347.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9469256653410829, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0530743346589171, \"precision\": 1.0, \"recall\": 0.9469256653410829, \"specificity\": 1.0, \"npv\": 0.9999722206855983, \"accuracy\": 0.9999722344468894, \"f1\": 0.9727394139366801, \"f2\": 0.9570849952076184, \"f0_5\": 0.9889144463612549, \"p4\": 0.986174600424879, \"phi\": 0.9730875399445367}, {\"truth_threshold\": 9.300000138580799, \"match_probability\": 0.9984160824655384, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6183.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 355.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9457020495564393, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05429795044356072, \"precision\": 1.0, \"recall\": 0.9457020495564393, \"specificity\": 1.0, \"npv\": 0.9999715802585107, \"accuracy\": 0.9999715943188637, \"f1\": 0.9720933888845217, \"f2\": 0.9560847379001083, \"f0_5\": 0.98864726574992, \"p4\": 0.9858423392031227, \"phi\": 0.9724583142472819}, {\"truth_threshold\": 9.400000140070915, \"match_probability\": 0.9985219959137808, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6172.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 366.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9440195778525543, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0559804221474457, \"precision\": 1.0, \"recall\": 0.9440195778525543, \"specificity\": 1.0, \"npv\": 0.9999706996726049, \"accuracy\": 0.9999707141428286, \"f1\": 0.9712037765538946, \"f2\": 0.9547085756713278, \"f0_5\": 0.9882789982706719, \"p4\": 0.9853844414568206, \"phi\": 0.9715924648585207}, {\"truth_threshold\": 9.500000141561031, \"match_probability\": 0.9986208369212233, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6163.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 375.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9426430100948302, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.057356989905169777, \"precision\": 1.0, \"recall\": 0.9426430100948302, \"specificity\": 1.0, \"npv\": 0.9999699791943809, \"accuracy\": 0.9999699939987997, \"f1\": 0.9704747657664751, \"f2\": 0.9535819278972614, \"f0_5\": 0.987976915678102, \"p4\": 0.9850089003847972, \"phi\": 0.9708834694196086}, {\"truth_threshold\": 9.600000143051147, \"match_probability\": 0.9987130764898899, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6162.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 376.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9424900581217498, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05750994187825023, \"precision\": 1.0, \"recall\": 0.9424900581217498, \"specificity\": 1.0, \"npv\": 0.999969899141309, \"accuracy\": 0.9999699139827966, \"f1\": 0.9703937007874016, \"f2\": 0.9534567060716718, \"f0_5\": 0.9879433078945681, \"p4\": 0.9849671236145177, \"phi\": 0.9708046602492658}, {\"truth_threshold\": 9.700000144541264, \"match_probability\": 0.9987991544181472, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6157.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 381.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9417252982563475, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05827470174365249, \"precision\": 1.0, \"recall\": 0.9417252982563475, \"specificity\": 1.0, \"npv\": 0.9999694988761416, \"accuracy\": 0.9999695139027805, \"f1\": 0.9699881843245373, \"f2\": 0.9528304806710205, \"f0_5\": 0.987775139575178, \"p4\": 0.9847580895397872, \"phi\": 0.9704105185829268}, {\"truth_threshold\": 9.80000014603138, \"match_probability\": 0.9988794813467569, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6154.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 384.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9412664423371061, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.05873355766289385, \"precision\": 1.0, \"recall\": 0.9412664423371061, \"specificity\": 1.0, \"npv\": 0.9999692587171951, \"accuracy\": 0.999969273854771, \"f1\": 0.9697447210841474, \"f2\": 0.9524546523865536, \"f0_5\": 0.9876741349425435, \"p4\": 0.9846325487864815, \"phi\": 0.9701739568753676}, {\"truth_threshold\": 9.900000147521496, \"match_probability\": 0.9989544406735176, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6145.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 393.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9398898745793821, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06011012542061792, \"precision\": 1.0, \"recall\": 0.9398898745793821, \"specificity\": 1.0, \"npv\": 0.9999685382410475, \"accuracy\": 0.9999685537107421, \"f1\": 0.9690136403059213, \"f2\": 0.9513267486144223, \"f0_5\": 0.9873706536409795, \"p4\": 0.984255384032769, \"phi\": 0.9694639260904484}, {\"truth_threshold\": 10.000000149011612, \"match_probability\": 0.9990243903445719, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6139.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 399.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9389721627408993, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.061027837259100645, \"precision\": 1.0, \"recall\": 0.9389721627408993, \"specificity\": 1.0, \"npv\": 0.9999680579241925, \"accuracy\": 0.999968073614723, \"f1\": 0.9685256764218664, \"f2\": 0.9505744634727943, \"f0_5\": 0.9871679423683025, \"p4\": 0.9840034878071545, \"phi\": 0.9689902837597992}, {\"truth_threshold\": 10.100000150501728, \"match_probability\": 0.9990896645300149, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6134.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 404.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9382074028754971, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.061792597124502906, \"precision\": 1.0, \"recall\": 0.9382074028754971, \"specificity\": 1.0, \"npv\": 0.9999676576604992, \"accuracy\": 0.999967673534707, \"f1\": 0.9681186868686869, \"f2\": 0.9499473455987115, \"f0_5\": 0.9869987771126987, \"p4\": 0.9837932967518425, \"phi\": 0.9685954052405737}, {\"truth_threshold\": 10.200000151991844, \"match_probability\": 0.9991505751910027, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6125.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 413.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.936830835117773, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06316916488222699, \"precision\": 1.0, \"recall\": 0.936830835117773, \"specificity\": 1.0, \"npv\": 0.9999669371866587, \"accuracy\": 0.9999669533906781, \"f1\": 0.9673852957435047, \"f2\": 0.9488180438082845, \"f0_5\": 0.9866937302661254, \"p4\": 0.9834143155158388, \"phi\": 0.967884218723882}, {\"truth_threshold\": 10.30000015348196, \"match_probability\": 0.9992074135451509, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6117.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 421.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9356072193331294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0643927806668706, \"precision\": 1.0, \"recall\": 0.9356072193331294, \"specificity\": 1.0, \"npv\": 0.9999662967663389, \"accuracy\": 0.9999663132626525, \"f1\": 0.966732516791782, \"f2\": 0.9478136911586972, \"f0_5\": 0.9864219828420305, \"p4\": 0.9830767535630419, \"phi\": 0.9672516148057864}, {\"truth_threshold\": 10.400000154972076, \"match_probability\": 0.9992604514366183, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6112.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 426.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9348424594677271, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06515754053227286, \"precision\": 1.0, \"recall\": 0.9348424594677271, \"specificity\": 1.0, \"npv\": 0.9999658965040554, \"accuracy\": 0.9999659131826365, \"f1\": 0.9663241106719368, \"f2\": 0.9471857178279197, \"f0_5\": 0.9862518556767572, \"p4\": 0.9828654468111998, \"phi\": 0.9668560275820294}, {\"truth_threshold\": 10.500000156462193, \"match_probability\": 0.9993099426168967, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6109.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 429.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9343836035484858, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06561639645151422, \"precision\": 1.0, \"recall\": 0.9343836035484858, \"specificity\": 1.0, \"npv\": 0.9999656563468392, \"accuracy\": 0.9999656731346269, \"f1\": 0.9660789119949396, \"f2\": 0.946808840395524, \"f0_5\": 0.986149673920062, \"p4\": 0.9827385404821443, \"phi\": 0.9666185976909849}, {\"truth_threshold\": 10.600000157952309, \"match_probability\": 0.9993561239419685, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6106.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 432.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9339247476292444, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06607525237075558, \"precision\": 1.0, \"recall\": 0.9339247476292444, \"specificity\": 1.0, \"npv\": 0.9999654161897383, \"accuracy\": 0.9999654330866173, \"f1\": 0.9658335969629864, \"f2\": 0.9464318928637857, \"f0_5\": 0.9860474129578193, \"p4\": 0.9826115423228619, \"phi\": 0.966381109579949}, {\"truth_threshold\": 10.700000159442425, \"match_probability\": 0.9993992164911604, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6094.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 444.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.932089323952279, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06791067604772101, \"precision\": 1.0, \"recall\": 0.932089323952279, \"specificity\": 1.0, \"npv\": 0.9999644555624884, \"accuracy\": 0.9999644728945789, \"f1\": 0.9648511716276124, \"f2\": 0.9449234013521057, \"f0_5\": 0.9856375752086434, \"p4\": 0.9821026293854247, \"phi\": 0.9654305740764317}, {\"truth_threshold\": 10.800000160932541, \"match_probability\": 0.9994394266126935, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6092.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 446.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.931783420006118, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.06821657999388192, \"precision\": 1.0, \"recall\": 0.931783420006118, \"specificity\": 1.0, \"npv\": 0.9999642954581262, \"accuracy\": 0.9999643128625725, \"f1\": 0.9646872525732383, \"f2\": 0.9446718769383451, \"f0_5\": 0.9855691451498091, \"p4\": 0.9820176671104437, \"phi\": 0.9652720606678622}, {\"truth_threshold\": 10.900000162422657, \"match_probability\": 0.9994769469006325, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6083.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 455.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.930406852248394, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.069593147751606, \"precision\": 1.0, \"recall\": 0.930406852248394, \"specificity\": 1.0, \"npv\": 0.9999635749891306, \"accuracy\": 0.9999635927185437, \"f1\": 0.963948973932335, \"f2\": 0.9435396308360477, \"f0_5\": 0.9852607709750567, \"p4\": 0.9816348282554753, \"phi\": 0.9645584285924248}, {\"truth_threshold\": 11.000000163912773, \"match_probability\": 0.9995119571076428, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6078.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 460.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9296420923829918, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07035790761700826, \"precision\": 1.0, \"recall\": 0.9296420923829918, \"specificity\": 1.0, \"npv\": 0.9999631747290261, \"accuracy\": 0.9999631926385277, \"f1\": 0.9635383639822448, \"f2\": 0.9429103319888302, \"f0_5\": 0.9850891410048622, \"p4\": 0.9814217796772566, \"phi\": 0.9641617385382139}, {\"truth_threshold\": 11.10000016540289, \"match_probability\": 0.9995446249976983, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6073.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 465.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9288773325175895, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07112266748241053, \"precision\": 1.0, \"recall\": 0.9288773325175895, \"specificity\": 1.0, \"npv\": 0.999962774469242, \"accuracy\": 0.9999627925585117, \"f1\": 0.9631274284354928, \"f2\": 0.9422808378588052, \"f0_5\": 0.9849172883554979, \"p4\": 0.9812084731310984, \"phi\": 0.9637648855223339}, {\"truth_threshold\": 11.200000166893005, \"match_probability\": 0.9995751071426191, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6064.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 474.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9275007647598654, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0724992352401346, \"precision\": 1.0, \"recall\": 0.9275007647598654, \"specificity\": 1.0, \"npv\": 0.9999620540024382, \"accuracy\": 0.9999620724144829, \"f1\": 0.9623869227106808, \"f2\": 0.9411472560218525, \"f0_5\": 0.9846073910502046, \"p4\": 0.9808238697703506, \"phi\": 0.9630501387872321}, {\"truth_threshold\": 11.300000168383121, \"match_probability\": 0.9996035496660847, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6060.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 478.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9268889568675436, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07311104313245641, \"precision\": 1.0, \"recall\": 0.9268889568675436, \"specificity\": 1.0, \"npv\": 0.9999617337953031, \"accuracy\": 0.9999617523504701, \"f1\": 0.9620574694395936, \"f2\": 0.9406432385446417, \"f0_5\": 0.9844694262135292, \"p4\": 0.9806526654507866, \"phi\": 0.9627323035740458}, {\"truth_threshold\": 11.400000169873238, \"match_probability\": 0.99963008893853, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6056.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 482.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9262771489752217, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07372285102477821, \"precision\": 1.0, \"recall\": 0.9262771489752217, \"specificity\": 1.0, \"npv\": 0.999961413588373, \"accuracy\": 0.9999614322864573, \"f1\": 0.9617278068921709, \"f2\": 0.9401390958768008, \"f0_5\": 0.9843313178596971, \"p4\": 0.9804812949472801, \"phi\": 0.9624143636001442}, {\"truth_threshold\": 11.500000171363354, \"match_probability\": 0.999654852226126, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6049.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 489.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9252064851636587, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07479351483634139, \"precision\": 1.0, \"recall\": 0.9252064851636587, \"specificity\": 1.0, \"npv\": 0.9999608532267388, \"accuracy\": 0.9999608721744349, \"f1\": 0.9611503932628903, \"f2\": 0.9392565448278004, \"f0_5\": 0.9840892822281513, \"p4\": 0.9801809959570182, \"phi\": 0.9618577162528583}, {\"truth_threshold\": 11.60000017285347, \"match_probability\": 0.9996779582968373, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6041.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 497.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.923982869379015, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07601713062098502, \"precision\": 1.0, \"recall\": 0.923982869379015, \"specificity\": 1.0, \"npv\": 0.9999602128142117, \"accuracy\": 0.9999602320464093, \"f1\": 0.9604897050639956, \"f2\": 0.9382474450967602, \"f0_5\": 0.9838121295029639, \"p4\": 0.9798371713419749, \"phi\": 0.9612211538979599}, {\"truth_threshold\": 11.700000174343586, \"match_probability\": 0.9996995179863626, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6038.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 500.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9235240134597736, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07647598654022637, \"precision\": 1.0, \"recall\": 0.9235240134597736, \"specificity\": 1.0, \"npv\": 0.9999599726597255, \"accuracy\": 0.9999599919983997, \"f1\": 0.9602417302798982, \"f2\": 0.9378689033861448, \"f0_5\": 0.9837080482241772, \"p4\": 0.9797080646478731, \"phi\": 0.9609823345149665}, {\"truth_threshold\": 11.800000175833702, \"match_probability\": 0.9997196347265854, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6035.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 503.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9230651575405323, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07693484245946773, \"precision\": 1.0, \"recall\": 0.9230651575405323, \"specificity\": 1.0, \"npv\": 0.9999597325053546, \"accuracy\": 0.9999597519503901, \"f1\": 0.9599936371589914, \"f2\": 0.9374902911113182, \"f0_5\": 0.9836038855205684, \"p4\": 0.9795788637224542, \"phi\": 0.9607434558815604}, {\"truth_threshold\": 11.900000177323818, \"match_probability\": 0.9997384050389891, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6032.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 506.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9226063016212909, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07739369837870909, \"precision\": 1.0, \"recall\": 0.9226063016212909, \"specificity\": 1.0, \"npv\": 0.9999594923510992, \"accuracy\": 0.9999595119023805, \"f1\": 0.9597454256165473, \"f2\": 0.9371116082525478, \"f0_5\": 0.9834996412965499, \"p4\": 0.9794495684624298, \"phi\": 0.9605045179535342}, {\"truth_threshold\": 12.000000178813934, \"match_probability\": 0.9997559189953416, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6018.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 520.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9204649739981646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.07953502600183543, \"precision\": 1.0, \"recall\": 0.9204649739981646, \"specificity\": 1.0, \"npv\": 0.9999583716327655, \"accuracy\": 0.9999583916783357, \"f1\": 0.9585855367951577, \"f2\": 0.9353434877214797, \"f0_5\": 0.9830120875530872, \"p4\": 0.978844940225795, \"phi\": 0.959388688980749}, {\"truth_threshold\": 12.10000018030405, \"match_probability\": 0.9997722606477963, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6014.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 524.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9198531661058428, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08014683389415724, \"precision\": 1.0, \"recall\": 0.9198531661058428, \"specificity\": 1.0, \"npv\": 0.9999580514279887, \"accuracy\": 0.9999580716143228, \"f1\": 0.9582536647546208, \"f2\": 0.9348380277311447, \"f0_5\": 0.9828724586520232, \"p4\": 0.9786718100723659, \"phi\": 0.9590696427158273}, {\"truth_threshold\": 12.200000181794167, \"match_probability\": 0.9997875084304283, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 6003.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 535.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9181706944019578, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08182930559804222, \"precision\": 1.0, \"recall\": 0.9181706944019578, \"specificity\": 1.0, \"npv\": 0.9999571708659099, \"accuracy\": 0.9999571914382877, \"f1\": 0.9573399250458496, \"f2\": 0.9334473643290313, \"f0_5\": 0.9824877250409165, \"p4\": 0.9781948300043662, \"phi\": 0.9581917187839654}, {\"truth_threshold\": 12.300000183284283, \"match_probability\": 0.9998017355340825, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5979.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 559.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9144998470480269, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08550015295197308, \"precision\": 1.0, \"recall\": 0.9144998470480269, \"specificity\": 1.0, \"npv\": 0.9999552496449392, \"accuracy\": 0.9999552710542109, \"f1\": 0.9553407365982264, \"f2\": 0.9304098845351841, \"f0_5\": 0.9816444473632363, \"p4\": 0.9771496849769628, \"phi\": 0.9562734561071788}, {\"truth_threshold\": 12.400000184774399, \"match_probability\": 0.9998150102562988, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5974.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 564.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9137350871826246, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08626491281737535, \"precision\": 1.0, \"recall\": 0.9137350871826246, \"specificity\": 1.0, \"npv\": 0.9999548493914995, \"accuracy\": 0.9999548709741949, \"f1\": 0.954923273657289, \"f2\": 0.9297765050115171, \"f0_5\": 0.9814680948938687, \"p4\": 0.976931172555825, \"phi\": 0.9558733344368541}, {\"truth_threshold\": 12.500000186264515, \"match_probability\": 0.9998273963279586, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5969.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 569.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9129703273172224, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08702967268277761, \"precision\": 1.0, \"recall\": 0.9129703273172224, \"specificity\": 1.0, \"npv\": 0.9999544491383803, \"accuracy\": 0.9999544708941789, \"f1\": 0.9545054769329175, \"f2\": 0.9291429283023567, \"f0_5\": 0.9812915104885908, \"p4\": 0.9767123921681168, \"phi\": 0.9554730455288521}, {\"truth_threshold\": 12.700000189244747, \"match_probability\": 0.9998497364189812, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5963.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 575.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9120526154787397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08794738452126033, \"precision\": 1.0, \"recall\": 0.9120526154787397, \"specificity\": 1.0, \"npv\": 0.99995396883506, \"accuracy\": 0.9999539907981596, \"f1\": 0.9540036797056235, \"f2\": 0.9283823758368364, \"f0_5\": 0.981079302402106, \"p4\": 0.9764495012918851, \"phi\": 0.9549924777894132}, {\"truth_threshold\": 12.800000190734863, \"match_probability\": 0.9998597977108138, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5961.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 577.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9117467115325788, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08825328846742123, \"precision\": 1.0, \"recall\": 0.9117467115325788, \"specificity\": 1.0, \"npv\": 0.9999538087340559, \"accuracy\": 0.9999538307661532, \"f1\": 0.9538363069045523, \"f2\": 0.9281287951919783, \"f0_5\": 0.9810084918701863, \"p4\": 0.9763617849447761, \"phi\": 0.9548322348966611}, {\"truth_threshold\": 12.90000019222498, \"match_probability\": 0.9998691854106266, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5957.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 581.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.911134903640257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08886509635974305, \"precision\": 1.0, \"recall\": 0.911134903640257, \"specificity\": 1.0, \"npv\": 0.9999534885322013, \"accuracy\": 0.9999535107021404, \"f1\": 0.9535014005602241, \"f2\": 0.9276215391323305, \"f0_5\": 0.9808667588750576, \"p4\": 0.9761862229776187, \"phi\": 0.9545116685606971}, {\"truth_threshold\": 13.000000193715096, \"match_probability\": 0.9998779446032292, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5951.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 587.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9102171918017743, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.08978280819822576, \"precision\": 1.0, \"recall\": 0.9102171918017743, \"specificity\": 1.0, \"npv\": 0.999953008229804, \"accuracy\": 0.9999530306061212, \"f1\": 0.9529986388021459, \"f2\": 0.9268604180294676, \"f0_5\": 0.9806538791114626, \"p4\": 0.9759225563675026, \"phi\": 0.9540306174775884}, {\"truth_threshold\": 13.100000195205212, \"match_probability\": 0.9998861173572945, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5941.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 597.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9086876720709697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.09131232792903028, \"precision\": 1.0, \"recall\": 0.9086876720709697, \"specificity\": 1.0, \"npv\": 0.9999522077268338, \"accuracy\": 0.9999522304460892, \"f1\": 0.9521596281753346, \"f2\": 0.9255912504284424, \"f0_5\": 0.9802983301432249, \"p4\": 0.9754822467997415, \"phi\": 0.9532283272236108}, {\"truth_threshold\": 13.200000196695328, \"match_probability\": 0.9998937429269453, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5933.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 605.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9074640562863261, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.09253594371367391, \"precision\": 1.0, \"recall\": 0.9074640562863261, \"specificity\": 1.0, \"npv\": 0.9999515673253804, \"accuracy\": 0.9999515903180636, \"f1\": 0.9514874508860557, \"f2\": 0.9245753467352346, \"f0_5\": 0.9800132144037, \"p4\": 0.9751292181458383, \"phi\": 0.9525860094369215}, {\"truth_threshold\": 13.300000198185444, \"match_probability\": 0.9999008579398913, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5925.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 613.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9062404405016825, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.09375955949831753, \"precision\": 1.0, \"recall\": 0.9062404405016825, \"specificity\": 1.0, \"npv\": 0.9999509269247473, \"accuracy\": 0.999950950190038, \"f1\": 0.9508144106555404, \"f2\": 0.9235589363095053, \"f0_5\": 0.9797274952047093, \"p4\": 0.9747754930426697, \"phi\": 0.9519432590739579}, {\"truth_threshold\": 13.40000019967556, \"match_probability\": 0.999907496573012, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5918.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 620.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9051697766901193, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0948302233098807, \"precision\": 1.0, \"recall\": 0.9051697766901193, \"specificity\": 1.0, \"npv\": 0.9999503665748661, \"accuracy\": 0.9999503900780156, \"f1\": 0.9502247912652537, \"f2\": 0.9226691612098534, \"f0_5\": 0.9794769943727243, \"p4\": 0.9744654106484961, \"phi\": 0.951380496969417}, {\"truth_threshold\": 13.500000201165676, \"match_probability\": 0.9999136907162209, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5899.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 639.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9022636892015907, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.0977363107984093, \"precision\": 1.0, \"recall\": 0.9022636892015907, \"specificity\": 1.0, \"npv\": 0.9999488456283546, \"accuracy\": 0.9999488697739548, \"f1\": 0.948621050092466, \"f2\": 0.9202520982184643, \"f0_5\": 0.9787947169310414, \"p4\": 0.9736210515961913, \"phi\": 0.94985132229708}, {\"truth_threshold\": 13.600000202655792, \"match_probability\": 0.9999194701253888, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5895.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 643.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.9016518813092689, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.09834811869073111, \"precision\": 1.0, \"recall\": 0.9016518813092689, \"specificity\": 1.0, \"npv\": 0.9999485254296786, \"accuracy\": 0.999948549709942, \"f1\": 0.9482827957854097, \"f2\": 0.9197428776484539, \"f0_5\": 0.978650640812803, \"p4\": 0.9734427858148172, \"phi\": 0.949529077577985}, {\"truth_threshold\": 13.700000204145908, \"match_probability\": 0.9999248625650565, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5891.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 647.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.901040073416947, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.09895992658305292, \"precision\": 1.0, \"recall\": 0.901040073416947, \"specificity\": 1.0, \"npv\": 0.9999482052312076, \"accuracy\": 0.9999482296459292, \"f1\": 0.9479443237589509, \"f2\": 0.9192335299441375, \"f0_5\": 0.9785064115341173, \"p4\": 0.9732643434512526, \"phi\": 0.9492067236670164}, {\"truth_threshold\": 13.800000205636024, \"match_probability\": 0.999929893941616, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5868.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 670.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8975221780360967, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.10247782196390333, \"precision\": 1.0, \"recall\": 0.8975221780360967, \"specificity\": 1.0, \"npv\": 0.9999463640939791, \"accuracy\": 0.9999463892778556, \"f1\": 0.9459938739319684, \"f2\": 0.9163023110555902, \"f0_5\": 0.9776741086304566, \"p4\": 0.9722348598610446, \"phi\": 0.9473510640839033}, {\"truth_threshold\": 13.90000020712614, \"match_probability\": 0.9999345884275949, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5857.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 681.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8958397063322117, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.10416029366778831, \"precision\": 1.0, \"recall\": 0.8958397063322117, \"specificity\": 1.0, \"npv\": 0.9999454835524838, \"accuracy\": 0.9999455091018203, \"f1\": 0.945058491327148, \"f2\": 0.9148989346746228, \"f0_5\": 0.9772742441433625, \"p4\": 0.9717404165919651, \"phi\": 0.9464622910258381}, {\"truth_threshold\": 14.000000208616257, \"match_probability\": 0.9999389685776376, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5847.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 691.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8943101866014072, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.10568981339859285, \"precision\": 1.0, \"recall\": 0.8943101866014072, \"specificity\": 1.0, \"npv\": 0.9999446830615611, \"accuracy\": 0.9999447089417883, \"f1\": 0.9442067016552281, \"f2\": 0.9136223006968968, \"f0_5\": 0.9769097106195281, \"p4\": 0.9712897475174503, \"phi\": 0.9456535920197574}, {\"truth_threshold\": 14.100000210106373, \"match_probability\": 0.9999430554367367, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5833.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 705.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8921688589782808, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.10783114102171917, \"precision\": 1.0, \"recall\": 0.8921688589782808, \"specificity\": 1.0, \"npv\": 0.9999435623764226, \"accuracy\": 0.9999435887177436, \"f1\": 0.9430118826287285, \"f2\": 0.9118336720337659, \"f0_5\": 0.976397723468363, \"p4\": 0.9706569222892635, \"phi\": 0.9445202523440408}, {\"truth_threshold\": 14.200000211596489, \"match_probability\": 0.9999468686412301, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5829.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 709.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8915570510859591, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.10844294891404099, \"precision\": 1.0, \"recall\": 0.8915570510859591, \"specificity\": 1.0, \"npv\": 0.9999432421811302, \"accuracy\": 0.9999432686537307, \"f1\": 0.9426700088946389, \"f2\": 0.9113223476439136, \"f0_5\": 0.9762510886313391, \"p4\": 0.9704757089825157, \"phi\": 0.9441961916108014}, {\"truth_threshold\": 14.300000213086605, \"match_probability\": 0.9999504265130488, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5825.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 713.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8909452431936372, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1090547568063628, \"precision\": 1.0, \"recall\": 0.8909452431936372, \"specificity\": 1.0, \"npv\": 0.9999429219860427, \"accuracy\": 0.999942948589718, \"f1\": 0.9423279139367468, \"f2\": 0.9108108953310192, \"f0_5\": 0.9761042965346203, \"p4\": 0.9702943146913535, \"phi\": 0.9438720198250454}, {\"truth_threshold\": 14.400000214576721, \"match_probability\": 0.9999537461476637, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5818.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 720.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.889874579382074, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11012542061792598, \"precision\": 1.0, \"recall\": 0.889874579382074, \"specificity\": 1.0, \"npv\": 0.9999423616451332, \"accuracy\": 0.9999423884776956, \"f1\": 0.9417287147944319, \"f2\": 0.9099155458242102, \"f0_5\": 0.9758470311975848, \"p4\": 0.9699764383711981, \"phi\": 0.9433044516354625}, {\"truth_threshold\": 14.500000216066837, \"match_probability\": 0.9999568434961527, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5809.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 729.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8884980116243499, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11150198837565005, \"precision\": 1.0, \"recall\": 0.8884980116243499, \"specificity\": 1.0, \"npv\": 0.9999416412077436, \"accuracy\": 0.9999416683336667, \"f1\": 0.9409573175670203, \"f2\": 0.9087638058884265, \"f0_5\": 0.9755155504802848, \"p4\": 0.9695669222447544, \"phi\": 0.9425742198646583}, {\"truth_threshold\": 14.600000217556953, \"match_probability\": 0.9999597334417798, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5799.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 739.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8869684918935454, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11303150810645457, \"precision\": 1.0, \"recall\": 0.8869684918935454, \"specificity\": 1.0, \"npv\": 0.9999408407229728, \"accuracy\": 0.9999408681736347, \"f1\": 0.9400988895193321, \"f2\": 0.9074833338549654, \"f0_5\": 0.9751462971682249, \"p4\": 0.9691108215027341, \"phi\": 0.9417621883887773}, {\"truth_threshold\": 14.70000021904707, \"match_probability\": 0.9999624298714548, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5790.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 748.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8855919241358213, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11440807586417864, \"precision\": 1.0, \"recall\": 0.8855919241358213, \"specificity\": 1.0, \"npv\": 0.9999401202877749, \"accuracy\": 0.999940148029606, \"f1\": 0.9393251135626217, \"f2\": 0.9063302235301484, \"f0_5\": 0.9748131187285339, \"p4\": 0.9686993528693028, \"phi\": 0.9410307620616105}, {\"truth_threshold\": 14.800000220537186, \"match_probability\": 0.9999649457424121, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5784.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 754.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8846742122973387, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11532578770266136, \"precision\": 1.0, \"recall\": 0.8846742122973387, \"specificity\": 1.0, \"npv\": 0.9999396399982197, \"accuracy\": 0.9999396679335867, \"f1\": 0.9388086349618568, \"f2\": 0.9055611222444889, \"f0_5\": 0.974590550650401, \"p4\": 0.9684245241122661, \"phi\": 0.9405428290940873}, {\"truth_threshold\": 14.900000222027302, \"match_probability\": 0.9999672931444318, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5782.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 756.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8843683083511777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11563169164882227, \"precision\": 1.0, \"recall\": 0.8843683083511777, \"specificity\": 1.0, \"npv\": 0.9999394799018038, \"accuracy\": 0.9999395079015804, \"f1\": 0.9386363636363636, \"f2\": 0.9053046909250329, \"f0_5\": 0.9745162812647475, \"p4\": 0.9683328225377232, \"phi\": 0.9403801286151864}, {\"truth_threshold\": 15.000000223517418, \"match_probability\": 0.9999694833578969, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5778.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 760.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8837565004588559, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11624349954114407, \"precision\": 1.0, \"recall\": 0.8837565004588559, \"specificity\": 1.0, \"npv\": 0.9999391597091257, \"accuracy\": 0.9999391878375675, \"f1\": 0.9382916531341344, \"f2\": 0.9047917319135609, \"f0_5\": 0.9743676222596964, \"p4\": 0.9681492811972435, \"phi\": 0.9400546433353254}, {\"truth_threshold\": 15.100000225007534, \"match_probability\": 0.9999715269079685, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5773.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8829917405934536, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11700825940654634, \"precision\": 1.0, \"recall\": 0.8829917405934536, \"specificity\": 1.0, \"npv\": 0.9999387594685667, \"accuracy\": 0.9999387877575515, \"f1\": 0.9378604500040614, \"f2\": 0.9041503523884103, \"f0_5\": 0.9741815727303409, \"p4\": 0.9679195950543471, \"phi\": 0.9396476284810218}, {\"truth_threshold\": 15.20000022649765, \"match_probability\": 0.9999734336151354, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5768.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 770.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8822269807280514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11777301927194861, \"precision\": 1.0, \"recall\": 0.8822269807280514, \"specificity\": 1.0, \"npv\": 0.999938359228328, \"accuracy\": 0.9999383876775355, \"f1\": 0.9374288964732651, \"f2\": 0.9035087719298246, \"f0_5\": 0.9739952718676123, \"p4\": 0.9676896201065586, \"phi\": 0.939240437575049}, {\"truth_threshold\": 15.300000227987766, \"match_probability\": 0.9999752126423825, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5766.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 772.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8819210767818905, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11807892321810952, \"precision\": 1.0, \"recall\": 0.8819210767818905, \"specificity\": 1.0, \"npv\": 0.9999381991323222, \"accuracy\": 0.9999382276455291, \"f1\": 0.937256176853056, \"f2\": 0.9032520834638762, \"f0_5\": 0.9739206810350652, \"p4\": 0.967597549139933, \"phi\": 0.9390775118668969}, {\"truth_threshold\": 15.400000229477882, \"match_probability\": 0.9999768725392036, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5762.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 776.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8813092688895686, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11869073111043132, \"precision\": 1.0, \"recall\": 0.8813092688895686, \"specificity\": 1.0, \"npv\": 0.9999378789404645, \"accuracy\": 0.9999379075815163, \"f1\": 0.936910569105691, \"f2\": 0.9027386100144137, \"f0_5\": 0.9737713783546272, \"p4\": 0.9674132681786282, \"phi\": 0.9387515757771099}, {\"truth_threshold\": 15.500000230967999, \"match_probability\": 0.9999784212826682, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5757.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 781.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8805445090241664, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.11945549097583359, \"precision\": 1.0, \"recall\": 0.8805445090241664, \"specificity\": 1.0, \"npv\": 0.9999374787009306, \"accuracy\": 0.9999375075015003, \"f1\": 0.9364782431882879, \"f2\": 0.902096587169764, \"f0_5\": 0.9735845227626327, \"p4\": 0.9671826559380665, \"phi\": 0.9383439967504315}, {\"truth_threshold\": 15.600000232458115, \"match_probability\": 0.9999798663157408, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5750.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 788.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8794738452126032, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12052615478739676, \"precision\": 1.0, \"recall\": 0.8794738452126032, \"specificity\": 1.0, \"npv\": 0.9999369183661216, \"accuracy\": 0.9999369473894779, \"f1\": 0.9358723958333334, \"f2\": 0.9011974170898376, \"f0_5\": 0.9733224998307265, \"p4\": 0.9668593105453163, \"phi\": 0.9377730890601915}, {\"truth_threshold\": 15.70000023394823, \"match_probability\": 0.9999812145830361, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5745.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 793.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8787090853472009, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12129091465279902, \"precision\": 1.0, \"recall\": 0.8787090853472009, \"specificity\": 1.0, \"npv\": 0.9999365181273568, \"accuracy\": 0.9999365473094619, \"f1\": 0.9354392249450459, \"f2\": 0.9005549111201681, \"f0_5\": 0.9731350362490684, \"p4\": 0.9666280000480074, \"phi\": 0.9373650853583969}, {\"truth_threshold\": 15.800000235438347, \"match_probability\": 0.9999824725641815, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5737.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 801.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8774854695625574, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12251453043744265, \"precision\": 1.0, \"recall\": 0.8774854695625574, \"specificity\": 1.0, \"npv\": 0.9999358777459995, \"accuracy\": 0.9999359071814363, \"f1\": 0.934745417515275, \"f2\": 0.8995264824861238, \"f0_5\": 0.9728345655565354, \"p4\": 0.9662572959173861, \"phi\": 0.9367119104700208}, {\"truth_threshold\": 15.900000236928463, \"match_probability\": 0.9999836463049459, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5734.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 804.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.877026613643316, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.122973386356684, \"precision\": 1.0, \"recall\": 0.877026613643316, \"specificity\": 1.0, \"npv\": 0.999935637603202, \"accuracy\": 0.9999356671334266, \"f1\": 0.9344850065189049, \"f2\": 0.8991406887035063, \"f0_5\": 0.9727217208387053, \"p4\": 0.9661180887666886, \"phi\": 0.9364668526479762}, {\"truth_threshold\": 16.00000023841858, \"match_probability\": 0.9999847414462861, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5730.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 808.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8764148057509942, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12358519424900581, \"precision\": 1.0, \"recall\": 0.8764148057509942, \"specificity\": 1.0, \"npv\": 0.9999353174129847, \"accuracy\": 0.9999353470694139, \"f1\": 0.9341375937398109, \"f2\": 0.898626184053698, \"f0_5\": 0.9725711182021861, \"p4\": 0.9659323150777976, \"phi\": 0.9361400092796267}, {\"truth_threshold\": 16.100000239908695, \"match_probability\": 0.9999857632514492, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5727.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 811.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8759559498317528, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12404405016824717, \"precision\": 1.0, \"recall\": 0.8759559498317528, \"specificity\": 1.0, \"npv\": 0.9999350772704563, \"accuracy\": 0.9999351070214043, \"f1\": 0.9338768854463921, \"f2\": 0.8982402208350325, \"f0_5\": 0.9724580588195341, \"p4\": 0.9657928615394306, \"phi\": 0.9358948019839248}, {\"truth_threshold\": 16.20000024139881, \"match_probability\": 0.9999867166312594, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5721.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 817.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8750382379932701, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12496176200672988, \"precision\": 1.0, \"recall\": 0.8750382379932701, \"specificity\": 1.0, \"npv\": 0.9999345969857456, \"accuracy\": 0.999934626925385, \"f1\": 0.9333550860592218, \"f2\": 0.8974680764283249, \"f0_5\": 0.9722316633811433, \"p4\": 0.965513636956696, \"phi\": 0.9354041949098355}, {\"truth_threshold\": 16.300000242888927, \"match_probability\": 0.9999876061677141, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5714.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 824.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8739675741817069, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12603242581829305, \"precision\": 1.0, \"recall\": 0.8739675741817069, \"specificity\": 1.0, \"npv\": 0.9999340366541661, \"accuracy\": 0.9999340668133627, \"f1\": 0.9327456741756448, \"f2\": 0.8965668737839704, \"f0_5\": 0.9719670681091379, \"p4\": 0.9651873388024544, \"phi\": 0.9348314951671043}, {\"truth_threshold\": 16.400000244379044, \"match_probability\": 0.9999884361359999, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5712.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 826.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8736616702355461, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12633832976445397, \"precision\": 1.0, \"recall\": 0.8736616702355461, \"specificity\": 1.0, \"npv\": 0.9999338765595446, \"accuracy\": 0.9999339067813563, \"f1\": 0.9325714285714286, \"f2\": 0.8963093145869947, \"f0_5\": 0.9718913768461171, \"p4\": 0.9650940045069124, \"phi\": 0.9346678023341318}, {\"truth_threshold\": 16.50000024586916, \"match_probability\": 0.9999892105250341, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5708.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 830.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8730498623432242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12695013765677576, \"precision\": 1.0, \"recall\": 0.8730498623432242, \"specificity\": 1.0, \"npv\": 0.9999335563704551, \"accuracy\": 0.9999335867173434, \"f1\": 0.9322227666176711, \"f2\": 0.8957940991839297, \"f0_5\": 0.9717398706162751, \"p4\": 0.9649071940136829, \"phi\": 0.934340330790444}, {\"truth_threshold\": 16.600000247359276, \"match_probability\": 0.9999899330566321, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5702.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 836.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8721321505047415, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.12786784949525848, \"precision\": 1.0, \"recall\": 0.8721321505047415, \"specificity\": 1.0, \"npv\": 0.9999330760872054, \"accuracy\": 0.9999331066213243, \"f1\": 0.9316993464052288, \"f2\": 0.8950210334651849, \"f0_5\": 0.9715123015061677, \"p4\": 0.9646266229782946, \"phi\": 0.9338489085546738}, {\"truth_threshold\": 16.700000248849392, \"match_probability\": 0.9999906072033913, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5696.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 842.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8712144386662588, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1287855613337412, \"precision\": 1.0, \"recall\": 0.8712144386662588, \"specificity\": 1.0, \"npv\": 0.9999325958044171, \"accuracy\": 0.999932626525305, \"f1\": 0.9311754127840445, \"f2\": 0.8942476764632002, \"f0_5\": 0.971284359866312, \"p4\": 0.9643456247219898, \"phi\": 0.9333572280524967}, {\"truth_threshold\": 16.800000250339508, \"match_probability\": 0.9999912362053778, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5692.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 846.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.870602630773937, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.129397369226063, \"precision\": 1.0, \"recall\": 0.870602630773937, \"specificity\": 1.0, \"npv\": 0.9999322756161478, \"accuracy\": 0.9999323064612923, \"f1\": 0.9308258381030253, \"f2\": 0.8937319432232131, \"f0_5\": 0.9711321913601311, \"p4\": 0.9641580547236859, \"phi\": 0.9330292973680878}, {\"truth_threshold\": 16.900000251829624, \"match_probability\": 0.999991823085696, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5682.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 856.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8690731110431325, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13092688895686755, \"precision\": 1.0, \"recall\": 0.8690731110431325, \"specificity\": 1.0, \"npv\": 0.9999314751463718, \"accuracy\": 0.9999315063012603, \"f1\": 0.9299509001636661, \"f2\": 0.8924420430985739, \"f0_5\": 0.9707510421649695, \"p4\": 0.9636882952140735, \"phi\": 0.9322089668821074}, {\"truth_threshold\": 17.00000025331974, \"match_probability\": 0.9999923706650156, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5669.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 869.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8670847353930866, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13291526460691344, \"precision\": 1.0, \"recall\": 0.8670847353930866, \"specificity\": 1.0, \"npv\": 0.999930434537579, \"accuracy\": 0.9999304660932187, \"f1\": 0.9288113377570246, \"f2\": 0.8907639609063197, \"f0_5\": 0.9702539878140618, \"p4\": 0.9630758194365757, \"phi\": 0.9311414587711745}, {\"truth_threshold\": 17.100000254809856, \"match_probability\": 0.9999928815751264, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5663.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 875.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8661670235546038, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13383297644539616, \"precision\": 1.0, \"recall\": 0.8661670235546038, \"specificity\": 1.0, \"npv\": 0.9999299542573282, \"accuracy\": 0.9999299859971994, \"f1\": 0.9282845668387837, \"f2\": 0.88998899889989, \"f0_5\": 0.9700239808153477, \"p4\": 0.9627924539162589, \"phi\": 0.9306483504751734}, {\"truth_threshold\": 17.200000256299973, \"match_probability\": 0.999993358271586, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5650.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 888.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.864178647904558, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13582135209544202, \"precision\": 1.0, \"recall\": 0.864178647904558, \"specificity\": 1.0, \"npv\": 0.9999289136517009, \"accuracy\": 0.9999289457891578, \"f1\": 0.9271414506071546, \"f2\": 0.8883089113892209, \"f0_5\": 0.969524332486787, \"p4\": 0.9621770063779627, \"phi\": 0.9295790534431165}, {\"truth_threshold\": 17.30000025779009, \"match_probability\": 0.999993803045519, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5646.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 892.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8635668400122362, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13643315998776384, \"precision\": 1.0, \"recall\": 0.8635668400122362, \"specificity\": 1.0, \"npv\": 0.9999285934657898, \"accuracy\": 0.9999286257251451, \"f1\": 0.9267892317793828, \"f2\": 0.887791685011636, \"f0_5\": 0.9693702355607444, \"p4\": 0.9619872267971805, \"phi\": 0.9292497918736017}, {\"truth_threshold\": 17.400000259280205, \"match_probability\": 0.9999942180346287, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5639.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 899.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.862496176200673, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13750382379932702, \"precision\": 1.0, \"recall\": 0.862496176200673, \"specificity\": 1.0, \"npv\": 0.9999280331409387, \"accuracy\": 0.9999280656131226, \"f1\": 0.9261722920259505, \"f2\": 0.8868862256613507, \"f0_5\": 0.969100158108201, \"p4\": 0.9616546455829704, \"phi\": 0.9286733037295297}, {\"truth_threshold\": 17.50000026077032, \"match_probability\": 0.9999946052334694, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5637.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 901.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8621902722545121, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13780972774548791, \"precision\": 1.0, \"recall\": 0.8621902722545121, \"specificity\": 1.0, \"npv\": 0.9999278730482395, \"accuracy\": 0.9999279055811162, \"f1\": 0.9259958932238193, \"f2\": 0.8866274497467678, \"f0_5\": 0.9690228976139723, \"p4\": 0.9615595130407129, \"phi\": 0.9285085272081979}, {\"truth_threshold\": 17.600000262260437, \"match_probability\": 0.999994966503032, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5627.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 911.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8606607525237076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.13933924747629245, \"precision\": 1.0, \"recall\": 0.8606607525237076, \"specificity\": 1.0, \"npv\": 0.9999270725855122, \"accuracy\": 0.9999271054210842, \"f1\": 0.9251130291820797, \"f2\": 0.8853330815947639, \"f0_5\": 0.9686359567582455, \"p4\": 0.9610831196338553, \"phi\": 0.9276842063764343}, {\"truth_threshold\": 17.700000263750553, \"match_probability\": 0.9999953035796879, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5621.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 917.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8597430406852249, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.14025695931477516, \"precision\": 1.0, \"recall\": 0.8597430406852249, \"specificity\": 1.0, \"npv\": 0.999926592308491, \"accuracy\": 0.999926625325065, \"f1\": 0.9245826137017847, \"f2\": 0.8845560696188588, \"f0_5\": 0.9684032802701399, \"p4\": 0.9607966976842784, \"phi\": 0.9271892627362103}, {\"truth_threshold\": 17.80000026524067, \"match_probability\": 0.9999956180835331, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5613.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 925.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8585194249005812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.14148057509941878, \"precision\": 1.0, \"recall\": 0.8585194249005812, \"specificity\": 1.0, \"npv\": 0.999925951939847, \"accuracy\": 0.9999259851970395, \"f1\": 0.9238745782240145, \"f2\": 0.8835195970407681, \"f0_5\": 0.968092445670921, \"p4\": 0.9604141160896794, \"phi\": 0.9265289273425649}, {\"truth_threshold\": 17.900000266730785, \"match_probability\": 0.9999959115261747, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5610.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 928.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8580605689813399, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.14193943101866013, \"precision\": 1.0, \"recall\": 0.8580605689813399, \"specificity\": 1.0, \"npv\": 0.999925711801817, \"accuracy\": 0.9999257451490298, \"f1\": 0.9236088244978597, \"f2\": 0.8831307852150369, \"f0_5\": 0.9679757057077784, \"p4\": 0.9602704455254009, \"phi\": 0.9262811804240322}, {\"truth_threshold\": 18.0000002682209, \"match_probability\": 0.9999961853179954, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5601.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 937.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8566840012236158, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1433159987763842, \"precision\": 1.0, \"recall\": 0.8566840012236158, \"specificity\": 1.0, \"npv\": 0.9999249913884191, \"accuracy\": 0.999925025005001, \"f1\": 0.9228107751874125, \"f2\": 0.8819639089219916, \"f0_5\": 0.9676249049823785, \"p4\": 0.9598387695121465, \"phi\": 0.9255375424833509}, {\"truth_threshold\": 18.100000269711018, \"match_probability\": 0.999996440774932, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5585.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 953.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8542367696543285, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.14576323034567146, \"precision\": 1.0, \"recall\": 0.8542367696543285, \"specificity\": 1.0, \"npv\": 0.9999237106560523, \"accuracy\": 0.9999237447489497, \"f1\": 0.9213890951084714, \"f2\": 0.8798878280870908, \"f0_5\": 0.9669990996606413, \"p4\": 0.9590688761690648, \"phi\": 0.9242140447383365}, {\"truth_threshold\": 18.200000271201134, \"match_probability\": 0.9999966791247992, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5573.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 965.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8524013459773631, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1475986540226369, \"precision\": 1.0, \"recall\": 0.8524013459773631, \"specificity\": 1.0, \"npv\": 0.9999227501089304, \"accuracy\": 0.9999227845569114, \"f1\": 0.9203203699116506, \"f2\": 0.8783293932230103, \"f0_5\": 0.9665279223031564, \"p4\": 0.958489372032922, \"phi\": 0.9232201785415214}, {\"truth_threshold\": 18.30000027269125, \"match_probability\": 0.999996901513191, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5553.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 985.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.849342306515754, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.15065769348424596, \"precision\": 1.0, \"recall\": 0.849342306515754, \"specificity\": 1.0, \"npv\": 0.9999211492011614, \"accuracy\": 0.9999211842368474, \"f1\": 0.9185344471094202, \"f2\": 0.8757293802239394, \"f0_5\": 0.9657391304347827, \"p4\": 0.9575195373920614, \"phi\": 0.9215613572608162}, {\"truth_threshold\": 18.400000274181366, \"match_probability\": 0.9999971090089864, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5546.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 992.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8482716427041909, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1517283572958091, \"precision\": 1.0, \"recall\": 0.8482716427041909, \"specificity\": 1.0, \"npv\": 0.9999205888846533, \"accuracy\": 0.999920624124825, \"f1\": 0.917907977490897, \"f2\": 0.874818600542621, \"f0_5\": 0.9654620151800014, \"p4\": 0.9571789097119904, \"phi\": 0.9209800652060427}, {\"truth_threshold\": 18.500000275671482, \"match_probability\": 0.9999973026094866, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5533.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1005.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.846283267054145, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.153716732945855, \"precision\": 1.0, \"recall\": 0.846283267054145, \"specificity\": 1.0, \"npv\": 0.9999195482985186, \"accuracy\": 0.9999195839167834, \"f1\": 0.9167426062463756, \"f2\": 0.8731260848982169, \"f0_5\": 0.9649459365190094, \"p4\": 0.9565446760663825, \"phi\": 0.9198995500191177}, {\"truth_threshold\": 18.600000277161598, \"match_probability\": 0.999997483245208, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5516.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1022.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8436830835117773, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.15631691648822268, \"precision\": 1.0, \"recall\": 0.8436830835117773, \"specificity\": 1.0, \"npv\": 0.9999181875353026, \"accuracy\": 0.9999182236447289, \"f1\": 0.9152148664343787, \"f2\": 0.8709106984969054, \"f0_5\": 0.9642682329907, \"p4\": 0.9557120624321677, \"phi\": 0.918484654046703}, {\"truth_threshold\": 18.700000278651714, \"match_probability\": 0.9999976517843541, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5508.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1030.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8424594677271336, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.15754053227286632, \"precision\": 1.0, \"recall\": 0.8424594677271336, \"specificity\": 1.0, \"npv\": 0.9999175471774238, \"accuracy\": 0.9999175835167033, \"f1\": 0.9144944379877138, \"f2\": 0.8698673404927353, \"f0_5\": 0.9639481974098705, \"p4\": 0.9553189708600036, \"phi\": 0.9178180672475964}, {\"truth_threshold\": 18.80000028014183, \"match_probability\": 0.9999978090369889, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5500.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1038.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8412358519424901, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.15876414805750994, \"precision\": 1.0, \"recall\": 0.8412358519424901, \"specificity\": 1.0, \"npv\": 0.999916906820365, \"accuracy\": 0.9999169433886778, \"f1\": 0.9137730520019937, \"f2\": 0.868823455073929, \"f0_5\": 0.9636274441096082, \"p4\": 0.9549250608150092, \"phi\": 0.9171509968269834}, {\"truth_threshold\": 18.900000281631947, \"match_probability\": 0.9999979557589296, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5492.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1046.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8400122361578465, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.15998776384215357, \"precision\": 1.0, \"recall\": 0.8400122361578465, \"specificity\": 1.0, \"npv\": 0.9999162664641265, \"accuracy\": 0.9999163032606522, \"f1\": 0.913050706566916, \"f2\": 0.8677790418404753, \"f0_5\": 0.9633059706728408, \"p4\": 0.9545303297363974, \"phi\": 0.9164834417288378}, {\"truth_threshold\": 19.000000283122063, \"match_probability\": 0.9999980926553794, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5475.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1063.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8374120526154787, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.16258794738452126, \"precision\": 1.0, \"recall\": 0.8374120526154787, \"specificity\": 1.0, \"npv\": 0.9999149057098429, \"accuracy\": 0.9999149429885977, \"f1\": 0.9115125280945642, \"f2\": 0.8655579093812249, \"f0_5\": 0.9626204374428582, \"p4\": 0.9536887883524469, \"phi\": 0.9150632730206653}, {\"truth_threshold\": 19.10000028461218, \"match_probability\": 0.9999982203843173, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5465.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1073.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8358825328846742, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1641174671153258, \"precision\": 1.0, \"recall\": 0.8358825328846742, \"specificity\": 1.0, \"npv\": 0.9999141052678767, \"accuracy\": 0.9999141428285657, \"f1\": 0.9106056819128551, \"f2\": 0.8642502451212956, \"f0_5\": 0.9622156489893654, \"p4\": 0.9531920167099226, \"phi\": 0.91422685093932}, {\"truth_threshold\": 19.200000286102295, \"match_probability\": 0.9999983395596597, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5456.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1082.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8345059651269502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.16549403487304987, \"precision\": 1.0, \"recall\": 0.8345059651269502, \"specificity\": 1.0, \"npv\": 0.9999133848712028, \"accuracy\": 0.9999134226845369, \"f1\": 0.9097882274470569, \"f2\": 0.8630726398380157, \"f0_5\": 0.9618503631619773, \"p4\": 0.9527438103909488, \"phi\": 0.9134734173939046}, {\"truth_threshold\": 19.30000028759241, \"match_probability\": 0.9999984507542113, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5439.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1099.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8319057815845824, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.16809421841541755, \"precision\": 1.0, \"recall\": 0.8319057815845824, \"specificity\": 1.0, \"npv\": 0.9999120241247621, \"accuracy\": 0.9999120624124825, \"f1\": 0.9082407948568089, \"f2\": 0.8608464436073565, \"f0_5\": 0.9611578426521524, \"p4\": 0.9518943109932, \"phi\": 0.912048569948625}, {\"truth_threshold\": 19.400000289082527, \"match_probability\": 0.9999985545024187, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5427.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1111.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.830070357907617, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.16992964209238298, \"precision\": 1.0, \"recall\": 0.830070357907617, \"specificity\": 1.0, \"npv\": 0.9999110636000926, \"accuracy\": 0.9999111022204441, \"f1\": 0.9071458420392813, \"f2\": 0.8592735678773868, \"f0_5\": 0.960666997096934, \"p4\": 0.9512923796149697, \"phi\": 0.9110414559383754}, {\"truth_threshold\": 19.500000290572643, \"match_probability\": 0.9999986513029383, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5425.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1113.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8297644539614561, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1702355460385439, \"precision\": 1.0, \"recall\": 0.8297644539614561, \"specificity\": 1.0, \"npv\": 0.9999109035128272, \"accuracy\": 0.9999109421884377, \"f1\": 0.9069631363370392, \"f2\": 0.8590113056971846, \"f0_5\": 0.9605850272682201, \"p4\": 0.951191873139123, \"phi\": 0.9108734955324078}, {\"truth_threshold\": 19.60000029206276, \"match_probability\": 0.9999987416210334, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5405.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1133.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.826705414499847, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.17329458550015295, \"precision\": 1.0, \"recall\": 0.826705414499847, \"specificity\": 1.0, \"npv\": 0.9999093026429914, \"accuracy\": 0.9999093418683737, \"f1\": 0.90513271372352, \"f2\": 0.8563868555312609, \"f0_5\": 0.9597627672419916, \"p4\": 0.9501838948422977, \"phi\": 0.9091921878809383}, {\"truth_threshold\": 19.700000293552876, \"match_probability\": 0.9999988258908107, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5393.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1145.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8248699908228816, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.17513000917711838, \"precision\": 1.0, \"recall\": 0.8248699908228816, \"specificity\": 1.0, \"npv\": 0.9999083421235505, \"accuracy\": 0.9999083816763352, \"f1\": 0.9040315145419495, \"f2\": 0.8548105880488192, \"f0_5\": 0.9592671647100676, \"p4\": 0.9495765537221438, \"phi\": 0.9081819118387989}, {\"truth_threshold\": 19.80000029504299, \"match_probability\": 0.9999989045173057, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5380.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1158.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8228816151728358, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.17711838482716427, \"precision\": 1.0, \"recall\": 0.8228816151728358, \"specificity\": 1.0, \"npv\": 0.9999073015629053, \"accuracy\": 0.9999073414682936, \"f1\": 0.902836046316496, \"f2\": 0.8531016110617785, \"f0_5\": 0.9587283484211276, \"p4\": 0.94891642626389, \"phi\": 0.9070861785592345}, {\"truth_threshold\": 19.900000296533108, \"match_probability\": 0.9999989778784306, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5365.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1173.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8205873355766289, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.17941266442337106, \"precision\": 1.0, \"recall\": 0.8205873355766289, \"specificity\": 1.0, \"npv\": 0.999906100918698, \"accuracy\": 0.9999061412282456, \"f1\": 0.9014534151054356, \"f2\": 0.8511279626868039, \"f0_5\": 0.9581041502964498, \"p4\": 0.9481519156103556, \"phi\": 0.9058202267446286}, {\"truth_threshold\": 20.000000298023224, \"match_probability\": 0.99999904632679, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5350.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1188.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8182930559804221, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.18170694401957785, \"precision\": 1.0, \"recall\": 0.8182930559804221, \"specificity\": 1.0, \"npv\": 0.9999049002773742, \"accuracy\": 0.9999049409881976, \"f1\": 0.9000672947510094, \"f2\": 0.8491524347660466, \"f0_5\": 0.9574772711002935, \"p4\": 0.9473843607495207, \"phi\": 0.9045525062359684}, {\"truth_threshold\": 20.10000029951334, \"match_probability\": 0.9999991101913761, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5334.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1204.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.815845824411135, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1841541755888651, \"precision\": 1.0, \"recall\": 0.815845824411135, \"specificity\": 1.0, \"npv\": 0.9999036195964734, \"accuracy\": 0.9999036607321464, \"f1\": 0.8985849056603774, \"f2\": 0.8470431302801245, \"f0_5\": 0.9568056253139126, \"p4\": 0.9465622596270835, \"phi\": 0.9031983131413405}, {\"truth_threshold\": 20.200000301003456, \"match_probability\": 0.9999991697791492, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5315.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1223.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8129397369226063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1870602630773937, \"precision\": 1.0, \"recall\": 0.8129397369226063, \"specificity\": 1.0, \"npv\": 0.9999020987921649, \"accuracy\": 0.9999021404280856, \"f1\": 0.8968193706234708, \"f2\": 0.8445355451743096, \"f0_5\": 0.9560040290668393, \"p4\": 0.9455814581380264, \"phi\": 0.9015875715317201}, {\"truth_threshold\": 20.300000302493572, \"match_probability\": 0.9999992253765136, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5295.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1243.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8098806974609972, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.19011930253900275, \"precision\": 1.0, \"recall\": 0.8098806974609972, \"specificity\": 1.0, \"npv\": 0.9999004979505219, \"accuracy\": 0.9999005401080217, \"f1\": 0.8949547874588016, \"f2\": 0.8418927083664578, \"f0_5\": 0.9551554946244317, \"p4\": 0.9445436518326642, \"phi\": 0.8998889446324847}, {\"truth_threshold\": 20.40000030398369, \"match_probability\": 0.9999992772506945, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5282.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1256.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8078923218109514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.19210767818904864, \"precision\": 1.0, \"recall\": 0.8078923218109514, \"specificity\": 1.0, \"npv\": 0.9998994574062028, \"accuracy\": 0.99989949989998, \"f1\": 0.8937394247038917, \"f2\": 0.8401730610167335, \"f0_5\": 0.9546013156943541, \"p4\": 0.9438660960696804, \"phi\": 0.8987831185672145}, {\"truth_threshold\": 20.500000305473804, \"match_probability\": 0.9999993256510213, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5256.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1282.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8039155705108596, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.1960844294891404, \"precision\": 1.0, \"recall\": 0.8039155705108596, \"specificity\": 1.0, \"npv\": 0.9998973763240615, \"accuracy\": 0.9998974194838968, \"f1\": 0.8913006613532305, \"f2\": 0.8367294956698931, \"f0_5\": 0.9534866845657064, \"p4\": 0.9425038810461902, \"phi\": 0.8965673815948635}, {\"truth_threshold\": 20.60000030696392, \"match_probability\": 0.9999993708101274, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5246.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1292.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.8023860507800551, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.19761394921994493, \"precision\": 1.0, \"recall\": 0.8023860507800551, \"specificity\": 1.0, \"npv\": 0.99989657591016, \"accuracy\": 0.9998966193238648, \"f1\": 0.8903598099117448, \"f2\": 0.8354035288871903, \"f0_5\": 0.9530557372283991, \"p4\": 0.9419774135956439, \"phi\": 0.8957137180668011}, {\"truth_threshold\": 20.700000308454037, \"match_probability\": 0.9999994129450668, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5237.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1301.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.801009483022331, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.198990516977669, \"precision\": 1.0, \"recall\": 0.801009483022331, \"specificity\": 1.0, \"npv\": 0.9998958555387443, \"accuracy\": 0.999895899179836, \"f1\": 0.8895116772823779, \"f2\": 0.8342094364267737, \"f0_5\": 0.9526668121952995, \"p4\": 0.9415023797402384, \"phi\": 0.8949447258469435}, {\"truth_threshold\": 20.800000309944153, \"match_probability\": 0.9999994522583585, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5220.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1318.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7984092994799633, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.20159070052003672, \"precision\": 1.0, \"recall\": 0.7984092994799633, \"specificity\": 1.0, \"npv\": 0.9998944948400131, \"accuracy\": 0.9998945389077816, \"f1\": 0.887906106480694, \"f2\": 0.8319520591610353, \"f0_5\": 0.9519293894521847, \"p4\": 0.9406019425673022, \"phi\": 0.8934903822532655}, {\"truth_threshold\": 20.90000031143427, \"match_probability\": 0.9999994889389594, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5182.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1356.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7925971245029061, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2074028754970939, \"precision\": 1.0, \"recall\": 0.7925971245029061, \"specificity\": 1.0, \"npv\": 0.9998914532915348, \"accuracy\": 0.99989149829966, \"f1\": 0.8843003412969284, \"f2\": 0.8268973000574456, \"f0_5\": 0.9502677327074012, \"p4\": 0.9385741715721825, \"phi\": 0.8902309198707391}, {\"truth_threshold\": 21.000000312924385, \"match_probability\": 0.9999995231631726, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5156.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1382.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7886203732028143, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2113796267971857, \"precision\": 1.0, \"recall\": 0.7886203732028143, \"specificity\": 1.0, \"npv\": 0.9998893722427112, \"accuracy\": 0.9998894178835768, \"f1\": 0.8818197366170686, \"f2\": 0.8234317107448575, \"f0_5\": 0.9491200942493189, \"p4\": 0.9371746514365215, \"phi\": 0.8879938794268655}, {\"truth_threshold\": 21.1000003144145, \"match_probability\": 0.9999995550954947, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5137.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1401.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7857142857142857, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.21428571428571427, \"precision\": 1.0, \"recall\": 0.7857142857142857, \"specificity\": 1.0, \"npv\": 0.9998878514817413, \"accuracy\": 0.9998878975795159, \"f1\": 0.88, \"f2\": 0.8208955223880597, \"f0_5\": 0.9482758620689655, \"p4\": 0.9361456385034252, \"phi\": 0.8863555545160013}, {\"truth_threshold\": 21.200000315904617, \"match_probability\": 0.9999995848894065, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5117.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1421.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7826552462526767, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.21734475374732334, \"precision\": 1.0, \"recall\": 0.7826552462526767, \"specificity\": 1.0, \"npv\": 0.9998862506857179, \"accuracy\": 0.9998862972594519, \"f1\": 0.878078078078078, \"f2\": 0.8182225207074099, \"f0_5\": 0.9473820632452048, \"p4\": 0.9350566806817384, \"phi\": 0.884627729474436}, {\"truth_threshold\": 21.300000317394733, \"match_probability\": 0.9999996126881108, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5100.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1438.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.780055062710309, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.21994493728969103, \"precision\": 1.0, \"recall\": 0.780055062710309, \"specificity\": 1.0, \"npv\": 0.999884890013128, \"accuracy\": 0.9998849369873974, \"f1\": 0.876439250730366, \"f2\": 0.8159477793421221, \"f0_5\": 0.9466181602197639, \"p4\": 0.9341263644918284, \"phi\": 0.8831564247528751}, {\"truth_threshold\": 21.40000031888485, \"match_probability\": 0.9999996386252203, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5081.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1457.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7771489752217804, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.22285102477821964, \"precision\": 1.0, \"recall\": 0.7771489752217804, \"specificity\": 1.0, \"npv\": 0.9998833692657924, \"accuracy\": 0.9998834166833367, \"f1\": 0.8746019450899389, \"f2\": 0.8134024909550795, \"f0_5\": 0.945759809396173, \"p4\": 0.9330814467070927, \"phi\": 0.8815091240402514}, {\"truth_threshold\": 21.500000320374966, \"match_probability\": 0.9999996628254004, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5053.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1485.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7728663199755277, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.22713368002447232, \"precision\": 1.0, \"recall\": 0.7728663199755277, \"specificity\": 1.0, \"npv\": 0.9998811281728872, \"accuracy\": 0.9998811762352471, \"f1\": 0.8718833577775861, \"f2\": 0.8096458900817177, \"f0_5\": 0.9444859813084112, \"p4\": 0.9315315665917477, \"phi\": 0.879075905678206}, {\"truth_threshold\": 21.600000321865082, \"match_probability\": 0.999999685404968, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5035.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1503.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7701131844600795, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.22988681553992046, \"precision\": 1.0, \"recall\": 0.7701131844600795, \"specificity\": 1.0, \"npv\": 0.9998796874756103, \"accuracy\": 0.9998797359471895, \"f1\": 0.8701287479478096, \"f2\": 0.8072273703786834, \"f0_5\": 0.9436614438863483, \"p4\": 0.9305288657944325, \"phi\": 0.8775081368276828}, {\"truth_threshold\": 21.700000323355198, \"match_probability\": 0.9999997064724503, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5014.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1524.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7669011930253901, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.23309880697460997, \"precision\": 1.0, \"recall\": 0.7669011930253901, \"specificity\": 1.0, \"npv\": 0.9998780066673679, \"accuracy\": 0.9998780556111222, \"f1\": 0.8680747922437673, \"f2\": 0.8044022332028492, \"f0_5\": 0.942693840715951, \"p4\": 0.9293527094172518, \"phi\": 0.8756755313431188}, {\"truth_threshold\": 21.800000324845314, \"match_probability\": 0.999999726129107, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4995.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1543.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7639951055368615, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.23600489446313858, \"precision\": 1.0, \"recall\": 0.7639951055368615, \"specificity\": 1.0, \"npv\": 0.9998764859409702, \"accuracy\": 0.9998765353070614, \"f1\": 0.8662100060695396, \"f2\": 0.8018428741130768, \"f0_5\": 0.9418131080775323, \"p4\": 0.9282826382358809, \"phi\": 0.8740141540045548}, {\"truth_threshold\": 21.90000032633543, \"match_probability\": 0.9999997444694171, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4968.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1570.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7598654022636891, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2401345977363108, \"precision\": 1.0, \"recall\": 0.7598654022636891, \"specificity\": 1.0, \"npv\": 0.9998743249166783, \"accuracy\": 0.999874374874975, \"f1\": 0.8635494524595863, \"f2\": 0.7982005141388174, \"f0_5\": 0.9405528209011738, \"p4\": 0.926752229548712, \"phi\": 0.8716478108249607}, {\"truth_threshold\": 22.000000327825546, \"match_probability\": 0.9999997615815319, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4944.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1594.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7561945549097583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.24380544509024166, \"precision\": 1.0, \"recall\": 0.7561945549097583, \"specificity\": 1.0, \"npv\": 0.9998724040140384, \"accuracy\": 0.9998724544908982, \"f1\": 0.861174011496255, \"f2\": 0.7949575508103937, \"f0_5\": 0.939423880823896, \"p4\": 0.9253821320767224, \"phi\": 0.8695389971243072}, {\"truth_threshold\": 22.100000329315662, \"match_probability\": 0.9999997775477002, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4926.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1612.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7534414193943102, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2465585806056898, \"precision\": 1.0, \"recall\": 0.7534414193943102, \"specificity\": 1.0, \"npv\": 0.9998709633419022, \"accuracy\": 0.9998710142028405, \"f1\": 0.8593859036985345, \"f2\": 0.7925220413153999, \"f0_5\": 0.9385717552015852, \"p4\": 0.9243484882864478, \"phi\": 0.8679540297916007}, {\"truth_threshold\": 22.20000033080578, \"match_probability\": 0.9999997924446623, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4896.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1642.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7488528602018966, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2511471397981034, \"precision\": 1.0, \"recall\": 0.7488528602018966, \"specificity\": 1.0, \"npv\": 0.9998685622309008, \"accuracy\": 0.9998686137227446, \"f1\": 0.8563932132237188, \"f2\": 0.7884565833548055, \"f0_5\": 0.9371411071127785, \"p4\": 0.9226140677460337, \"phi\": 0.8653059763185321}, {\"truth_threshold\": 22.300000332295895, \"match_probability\": 0.9999998063440199, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4866.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1672.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.744264301009483, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.25573569899051696, \"precision\": 1.0, \"recall\": 0.744264301009483, \"specificity\": 1.0, \"npv\": 0.9998661611314316, \"accuracy\": 0.9998662132426486, \"f1\": 0.8533847772711329, \"f2\": 0.7843832613321298, \"f0_5\": 0.9356972540573802, \"p4\": 0.9208648844302996, \"phi\": 0.8626498070002218}, {\"truth_threshold\": 22.40000033378601, \"match_probability\": 0.9999998193125794, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4850.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1688.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7418170694401958, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.25818293055980424, \"precision\": 1.0, \"recall\": 0.7418170694401958, \"specificity\": 1.0, \"npv\": 0.9998648805497633, \"accuracy\": 0.9998649329865973, \"f1\": 0.8517737969792765, \"f2\": 0.7822075995097091, \"f0_5\": 0.934921736448454, \"p4\": 0.9199258850585345, \"phi\": 0.8612298389661128}, {\"truth_threshold\": 22.500000335276127, \"match_probability\": 0.9999998314126736, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4839.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1699.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7401345977363109, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2598654022636892, \"precision\": 1.0, \"recall\": 0.7401345977363109, \"specificity\": 1.0, \"npv\": 0.9998640001517691, \"accuracy\": 0.9998640528105621, \"f1\": 0.85066361958337, \"f2\": 0.7807105288632183, \"f0_5\": 0.9343863443268711, \"p4\": 0.919277840796136, \"phi\": 0.8602522534369487}, {\"truth_threshold\": 22.600000336766243, \"match_probability\": 0.9999998427024609, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4817.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1721.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7367696543285408, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.26323034567145914, \"precision\": 1.0, \"recall\": 0.7367696543285408, \"specificity\": 1.0, \"npv\": 0.9998622393604321, \"accuracy\": 0.9998622924584917, \"f1\": 0.8484368119771026, \"f2\": 0.7777131970680358, \"f0_5\": 0.9333100829264512, \"p4\": 0.9179756421868027, \"phi\": 0.8582937471925018}, {\"truth_threshold\": 22.70000033825636, \"match_probability\": 0.9999998532362051, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4801.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1737.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7343224227592536, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2656775772407464, \"precision\": 1.0, \"recall\": 0.7343224227592536, \"specificity\": 1.0, \"npv\": 0.9998609587888093, \"accuracy\": 0.9998610122024405, \"f1\": 0.8468118881735602, \"f2\": 0.7755306432332891, \"f0_5\": 0.9325227255069536, \"p4\": 0.9170234357449177, \"phi\": 0.856866571690242}, {\"truth_threshold\": 22.800000339746475, \"match_probability\": 0.9999998630645361, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4793.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1745.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7330988069746099, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.26690119302539, \"precision\": 1.0, \"recall\": 0.7330988069746099, \"specificity\": 1.0, \"npv\": 0.9998603185042281, \"accuracy\": 0.9998603720744149, \"f1\": 0.8459977054099374, \"f2\": 0.7744385199547584, \"f0_5\": 0.9321275768183586, \"p4\": 0.9165456945534598, \"phi\": 0.8561520931684411}, {\"truth_threshold\": 22.90000034123659, \"match_probability\": 0.9999998722346936, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4780.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1758.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7311104313245641, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2688895686754359, \"precision\": 1.0, \"recall\": 0.7311104313245641, \"specificity\": 1.0, \"npv\": 0.9998592780435326, \"accuracy\": 0.9998593318663733, \"f1\": 0.8446722035695352, \"f2\": 0.7726626147678779, \"f0_5\": 0.9314833580169928, \"p4\": 0.9157670237448036, \"phi\": 0.8549897941111778}, {\"truth_threshold\": 23.000000342726707, \"match_probability\": 0.999999880790753, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4755.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1783.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7272866319975527, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2727133680024472, \"precision\": 1.0, \"recall\": 0.7272866319975527, \"specificity\": 1.0, \"npv\": 0.9998572771636658, \"accuracy\": 0.9998573314662933, \"f1\": 0.8421145842557336, \"f2\": 0.7692432135115023, \"f0_5\": 0.9302371077549104, \"p4\": 0.914261378196057, \"phi\": 0.8527501577757733}, {\"truth_threshold\": 23.100000344216824, \"match_probability\": 0.9999998887738388, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4736.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1802.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7243805445090241, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.27561945549097583, \"precision\": 1.0, \"recall\": 0.7243805445090241, \"specificity\": 1.0, \"npv\": 0.9998557565003229, \"accuracy\": 0.9998558111622324, \"f1\": 0.840163207379812, \"f2\": 0.7666407666407666, \"f0_5\": 0.9292834157444471, \"p4\": 0.9131098106005202, \"phi\": 0.8510440983428451}, {\"truth_threshold\": 23.20000034570694, \"match_probability\": 0.9999998962223214, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4719.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1819.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7217803609666564, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2782196390333435, \"precision\": 1.0, \"recall\": 0.7217803609666564, \"specificity\": 1.0, \"npv\": 0.9998543959107263, \"accuracy\": 0.999854450890178, \"f1\": 0.8384116549702407, \"f2\": 0.7643095461760228, \"f0_5\": 0.9284252774061541, \"p4\": 0.9120740863704666, \"phi\": 0.8495147242953134}, {\"truth_threshold\": 23.300000347197056, \"match_probability\": 0.9999999031720016, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4709.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1829.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7202508412358519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2797491587641481, \"precision\": 1.0, \"recall\": 0.7202508412358519, \"specificity\": 1.0, \"npv\": 0.9998535955656346, \"accuracy\": 0.999853650730146, \"f1\": 0.8373788565839779, \"f2\": 0.7629370402773727, \"f0_5\": 0.9279183416095216, \"p4\": 0.9114624499845586, \"phi\": 0.848613806933896}, {\"truth_threshold\": 23.400000348687172, \"match_probability\": 0.9999999096562825, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4697.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1841.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7184154175588865, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2815845824411135, \"precision\": 1.0, \"recall\": 0.7184154175588865, \"specificity\": 1.0, \"npv\": 0.9998526351532158, \"accuracy\": 0.9998526905381077, \"f1\": 0.8361370716510903, \"f2\": 0.7612888586339914, \"f0_5\": 0.9273079049198453, \"p4\": 0.9107261392861045, \"phi\": 0.8475314438892226}, {\"truth_threshold\": 23.500000350177288, \"match_probability\": 0.9999999157063305, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4683.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1855.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7162740899357601, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2837259100642398, \"precision\": 1.0, \"recall\": 0.7162740899357601, \"specificity\": 1.0, \"npv\": 0.9998515146743925, \"accuracy\": 0.9998515703140628, \"f1\": 0.8346849656893325, \"f2\": 0.7593643586833144, \"f0_5\": 0.9265927977839336, \"p4\": 0.9098638570962553, \"phi\": 0.8462669400043297}, {\"truth_threshold\": 23.600000351667404, \"match_probability\": 0.9999999213512251, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4654.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1884.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.711838482716427, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.288161517283573, \"precision\": 1.0, \"recall\": 0.711838482716427, \"specificity\": 1.0, \"npv\": 0.9998491936905329, \"accuracy\": 0.99984924984997, \"f1\": 0.8316654753395283, \"f2\": 0.7553723300655717, \"f0_5\": 0.9251013755267552, \"p4\": 0.9080664670777495, \"phi\": 0.8436415903580808}, {\"truth_threshold\": 23.70000035315752, \"match_probability\": 0.9999999266180979, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4632.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1906.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7084735393086571, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2915264606913429, \"precision\": 1.0, \"recall\": 0.7084735393086571, \"specificity\": 1.0, \"npv\": 0.9998474329513447, \"accuracy\": 0.9998474894978996, \"f1\": 0.8293643688451209, \"f2\": 0.7523388773388774, \"f0_5\": 0.9239607436367988, \"p4\": 0.9066927254100616, \"phi\": 0.8416444912144999}, {\"truth_threshold\": 23.800000354647636, \"match_probability\": 0.9999999315322641, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4619.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1919.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7064851636586112, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2935148363413888, \"precision\": 1.0, \"recall\": 0.7064851636586112, \"specificity\": 1.0, \"npv\": 0.9998463925174665, \"accuracy\": 0.999846449289858, \"f1\": 0.8280003585193152, \"f2\": 0.7505443437002373, \"f0_5\": 0.9232829615415368, \"p4\": 0.9058767918573599, \"phi\": 0.8404621599163012}, {\"truth_threshold\": 23.900000356137753, \"match_probability\": 0.9999999361173434, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4606.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1932.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7044967880085653, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2955032119914347, \"precision\": 1.0, \"recall\": 0.7044967880085653, \"specificity\": 1.0, \"npv\": 0.9998453520857538, \"accuracy\": 0.9998454090818164, \"f1\": 0.8266331658291457, \"f2\": 0.7487482931269913, \"f0_5\": 0.9226023555804823, \"p4\": 0.9050577333541469, \"phi\": 0.839278165478947}, {\"truth_threshold\": 24.00000035762787, \"match_probability\": 0.9999999403953735, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4585.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1953.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.7012847965738758, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2987152034261242, \"precision\": 1.0, \"recall\": 0.7012847965738758, \"specificity\": 1.0, \"npv\": 0.9998436713929455, \"accuracy\": 0.9998437287457491, \"f1\": 0.8244178728760226, \"f2\": 0.7458437713504896, \"f0_5\": 0.921496904895892, \"p4\": 0.9037279917519934, \"phi\": 0.8373620278580102}, {\"truth_threshold\": 24.100000359117985, \"match_probability\": 0.9999999443869169, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4572.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1966.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6992964209238299, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.30070357907617007, \"precision\": 1.0, \"recall\": 0.6992964209238299, \"specificity\": 1.0, \"npv\": 0.9998426309668959, \"accuracy\": 0.9998426885377075, \"f1\": 0.823042304230423, \"f2\": 0.7440437443041271, \"f0_5\": 0.9208088294529928, \"p4\": 0.9029006748615036, \"phi\": 0.8361736502199862}, {\"truth_threshold\": 24.2000003606081, \"match_probability\": 0.9999999481111586, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4553.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1985.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6963903334353013, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3036096665646987, \"precision\": 1.0, \"recall\": 0.6963903334353013, \"specificity\": 1.0, \"npv\": 0.9998411103481029, \"accuracy\": 0.9998411682336468, \"f1\": 0.8210260571634659, \"f2\": 0.7414101937795148, \"f0_5\": 0.9197979797979798, \"p4\": 0.9016857748518093, \"phi\": 0.8344337506462913}, {\"truth_threshold\": 24.300000362098217, \"match_probability\": 0.999999951585999, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4539.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 1999.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.694249005812175, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.30575099418782503, \"precision\": 1.0, \"recall\": 0.694249005812175, \"specificity\": 1.0, \"npv\": 0.9998399898951097, \"accuracy\": 0.9998400480096019, \"f1\": 0.819535975444615, \"f2\": 0.7394675963637548, \"f0_5\": 0.9190491617396939, \"p4\": 0.9007861903756719, \"phi\": 0.8331493977408464}, {\"truth_threshold\": 24.400000363588333, \"match_probability\": 0.9999999548281396, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4516.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2022.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6907311104313245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.30926888956867543, \"precision\": 1.0, \"recall\": 0.6907311104313245, \"specificity\": 1.0, \"npv\": 0.9998381491563585, \"accuracy\": 0.9998382076415283, \"f1\": 0.8170797901212231, \"f2\": 0.736272335985392, \"f0_5\": 0.9178115600357695, \"p4\": 0.8993001383335989, \"phi\": 0.8310350865146259}, {\"truth_threshold\": 24.50000036507845, \"match_probability\": 0.999999957853164, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4502.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2036.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6885897828081983, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3114102171918018, \"precision\": 1.0, \"recall\": 0.6885897828081983, \"specificity\": 1.0, \"npv\": 0.9998370287100022, \"accuracy\": 0.9998370874174834, \"f1\": 0.8155797101449276, \"f2\": 0.7343250473021465, \"f0_5\": 0.9170536951030718, \"p4\": 0.898390578044074, \"phi\": 0.8297454805197885}, {\"truth_threshold\": 24.600000366568565, \"match_probability\": 0.9999999606756114, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4487.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2051.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6862955032119914, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31370449678800855, \"precision\": 1.0, \"recall\": 0.6862955032119914, \"specificity\": 1.0, \"npv\": 0.99983582823455, \"accuracy\": 0.9998358871774354, \"f1\": 0.813968253968254, \"f2\": 0.7322366917980352, \"f0_5\": 0.9162378502001144, \"p4\": 0.8974118117492211, \"phi\": 0.828361535120752}, {\"truth_threshold\": 24.70000036805868, \"match_probability\": 0.999999963309048, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4475.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2063.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.684460079535026, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.315539920464974, \"precision\": 1.0, \"recall\": 0.684460079535026, \"specificity\": 1.0, \"npv\": 0.9998348678562637, \"accuracy\": 0.9998349269853971, \"f1\": 0.8126759284481976, \"f2\": 0.7305645345610082, \"f0_5\": 0.915582289876422, \"p4\": 0.8966256231233458, \"phi\": 0.827252714214218}, {\"truth_threshold\": 24.800000369548798, \"match_probability\": 0.9999999657661313, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4455.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2083.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.681401040073417, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31859895992658305, \"precision\": 1.0, \"recall\": 0.681401040073417, \"specificity\": 1.0, \"npv\": 0.9998332672298867, \"accuracy\": 0.9998333266653331, \"f1\": 0.8105157827708542, \"f2\": 0.727774692063907, \"f0_5\": 0.9144839477789638, \"p4\": 0.8953089915345723, \"phi\": 0.8254013739935544}, {\"truth_threshold\": 24.900000371038914, \"match_probability\": 0.999999968058671, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4433.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2105.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.678036096665647, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.321963903334353, \"precision\": 1.0, \"recall\": 0.678036096665647, \"specificity\": 1.0, \"npv\": 0.9998315065467911, \"accuracy\": 0.9998315663132626, \"f1\": 0.8081305259320025, \"f2\": 0.7247016511361779, \"f0_5\": 0.9132674083230325, \"p4\": 0.8938515018230405, \"phi\": 0.8233600986338356}, {\"truth_threshold\": 25.00000037252903, \"match_probability\": 0.9999999701976862, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4419.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2119.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6758947690425207, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32410523095747934, \"precision\": 1.0, \"recall\": 0.6758947690425207, \"specificity\": 1.0, \"npv\": 0.9998303861153226, \"accuracy\": 0.9998304460892179, \"f1\": 0.8066076480788537, \"f2\": 0.7227437767819175, \"f0_5\": 0.9124886429338399, \"p4\": 0.8929189503661759, \"phi\": 0.8220584698822282}, {\"truth_threshold\": 25.100000374019146, \"match_probability\": 0.9999999721934579, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4407.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2131.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6740593453655552, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32594065463444477, \"precision\": 1.0, \"recall\": 0.6740593453655552, \"specificity\": 1.0, \"npv\": 0.999829425747491, \"accuracy\": 0.9998294858971795, \"f1\": 0.8052992233896756, \"f2\": 0.7210641709480022, \"f0_5\": 0.911818257055367, \"p4\": 0.8921164665796518, \"phi\": 0.8209411478276459}, {\"truth_threshold\": 25.200000375509262, \"match_probability\": 0.9999999740555788, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4385.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2153.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6706944019577853, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32930559804221476, \"precision\": 1.0, \"recall\": 0.6706944019577853, \"specificity\": 1.0, \"npv\": 0.999827665077925, \"accuracy\": 0.999827725545109, \"f1\": 0.8028929781195642, \"f2\": 0.7179814651079018, \"f0_5\": 0.910582274275272, \"p4\": 0.8906376295768972, \"phi\": 0.8188887701576373}, {\"truth_threshold\": 25.300000376999378, \"match_probability\": 0.9999999757929992, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4381.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2157.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6700825940654634, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32991740593453656, \"precision\": 1.0, \"recall\": 0.6700825940654634, \"specificity\": 1.0, \"npv\": 0.9998273449568519, \"accuracy\": 0.9998274054810962, \"f1\": 0.8024544372195256, \"f2\": 0.7174204958569417, \"f0_5\": 0.9103565788380018, \"p4\": 0.8903676849769424, \"phi\": 0.8185150584603024}, {\"truth_threshold\": 25.400000378489494, \"match_probability\": 0.9999999774140695, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4372.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2166.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6687060263077393, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33129397369226066, \"precision\": 1.0, \"recall\": 0.6687060263077393, \"specificity\": 1.0, \"npv\": 0.9998266246851869, \"accuracy\": 0.9998266853370674, \"f1\": 0.8014665444546287, \"f2\": 0.716157777486568, \"f0_5\": 0.9098476650295513, \"p4\": 0.8897591049367636, \"phi\": 0.8176735835221234}, {\"truth_threshold\": 25.50000037997961, \"match_probability\": 0.9999999789265818, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4350.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2188.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6653410828999694, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3346589171000306, \"precision\": 1.0, \"recall\": 0.6653410828999694, \"specificity\": 1.0, \"npv\": 0.999824864025486, \"accuracy\": 0.999824924984997, \"f1\": 0.799044819985305, \"f2\": 0.713067995541276, \"f0_5\": 0.9085972094577659, \"p4\": 0.8882644049818648, \"phi\": 0.8156129950785675}, {\"truth_threshold\": 25.600000381469727, \"match_probability\": 0.9999999803378055, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4336.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2202.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.663199755276843, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33680024472315695, \"precision\": 1.0, \"recall\": 0.663199755276843, \"specificity\": 1.0, \"npv\": 0.9998237436089048, \"accuracy\": 0.9998238047609522, \"f1\": 0.7974986205628104, \"f2\": 0.7110994489635266, \"f0_5\": 0.907796666945817, \"p4\": 0.8873079789912414, \"phi\": 0.8142990004177844}, {\"truth_threshold\": 25.700000382959843, \"match_probability\": 0.9999999816545239, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4318.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2220.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.660446619761395, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3395533802386051, \"precision\": 1.0, \"recall\": 0.660446619761395, \"specificity\": 1.0, \"npv\": 0.9998223030769903, \"accuracy\": 0.9998223644728946, \"f1\": 0.7955047899778924, \"f2\": 0.7085658024286183, \"f0_5\": 0.9067618647627047, \"p4\": 0.8860722347677458, \"phi\": 0.8126064609817295}, {\"truth_threshold\": 25.80000038444996, \"match_probability\": 0.9999999828830655, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4312.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2226.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6595289079229122, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3404710920770878, \"precision\": 1.0, \"recall\": 0.6595289079229122, \"specificity\": 1.0, \"npv\": 0.999821822900608, \"accuracy\": 0.9998218843768754, \"f1\": 0.7948387096774193, \"f2\": 0.7077205882352942, \"f0_5\": 0.9064155385520895, \"p4\": 0.8856587977503816, \"phi\": 0.8120414983085121}, {\"truth_threshold\": 25.900000385940075, \"match_probability\": 0.9999999840293354, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4304.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2234.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6583052921382686, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3416947078617314, \"precision\": 1.0, \"recall\": 0.6583052921382686, \"specificity\": 1.0, \"npv\": 0.9998211826661488, \"accuracy\": 0.9998212442488498, \"f1\": 0.7939494558199595, \"f2\": 0.7065931179406356, \"f0_5\": 0.9059526816536162, \"p4\": 0.8851063585569687, \"phi\": 0.8112876035913948}, {\"truth_threshold\": 26.00000038743019, \"match_probability\": 0.999999985098843, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4289.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2249.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6560110125420618, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3439889874579382, \"precision\": 1.0, \"recall\": 0.6560110125420618, \"specificity\": 1.0, \"npv\": 0.9998199822287481, \"accuracy\": 0.9998200440088018, \"f1\": 0.7922785628521289, \"f2\": 0.7044775138793075, \"f0_5\": 0.905081455220731, \"p4\": 0.8840668535132122, \"phi\": 0.8098721620735382}, {\"truth_threshold\": 26.100000388920307, \"match_probability\": 0.9999999860967289, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4272.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2266.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.653410828999694, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3465891710003059, \"precision\": 1.0, \"recall\": 0.653410828999694, \"specificity\": 1.0, \"npv\": 0.9998186217365119, \"accuracy\": 0.9998186837367473, \"f1\": 0.7903792784458834, \"f2\": 0.7020773073889035, \"f0_5\": 0.9040887158215525, \"p4\": 0.8828829066804633, \"phi\": 0.8082650026310589}, {\"truth_threshold\": 26.200000390410423, \"match_probability\": 0.9999999870277894, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4260.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2278.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6515754053227286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3484245946772713, \"precision\": 1.0, \"recall\": 0.6515754053227286, \"specificity\": 1.0, \"npv\": 0.9998176613912804, \"accuracy\": 0.999817723544709, \"f1\": 0.789035006482682, \"f2\": 0.7003814283835328, \"f0_5\": 0.9033845109848163, \"p4\": 0.882043417373629, \"phi\": 0.8071286130288322}, {\"truth_threshold\": 26.30000039190054, \"match_probability\": 0.9999999878964996, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4251.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2287.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6501988375650046, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3498011624349954, \"precision\": 1.0, \"recall\": 0.6501988375650046, \"specificity\": 1.0, \"npv\": 0.9998169411335674, \"accuracy\": 0.9998170034006801, \"f1\": 0.7880248401149319, \"f2\": 0.6991086405946781, \"f0_5\": 0.9028544728570215, \"p4\": 0.8814117450919253, \"phi\": 0.8062752711716044}, {\"truth_threshold\": 26.400000393390656, \"match_probability\": 0.9999999887070348, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4242.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2296.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6488222698072805, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3511777301927195, \"precision\": 1.0, \"recall\": 0.6488222698072805, \"specificity\": 1.0, \"npv\": 0.9998162208768921, \"accuracy\": 0.9998162832566513, \"f1\": 0.787012987012987, \"f2\": 0.6978350990327038, \"f0_5\": 0.9023228111971412, \"p4\": 0.8807783028920311, \"phi\": 0.8054210264324384}, {\"truth_threshold\": 26.50000039488077, \"match_probability\": 0.9999999894632908, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4227.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2311.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6465279902110738, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3534720097889263, \"precision\": 1.0, \"recall\": 0.6465279902110738, \"specificity\": 1.0, \"npv\": 0.9998150204514061, \"accuracy\": 0.9998150830166033, \"f1\": 0.785322805387831, \"f2\": 0.6957108528918002, \"f0_5\": 0.9014330802695556, \"p4\": 0.8797186124788048, \"phi\": 0.8039952709781888}, {\"truth_threshold\": 26.600000396370888, \"match_probability\": 0.9999999901689027, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4217.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2321.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6449984704802691, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3550015295197308, \"precision\": 1.0, \"recall\": 0.6449984704802691, \"specificity\": 1.0, \"npv\": 0.9998142201693502, \"accuracy\": 0.9998142828565714, \"f1\": 0.7841933984193399, \"f2\": 0.694293523000428, \"f0_5\": 0.9008373921216782, \"p4\": 0.8790093921827526, \"phi\": 0.8030433629472657}, {\"truth_threshold\": 26.700000397861004, \"match_probability\": 0.999999990827262, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4207.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2331.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6434689507494646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35653104925053536, \"precision\": 1.0, \"recall\": 0.6434689507494646, \"specificity\": 1.0, \"npv\": 0.9998134198885754, \"accuracy\": 0.9998134826965394, \"f1\": 0.7830618892508143, \"f2\": 0.6928752593958958, \"f0_5\": 0.9002396644697424, \"p4\": 0.878297951768354, \"phi\": 0.8020903267344244}, {\"truth_threshold\": 26.80000039935112, \"match_probability\": 0.9999999914415327, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4193.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2345.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6413276231263383, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35867237687366166, \"precision\": 1.0, \"recall\": 0.6413276231263383, \"specificity\": 1.0, \"npv\": 0.999812299497643, \"accuracy\": 0.9998123624724945, \"f1\": 0.781474233529028, \"f2\": 0.6908881199538639, \"f0_5\": 0.8993993993993994, \"p4\": 0.8772981854872046, \"phi\": 0.8007541730202234}, {\"truth_threshold\": 26.900000400841236, \"match_probability\": 0.9999999920146677, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4174.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2364.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6384215356378097, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36157846436219027, \"precision\": 1.0, \"recall\": 0.6384215356378097, \"specificity\": 1.0, \"npv\": 0.9998107789711083, \"accuracy\": 0.9998108421684336, \"f1\": 0.7793129200896192, \"f2\": 0.688188353228253, \"f0_5\": 0.8982525609021262, \"p4\": 0.875934315248441, \"phi\": 0.7989372521405982}, {\"truth_threshold\": 27.000000402331352, \"match_probability\": 0.9999999925494215, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4157.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2381.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.635821352095442, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36417864790455795, \"precision\": 1.0, \"recall\": 0.635821352095442, \"specificity\": 1.0, \"npv\": 0.9998094185039185, \"accuracy\": 0.9998094818963793, \"f1\": 0.7773726040205704, \"f2\": 0.6857699033290442, \"f0_5\": 0.8972200638867306, \"p4\": 0.8747070799937192, \"phi\": 0.7973080811774825}, {\"truth_threshold\": 27.10000040382147, \"match_probability\": 0.9999999930483645, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4141.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2397.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6333741205261548, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36662587947384523, \"precision\": 1.0, \"recall\": 0.6333741205261548, \"specificity\": 1.0, \"npv\": 0.9998081380675925, \"accuracy\": 0.999808201640328, \"f1\": 0.7755407809720011, \"f2\": 0.6834912355989833, \"f0_5\": 0.8962427495454939, \"p4\": 0.8735460067657135, \"phi\": 0.7957717010194908}, {\"truth_threshold\": 27.200000405311584, \"match_probability\": 0.9999999935138947, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4120.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2418.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6301621290914653, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3698378709085347, \"precision\": 1.0, \"recall\": 0.6301621290914653, \"specificity\": 1.0, \"npv\": 0.9998064574998919, \"accuracy\": 0.9998065213042608, \"f1\": 0.7731281666353913, \"f2\": 0.6804968287526427, \"f0_5\": 0.8949517768702754, \"p4\": 0.8720131509356661, \"phi\": 0.7937506950784532}, {\"truth_threshold\": 27.3000004068017, \"match_probability\": 0.9999999939482498, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4106.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2432.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6280208014683389, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37197919853166106, \"precision\": 1.0, \"recall\": 0.6280208014683389, \"specificity\": 1.0, \"npv\": 0.9998053371245635, \"accuracy\": 0.9998054010802161, \"f1\": 0.7715144682450207, \"f2\": 0.6784982483971181, \"f0_5\": 0.894085881020817, \"p4\": 0.8709855588722847, \"phi\": 0.7924004979385685}, {\"truth_threshold\": 27.400000408291817, \"match_probability\": 0.9999999943535174, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4083.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2455.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6245029060874885, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37549709391251146, \"precision\": 1.0, \"recall\": 0.6245029060874885, \"specificity\": 1.0, \"npv\": 0.9998034965134038, \"accuracy\": 0.9998035607121424, \"f1\": 0.7688541568590528, \"f2\": 0.675210848354556, \"f0_5\": 0.8926541320507214, \"p4\": 0.8692874015545988, \"phi\": 0.7901773149673766}, {\"truth_threshold\": 27.500000409781933, \"match_probability\": 0.9999999947316455, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4072.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2466.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6228204343836036, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37717956561639643, \"precision\": 1.0, \"recall\": 0.6228204343836036, \"specificity\": 1.0, \"npv\": 0.9998026162235056, \"accuracy\": 0.9998026805361072, \"f1\": 0.7675777568331762, \"f2\": 0.673636844891477, \"f0_5\": 0.8919653027249628, \"p4\": 0.8684708239096309, \"phi\": 0.7891118423482105}, {\"truth_threshold\": 27.60000041127205, \"match_probability\": 0.9999999950844514, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4067.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2471.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6220556745182013, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37794432548179874, \"precision\": 1.0, \"recall\": 0.6220556745182013, \"specificity\": 1.0, \"npv\": 0.9998022160922462, \"accuracy\": 0.9998022804560912, \"f1\": 0.766996699669967, \"f2\": 0.6729210099606208, \"f0_5\": 0.8916513198281154, \"p4\": 0.8680987018741384, \"phi\": 0.7886270613642766}, {\"truth_threshold\": 27.700000412762165, \"match_probability\": 0.999999995413631, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4055.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2483.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6202202508412359, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37977974915876417, \"precision\": 1.0, \"recall\": 0.6202202508412359, \"specificity\": 1.0, \"npv\": 0.9998012557785303, \"accuracy\": 0.9998013202640528, \"f1\": 0.7655999244784292, \"f2\": 0.6712020392624226, \"f0_5\": 0.890895509271465, \"p4\": 0.8672031746380038, \"phi\": 0.7874623709424742}, {\"truth_threshold\": 27.80000041425228, \"match_probability\": 0.9999999957207664, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4041.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2497.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6180789232181095, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38192107678189047, \"precision\": 1.0, \"recall\": 0.6180789232181095, \"specificity\": 1.0, \"npv\": 0.9998001354148599, \"accuracy\": 0.999800200040008, \"f1\": 0.7639663484261272, \"f2\": 0.6691948464875964, \"f0_5\": 0.8900096907761431, \"p4\": 0.866154027567695, \"phi\": 0.7861013873098919}, {\"truth_threshold\": 27.900000415742397, \"match_probability\": 0.9999999960073339, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4023.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2515.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6153257877026613, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3846742122973386, \"precision\": 1.0, \"recall\": 0.6153257877026613, \"specificity\": 1.0, \"npv\": 0.9997986949509732, \"accuracy\": 0.9997987597519504, \"f1\": 0.7618596723795095, \"f2\": 0.6666114333057167, \"f0_5\": 0.8888643393725144, \"p4\": 0.8647981674642208, \"phi\": 0.7843480856831363}, {\"truth_threshold\": 28.000000417232513, \"match_probability\": 0.9999999962747108, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4008.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2530.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6130315081064546, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38696849189354543, \"precision\": 1.0, \"recall\": 0.6130315081064546, \"specificity\": 1.0, \"npv\": 0.9997974945675716, \"accuracy\": 0.9997975595119024, \"f1\": 0.7600986155888488, \"f2\": 0.6644562334217506, \"f0_5\": 0.8879042977403633, \"p4\": 0.8636622601901639, \"phi\": 0.7828840053902067}, {\"truth_threshold\": 28.10000041872263, \"match_probability\": 0.9999999965241823, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3988.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2550.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6099724686448456, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3900275313551545, \"precision\": 1.0, \"recall\": 0.6099724686448456, \"specificity\": 1.0, \"npv\": 0.9997958940608533, \"accuracy\": 0.9997959591918384, \"f1\": 0.7577427322819684, \"f2\": 0.661579296615793, \"f0_5\": 0.8866162738995109, \"p4\": 0.862139125364106, \"phi\": 0.7809276340617478}, {\"truth_threshold\": 28.200000420212746, \"match_probability\": 0.9999999967569474, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3969.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2569.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.607066381156317, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3929336188436831, \"precision\": 1.0, \"recall\": 0.607066381156317, \"specificity\": 1.0, \"npv\": 0.9997943735842173, \"accuracy\": 0.9997944388877775, \"f1\": 0.7554963357761493, \"f2\": 0.658842667906112, \"f0_5\": 0.8853841349156777, \"p4\": 0.8606829723647027, \"phi\": 0.7790645366541963}, {\"truth_threshold\": 28.300000421702862, \"match_probability\": 0.9999999969741249, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3955.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2583.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6049250535331906, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3950749464668094, \"precision\": 1.0, \"recall\": 0.6049250535331906, \"specificity\": 1.0, \"npv\": 0.999793253235971, \"accuracy\": 0.9997933186637328, \"f1\": 0.7538358905937291, \"f2\": 0.6568239944199024, \"f0_5\": 0.8844708829054477, \"p4\": 0.8596042477032296, \"phi\": 0.7776888756025077}, {\"truth_threshold\": 28.400000423192978, \"match_probability\": 0.9999999971767587, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3941.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2597.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.6027837259100642, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.39721627408993576, \"precision\": 1.0, \"recall\": 0.6027837259100642, \"specificity\": 1.0, \"npv\": 0.9997921328902357, \"accuracy\": 0.9997921984396879, \"f1\": 0.7521710086840347, \"f2\": 0.6548034426610839, \"f0_5\": 0.8835530445699937, \"p4\": 0.8585205898390385, \"phi\": 0.7763107799065696}, {\"truth_threshold\": 28.500000424683094, \"match_probability\": 0.9999999973658228, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3909.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2629.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5978892627714898, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40211073722851026, \"precision\": 1.0, \"recall\": 0.5978892627714898, \"specificity\": 1.0, \"npv\": 0.9997895721094119, \"accuracy\": 0.9997896379275855, \"f1\": 0.7483488082703168, \"f2\": 0.6501779714580354, \"f0_5\": 0.881437719852079, \"p4\": 0.8560249503241031, \"phi\": 0.7731516346714398}, {\"truth_threshold\": 28.60000042617321, \"match_probability\": 0.9999999975422257, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3886.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2652.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5943713673906393, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40562863260936066, \"precision\": 1.0, \"recall\": 0.5943713673906393, \"specificity\": 1.0, \"npv\": 0.9997877315562975, \"accuracy\": 0.9997877975595119, \"f1\": 0.7455871066768994, \"f2\": 0.6468473267194886, \"f0_5\": 0.8799021827732996, \"p4\": 0.8542149499722755, \"phi\": 0.7708730123084488}, {\"truth_threshold\": 28.700000427663326, \"match_probability\": 0.9999999977068155, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3867.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2671.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5914652799021107, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40853472009788927, \"precision\": 1.0, \"recall\": 0.5914652799021107, \"specificity\": 1.0, \"npv\": 0.9997862111044882, \"accuracy\": 0.9997862772554511, \"f1\": 0.7432964920711197, \"f2\": 0.6440920750191546, \"f0_5\": 0.8786240116331909, \"p4\": 0.85270934979703, \"phi\": 0.7689855858162667}, {\"truth_threshold\": 28.800000429153442, \"match_probability\": 0.9999999978603832, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3842.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2696.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5876414805750995, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4123585194249006, \"precision\": 1.0, \"recall\": 0.5876414805750995, \"specificity\": 1.0, \"npv\": 0.9997842105170479, \"accuracy\": 0.9997842768553711, \"f1\": 0.7402697495183044, \"f2\": 0.640461425618457, \"f0_5\": 0.8769286953346115, \"p4\": 0.8507138259660365, \"phi\": 0.7664950578600261}, {\"truth_threshold\": 28.90000043064356, \"match_probability\": 0.999999998003667, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3819.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2719.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.584123585194249, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.415876414805751, \"precision\": 1.0, \"recall\": 0.584123585194249, \"specificity\": 1.0, \"npv\": 0.9997823699836742, \"accuracy\": 0.9997824364872975, \"f1\": 0.7374722409964275, \"f2\": 0.6371158786827267, \"f0_5\": 0.875355276427982, \"p4\": 0.8488632582121125, \"phi\": 0.7641966123772512}, {\"truth_threshold\": 29.000000432133675, \"match_probability\": 0.9999999981373554, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3802.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2736.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5815234016518813, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4184765983481187, \"precision\": 1.0, \"recall\": 0.5815234016518813, \"specificity\": 1.0, \"npv\": 0.9997810095937968, \"accuracy\": 0.999781076215243, \"f1\": 0.7353965183752418, \"f2\": 0.6346397809975295, \"f0_5\": 0.8741837579324934, \"p4\": 0.8474863033356244, \"phi\": 0.7624933138106438}, {\"truth_threshold\": 29.10000043362379, \"match_probability\": 0.9999999982620912, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3793.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2745.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5801468338941572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4198531661058428, \"precision\": 1.0, \"recall\": 0.5801468338941572, \"specificity\": 1.0, \"npv\": 0.99978028938889, \"accuracy\": 0.9997803560712143, \"f1\": 0.7342948407704966, \"f2\": 0.6333277675738854, \"f0_5\": 0.8735605711653616, \"p4\": 0.8467541548192153, \"phi\": 0.7615900271660263}, {\"truth_threshold\": 29.200000435113907, \"match_probability\": 0.9999999983784738, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3768.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2770.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.576323034567146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4236769654328541, \"precision\": 1.0, \"recall\": 0.576323034567146, \"specificity\": 1.0, \"npv\": 0.9997782888251485, \"accuracy\": 0.9997783556711343, \"f1\": 0.7312245294003493, \"f2\": 0.6296791443850267, \"f0_5\": 0.8718186024988431, \"p4\": 0.844708786418495, \"phi\": 0.7590752645884714}, {\"truth_threshold\": 29.300000436604023, \"match_probability\": 0.9999999984870624, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3743.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2795.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5724992352401346, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4275007647598654, \"precision\": 1.0, \"recall\": 0.5724992352401346, \"specificity\": 1.0, \"npv\": 0.9997762882694132, \"accuracy\": 0.9997763552710542, \"f1\": 0.7281392860616671, \"f2\": 0.6260244187991303, \"f0_5\": 0.8700604370060437, \"p4\": 0.8426461557844616, \"phi\": 0.7565521531563171}, {\"truth_threshold\": 29.40000043809414, \"match_probability\": 0.9999999985883794, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3707.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2831.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5669929642092383, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4330070357907617, \"precision\": 1.0, \"recall\": 0.5669929642092383, \"specificity\": 1.0, \"npv\": 0.9997734074832199, \"accuracy\": 0.9997734746949389, \"f1\": 0.7236700829673012, \"f2\": 0.6207508623865501, \"f0_5\": 0.867499765983338, \"p4\": 0.8396451982712152, \"phi\": 0.7529040362798445}, {\"truth_threshold\": 29.500000439584255, \"match_probability\": 0.9999999986829113, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3680.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2858.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5628632609360661, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4371367390639339, \"precision\": 1.0, \"recall\": 0.5628632609360661, \"specificity\": 1.0, \"npv\": 0.9997712469044696, \"accuracy\": 0.9997713142628526, \"f1\": 0.7202975141906439, \"f2\": 0.6167873424510593, \"f0_5\": 0.8655564963778343, \"p4\": 0.8373702884779534, \"phi\": 0.7501563198579124}, {\"truth_threshold\": 29.60000044107437, \"match_probability\": 0.9999999987711129, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3650.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2888.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5582747017436525, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4417252982563475, \"precision\": 1.0, \"recall\": 0.5582747017436525, \"specificity\": 1.0, \"npv\": 0.999768846272366, \"accuracy\": 0.9997689137827566, \"f1\": 0.7165292500981547, \"f2\": 0.6123750083886987, \"f0_5\": 0.8633740183555682, \"p4\": 0.8348179032487424, \"phi\": 0.7470914632528608}, {\"truth_threshold\": 29.700000442564487, \"match_probability\": 0.9999999988534077, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3619.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2919.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5535331905781584, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4464668094218415, \"precision\": 1.0, \"recall\": 0.5535331905781584, \"specificity\": 1.0, \"npv\": 0.9997663656313038, \"accuracy\": 0.9997664332866574, \"f1\": 0.7126119917298415, \"f2\": 0.6078062544086528, \"f0_5\": 0.8610926049300467, \"p4\": 0.832152700862904, \"phi\": 0.7439111951037068}, {\"truth_threshold\": 29.800000444054604, \"match_probability\": 0.9999999989301916, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3581.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2957.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5477210156011012, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45227898439889874, \"precision\": 1.0, \"recall\": 0.5477210156011012, \"specificity\": 1.0, \"npv\": 0.999763324862279, \"accuracy\": 0.9997633926785358, \"f1\": 0.7077774483644629, \"f2\": 0.6021928496956244, \"f0_5\": 0.8582590355670597, \"p4\": 0.8288465573947712, \"phi\": 0.7399941781218966}, {\"truth_threshold\": 29.90000044554472, \"match_probability\": 0.9999999990018335, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3552.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 2986.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5432854083817681, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45671459161823186, \"precision\": 1.0, \"recall\": 0.5432854083817681, \"specificity\": 1.0, \"npv\": 0.999761004287836, \"accuracy\": 0.9997610722144429, \"f1\": 0.7040634291377602, \"f2\": 0.5978992728252087, \"f0_5\": 0.8560686397377808, \"p4\": 0.8262939618948658, \"phi\": 0.7369908856279592}, {\"truth_threshold\": 30.000000447034836, \"match_probability\": 0.9999999990686778, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3523.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3015.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.538849801162435, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46115019883756503, \"precision\": 1.0, \"recall\": 0.538849801162435, \"specificity\": 1.0, \"npv\": 0.9997586837241657, \"accuracy\": 0.9997587517503501, \"f1\": 0.7003279992048504, \"f2\": 0.5935973041280539, \"f0_5\": 0.8538536112457586, \"p4\": 0.823715411430869, \"phi\": 0.733975318341962}, {\"truth_threshold\": 30.100000448524952, \"match_probability\": 0.9999999991310455, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3495.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3043.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5345671459161824, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4654328540838177, \"precision\": 1.0, \"recall\": 0.5345671459161824, \"specificity\": 1.0, \"npv\": 0.999756443190154, \"accuracy\": 0.9997565113022604, \"f1\": 0.6967008870726602, \"f2\": 0.5894356933247883, \"f0_5\": 0.8516911979725119, \"p4\": 0.8212007753102303, \"phi\": 0.7310519464767702}, {\"truth_threshold\": 30.200000450015068, \"match_probability\": 0.9999999991892369, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3461.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3077.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5293667788314469, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47063322116855305, \"precision\": 1.0, \"recall\": 0.5293667788314469, \"specificity\": 1.0, \"npv\": 0.9997537225552121, \"accuracy\": 0.9997537907581516, \"f1\": 0.6922692269226922, \"f2\": 0.5843717286326952, \"f0_5\": 0.8490334608968698, \"p4\": 0.8181137341584943, \"phi\": 0.7274863625758223}, {\"truth_threshold\": 30.300000451505184, \"match_probability\": 0.9999999992435312, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3436.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3102.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5255429795044356, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4744570204955644, \"precision\": 1.0, \"recall\": 0.5255429795044356, \"specificity\": 1.0, \"npv\": 0.9997517220977898, \"accuracy\": 0.9997517903580716, \"f1\": 0.6889913775817125, \"f2\": 0.5806408003244559, \"f0_5\": 0.8470565033034217, \"p4\": 0.815820008612674, \"phi\": 0.7248534326303235}, {\"truth_threshold\": 30.4000004529953, \"match_probability\": 0.9999999992941897, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3412.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3126.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5218721321505048, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47812786784949524, \"precision\": 1.0, \"recall\": 0.5218721321505048, \"specificity\": 1.0, \"npv\": 0.9997498016661961, \"accuracy\": 0.9997498699739948, \"f1\": 0.6858291457286432, \"f2\": 0.5770531727777026, \"f0_5\": 0.8451401961755672, \"p4\": 0.8135987406629889, \"phi\": 0.7223168007270646}, {\"truth_threshold\": 30.500000454485416, \"match_probability\": 0.9999999993414557, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3377.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3161.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5165188130926889, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4834811869073111, \"precision\": 1.0, \"recall\": 0.5165188130926889, \"specificity\": 1.0, \"npv\": 0.9997470010500137, \"accuracy\": 0.9997470694138828, \"f1\": 0.68119011598588, \"f2\": 0.5718107623014663, \"f0_5\": 0.8423126808340816, \"p4\": 0.8103250045824084, \"phi\": 0.7186015129230722}, {\"truth_threshold\": 30.600000455975533, \"match_probability\": 0.9999999993855564, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3342.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3196.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5111654940348731, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.48883450596512695, \"precision\": 1.0, \"recall\": 0.5111654940348731, \"specificity\": 1.0, \"npv\": 0.9997442004495221, \"accuracy\": 0.9997442688537708, \"f1\": 0.6765182186234818, \"f2\": 0.5665559096765443, \"f0_5\": 0.839445393348739, \"p4\": 0.8070097757346808, \"phi\": 0.7148669373605685}, {\"truth_threshold\": 30.70000045746565, \"match_probability\": 0.9999999994267039, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3300.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3238.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.504741511165494, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49525848883450596, \"precision\": 1.0, \"recall\": 0.504741511165494, \"specificity\": 1.0, \"npv\": 0.9997408397496438, \"accuracy\": 0.9997409081816363, \"f1\": 0.6708680626143525, \"f2\": 0.5602336004346055, \"f0_5\": 0.8359509575438241, \"p4\": 0.8029756110288372, \"phi\": 0.7103595584132695}, {\"truth_threshold\": 30.800000458955765, \"match_probability\": 0.9999999994650958, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3274.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3264.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.5007647598654023, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49923524013459775, \"precision\": 1.0, \"recall\": 0.5007647598654023, \"specificity\": 1.0, \"npv\": 0.9997387593277086, \"accuracy\": 0.9997388277655531, \"f1\": 0.6673461068079902, \"f2\": 0.5563107455991301, \"f0_5\": 0.833757767138637, \"p4\": 0.8004471391866589, \"phi\": 0.7075549022817065}, {\"truth_threshold\": 30.90000046044588, \"match_probability\": 0.9999999995009168, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3244.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3294.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4961762006729887, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5038237993270113, \"precision\": 1.0, \"recall\": 0.4961762006729887, \"specificity\": 1.0, \"npv\": 0.9997363588516195, \"accuracy\": 0.9997364272854571, \"f1\": 0.6632590472296054, \"f2\": 0.5517757518029663, \"f0_5\": 0.8311981141744389, \"p4\": 0.7974995548331207, \"phi\": 0.7043048971927174}, {\"truth_threshold\": 31.000000461935997, \"match_probability\": 0.9999999995343388, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3216.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3322.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.491893545426736, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.508106454573264, \"precision\": 1.0, \"recall\": 0.491893545426736, \"specificity\": 1.0, \"npv\": 0.99973411841767, \"accuracy\": 0.9997341868373675, \"f1\": 0.6594217756817716, \"f2\": 0.547534731680741, \"f0_5\": 0.8287805380888568, \"p4\": 0.7947189098521955, \"phi\": 0.7012579839064508}, {\"truth_threshold\": 31.100000463426113, \"match_probability\": 0.9999999995655228, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3187.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3351.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.48745793820740285, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5125420617925971, \"precision\": 1.0, \"recall\": 0.48745793820740285, \"specificity\": 1.0, \"npv\": 0.9997317979788084, \"accuracy\": 0.9997318663732746, \"f1\": 0.6554241645244216, \"f2\": 0.5431337128054807, \"f0_5\": 0.826247018562688, \"p4\": 0.7918083739629983, \"phi\": 0.6980882472890729}, {\"truth_threshold\": 31.20000046491623, \"match_probability\": 0.9999999995946184, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3159.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3379.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4831752829611502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5168247170388498, \"precision\": 1.0, \"recall\": 0.4831752829611502, \"specificity\": 1.0, \"npv\": 0.9997295575653009, \"accuracy\": 0.999729625925185, \"f1\": 0.651541713932144, \"f2\": 0.5388761898263451, \"f0_5\": 0.8237717742776677, \"p4\": 0.7889682035785288, \"phi\": 0.6950141091094769}, {\"truth_threshold\": 31.300000466406345, \"match_probability\": 0.9999999996217657, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3122.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3416.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.47751605995717344, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5224839400428265, \"precision\": 1.0, \"recall\": 0.47751605995717344, \"specificity\": 1.0, \"npv\": 0.9997265970342821, \"accuracy\": 0.9997266653330666, \"f1\": 0.6463768115942029, \"f2\": 0.5332376853180296, \"f0_5\": 0.8204562178072112, \"p4\": 0.7851691162336231, \"phi\": 0.6909308978835751}, {\"truth_threshold\": 31.40000046789646, \"match_probability\": 0.9999999996470949, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3086.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3452.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.47200978892627715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5279902110737229, \"precision\": 1.0, \"recall\": 0.47200978892627715, \"specificity\": 1.0, \"npv\": 0.9997237165344449, \"accuracy\": 0.9997237847569513, \"f1\": 0.6413133832086451, \"f2\": 0.5277378753676722, \"f0_5\": 0.8171803834339583, \"p4\": 0.7814214717339414, \"phi\": 0.6869347715948121}, {\"truth_threshold\": 31.500000469386578, \"match_probability\": 0.9999999996707278, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3058.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3480.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4677271336800245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5322728663199755, \"precision\": 1.0, \"recall\": 0.4677271336800245, \"specificity\": 1.0, \"npv\": 0.9997214761571586, \"accuracy\": 0.9997215443088617, \"f1\": 0.6373488953730722, \"f2\": 0.5234508729887025, \"f0_5\": 0.8145977623867874, \"p4\": 0.7784710266885368, \"phi\": 0.6838105443186371}, {\"truth_threshold\": 31.600000470876694, \"match_probability\": 0.9999999996927782, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3030.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3508.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4634444784337718, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5365555215662282, \"precision\": 1.0, \"recall\": 0.4634444784337718, \"specificity\": 1.0, \"npv\": 0.9997192357899135, \"accuracy\": 0.9997193038607721, \"f1\": 0.6333612040133779, \"f2\": 0.5191556438900692, \"f0_5\": 0.8119841354914782, \"p4\": 0.7754888721701174, \"phi\": 0.6806719913518297}, {\"truth_threshold\": 31.70000047236681, \"match_probability\": 0.999999999713352, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3012.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3526.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.46069134291832364, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5393086570816763, \"precision\": 1.0, \"recall\": 0.46069134291832364, \"specificity\": 1.0, \"npv\": 0.9997177955591298, \"accuracy\": 0.9997178635727145, \"f1\": 0.6307853403141361, \"f2\": 0.5163900699492525, \"f0_5\": 0.8102873130313138, \"p4\": 0.7735547895848445, \"phi\": 0.6786466928936452}, {\"truth_threshold\": 31.800000473856926, \"match_probability\": 0.9999999997325479, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2964.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3574.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4533496482104619, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5466503517895381, \"precision\": 1.0, \"recall\": 0.4533496482104619, \"specificity\": 1.0, \"npv\": 0.9997139549639938, \"accuracy\": 0.9997140228045609, \"f1\": 0.6238686592296359, \"f2\": 0.508998488803407, \"f0_5\": 0.8056975100576275, \"p4\": 0.768331068939002, \"phi\": 0.6732161389880789}, {\"truth_threshold\": 31.900000475347042, \"match_probability\": 0.9999999997504584, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2930.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3608.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4481492811257265, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5518507188742735, \"precision\": 1.0, \"recall\": 0.4481492811257265, \"specificity\": 1.0, \"npv\": 0.999711234560293, \"accuracy\": 0.9997113022604521, \"f1\": 0.6189269117025771, \"f2\": 0.5037480228319923, \"f0_5\": 0.8023879943038668, \"p4\": 0.7645715633053495, \"phi\": 0.6693428651307997}, {\"truth_threshold\": 32.00000047683716, \"match_probability\": 0.9999999997671695, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2895.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3643.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.44279596206791066, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5572040379320893, \"precision\": 1.0, \"recall\": 0.44279596206791066, \"specificity\": 1.0, \"npv\": 0.9997084341601835, \"accuracy\": 0.9997085017003401, \"f1\": 0.6138026078660024, \"f2\": 0.49833029228491754, \"f0_5\": 0.7989292416381499, \"p4\": 0.7606488709113556, \"phi\": 0.6653321410328551}, {\"truth_threshold\": 32.100000478327274, \"match_probability\": 0.9999999997827614, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2873.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3665.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4394310186601407, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5605689813398593, \"precision\": 1.0, \"recall\": 0.4394310186601407, \"specificity\": 1.0, \"npv\": 0.9997066739167164, \"accuracy\": 0.9997067413482696, \"f1\": 0.6105621081712889, \"f2\": 0.4949181739879414, \"f0_5\": 0.7967276760953965, \"p4\": 0.7581553668547628, \"phi\": 0.6627987040426103}, {\"truth_threshold\": 32.20000047981739, \"match_probability\": 0.9999999997973092, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2834.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3704.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.43346589171000305, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.566534108289997, \"precision\": 1.0, \"recall\": 0.43346589171000305, \"specificity\": 1.0, \"npv\": 0.9997035535003497, \"accuracy\": 0.9997036207241449, \"f1\": 0.6047801963294921, \"f2\": 0.4888566894362796, \"f0_5\": 0.7927716235873336, \"p4\": 0.7536812913857303, \"phi\": 0.6582836715760826}, {\"truth_threshold\": 32.30000048130751, \"match_probability\": 0.9999999998108828, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2807.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3731.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.42933618843683086, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5706638115631691, \"precision\": 1.0, \"recall\": 0.42933618843683086, \"specificity\": 1.0, \"npv\": 0.9997013932235069, \"accuracy\": 0.9997014602920584, \"f1\": 0.600749063670412, \"f2\": 0.484650713077109, \"f0_5\": 0.7899921197793538, \"p4\": 0.7505428648406718, \"phi\": 0.6551396688810486}, {\"truth_threshold\": 32.40000048279762, \"match_probability\": 0.9999999998235475, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2778.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3760.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.4249005812174977, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5750994187825023, \"precision\": 1.0, \"recall\": 0.4249005812174977, \"specificity\": 1.0, \"npv\": 0.9996990729365567, \"accuracy\": 0.9996991398279655, \"f1\": 0.5963933018462859, \"f2\": 0.48012443829934326, \"f0_5\": 0.7869688385269121, \"p4\": 0.7471338909661539, \"phi\": 0.6517458992071501}, {\"truth_threshold\": 32.50000048428774, \"match_probability\": 0.9999999998353639, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2741.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3797.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.41924135821352093, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5807586417864791, \"precision\": 1.0, \"recall\": 0.41924135821352093, \"specificity\": 1.0, \"npv\": 0.999696112586085, \"accuracy\": 0.9996961792358472, \"f1\": 0.5907964220282358, \"f2\": 0.47433634444329076, \"f0_5\": 0.7830533653296766, \"p4\": 0.742726180757158, \"phi\": 0.6473901111705115}, {\"truth_threshold\": 32.600000485777855, \"match_probability\": 0.9999999998463891, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2698.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3840.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.41266442337106146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5873355766289385, \"precision\": 1.0, \"recall\": 0.41266442337106146, \"specificity\": 1.0, \"npv\": 0.9996926722008079, \"accuracy\": 0.9996927385477096, \"f1\": 0.5842355998267649, \"f2\": 0.46759098786828424, \"f0_5\": 0.7784189267166762, \"p4\": 0.737519702193418, \"phi\": 0.6422908999215402}, {\"truth_threshold\": 32.70000048726797, \"match_probability\": 0.999999999856676, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2662.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3876.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.40715815234016517, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5928418476598348, \"precision\": 1.0, \"recall\": 0.40715815234016517, \"specificity\": 1.0, \"npv\": 0.9996897918964616, \"accuracy\": 0.9996898579715943, \"f1\": 0.578695652173913, \"f2\": 0.4619282293329631, \"f0_5\": 0.7744675898987547, \"p4\": 0.7330896754185727, \"phi\": 0.6379904768739794}, {\"truth_threshold\": 32.80000048875809, \"match_probability\": 0.999999999866274, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2642.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3896.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.40409911287855615, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5959008871214438, \"precision\": 1.0, \"recall\": 0.40409911287855615, \"specificity\": 1.0, \"npv\": 0.9996881917345519, \"accuracy\": 0.9996882576515304, \"f1\": 0.575599128540305, \"f2\": 0.45877613391678823, \"f0_5\": 0.7722436571963054, \"p4\": 0.7305999686043083, \"phi\": 0.6355887911496712}, {\"truth_threshold\": 32.9000004902482, \"match_probability\": 0.9999999998752291, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2612.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3926.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3995105536861426, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6004894463138575, \"precision\": 1.0, \"recall\": 0.3995105536861426, \"specificity\": 1.0, \"npv\": 0.9996857915012923, \"accuracy\": 0.9996858571714343, \"f1\": 0.5709289617486338, \"f2\": 0.4540397719371437, \"f0_5\": 0.7688684799246438, \"p4\": 0.7268264433624413, \"phi\": 0.6319691638639111}, {\"truth_threshold\": 33.00000049173832, \"match_probability\": 0.9999999998835847, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2588.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3950.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.39583970633221166, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6041602936677883, \"precision\": 1.0, \"recall\": 0.39583970633221166, \"specificity\": 1.0, \"npv\": 0.9996838713229833, \"accuracy\": 0.9996839367873575, \"f1\": 0.5671707210168748, \"f2\": 0.45024356297842727, \"f0_5\": 0.7661338069863824, \"p4\": 0.723773434765225, \"phi\": 0.6290584790379494}, {\"truth_threshold\": 33.100000493228436, \"match_probability\": 0.9999999998913807, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2566.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3972.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3924747629244417, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6075252370755583, \"precision\": 1.0, \"recall\": 0.3924747629244417, \"specificity\": 1.0, \"npv\": 0.9996821111660134, \"accuracy\": 0.999682176435287, \"f1\": 0.5637082601054482, \"f2\": 0.4467581307890522, \"f0_5\": 0.7635995714795858, \"p4\": 0.7209477194409156, \"phi\": 0.6263784794991655}, {\"truth_threshold\": 33.20000049471855, \"match_probability\": 0.9999999998986546, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2552.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 3986.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.39033343530131537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6096665646986846, \"precision\": 1.0, \"recall\": 0.39033343530131537, \"specificity\": 1.0, \"npv\": 0.9996809910693506, \"accuracy\": 0.9996810562112423, \"f1\": 0.5614961496149615, \"f2\": 0.4445373467112598, \"f0_5\": 0.7619730084796369, \"p4\": 0.7191358571870566, \"phi\": 0.6246670436716852}, {\"truth_threshold\": 33.30000049620867, \"match_probability\": 0.9999999999054414, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2526.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4012.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.38635668400122364, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6136433159987764, \"precision\": 1.0, \"recall\": 0.38635668400122364, \"specificity\": 1.0, \"npv\": 0.9996789108964933, \"accuracy\": 0.9996789757951591, \"f1\": 0.557369814651368, \"f2\": 0.4404072808424576, \"f0_5\": 0.7589232063453912, \"p4\": 0.7157423700487617, \"phi\": 0.6214761693580244}, {\"truth_threshold\": 33.400000497698784, \"match_probability\": 0.9999999999117737, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2508.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4030.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3836035484857755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6163964515142245, \"precision\": 1.0, \"recall\": 0.3836035484857755, \"specificity\": 1.0, \"npv\": 0.9996774707818941, \"accuracy\": 0.9996775355071014, \"f1\": 0.554499226177316, \"f2\": 0.43754361479413817, \"f0_5\": 0.7567893783946892, \"p4\": 0.7133709831956773, \"phi\": 0.6192574788674091}, {\"truth_threshold\": 33.5000004991889, \"match_probability\": 0.999999999917682, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2487.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4051.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.38039155705108596, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.619608442948914, \"precision\": 1.0, \"recall\": 0.38039155705108596, \"specificity\": 1.0, \"npv\": 0.9996757906534391, \"accuracy\": 0.9996758551710342, \"f1\": 0.5511357340720222, \"f2\": 0.4341981214427878, \"f0_5\": 0.7542763556957418, \"p4\": 0.7105812462592302, \"phi\": 0.6166589256249658}, {\"truth_threshold\": 33.600000500679016, \"match_probability\": 0.9999999999231945, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2453.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4085.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3751911899663506, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6248088100336494, \"precision\": 1.0, \"recall\": 0.3751911899663506, \"specificity\": 1.0, \"npv\": 0.999673070457438, \"accuracy\": 0.9996731346269254, \"f1\": 0.5456567678789901, \"f2\": 0.4287711938472295, \"f0_5\": 0.7501529051987768, \"p4\": 0.7060109105287832, \"phi\": 0.6124283867377814}, {\"truth_threshold\": 33.70000050216913, \"match_probability\": 0.999999999928338, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2440.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4098.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3732028143163047, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6267971856836954, \"precision\": 1.0, \"recall\": 0.3732028143163047, \"specificity\": 1.0, \"npv\": 0.9996720303864087, \"accuracy\": 0.9996720944188838, \"f1\": 0.5435509022053909, \"f2\": 0.4266927811975378, \"f0_5\": 0.7485581052889925, \"p4\": 0.7042456520017459, \"phi\": 0.6108030903110283}, {\"truth_threshold\": 33.80000050365925, \"match_probability\": 0.9999999999331369, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2416.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4122.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.36953196696237384, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6304680330376262, \"precision\": 1.0, \"recall\": 0.36953196696237384, \"specificity\": 1.0, \"npv\": 0.9996701102609634, \"accuracy\": 0.999670174034807, \"f1\": 0.5396470851016305, \"f2\": 0.4228507420890507, \"f0_5\": 0.745586964572275, \"p4\": 0.7009604773391737, \"phi\": 0.6077911336620722}, {\"truth_threshold\": 33.900000505149364, \"match_probability\": 0.9999999999376146, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2380.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4158.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3640256959314775, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6359743040685225, \"precision\": 1.0, \"recall\": 0.3640256959314775, \"specificity\": 1.0, \"npv\": 0.9996672300866258, \"accuracy\": 0.9996672934586918, \"f1\": 0.533751962323391, \"f2\": 0.4170755642787046, \"f0_5\": 0.7410636442894507, \"p4\": 0.6959678765353984, \"phi\": 0.6032450241255011}, {\"truth_threshold\": 34.00000050663948, \"match_probability\": 0.9999999999417923, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2359.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4179.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.360813704496788, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.639186295503212, \"precision\": 1.0, \"recall\": 0.360813704496788, \"specificity\": 1.0, \"npv\": 0.9996655499925932, \"accuracy\": 0.9996656131226245, \"f1\": 0.5302911093627065, \"f2\": 0.4136999754480727, \"f0_5\": 0.738387379491674, \"p4\": 0.6930189531835518, \"phi\": 0.6005772476131996}, {\"truth_threshold\": 34.1000005081296, \"match_probability\": 0.9999999999456903, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2347.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4191.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3589782808198226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6410217191801775, \"precision\": 1.0, \"recall\": 0.3589782808198226, \"specificity\": 1.0, \"npv\": 0.9996645899413956, \"accuracy\": 0.9996646529305862, \"f1\": 0.5283061339335959, \"f2\": 0.41176883399417524, \"f0_5\": 0.7368454100213487, \"p4\": 0.6913215703618015, \"phi\": 0.5990474738229142}, {\"truth_threshold\": 34.20000050961971, \"match_probability\": 0.9999999999493273, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2331.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4207.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.35653104925053536, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6434689507494646, \"precision\": 1.0, \"recall\": 0.35653104925053536, \"specificity\": 1.0, \"npv\": 0.9996633098760008, \"accuracy\": 0.9996633726745349, \"f1\": 0.5256511444356748, \"f2\": 0.40919144753010567, \"f0_5\": 0.73477493380406, \"p4\": 0.6890443454923086, \"phi\": 0.5970016823823477}, {\"truth_threshold\": 34.30000051110983, \"match_probability\": 0.9999999999527207, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2310.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4228.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3533190578158458, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6466809421841542, \"precision\": 1.0, \"recall\": 0.3533190578158458, \"specificity\": 1.0, \"npv\": 0.9996616297951452, \"accuracy\": 0.9996616923384677, \"f1\": 0.5221518987341772, \"f2\": 0.4058042302016724, \"f0_5\": 0.7320319432120674, \"p4\": 0.6860308577849257, \"phi\": 0.5943059020183743}, {\"truth_threshold\": 34.400000512599945, \"match_probability\": 0.9999999999558868, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2282.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4256.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3490364025695932, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6509635974304069, \"precision\": 1.0, \"recall\": 0.3490364025695932, \"specificity\": 1.0, \"npv\": 0.9996593896961221, \"accuracy\": 0.9996594518903781, \"f1\": 0.5174603174603175, \"f2\": 0.4012801575578533, \"f0_5\": 0.7283288650580876, \"p4\": 0.6819687495284521, \"phi\": 0.5906924048728318}, {\"truth_threshold\": 34.50000051409006, \"match_probability\": 0.999999999958841, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2262.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4276.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3459773631079841, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6540226368920159, \"precision\": 1.0, \"recall\": 0.3459773631079841, \"specificity\": 1.0, \"npv\": 0.9996577896315381, \"accuracy\": 0.9996578515703141, \"f1\": 0.514090909090909, \"f2\": 0.3980432181319068, \"f0_5\": 0.725651225458745, \"p4\": 0.6790358895272599, \"phi\": 0.5880977521357104}, {\"truth_threshold\": 34.60000051558018, \"match_probability\": 0.9999999999615973, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2234.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4304.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3416947078617314, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6583052921382686, \"precision\": 1.0, \"recall\": 0.3416947078617314, \"specificity\": 1.0, \"npv\": 0.9996555495497255, \"accuracy\": 0.9996556111222245, \"f1\": 0.5093479252165982, \"f2\": 0.39350383992108784, \"f0_5\": 0.7218560165438801, \"p4\": 0.674885233784951, \"phi\": 0.5844459008032754}, {\"truth_threshold\": 34.70000051707029, \"match_probability\": 0.999999999964169, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2226.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4312.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3404710920770878, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6595289079229122, \"precision\": 1.0, \"recall\": 0.3404710920770878, \"specificity\": 1.0, \"npv\": 0.9996549095281945, \"accuracy\": 0.9996549709941989, \"f1\": 0.5079872204472844, \"f2\": 0.3922052294030587, \"f0_5\": 0.7207615593834995, \"p4\": 0.6736896422701116, \"phi\": 0.5833983191159251}, {\"truth_threshold\": 34.80000051856041, \"match_probability\": 0.9999999999665685, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2207.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4331.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3375650045885592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6624349954114408, \"precision\": 1.0, \"recall\": 0.3375650045885592, \"specificity\": 1.0, \"npv\": 0.9996533894803428, \"accuracy\": 0.999653450690138, \"f1\": 0.5047455688965123, \"f2\": 0.3891180930216157, \"f0_5\": 0.7181439541845633, \"p4\": 0.67083263588564, \"phi\": 0.5809027465995497}, {\"truth_threshold\": 34.900000520050526, \"match_probability\": 0.9999999999688073, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2198.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4340.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3361884368308351, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6638115631691649, \"precision\": 1.0, \"recall\": 0.3361884368308351, \"specificity\": 1.0, \"npv\": 0.9996526694592895, \"accuracy\": 0.9996527305461093, \"f1\": 0.5032051282051282, \"f2\": 0.38765432098765434, \"f0_5\": 0.7168949771689498, \"p4\": 0.6694706614940096, \"phi\": 0.579716886349958}, {\"truth_threshold\": 35.00000052154064, \"match_probability\": 0.9999999999708962, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2178.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4360.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3331293973692261, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6668706026307739, \"precision\": 1.0, \"recall\": 0.3331293973692261, \"specificity\": 1.0, \"npv\": 0.9996510694162183, \"accuracy\": 0.9996511302260452, \"f1\": 0.4997705369435521, \"f2\": 0.38439816448994, \"f0_5\": 0.7140983606557377, \"p4\": 0.6664239106867955, \"phi\": 0.5770729228911431}, {\"truth_threshold\": 35.10000052303076, \"match_probability\": 0.9999999999728452, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2167.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4371.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.33144692566534106, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6685530743346589, \"precision\": 1.0, \"recall\": 0.33144692566534106, \"specificity\": 1.0, \"npv\": 0.9996501893947124, \"accuracy\": 0.99965025005001, \"f1\": 0.49787478460654794, \"f2\": 0.3826053179843921, \"f0_5\": 0.7125476785479417, \"p4\": 0.6647362472032575, \"phi\": 0.5756135700412677}, {\"truth_threshold\": 35.200000524520874, \"match_probability\": 0.9999999999746636, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2148.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4390.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.32854083817681246, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6714591618231875, \"precision\": 1.0, \"recall\": 0.32854083817681246, \"specificity\": 1.0, \"npv\": 0.9996486693612153, \"accuracy\": 0.9996487297459492, \"f1\": 0.49458899378309923, \"f2\": 0.3795053003533569, \"f0_5\": 0.7098479841374752, \"p4\": 0.6618009852814435, \"phi\": 0.5730841227204511}, {\"truth_threshold\": 35.30000052601099, \"match_probability\": 0.9999999999763604, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2135.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4403.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3265524625267666, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6734475374732334, \"precision\": 1.0, \"recall\": 0.3265524625267666, \"specificity\": 1.0, \"npv\": 0.9996476293409596, \"accuracy\": 0.9996476895379076, \"f1\": 0.4923325262308313, \"f2\": 0.3773818361791636, \"f0_5\": 0.7079851439182916, \"p4\": 0.6597777537463474, \"phi\": 0.5713470005349943}, {\"truth_threshold\": 35.400000527501106, \"match_probability\": 0.9999999999779434, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2126.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4412.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3251758947690425, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6748241052309575, \"precision\": 1.0, \"recall\": 0.3251758947690425, \"specificity\": 1.0, \"npv\": 0.9996469093282042, \"accuracy\": 0.9996469693938788, \"f1\": 0.4907663896583564, \"f2\": 0.3759106018813212, \"f0_5\": 0.706687940433453, \"p4\": 0.6583698982964457, \"phi\": 0.570141279152726}, {\"truth_threshold\": 35.50000052899122, \"match_probability\": 0.9999999999794205, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2117.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4421.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.32379932701131847, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6762006729886816, \"precision\": 1.0, \"recall\": 0.32379932701131847, \"specificity\": 1.0, \"npv\": 0.999646189316486, \"accuracy\": 0.99964624924985, \"f1\": 0.4891969959560947, \"f2\": 0.37443843078991124, \"f0_5\": 0.705384512861522, \"p4\": 0.6569561451140302, \"phi\": 0.5689330042721262}, {\"truth_threshold\": 35.60000053048134, \"match_probability\": 0.9999999999807987, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2111.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4427.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.32288161517283576, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6771183848271642, \"precision\": 1.0, \"recall\": 0.32288161517283576, \"specificity\": 1.0, \"npv\": 0.99964570930925, \"accuracy\": 0.9996457691538307, \"f1\": 0.48814891895016765, \"f2\": 0.37345646251282594, \"f0_5\": 0.7045120811640635, \"p4\": 0.6560103481434439, \"phi\": 0.5681260610307942}, {\"truth_threshold\": 35.700000531971455, \"match_probability\": 0.9999999999820844, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2087.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4451.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.31921076781890484, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6807892321810951, \"precision\": 1.0, \"recall\": 0.31921076781890484, \"specificity\": 1.0, \"npv\": 0.999643789284916, \"accuracy\": 0.999643848769754, \"f1\": 0.48394202898550726, \"f2\": 0.36952441658698965, \"f0_5\": 0.70099422275964, \"p4\": 0.6522005604860008, \"phi\": 0.564886768762588}, {\"truth_threshold\": 35.80000053346157, \"match_probability\": 0.9999999999832843, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2080.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4458.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3181401040073417, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6818598959926583, \"precision\": 1.0, \"recall\": 0.3181401040073417, \"specificity\": 1.0, \"npv\": 0.9996432292792079, \"accuracy\": 0.9996432886577316, \"f1\": 0.4827106057089812, \"f2\": 0.36837631056956643, \"f0_5\": 0.6999596177143627, \"p4\": 0.6510812858932477, \"phi\": 0.5639384726484993}, {\"truth_threshold\": 35.90000053495169, \"match_probability\": 0.9999999999844037, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2068.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4470.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3163046803303763, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6836953196696237, \"precision\": 1.0, \"recall\": 0.3163046803303763, \"specificity\": 1.0, \"npv\": 0.9996422692708824, \"accuracy\": 0.9996423284656931, \"f1\": 0.4805949337671392, \"f2\": 0.3664068036853296, \"f0_5\": 0.6981769074949359, \"p4\": 0.6491539469782224, \"phi\": 0.5623091039868183}, {\"truth_threshold\": 36.0000005364418, \"match_probability\": 0.9999999999854481, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2063.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4475.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.315539920464974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.684460079535026, \"precision\": 1.0, \"recall\": 0.315539920464974, \"specificity\": 1.0, \"npv\": 0.9996418692679576, \"accuracy\": 0.9996419283856771, \"f1\": 0.4797116614347169, \"f2\": 0.36558568137515507, \"f0_5\": 0.6974306964164977, \"p4\": 0.6483476713886296, \"phi\": 0.5616288061720742}, {\"truth_threshold\": 36.10000053793192, \"match_probability\": 0.9999999999864226, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2050.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4488.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3135515448149281, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6864484551850719, \"precision\": 1.0, \"recall\": 0.3135515448149281, \"specificity\": 1.0, \"npv\": 0.9996408292618513, \"accuracy\": 0.9996408881776355, \"f1\": 0.4774103400093153, \"f2\": 0.3634494007517197, \"f0_5\": 0.6954810693445515, \"p4\": 0.6462424338526489, \"phi\": 0.5598561657025216}, {\"truth_threshold\": 36.200000539422035, \"match_probability\": 0.9999999999873318, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2040.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4498.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.3120220250841236, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6879779749158764, \"precision\": 1.0, \"recall\": 0.3120220250841236, \"specificity\": 1.0, \"npv\": 0.9996400292586267, \"accuracy\": 0.9996400880176035, \"f1\": 0.4756353462345535, \"f2\": 0.36180476730987515, \"f0_5\": 0.6939719689753708, \"p4\": 0.6446141936603185, \"phi\": 0.5584887700611618}, {\"truth_threshold\": 36.30000054091215, \"match_probability\": 0.9999999999881801, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2028.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4510.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.31018660140715815, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6898133985928419, \"precision\": 1.0, \"recall\": 0.31018660140715815, \"specificity\": 1.0, \"npv\": 0.9996390692564474, \"accuracy\": 0.9996391278255651, \"f1\": 0.4734998832593976, \"f2\": 0.3598296664300923, \"f0_5\": 0.6921501706484642, \"p4\": 0.6426500896170942, \"phi\": 0.5568434659098301}, {\"truth_threshold\": 36.40000054240227, \"match_probability\": 0.9999999999889717, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2009.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4529.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.30728051391862954, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6927194860813705, \"precision\": 1.0, \"recall\": 0.30728051391862954, \"specificity\": 1.0, \"npv\": 0.999637549256768, \"accuracy\": 0.9996376075215043, \"f1\": 0.4701064701064701, \"f2\": 0.3566989808600547, \"f0_5\": 0.6892411143131604, \"p4\": 0.6395172429776101, \"phi\": 0.5542284184954602}, {\"truth_threshold\": 36.500000543892384, \"match_probability\": 0.9999999999897102, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1995.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4543.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.30513918629550324, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6948608137044968, \"precision\": 1.0, \"recall\": 0.30513918629550324, \"specificity\": 1.0, \"npv\": 0.9996364292599619, \"accuracy\": 0.9996364872974595, \"f1\": 0.46759639048400325, \"f2\": 0.35438945535936334, \"f0_5\": 0.6870781099324976, \"p4\": 0.6371905823304123, \"phi\": 0.5522936235515735}, {\"truth_threshold\": 36.6000005453825, \"match_probability\": 0.9999999999903993, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1989.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4549.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.30422147445702047, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6957785255429795, \"precision\": 1.0, \"recall\": 0.30422147445702047, \"specificity\": 1.0, \"npv\": 0.999635949262099, \"accuracy\": 0.9996360072014403, \"f1\": 0.46651811891638323, \"f2\": 0.35339895526100706, \"f0_5\": 0.6861459914447358, \"p4\": 0.6361886581357914, \"phi\": 0.5514623490364134}, {\"truth_threshold\": 36.700000546872616, \"match_probability\": 0.9999999999910423, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1979.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4559.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.30269195472621596, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.697308045273784, \"precision\": 1.0, \"recall\": 0.30269195472621596, \"specificity\": 1.0, \"npv\": 0.9996351492666853, \"accuracy\": 0.9996352070414083, \"f1\": 0.46471762357637664, \"f2\": 0.35174718282321993, \"f0_5\": 0.6845855818458558, \"p4\": 0.6345123598985047, \"phi\": 0.5500741016850054}, {\"truth_threshold\": 36.80000054836273, \"match_probability\": 0.9999999999916421, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1957.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4581.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.299327011318446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.700672988681554, \"precision\": 1.0, \"recall\": 0.299327011318446, \"specificity\": 1.0, \"npv\": 0.9996333892812821, \"accuracy\": 0.9996334466893378, \"f1\": 0.4607416127133608, \"f2\": 0.34810914653669645, \"f0_5\": 0.6811220938326604, \"p4\": 0.6307959758983506, \"phi\": 0.5470075637755797}, {\"truth_threshold\": 36.90000054985285, \"match_probability\": 0.9999999999922018, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1953.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4585.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2987152034261242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7012847965738758, \"precision\": 1.0, \"recall\": 0.2987152034261242, \"specificity\": 1.0, \"npv\": 0.9996330692846019, \"accuracy\": 0.9996331266253251, \"f1\": 0.4600164880461665, \"f2\": 0.34744707347447074, \"f0_5\": 0.6804878048780488, \"p4\": 0.6301160186518946, \"phi\": 0.5464481637290318}, {\"truth_threshold\": 37.000000551342964, \"match_probability\": 0.999999999992724, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1942.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4596.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2970327317222392, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7029672682777608, \"precision\": 1.0, \"recall\": 0.2970327317222392, \"specificity\": 1.0, \"npv\": 0.9996321892947878, \"accuracy\": 0.9996322464492898, \"f1\": 0.4580188679245283, \"f2\": 0.3456254004413754, \"f0_5\": 0.6787361946036627, \"p4\": 0.6282393311894444, \"phi\": 0.5449068543372466}, {\"truth_threshold\": 37.10000055283308, \"match_probability\": 0.9999999999932113, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1923.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4615.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2941266442337106, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7058733557662894, \"precision\": 1.0, \"recall\": 0.2941266442337106, \"specificity\": 1.0, \"npv\": 0.9996306693160307, \"accuracy\": 0.9996307261452291, \"f1\": 0.45455619903084743, \"f2\": 0.34247551202137133, \"f0_5\": 0.6756851721714687, \"p4\": 0.6249740769561738, \"phi\": 0.5422342798449967}, {\"truth_threshold\": 37.200000554323196, \"match_probability\": 0.999999999993666, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1912.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4626.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.29244417252982563, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7075558274701743, \"precision\": 1.0, \"recall\": 0.29244417252982563, \"specificity\": 1.0, \"npv\": 0.9996297893304421, \"accuracy\": 0.9996298459691938, \"f1\": 0.45254437869822484, \"f2\": 0.34064994298745727, \"f0_5\": 0.6739038488650783, \"p4\": 0.6230698061909045, \"phi\": 0.5406809656136463}, {\"truth_threshold\": 37.30000055581331, \"match_probability\": 0.9999999999940901, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1900.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4638.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2906087488528602, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7093912511471397, \"precision\": 1.0, \"recall\": 0.2906087488528602, \"specificity\": 1.0, \"npv\": 0.9996288293479305, \"accuracy\": 0.9996288857771555, \"f1\": 0.4503436833372837, \"f2\": 0.3386567802652217, \"f0_5\": 0.6719479417173575, \"p4\": 0.6209807081831444, \"phi\": 0.5389813386510255}, {\"truth_threshold\": 37.40000055730343, \"match_probability\": 0.9999999999944859, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1887.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4651.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2886203732028143, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7113796267971857, \"precision\": 1.0, \"recall\": 0.2886203732028143, \"specificity\": 1.0, \"npv\": 0.9996277893689569, \"accuracy\": 0.9996278455691138, \"f1\": 0.4479525222551929, \"f2\": 0.3364955954206641, \"f0_5\": 0.6698139997160301, \"p4\": 0.6187036055927619, \"phi\": 0.5371340108683983}, {\"truth_threshold\": 37.500000558793545, \"match_probability\": 0.9999999999948551, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1872.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4666.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.28632609360660755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7136739063933925, \"precision\": 1.0, \"recall\": 0.28632609360660755, \"specificity\": 1.0, \"npv\": 0.9996265893959071, \"accuracy\": 0.9996266453290659, \"f1\": 0.4451843043995244, \"f2\": 0.33399942906080504, \"f0_5\": 0.6673320975331527, \"p4\": 0.6160580220002259, \"phi\": 0.5349945573620598}, {\"truth_threshold\": 37.60000056028366, \"match_probability\": 0.9999999999951996, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1865.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4673.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.28525542979504437, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7147445702049556, \"precision\": 1.0, \"recall\": 0.28525542979504437, \"specificity\": 1.0, \"npv\": 0.9996260294094698, \"accuracy\": 0.9996260852170434, \"f1\": 0.44388908723075093, \"f2\": 0.33283363672056254, \"f0_5\": 0.6661665952278897, \"p4\": 0.6148167009398807, \"phi\": 0.5339932140519316}, {\"truth_threshold\": 37.70000056177378, \"match_probability\": 0.9999999999955211, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1857.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4681.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.28403181401040073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7159681859895992, \"precision\": 1.0, \"recall\": 0.28403181401040073, \"specificity\": 1.0, \"npv\": 0.9996253894257382, \"accuracy\": 0.9996254450890179, \"f1\": 0.4424061941631924, \"f2\": 0.3315005890963619, \"f0_5\": 0.6648288701131319, \"p4\": 0.6133927766355054, \"phi\": 0.5328465188864855}, {\"truth_threshold\": 37.80000056326389, \"match_probability\": 0.999999999995821, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1835.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4703.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2806668706026308, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7193331293973693, \"precision\": 1.0, \"recall\": 0.2806668706026308, \"specificity\": 1.0, \"npv\": 0.9996236294747018, \"accuracy\": 0.9996236847369474, \"f1\": 0.4383136271348382, \"f2\": 0.3278307785757673, \"f0_5\": 0.66111831676034, \"p4\": 0.6094477233745271, \"phi\": 0.529680314779687}, {\"truth_threshold\": 37.90000056475401, \"match_probability\": 0.9999999999961009, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1815.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4723.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2776078311410217, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7223921688589783, \"precision\": 1.0, \"recall\": 0.2776078311410217, \"specificity\": 1.0, \"npv\": 0.9996220295245919, \"accuracy\": 0.9996220844168834, \"f1\": 0.4345744044056028, \"f2\": 0.324489577001466, \"f0_5\": 0.6577040150746485, \"p4\": 0.6058236032645471, \"phi\": 0.5267854435888565}, {\"truth_threshold\": 38.000000566244125, \"match_probability\": 0.999999999996362, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1799.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4739.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2751605995717345, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7248394004282656, \"precision\": 1.0, \"recall\": 0.2751605995717345, \"specificity\": 1.0, \"npv\": 0.9996207495681915, \"accuracy\": 0.9996208041608322, \"f1\": 0.43157010915197314, \"f2\": 0.32181317305284246, \"f0_5\": 0.6549439347604485, \"p4\": 0.602898073209128, \"phi\": 0.5244580486516821}, {\"truth_threshold\": 38.10000056773424, \"match_probability\": 0.9999999999966056, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1778.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4760.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.27194860813704497, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.728051391862955, \"precision\": 1.0, \"recall\": 0.27194860813704497, \"specificity\": 1.0, \"npv\": 0.9996190696303903, \"accuracy\": 0.9996191238247649, \"f1\": 0.4276094276094276, \"f2\": 0.3182957393483709, \"f0_5\": 0.6512820512820513, \"p4\": 0.5990224166935114, \"phi\": 0.5213875858257775}, {\"truth_threshold\": 38.20000056922436, \"match_probability\": 0.999999999996833, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1767.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4771.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.27026613643316, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.72973386356684, \"precision\": 1.0, \"recall\": 0.27026613643316, \"specificity\": 1.0, \"npv\": 0.9996181896652241, \"accuracy\": 0.9996182436487298, \"f1\": 0.425526791089705, \"f2\": 0.316451162290913, \"f0_5\": 0.6493458768190504, \"p4\": 0.5969758502506735, \"phi\": 0.5197720135108563}, {\"truth_threshold\": 38.300000570714474, \"match_probability\": 0.999999999997045, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1748.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4790.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2673600489446314, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7326399510553686, \"precision\": 1.0, \"recall\": 0.2673600489446314, \"specificity\": 1.0, \"npv\": 0.9996166697290407, \"accuracy\": 0.9996167233446689, \"f1\": 0.42191648563842626, \"f2\": 0.3132616487455197, \"f0_5\": 0.6459719142645972, \"p4\": 0.5934138706367907, \"phi\": 0.5169695946036147}, {\"truth_threshold\": 38.40000057220459, \"match_probability\": 0.999999999997243, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1723.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4815.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.26353624961762007, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7364637503823799, \"precision\": 1.0, \"recall\": 0.26353624961762007, \"specificity\": 1.0, \"npv\": 0.999614669820052, \"accuracy\": 0.9996147229445889, \"f1\": 0.4171407819876528, \"f2\": 0.30905829596412554, \"f0_5\": 0.6414743112434848, \"p4\": 0.5886742132452341, \"phi\": 0.5132589026477106}, {\"truth_threshold\": 38.500000573694706, \"match_probability\": 0.9999999999974276, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1695.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4843.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2592535943713674, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7407464056286326, \"precision\": 1.0, \"recall\": 0.2592535943713674, \"specificity\": 1.0, \"npv\": 0.999612429931485, \"accuracy\": 0.9996124824964993, \"f1\": 0.4117575610348597, \"f2\": 0.3043415807806945, \"f0_5\": 0.6363568103318816, \"p4\": 0.5832931831629193, \"phi\": 0.50907083538348}, {\"truth_threshold\": 38.60000057518482, \"match_probability\": 0.9999999999975998, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1673.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4865.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2558886509635974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7441113490364025, \"precision\": 1.0, \"recall\": 0.2558886509635974, \"specificity\": 1.0, \"npv\": 0.9996106700260815, \"accuracy\": 0.9996107221444289, \"f1\": 0.4075021312872975, \"f2\": 0.30062893081761005, \"f0_5\": 0.6322751322751323, \"p4\": 0.579010358680175, \"phi\": 0.5057558955086848}, {\"truth_threshold\": 38.70000057667494, \"match_probability\": 0.9999999999977606, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1660.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4878.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2539002753135515, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7460997246864485, \"precision\": 1.0, \"recall\": 0.2539002753135515, \"specificity\": 1.0, \"npv\": 0.9996096300848922, \"accuracy\": 0.9996096819363873, \"f1\": 0.40497682361551596, \"f2\": 0.29843233136775493, \"f0_5\": 0.6298376081347701, \"p4\": 0.5764565291009125, \"phi\": 0.5037868202768226}, {\"truth_threshold\": 38.800000578165054, \"match_probability\": 0.9999999999979106, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1643.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4895.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.25130009177118384, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7486999082288162, \"precision\": 1.0, \"recall\": 0.25130009177118384, \"specificity\": 1.0, \"npv\": 0.9996082701650635, \"accuracy\": 0.9996083216643329, \"f1\": 0.40166238846106833, \"f2\": 0.29555675481201654, \"f0_5\": 0.6266209000762777, \"p4\": 0.5730906956778281, \"phi\": 0.5012002095248114}, {\"truth_threshold\": 38.90000057965517, \"match_probability\": 0.9999999999980504, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1627.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4911.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.2488528602018966, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7511471397981034, \"precision\": 1.0, \"recall\": 0.2488528602018966, \"specificity\": 1.0, \"npv\": 0.9996069902438989, \"accuracy\": 0.9996070414082816, \"f1\": 0.3985303123086344, \"f2\": 0.292847114726952, \"f0_5\": 0.6235627778629464, \"p4\": 0.5698953892899203, \"phi\": 0.49875350484984426}, {\"truth_threshold\": 39.00000058114529, \"match_probability\": 0.999999999998181, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1605.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4933.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.24548791679412665, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7545120832058734, \"precision\": 1.0, \"recall\": 0.24548791679412665, \"specificity\": 1.0, \"npv\": 0.9996052303576495, \"accuracy\": 0.9996052810562113, \"f1\": 0.3942036104629743, \"f2\": 0.28911625896170334, \"f0_5\": 0.6193085352677883, \"p4\": 0.5654577276057174, \"phi\": 0.4953695646858136}, {\"truth_threshold\": 39.1000005826354, \"match_probability\": 0.9999999999983028, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1577.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4961.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.24120526154787397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7587947384521261, \"precision\": 1.0, \"recall\": 0.24120526154787397, \"specificity\": 1.0, \"npv\": 0.9996029905113852, \"accuracy\": 0.9996030406081217, \"f1\": 0.3886629698089957, \"f2\": 0.28435933499224636, \"f0_5\": 0.6138097462245057, \"p4\": 0.5597346188612727, \"phi\": 0.49102902232997964}, {\"truth_threshold\": 39.20000058412552, \"match_probability\": 0.9999999999984165, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1552.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 4986.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.23738146222086265, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7626185377791374, \"precision\": 1.0, \"recall\": 0.23738146222086265, \"specificity\": 1.0, \"npv\": 0.9996009906571314, \"accuracy\": 0.9996010402080416, \"f1\": 0.38368355995055625, \"f2\": 0.28010395610742134, \"f0_5\": 0.6088184528479523, \"p4\": 0.5545521216354188, \"phi\": 0.4871208728843517}, {\"truth_threshold\": 39.300000585615635, \"match_probability\": 0.9999999999985225, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1530.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5008.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.23401651881309268, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7659834811869073, \"precision\": 1.0, \"recall\": 0.23401651881309268, \"specificity\": 1.0, \"npv\": 0.9995992307920073, \"accuracy\": 0.9995992798559712, \"f1\": 0.37927615270203274, \"f2\": 0.2763528646774077, \"f0_5\": 0.6043608784958129, \"p4\": 0.5499337399482727, \"phi\": 0.48365559254307267}, {\"truth_threshold\": 39.40000058710575, \"match_probability\": 0.9999999999986214, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1508.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5030.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.23065157540532272, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7693484245946772, \"precision\": 1.0, \"recall\": 0.23065157540532272, \"specificity\": 1.0, \"npv\": 0.9995974709330799, \"accuracy\": 0.9995975195039007, \"f1\": 0.37484464330101913, \"f2\": 0.2725958062183659, \"f0_5\": 0.5998408910103421, \"p4\": 0.5452602500635279, \"phi\": 0.4801653167835961}, {\"truth_threshold\": 39.50000058859587, \"match_probability\": 0.9999999999987138, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1488.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5050.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.22759253594371368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7724074640562864, \"precision\": 1.0, \"recall\": 0.22759253594371368, \"specificity\": 1.0, \"npv\": 0.9995958710667051, \"accuracy\": 0.9995959191838367, \"f1\": 0.37079491652130575, \"f2\": 0.2691751085383502, \"f0_5\": 0.5956765412329864, \"p4\": 0.5409629670720756, \"phi\": 0.4769701869246514}, {\"truth_threshold\": 39.60000059008598, \"match_probability\": 0.9999999999988, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1454.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5084.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.22239216885897828, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7776078311410217, \"precision\": 1.0, \"recall\": 0.22239216885897828, \"specificity\": 1.0, \"npv\": 0.999593151305621, \"accuracy\": 0.999593198639728, \"f1\": 0.3638638638638639, \"f2\": 0.26334854741722813, \"f0_5\": 0.5884733689493281, \"p4\": 0.5335490134264116, \"phi\": 0.4714888003923719}, {\"truth_threshold\": 39.7000005915761, \"match_probability\": 0.9999999999988802, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1425.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5113.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.21795656163964516, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7820434383603548, \"precision\": 1.0, \"recall\": 0.21795656163964516, \"specificity\": 1.0, \"npv\": 0.9995908315210976, \"accuracy\": 0.9995908781756351, \"f1\": 0.35790531206831594, \"f2\": 0.2583674801464989, \"f0_5\": 0.5822029743422128, \"p4\": 0.5271148184545168, \"phi\": 0.4667626599084938}, {\"truth_threshold\": 39.800000593066216, \"match_probability\": 0.9999999999989553, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1386.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5152.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.21199143468950749, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7880085653104925, \"precision\": 1.0, \"recall\": 0.21199143468950749, \"specificity\": 1.0, \"npv\": 0.999587711827853, \"accuracy\": 0.9995877575515103, \"f1\": 0.3498233215547703, \"f2\": 0.2516522623284189, \"f0_5\": 0.5735805330243338, \"p4\": 0.5182969060126912, \"phi\": 0.4603303521693834}, {\"truth_threshold\": 39.90000059455633, \"match_probability\": 0.9999999999990252, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1363.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5175.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.20847353930865709, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7915264606913429, \"precision\": 1.0, \"recall\": 0.20847353930865709, \"specificity\": 1.0, \"npv\": 0.9995858720178884, \"accuracy\": 0.9995859171834367, \"f1\": 0.34501961776990253, \"f2\": 0.24768308195529712, \"f0_5\": 0.5683903252710593, \"p4\": 0.513005581135573, \"phi\": 0.45649447376994556}, {\"truth_threshold\": 40.00000059604645, \"match_probability\": 0.9999999999990905, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1339.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5199.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.20480269195472622, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7951973080452738, \"precision\": 1.0, \"recall\": 0.20480269195472622, \"specificity\": 1.0, \"npv\": 0.9995839522234069, \"accuracy\": 0.9995839967993598, \"f1\": 0.3399771486606576, \"f2\": 0.24353424757193262, \"f0_5\": 0.5628888515217757, \"p4\": 0.5074104415056013, \"phi\": 0.45245716288959126}, {\"truth_threshold\": 40.100000597536564, \"match_probability\": 0.9999999999991515, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1307.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5231.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.19990822881615172, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8000917711838482, \"precision\": 1.0, \"recall\": 0.19990822881615172, \"specificity\": 1.0, \"npv\": 0.9995813925089025, \"accuracy\": 0.9995814362872575, \"f1\": 0.33320586360739324, \"f2\": 0.23799118686041007, \"f0_5\": 0.5554139044705082, \"p4\": 0.49983042530441796, \"phi\": 0.4470173886260323}, {\"truth_threshold\": 40.20000059902668, \"match_probability\": 0.9999999999992082, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1273.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5265.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.19470786173141633, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8052921382685837, \"precision\": 1.0, \"recall\": 0.19470786173141633, \"specificity\": 1.0, \"npv\": 0.999578672826606, \"accuracy\": 0.9995787157431486, \"f1\": 0.3259505825118423, \"f2\": 0.23208751139471284, \"f0_5\": 0.5472914875322442, \"p4\": 0.49162269067072445, \"phi\": 0.44116417127685637}, {\"truth_threshold\": 40.300000600516796, \"match_probability\": 0.9999999999992613, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1256.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5282.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.19210767818904864, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8078923218109514, \"precision\": 1.0, \"recall\": 0.19210767818904864, \"specificity\": 1.0, \"npv\": 0.9995773129910075, \"accuracy\": 0.9995773554710942, \"f1\": 0.32229920451629457, \"f2\": 0.22913018096906013, \"f0_5\": 0.5431586230755925, \"p4\": 0.48745789847301724, \"phi\": 0.43820825730370533}, {\"truth_threshold\": 40.40000060200691, \"match_probability\": 0.9999999999993108, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1238.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5300.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1893545426736005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8106454573263995, \"precision\": 1.0, \"recall\": 0.1893545426736005, \"specificity\": 1.0, \"npv\": 0.9995758731691124, \"accuracy\": 0.9995759151830366, \"f1\": 0.3184156378600823, \"f2\": 0.22599488864549105, \"f0_5\": 0.5387293298520452, \"p4\": 0.4830029538144372, \"phi\": 0.43505658520645585}, {\"truth_threshold\": 40.50000060349703, \"match_probability\": 0.9999999999993568, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1202.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5336.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1838482716427042, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8161517283572958, \"precision\": 1.0, \"recall\": 0.1838482716427042, \"specificity\": 1.0, \"npv\": 0.9995729935377662, \"accuracy\": 0.9995730346069214, \"f1\": 0.31059431524547804, \"f2\": 0.2197119251297799, \"f0_5\": 0.5297020976555614, \"p4\": 0.47395077240442485, \"phi\": 0.4286837613470357}, {\"truth_threshold\": 40.600000604987144, \"match_probability\": 0.9999999999993999, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1165.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5373.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.17818904863872745, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8218109513612726, \"precision\": 1.0, \"recall\": 0.17818904863872745, \"specificity\": 1.0, \"npv\": 0.9995700339339494, \"accuracy\": 0.9995700740148029, \"f1\": 0.3024795534207452, \"f2\": 0.21323717831387048, \"f0_5\": 0.5201821753884622, \"p4\": 0.464444060105307, \"phi\": 0.4220336875114011}, {\"truth_threshold\": 40.70000060647726, \"match_probability\": 0.9999999999994401, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1139.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5399.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.17421229733863566, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8257877026613644, \"precision\": 1.0, \"recall\": 0.17421229733863566, \"specificity\": 1.0, \"npv\": 0.9995679542228334, \"accuracy\": 0.9995679935987197, \"f1\": 0.29673049368242804, \"f2\": 0.2086768531750394, \"f0_5\": 0.5133405444384352, \"p4\": 0.4576368357115885, \"phi\": 0.41729729168931834}, {\"truth_threshold\": 40.80000060796738, \"match_probability\": 0.9999999999994776, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1101.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5437.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.16840012236157847, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8315998776384216, \"precision\": 1.0, \"recall\": 0.16840012236157847, \"specificity\": 1.0, \"npv\": 0.9995649146606155, \"accuracy\": 0.9995649529905981, \"f1\": 0.28825762534363136, \"f2\": 0.20199611051994276, \"f0_5\": 0.5031072929994517, \"p4\": 0.4474937043237737, \"phi\": 0.4102765578694308}, {\"truth_threshold\": 40.90000060945749, \"match_probability\": 0.9999999999995126, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1062.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5476.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1624349954114408, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8375650045885592, \"precision\": 1.0, \"recall\": 0.1624349954114408, \"specificity\": 1.0, \"npv\": 0.99956179512914, \"accuracy\": 0.9995618323664733, \"f1\": 0.2794736842105263, \"f2\": 0.19512015874182406, \"f0_5\": 0.4923048396068978, \"p4\": 0.4368363450006928, \"phi\": 0.40294393605718076}, {\"truth_threshold\": 41.00000061094761, \"match_probability\": 0.9999999999995453, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1049.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5489.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.16044661976139493, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.839553380238605, \"precision\": 1.0, \"recall\": 0.16044661976139493, \"specificity\": 1.0, \"npv\": 0.9995607552896418, \"accuracy\": 0.9995607921584316, \"f1\": 0.2765256359562409, \"f2\": 0.19282379324289547, \"f0_5\": 0.4886342463201043, \"p4\": 0.43322666963594936, \"phi\": 0.400469904527631}, {\"truth_threshold\": 41.100000612437725, \"match_probability\": 0.9999999999995757, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1006.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5532.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.15386968491893546, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8461303150810645, \"precision\": 1.0, \"recall\": 0.15386968491893546, \"specificity\": 1.0, \"npv\": 0.9995573158359456, \"accuracy\": 0.999557351470294, \"f1\": 0.2667020148462354, \"f2\": 0.1852124604168201, \"f0_5\": 0.4762355614466957, \"p4\": 0.4210770609437375, \"phi\": 0.39217543172168984}, {\"truth_threshold\": 41.20000061392784, \"match_probability\": 0.9999999999996041, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 974.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5564.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.14897522178036096, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8510247782196391, \"precision\": 1.0, \"recall\": 0.14897522178036096, \"specificity\": 1.0, \"npv\": 0.9995547562578592, \"accuracy\": 0.9995547909581917, \"f1\": 0.25931842385516507, \"f2\": 0.1795325517953255, \"f0_5\": 0.46674333908376464, \"p4\": 0.4118204362946352, \"phi\": 0.3858871486524645}, {\"truth_threshold\": 41.30000061541796, \"match_probability\": 0.9999999999996306, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 944.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5594.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.14438666258794738, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8556133374120526, \"precision\": 1.0, \"recall\": 0.14438666258794738, \"specificity\": 1.0, \"npv\": 0.9995523566653084, \"accuracy\": 0.9995523904780956, \"f1\": 0.25233894680566693, \"f2\": 0.17419545320342486, \"f0_5\": 0.4576304052743843, \"p4\": 0.402970078818962, \"phi\": 0.3798973925428043}, {\"truth_threshold\": 41.40000061690807, \"match_probability\": 0.9999999999996554, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 911.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5627.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.13933924747629245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8606607525237076, \"precision\": 1.0, \"recall\": 0.13933924747629245, \"specificity\": 1.0, \"npv\": 0.9995497171268095, \"accuracy\": 0.99954974994999, \"f1\": 0.24459659014632837, \"f2\": 0.16831097808816464, \"f0_5\": 0.44735808289137696, \"p4\": 0.39303620978223863, \"phi\": 0.3731976760372318}, {\"truth_threshold\": 41.50000061839819, \"match_probability\": 0.9999999999996785, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 876.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5662.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1339859284184766, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8660140715815234, \"precision\": 1.0, \"recall\": 0.1339859284184766, \"specificity\": 1.0, \"npv\": 0.9995469176315139, \"accuracy\": 0.999546949389878, \"f1\": 0.236309684380901, \"f2\": 0.1620541660500222, \"f0_5\": 0.43616809400517825, \"p4\": 0.3822657876801515, \"phi\": 0.36595795080403015}, {\"truth_threshold\": 41.600000619888306, \"match_probability\": 0.9999999999997, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 856.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5682.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.13092688895686755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8690731110431325, \"precision\": 1.0, \"recall\": 0.13092688895686755, \"specificity\": 1.0, \"npv\": 0.999545317926957, \"accuracy\": 0.999545349069814, \"f1\": 0.2315390857451988, \"f2\": 0.15847156398104265, \"f0_5\": 0.42963260389480024, \"p4\": 0.3759997354909151, \"phi\": 0.36175593823402485}, {\"truth_threshold\": 41.70000062137842, \"match_probability\": 0.9999999999997201, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 838.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5700.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1281737534414194, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8718262465585807, \"precision\": 1.0, \"recall\": 0.1281737534414194, \"specificity\": 1.0, \"npv\": 0.9995438781972338, \"accuracy\": 0.9995439087817564, \"f1\": 0.2272234273318872, \"f2\": 0.15524268247499073, \"f0_5\": 0.4236602628918099, \"p4\": 0.37028926201085083, \"phi\": 0.35793196364383606}, {\"truth_threshold\": 41.80000062286854, \"match_probability\": 0.9999999999997388, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 794.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5744.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.12144386662587947, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8785561333741205, \"precision\": 1.0, \"recall\": 0.12144386662587947, \"specificity\": 1.0, \"npv\": 0.9995403588753708, \"accuracy\": 0.9995403880776155, \"f1\": 0.2165848336061102, \"f2\": 0.14733170043791285, \"f0_5\": 0.4086884908379658, \"p4\": 0.35603923788437486, \"phi\": 0.34840787308906246}, {\"truth_threshold\": 41.900000624358654, \"match_probability\": 0.9999999999997563, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 777.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5761.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.11884368308351177, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8811563169164882, \"precision\": 1.0, \"recall\": 0.11884368308351177, \"specificity\": 1.0, \"npv\": 0.9995389991440156, \"accuracy\": 0.9995390278055611, \"f1\": 0.21244019138755982, \"f2\": 0.14426826098258383, \"f0_5\": 0.4027576197387518, \"p4\": 0.3504199371412086, \"phi\": 0.3446576504937645}, {\"truth_threshold\": 42.00000062584877, \"match_probability\": 0.9999999999997726, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 747.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5791.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1142551238910982, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8857448761089018, \"precision\": 1.0, \"recall\": 0.1142551238910982, \"specificity\": 1.0, \"npv\": 0.9995365996271192, \"accuracy\": 0.9995366273254651, \"f1\": 0.20507892930679478, \"f2\": 0.1388527454552214, \"f0_5\": 0.392084820491287, \"p4\": 0.3403442455620289, \"phi\": 0.33793812750869573}, {\"truth_threshold\": 42.100000627338886, \"match_probability\": 0.9999999999997878, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 726.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5812.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.11104313245640869, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8889568675435913, \"precision\": 1.0, \"recall\": 0.11104313245640869, \"specificity\": 1.0, \"npv\": 0.9995349199721464, \"accuracy\": 0.9995349469893979, \"f1\": 0.1998898678414097, \"f2\": 0.13505469156931318, \"f0_5\": 0.38445244651556876, \"p4\": 0.3331674454224692, \"phi\": 0.3331538511157764}, {\"truth_threshold\": 42.200000628829, \"match_probability\": 0.999999999999802, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 697.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5841.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.10660752523707556, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8933924747629245, \"precision\": 1.0, \"recall\": 0.10660752523707556, \"specificity\": 1.0, \"npv\": 0.9995326004578932, \"accuracy\": 0.9995326265253051, \"f1\": 0.1926744989633725, \"f2\": 0.129799992550933, \"f0_5\": 0.373686467939095, \"p4\": 0.32308433157745603, \"phi\": 0.3264317645827296}, {\"truth_threshold\": 42.30000063031912, \"match_probability\": 0.9999999999998154, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 677.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5861.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.1035484857754665, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8964515142245335, \"precision\": 1.0, \"recall\": 0.1035484857754665, \"specificity\": 1.0, \"npv\": 0.9995310007991631, \"accuracy\": 0.999531026205241, \"f1\": 0.18766458766458766, \"f2\": 0.12616944351261694, \"f0_5\": 0.3661042613021847, \"p4\": 0.3160111582961594, \"phi\": 0.3217140370241714}, {\"truth_threshold\": 42.400000631809235, \"match_probability\": 0.9999999999998277, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 656.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5882.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.100336494340777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.899663505659223, \"precision\": 1.0, \"recall\": 0.100336494340777, \"specificity\": 1.0, \"npv\": 0.9995293211630072, \"accuracy\": 0.9995293458691739, \"f1\": 0.18237420072282456, \"f2\": 0.12235153685467025, \"f0_5\": 0.35800043658589825, \"p4\": 0.30847692313345576, \"phi\": 0.3166848087236152}, {\"truth_threshold\": 42.50000063329935, \"match_probability\": 0.9999999999998392, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 633.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5905.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.09681859895992659, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9031814010400734, \"precision\": 1.0, \"recall\": 0.09681859895992659, \"specificity\": 1.0, \"npv\": 0.9995274815679802, \"accuracy\": 0.9995275055011003, \"f1\": 0.17654441500488077, \"f2\": 0.11816315101736047, \"f0_5\": 0.34895259095920617, \"p4\": 0.30009602910913363, \"phi\": 0.31108334958231965}, {\"truth_threshold\": 42.60000063478947, \"match_probability\": 0.99999999999985, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 603.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5935.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.092230039767513, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.907769960232487, \"precision\": 1.0, \"recall\": 0.092230039767513, \"specificity\": 1.0, \"npv\": 0.9995250821063821, \"accuracy\": 0.9995251050210042, \"f1\": 0.1688839098165523, \"f2\": 0.11268921696879088, \"f0_5\": 0.3368715083798883, \"p4\": 0.2889561771552784, \"phi\": 0.3036218669188672}, {\"truth_threshold\": 42.70000063627958, \"match_probability\": 0.99999999999986, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 574.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5964.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.08779443254817987, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9122055674518201, \"precision\": 1.0, \"recall\": 0.08779443254817987, \"specificity\": 1.0, \"npv\": 0.9995227626377878, \"accuracy\": 0.9995227845569113, \"f1\": 0.16141732283464566, \"f2\": 0.107386066003143, \"f0_5\": 0.3248811410459588, \"p4\": 0.27795687914489137, \"phi\": 0.2962305415799891}, {\"truth_threshold\": 42.8000006377697, \"match_probability\": 0.9999999999998694, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 561.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5977.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.08580605689813399, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.914193943101866, \"precision\": 1.0, \"recall\": 0.08580605689813399, \"specificity\": 1.0, \"npv\": 0.9995217228794987, \"accuracy\": 0.9995217443488698, \"f1\": 0.1580504296379772, \"f2\": 0.10500505371916295, \"f0_5\": 0.3194033249829196, \"p4\": 0.2729505812723341, \"phi\": 0.29285665064041005}, {\"truth_threshold\": 42.900000639259815, \"match_probability\": 0.9999999999998782, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 548.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 5990.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0838176812480881, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9161823187519119, \"precision\": 1.0, \"recall\": 0.0838176812480881, \"specificity\": 1.0, \"npv\": 0.9995206831233728, \"accuracy\": 0.9995207041408282, \"f1\": 0.1546711826136043, \"f2\": 0.10262172284644194, \"f0_5\": 0.3138602520045819, \"p4\": 0.26789655329183043, \"phi\": 0.28944344183088017}, {\"truth_threshold\": 43.00000064074993, \"match_probability\": 0.9999999999998863, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 514.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6024.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.07861731416335271, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9213826858366473, \"precision\": 1.0, \"recall\": 0.07861731416335271, \"specificity\": 1.0, \"npv\": 0.9995179637714245, \"accuracy\": 0.9995179835967194, \"f1\": 0.14577424844015882, \"f2\": 0.09637740943523589, \"f0_5\": 0.2990458459390272, \"p4\": 0.25444763935199033, \"phi\": 0.2803202057821246}, {\"truth_threshold\": 43.10000064224005, \"match_probability\": 0.999999999999894, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 488.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6050.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.07464056286326094, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9253594371367391, \"precision\": 1.0, \"recall\": 0.07464056286326094, \"specificity\": 1.0, \"npv\": 0.9995158842769776, \"accuracy\": 0.9995159031806361, \"f1\": 0.138912610304583, \"f2\": 0.0915915915915916, \"f0_5\": 0.287396937573616, \"p4\": 0.24393180999375516, \"phi\": 0.27313811193827126}, {\"truth_threshold\": 43.200000643730164, \"match_probability\": 0.9999999999999011, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 482.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6056.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.07372285102477821, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9262771489752217, \"precision\": 1.0, \"recall\": 0.07372285102477821, \"specificity\": 1.0, \"npv\": 0.9995154043948724, \"accuracy\": 0.9995154230846169, \"f1\": 0.13732193732193732, \"f2\": 0.09048584516032139, \"f0_5\": 0.28466808410111033, \"p4\": 0.2414758980337697, \"phi\": 0.2714537258082382}, {\"truth_threshold\": 43.30000064522028, \"match_probability\": 0.9999999999999076, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 462.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6076.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.07066381156316917, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9293361884368309, \"precision\": 1.0, \"recall\": 0.07066381156316917, \"specificity\": 1.0, \"npv\": 0.9995138047911833, \"accuracy\": 0.9995138227645529, \"f1\": 0.132, \"f2\": 0.08679642293529721, \"f0_5\": 0.27545909849749584, \"p4\": 0.2332089336974025, \"phi\": 0.2657620273036583}, {\"truth_threshold\": 43.400000646710396, \"match_probability\": 0.9999999999999138, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 438.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6100.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0669929642092383, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9330070357907617, \"precision\": 1.0, \"recall\": 0.0669929642092383, \"specificity\": 1.0, \"npv\": 0.9995118852735146, \"accuracy\": 0.999511902380476, \"f1\": 0.12557339449541285, \"f2\": 0.08236179014667168, \"f0_5\": 0.26417370325693607, \"p4\": 0.2231217873852862, \"phi\": 0.25876681386305483}, {\"truth_threshold\": 43.50000064820051, \"match_probability\": 0.9999999999999196, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 421.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6117.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0643927806668706, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9356072193331294, \"precision\": 1.0, \"recall\": 0.0643927806668706, \"specificity\": 1.0, \"npv\": 0.9995105256196268, \"accuracy\": 0.9995105421084217, \"f1\": 0.1209943957465153, \"f2\": 0.07921574530538517, \"f0_5\": 0.25602043298467525, \"p4\": 0.21586405529904226, \"phi\": 0.25369521487496205}, {\"truth_threshold\": 43.60000064969063, \"match_probability\": 0.999999999999925, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 406.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6132.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.06209850107066381, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9379014989293362, \"precision\": 1.0, \"recall\": 0.06209850107066381, \"specificity\": 1.0, \"npv\": 0.9995093259280917, \"accuracy\": 0.9995093418683737, \"f1\": 0.11693548387096774, \"f2\": 0.0764364786505008, \"f0_5\": 0.24871355060034306, \"p4\": 0.20938090097792697, \"phi\": 0.24913456393339736}, {\"truth_threshold\": 43.700000651180744, \"match_probability\": 0.9999999999999301, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 377.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6161.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.05766289385133068, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9423371061486693, \"precision\": 1.0, \"recall\": 0.05766289385133068, \"specificity\": 1.0, \"npv\": 0.9995070065326236, \"accuracy\": 0.9995070214042808, \"f1\": 0.10903832248734635, \"f2\": 0.0710543179162426, \"f0_5\": 0.23427790206313695, \"p4\": 0.196631038781739, \"phi\": 0.24007179430610326}, {\"truth_threshold\": 43.80000065267086, \"match_probability\": 0.9999999999999347, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 356.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6182.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.054450902416641175, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9455490975833588, \"precision\": 1.0, \"recall\": 0.054450902416641175, \"specificity\": 1.0, \"npv\": 0.9995053269771077, \"accuracy\": 0.9995053410682136, \"f1\": 0.10327821293878735, \"f2\": 0.0671495397615814, \"f0_5\": 0.22356191911580006, \"p4\": 0.1872162784936794, \"phi\": 0.23328944902018933}, {\"truth_threshold\": 43.900000654160976, \"match_probability\": 0.999999999999939, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 333.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6205.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.05093300703579076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9490669929642093, \"precision\": 1.0, \"recall\": 0.05093300703579076, \"specificity\": 1.0, \"npv\": 0.9995034874704003, \"accuracy\": 0.99950350070014, \"f1\": 0.09692912239848639, \"f2\": 0.06286577307910138, \"f0_5\": 0.21156289707750953, \"p4\": 0.1767242624558962, \"phi\": 0.22562738787573486}, {\"truth_threshold\": 44.00000065565109, \"match_probability\": 0.9999999999999432, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 314.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6224.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.04802691954726216, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9519730804527379, \"precision\": 1.0, \"recall\": 0.04802691954726216, \"specificity\": 1.0, \"npv\": 0.9995019678830098, \"accuracy\": 0.9995019803960792, \"f1\": 0.09165207238762405, \"f2\": 0.05932139348598201, \"f0_5\": 0.20143700282268412, \"p4\": 0.16791092629081864, \"phi\": 0.21909587079369508}, {\"truth_threshold\": 44.10000065714121, \"match_probability\": 0.9999999999999469, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 301.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6237.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.046038543897216275, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9539614561027837, \"precision\": 1.0, \"recall\": 0.046038543897216275, \"specificity\": 1.0, \"npv\": 0.9995009281679839, \"accuracy\": 0.9995009401880376, \"f1\": 0.08802456499488229, \"f2\": 0.056893358031225194, \"f0_5\": 0.19439421338155516, \"p4\": 0.1618029406968327, \"phi\": 0.21451239441293396}, {\"truth_threshold\": 44.200000658631325, \"match_probability\": 0.9999999999999505, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 272.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6266.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.041602936677883146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9583970633221168, \"precision\": 1.0, \"recall\": 0.041602936677883146, \"specificity\": 1.0, \"npv\": 0.99949860881149, \"accuracy\": 0.9994986197239448, \"f1\": 0.07988252569750368, \"f2\": 0.05146836209506509, \"f0_5\": 0.17833726724364019, \"p4\": 0.1479439507096688, \"phi\": 0.20391683925565518}, {\"truth_threshold\": 44.30000066012144, \"match_probability\": 0.9999999999999538, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 262.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6276.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.04007341694707862, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9599265830529213, \"precision\": 1.0, \"recall\": 0.04007341694707862, \"specificity\": 1.0, \"npv\": 0.9994978090358846, \"accuracy\": 0.9994978195639128, \"f1\": 0.07705882352941176, \"f2\": 0.049594911789202695, \"f0_5\": 0.17268652781439495, \"p4\": 0.1430886351436212, \"phi\": 0.20013318675118968}, {\"truth_threshold\": 44.40000066161156, \"match_probability\": 0.9999999999999569, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 245.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6293.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.03747323340471092, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9625267665952891, \"precision\": 1.0, \"recall\": 0.03747323340471092, \"specificity\": 1.0, \"npv\": 0.9994964494202927, \"accuracy\": 0.9994964592918584, \"f1\": 0.07223942208462332, \"f2\": 0.046406788650225406, \"f0_5\": 0.16294227188081936, \"p4\": 0.1347426603104239, \"phi\": 0.19353129911284755}, {\"truth_threshold\": 44.50000066310167, \"match_probability\": 0.9999999999999598, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 237.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6301.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0362496176200673, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9637503823799327, \"precision\": 1.0, \"recall\": 0.0362496176200673, \"specificity\": 1.0, \"npv\": 0.9994958096024705, \"accuracy\": 0.9994958191638328, \"f1\": 0.06996309963099631, \"f2\": 0.04490507408389859, \"f0_5\": 0.1582954849051563, \"p4\": 0.13077450206420163, \"phi\": 0.1903453201708651}, {\"truth_threshold\": 44.60000066459179, \"match_probability\": 0.9999999999999625, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 217.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6321.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.033190578158458245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9668094218415417, \"precision\": 1.0, \"recall\": 0.033190578158458245, \"specificity\": 1.0, \"npv\": 0.999494210061499, \"accuracy\": 0.9994942188437688, \"f1\": 0.06424870466321243, \"f2\": 0.041146801168038226, \"f0_5\": 0.14650283553875237, \"p4\": 0.12073817519884868, \"phi\": 0.1821367362669395}, {\"truth_threshold\": 44.700000666081905, \"match_probability\": 0.999999999999965, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 202.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6336.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.030896298562251453, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9691037014377486, \"precision\": 1.0, \"recall\": 0.030896298562251453, \"specificity\": 1.0, \"npv\": 0.9994930104091301, \"accuracy\": 0.9994930186037208, \"f1\": 0.0599406528189911, \"f2\": 0.03832435303938681, \"f0_5\": 0.13748979036210182, \"p4\": 0.11310028153838621, \"phi\": 0.17572886632674775}, {\"truth_threshold\": 44.80000066757202, \"match_probability\": 0.9999999999999674, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 194.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6344.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.02967268277760783, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9703273172223922, \"precision\": 1.0, \"recall\": 0.02967268277760783, \"specificity\": 1.0, \"npv\": 0.9994923705957108, \"accuracy\": 0.9994923784756952, \"f1\": 0.05763517528223411, \"f2\": 0.03681773324223791, \"f0_5\": 0.13262236806125238, \"p4\": 0.10898725582478543, \"phi\": 0.17221387879995553}, {\"truth_threshold\": 44.90000066906214, \"match_probability\": 0.9999999999999696, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 185.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6353.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.028296115019883758, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9717038849801163, \"precision\": 1.0, \"recall\": 0.028296115019883758, \"specificity\": 1.0, \"npv\": 0.9994916508065933, \"accuracy\": 0.9994916583316663, \"f1\": 0.055034954633348204, \"f2\": 0.03512169191631545, \"f0_5\": 0.12709535586699644, \"p4\": 0.10432682696109778, \"phi\": 0.168171729825904}, {\"truth_threshold\": 45.000000670552254, \"match_probability\": 0.9999999999999716, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 180.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6358.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.027531355154481494, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9724686448455185, \"precision\": 1.0, \"recall\": 0.027531355154481494, \"specificity\": 1.0, \"npv\": 0.9994912509241981, \"accuracy\": 0.9994912582516503, \"f1\": 0.05358737719559393, \"f2\": 0.03417894576940605, \"f0_5\": 0.12400110223201984, \"p4\": 0.10172233400009487, \"phi\": 0.1658835392767802}, {\"truth_threshold\": 45.10000067204237, \"match_probability\": 0.9999999999999735, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 169.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6369.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.02584888345059651, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9741511165494035, \"precision\": 1.0, \"recall\": 0.02584888345059651, \"specificity\": 1.0, \"npv\": 0.9994903711840553, \"accuracy\": 0.9994903780756151, \"f1\": 0.05039510958699866, \"f2\": 0.03210364347859124, \"f0_5\": 0.1171333518159135, \"p4\": 0.09595340405698327, \"phi\": 0.1607349063356497}, {\"truth_threshold\": 45.200000673532486, \"match_probability\": 0.9999999999999752, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 160.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6378.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.024472315692872438, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9755276843071276, \"precision\": 1.0, \"recall\": 0.024472315692872438, \"specificity\": 1.0, \"npv\": 0.9994896513978174, \"accuracy\": 0.9994896579315863, \"f1\": 0.0477754553598089, \"f2\": 0.030404378230465188, \"f0_5\": 0.11145165784341042, \"p4\": 0.09119301079533873, \"phi\": 0.15639637553590047}, {\"truth_threshold\": 45.3000006750226, \"match_probability\": 0.9999999999999769, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 150.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6388.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.02294279596206791, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9770572040379321, \"precision\": 1.0, \"recall\": 0.02294279596206791, \"specificity\": 1.0, \"npv\": 0.999488851636547, \"accuracy\": 0.9994888577715543, \"f1\": 0.04485645933014354, \"f2\": 0.028514941829518668, \"f0_5\": 0.10507144858503782, \"p4\": 0.08586053427484126, \"phi\": 0.151430078879524}, {\"truth_threshold\": 45.40000067651272, \"match_probability\": 0.9999999999999785, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 141.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6397.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.021566228204343837, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9784337717956562, \"precision\": 1.0, \"recall\": 0.021566228204343837, \"specificity\": 1.0, \"npv\": 0.9994881318524977, \"accuracy\": 0.9994881376275255, \"f1\": 0.042221889504416826, \"f2\": 0.026813220248735406, \"f0_5\": 0.09926781188397635, \"p4\": 0.08102200105893484, \"phi\": 0.14681685577298087}, {\"truth_threshold\": 45.500000678002834, \"match_probability\": 0.9999999999999799, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 133.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6405.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.020342612419700215, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9796573875802997, \"precision\": 1.0, \"recall\": 0.020342612419700215, \"specificity\": 1.0, \"npv\": 0.9994874920453244, \"accuracy\": 0.9994874974994999, \"f1\": 0.03987408184679958, \"f2\": 0.02529960053262317, \"f0_5\": 0.09405940594059406, \"p4\": 0.07668945796355989, \"phi\": 0.14259097681486102}, {\"truth_threshold\": 45.60000067949295, \"match_probability\": 0.9999999999999812, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 122.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6416.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.018660140715815236, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9813398592841848, \"precision\": 1.0, \"recall\": 0.018660140715815236, \"specificity\": 1.0, \"npv\": 0.9994866123117985, \"accuracy\": 0.9994866173234647, \"f1\": 0.03663663663663664, \"f2\": 0.023216868386998553, \"f0_5\": 0.08682038144036436, \"p4\": 0.07068302007754218, \"phi\": 0.13656705616403844}, {\"truth_threshold\": 45.70000068098307, \"match_probability\": 0.9999999999999825, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 111.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6427.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.016977669011930253, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9830223309880698, \"precision\": 1.0, \"recall\": 0.016977669011930253, \"specificity\": 1.0, \"npv\": 0.9994857325798213, \"accuracy\": 0.9994857371474295, \"f1\": 0.03338847947059708, \"f2\": 0.021132391577504475, \"f0_5\": 0.0794901174448582, \"p4\": 0.06461887780795754, \"phi\": 0.13026487611742024}, {\"truth_threshold\": 45.80000068247318, \"match_probability\": 0.9999999999999837, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 105.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6433.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.016059957173447537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9839400428265525, \"precision\": 1.0, \"recall\": 0.016059957173447537, \"specificity\": 1.0, \"npv\": 0.9994852527266682, \"accuracy\": 0.9994852570514103, \"f1\": 0.03161222339304531, \"f2\": 0.01999466808850973, \"f0_5\": 0.07545271629778671, \"p4\": 0.06128654397351574, \"phi\": 0.1266952657137696}, {\"truth_threshold\": 45.9000006839633, \"match_probability\": 0.9999999999999848, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 101.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6437.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.015448149281125726, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9845518507188743, \"precision\": 1.0, \"recall\": 0.015448149281125726, \"specificity\": 1.0, \"npv\": 0.9994849328248222, \"accuracy\": 0.9994849369873975, \"f1\": 0.030426269016418137, \"f2\": 0.019235896849883822, \"f0_5\": 0.07274560645347163, \"p4\": 0.059055244300191816, \"phi\": 0.12425857091771889}, {\"truth_threshold\": 46.000000685453415, \"match_probability\": 0.9999999999999858, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 91.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6447.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.013918629550321198, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9860813704496788, \"precision\": 1.0, \"recall\": 0.013918629550321198, \"specificity\": 1.0, \"npv\": 0.999484133071103, \"accuracy\": 0.9994841368273655, \"f1\": 0.027455121436114043, \"f2\": 0.017337956788476928, \"f0_5\": 0.06592292089249494, \"p4\": 0.05344259138187696, \"phi\": 0.11794680745844975}, {\"truth_threshold\": 46.20000068843365, \"match_probability\": 0.9999999999999877, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 87.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6451.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.013306821657999388, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9866931783420007, \"precision\": 1.0, \"recall\": 0.013306821657999388, \"specificity\": 1.0, \"npv\": 0.9994838131699737, \"accuracy\": 0.9994838167633526, \"f1\": 0.026264150943396226, \"f2\": 0.016578375700293458, \"f0_5\": 0.06317165262852163, \"p4\": 0.05118365939757443, \"phi\": 0.11532542153363246}, {\"truth_threshold\": 46.30000068992376, \"match_probability\": 0.9999999999999885, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 66.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6472.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.01009483022330988, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9899051697766901, \"precision\": 1.0, \"recall\": 0.01009483022330988, \"specificity\": 1.0, \"npv\": 0.9994821336924044, \"accuracy\": 0.9994821364272855, \"f1\": 0.019987886129618413, \"f2\": 0.012586772446410863, \"f0_5\": 0.04851514260511614, \"p4\": 0.039192200081474594, \"phi\": 0.10044701315049806}, {\"truth_threshold\": 46.40000069141388, \"match_probability\": 0.9999999999999892, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 63.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6475.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.009635974304068522, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9903640256959315, \"precision\": 1.0, \"recall\": 0.009635974304068522, \"specificity\": 1.0, \"npv\": 0.9994818937674981, \"accuracy\": 0.9994818963792759, \"f1\": 0.019088016967126194, \"f2\": 0.012016021361815754, \"f0_5\": 0.04639175257731959, \"p4\": 0.03746079628657963, \"phi\": 0.09813756592521213}, {\"truth_threshold\": 46.500000692903996, \"match_probability\": 0.9999999999999899, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 59.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6479.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.009024166411746712, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9909758335882533, \"precision\": 1.0, \"recall\": 0.009024166411746712, \"specificity\": 1.0, \"npv\": 0.9994815738678022, \"accuracy\": 0.999481576315263, \"f1\": 0.01788691829619524, \"f2\": 0.011254816680019838, \"f0_5\": 0.043548863300856215, \"p4\": 0.03514503714909533, \"phi\": 0.09497098529581316}, {\"truth_threshold\": 46.60000069439411, \"match_probability\": 0.9999999999999907, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 54.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6484.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.008259406546344448, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9917405934536555, \"precision\": 1.0, \"recall\": 0.008259406546344448, \"specificity\": 1.0, \"npv\": 0.9994811739934704, \"accuracy\": 0.999481176235247, \"f1\": 0.016383495145631068, \"f2\": 0.010302984049454324, \"f0_5\": 0.03997631033461652, \"p4\": 0.03223867109150013, \"phi\": 0.09085769836084173}, {\"truth_threshold\": 46.70000069588423, \"match_probability\": 0.9999999999999912, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 49.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6489.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.007494646680942184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9925053533190579, \"precision\": 1.0, \"recall\": 0.007494646680942184, \"specificity\": 1.0, \"npv\": 0.9994807741194585, \"accuracy\": 0.999480776155231, \"f1\": 0.01487778958554729, \"f2\": 0.009350788137857334, \"f0_5\": 0.036382536382536385, \"p4\": 0.02931926008524652, \"phi\": 0.08654914942632264}, {\"truth_threshold\": 46.800000697374344, \"match_probability\": 0.9999999999999918, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 48.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6490.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.007341694707861731, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9926583052921383, \"precision\": 1.0, \"recall\": 0.007341694707861731, \"specificity\": 1.0, \"npv\": 0.9994806941446944, \"accuracy\": 0.9994806961392279, \"f1\": 0.014576374126935925, \"f2\": 0.00916030534351145, \"f0_5\": 0.03566121842496285, \"p4\": 0.02873380475747879, \"phi\": 0.08566143894899311}, {\"truth_threshold\": 46.90000069886446, \"match_probability\": 0.9999999999999923, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 44.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6494.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.00672988681553992, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9932701131844601, \"precision\": 1.0, \"recall\": 0.00672988681553992, \"specificity\": 1.0, \"npv\": 0.9994803742457665, \"accuracy\": 0.999480376075215, \"f1\": 0.013369796414463689, \"f2\": 0.008398228737211789, \"f0_5\": 0.03276735180220435, \"p4\": 0.0263867161008682, \"phi\": 0.08201457061417496}, {\"truth_threshold\": 47.000000700354576, \"match_probability\": 0.9999999999999929, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 42.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6496.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.006423982869379015, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9935760171306209, \"precision\": 1.0, \"recall\": 0.006423982869379015, \"specificity\": 1.0, \"npv\": 0.9994802142963793, \"accuracy\": 0.9994802160432087, \"f1\": 0.01276595744680851, \"f2\": 0.008017103153393906, \"f0_5\": 0.031315240083507306, \"p4\": 0.02521000140369941, \"phi\": 0.08012891971643701}, {\"truth_threshold\": 47.20000070333481, \"match_probability\": 0.9999999999999938, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 41.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6497.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.006271030896298563, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9937289691037015, \"precision\": 1.0, \"recall\": 0.006271030896298563, \"specificity\": 1.0, \"npv\": 0.9994801343217049, \"accuracy\": 0.9994801360272054, \"f1\": 0.012463900288797689, \"f2\": 0.00782651853548658, \"f0_5\": 0.030587884213667562, \"p4\": 0.024620848963539155, \"phi\": 0.07916925414937322}, {\"truth_threshold\": 47.300000704824924, \"match_probability\": 0.9999999999999942, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 38.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6500.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.005812174977057204, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9941878250229428, \"precision\": 1.0, \"recall\": 0.005812174977057204, \"specificity\": 1.0, \"npv\": 0.9994798943977585, \"accuracy\": 0.9994798959791958, \"f1\": 0.011557177615571776, \"f2\": 0.007254677357770141, \"f0_5\": 0.028400597907324365, \"p4\": 0.022850202668865307, \"phi\": 0.07621779341000649}, {\"truth_threshold\": 47.40000070631504, \"match_probability\": 0.9999999999999946, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 36.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6502.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.005506271030896299, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9944937289691037, \"precision\": 1.0, \"recall\": 0.005506271030896299, \"specificity\": 1.0, \"npv\": 0.9994797344485249, \"accuracy\": 0.9994797359471894, \"f1\": 0.010952236081533314, \"f2\": 0.006873377119291278, \"f0_5\": 0.02693804250224484, \"p4\": 0.021667107128103904, \"phi\": 0.07418494663853198}, {\"truth_threshold\": 47.50000070780516, \"match_probability\": 0.999999999999995, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 32.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6506.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.004894463138574488, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9951055368614256, \"precision\": 1.0, \"recall\": 0.004894463138574488, \"specificity\": 1.0, \"npv\": 0.9994794145502113, \"accuracy\": 0.9994794158831767, \"f1\": 0.009741248097412482, \"f2\": 0.006110601894286587, \"f0_5\": 0.024002400240024, \"p4\": 0.019294494786059904, \"phi\": 0.06994222724706455}, {\"truth_threshold\": 47.70000071078539, \"match_probability\": 0.9999999999999957, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 29.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6509.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.004435607219333129, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9955643927806669, \"precision\": 1.0, \"recall\": 0.004435607219333129, \"specificity\": 1.0, \"npv\": 0.9994791746266104, \"accuracy\": 0.999479175835167, \"f1\": 0.008832038982792752, \"f2\": 0.0055383675184293955, \"f0_5\": 0.02179140366696724, \"p4\": 0.01750939402285685, \"phi\": 0.06658300866247267}, {\"truth_threshold\": 47.90000071376562, \"match_probability\": 0.9999999999999962, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 27.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6511.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.004129703273172224, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9958702967268278, \"precision\": 1.0, \"recall\": 0.004129703273172224, \"specificity\": 1.0, \"npv\": 0.9994790146776072, \"accuracy\": 0.9994790158031607, \"f1\": 0.008225437928408226, \"f2\": 0.005156805072768249, \"f0_5\": 0.020312970207643697, \"p4\": 0.01631662945472141, \"phi\": 0.0642460252341035}, {\"truth_threshold\": 48.00000071525574, \"match_probability\": 0.9999999999999964, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 24.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6514.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0036708473539308656, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9963291526460691, \"precision\": 1.0, \"recall\": 0.0036708473539308656, \"specificity\": 1.0, \"npv\": 0.9994787747541983, \"accuracy\": 0.999478775755151, \"f1\": 0.00731484303565986, \"f2\": 0.004584352078239609, \"f0_5\": 0.01808863430810974, \"p4\": 0.014523421819345864, \"phi\": 0.06057172620634575}, {\"truth_threshold\": 48.20000071823597, \"match_probability\": 0.9999999999999969, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 22.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6516.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.00336494340776996, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9966350565922301, \"precision\": 1.0, \"recall\": 0.00336494340776996, \"specificity\": 1.0, \"npv\": 0.9994786148053231, \"accuracy\": 0.9994786157231447, \"f1\": 0.006707317073170732, \"f2\": 0.004202643845037059, \"f0_5\": 0.016601267733172352, \"p4\": 0.013325234263099556, \"phi\": 0.05799300799317297}, {\"truth_threshold\": 48.300000719726086, \"match_probability\": 0.9999999999999971, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 21.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6517.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0032119914346895075, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9967880085653105, \"precision\": 1.0, \"recall\": 0.0032119914346895075, \"specificity\": 1.0, \"npv\": 0.9994785348309047, \"accuracy\": 0.9994785357071414, \"f1\": 0.0064034151547491995, \"f2\": 0.004011767852366943, \"f0_5\": 0.015856236786469344, \"p4\": 0.012725323522984729, \"phi\": 0.05665965489687423}, {\"truth_threshold\": 48.600000724196434, \"match_probability\": 0.9999999999999977, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 19.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6519.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.002906087488528602, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9970939125114714, \"precision\": 1.0, \"recall\": 0.002906087488528602, \"specificity\": 1.0, \"npv\": 0.9994783748821062, \"accuracy\": 0.999478375675135, \"f1\": 0.0057953332316608205, \"f2\": 0.003629972106530129, \"f0_5\": 0.014363471424251588, \"p4\": 0.011523864401635763, \"phi\": 0.053894077599489436}, {\"truth_threshold\": 48.70000072568655, \"match_probability\": 0.9999999999999978, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 17.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6521.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0026001835423676966, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9973998164576323, \"precision\": 1.0, \"recall\": 0.0026001835423676966, \"specificity\": 1.0, \"npv\": 0.999478214933359, \"accuracy\": 0.9994782156431287, \"f1\": 0.005186880244088482, \"f2\": 0.003248118002216363, \"f0_5\": 0.012867090523766273, \"p4\": 0.010320216786813441, \"phi\": 0.0509786897185948}, {\"truth_threshold\": 48.800000727176666, \"match_probability\": 0.999999999999998, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 16.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6522.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.002447231569287244, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9975527684307127, \"precision\": 1.0, \"recall\": 0.002447231569287244, \"specificity\": 1.0, \"npv\": 0.9994781349590045, \"accuracy\": 0.9994781356271254, \"f1\": 0.004882514494964907, \"f2\": 0.003057169061449098, \"f0_5\": 0.012117540139351712, \"p4\": 0.009717570424759955, \"phi\": 0.04945659151906864}, {\"truth_threshold\": 48.90000072866678, \"match_probability\": 0.9999999999999981, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 15.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6523.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.002294279596206791, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9977057204037932, \"precision\": 1.0, \"recall\": 0.002294279596206791, \"specificity\": 1.0, \"npv\": 0.9994780549846629, \"accuracy\": 0.9994780556111222, \"f1\": 0.0045780558522813975, \"f2\": 0.0028662055260442543, \"f0_5\": 0.011367080933616247, \"p4\": 0.009114374693103295, \"phi\": 0.0478861369125529}, {\"truth_threshold\": 49.20000073313713, \"match_probability\": 0.9999999999999984, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 14.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6524.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0021413276231263384, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9978586723768736, \"precision\": 1.0, \"recall\": 0.0021413276231263384, \"specificity\": 1.0, \"npv\": 0.9994779750103341, \"accuracy\": 0.999477975595119, \"f1\": 0.004273504273504274, \"f2\": 0.002675227394328518, \"f0_5\": 0.010615711252653927, \"p4\": 0.008510628840252391, \"phi\": 0.0462624015437591}, {\"truth_threshold\": 49.30000073462725, \"match_probability\": 0.9999999999999986, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 11.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6527.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.00168247170388498, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.998317528296115, \"precision\": 1.0, \"recall\": 0.00168247170388498, \"specificity\": 1.0, \"npv\": 0.9994777350874243, \"accuracy\": 0.9994777355471094, \"f1\": 0.003359291494884715, \"f2\": 0.0021022054045789856, \"f0_5\": 0.008356122759039806, \"p4\": 0.006696083018030355, \"phi\": 0.04100723116655939}, {\"truth_threshold\": 49.40000073611736, \"match_probability\": 0.9999999999999987, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 10.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6528.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0015295197308045274, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9984704802691955, \"precision\": 1.0, \"recall\": 0.0015295197308045274, \"specificity\": 1.0, \"npv\": 0.9994776551131467, \"accuracy\": 0.9994776555311062, \"f1\": 0.0030543677458766036, \"f2\": 0.0019111688708814312, \"f0_5\": 0.007601094557616297, \"p4\": 0.006090129137012272, \"phi\": 0.0390988592415917}, {\"truth_threshold\": 49.600000739097595, \"match_probability\": 0.9999999999999988, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 9.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6529.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0013765677577240747, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9986234322422759, \"precision\": 1.0, \"recall\": 0.0013765677577240747, \"specificity\": 1.0, \"npv\": 0.9994775751388819, \"accuracy\": 0.999477575515103, \"f1\": 0.0027493508477165114, \"f2\": 0.0017201177325025802, \"f0_5\": 0.006845147550958321, \"p4\": 0.005483621356207485, \"phi\": 0.037092433251330735}, {\"truth_threshold\": 49.80000074207783, \"match_probability\": 0.999999999999999, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 7.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6531.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0010706638115631692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9989293361884368, \"precision\": 1.0, \"recall\": 0.0010706638115631692, \"specificity\": 1.0, \"npv\": 0.9994774151903906, \"accuracy\": 0.9994774154830967, \"f1\": 0.0021390374331550803, \"f2\": 0.001337971635001338, \"f0_5\": 0.005330490405117271, \"p4\": 0.004268941054375471, \"phi\": 0.03271244868424019}, {\"truth_threshold\": 50.30000074952841, \"match_probability\": 0.9999999999999993, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 5.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6533.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0007647598654022637, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9992352401345977, \"precision\": 1.0, \"recall\": 0.0007647598654022637, \"specificity\": 1.0, \"npv\": 0.9994772552419505, \"accuracy\": 0.9994772554510902, \"f1\": 0.0015283509093687911, \"f2\": 0.0009557670986733952, \"f0_5\": 0.003812137846904544, \"p4\": 0.003052036016885189, \"phi\": 0.027647062975865232}, {\"truth_threshold\": 50.50000075250864, \"match_probability\": 0.9999999999999993, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 4.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6534.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.000611807892321811, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9993881921076782, \"precision\": 1.0, \"recall\": 0.000611807892321811, \"specificity\": 1.0, \"npv\": 0.9994771752677496, \"accuracy\": 0.999477175435087, \"f1\": 0.0012228676245796392, \"f2\": 0.000764642911760208, \"f0_5\": 0.003051571559353067, \"p4\": 0.0024427473112691466, \"phi\": 0.024728283887571315}, {\"truth_threshold\": 50.600000753998756, \"match_probability\": 0.9999999999999994, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 3.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6535.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0004588559192413582, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9995411440807587, \"precision\": 1.0, \"recall\": 0.0004588559192413582, \"specificity\": 1.0, \"npv\": 0.9994770952935615, \"accuracy\": 0.9994770954190838, \"f1\": 0.0009172909341079345, \"f2\": 0.0005735041101127892, \"f0_5\": 0.0022900763358778627, \"p4\": 0.0018329001257368872, \"phi\": 0.021415321181845713}, {\"truth_threshold\": 51.20000076293945, \"match_probability\": 0.9999999999999997, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 2.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6536.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.0003059039461609055, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9996940960538391, \"precision\": 1.0, \"recall\": 0.0003059039461609055, \"specificity\": 1.0, \"npv\": 0.9994770153193864, \"accuracy\": 0.9994770154030806, \"f1\": 0.0006116207951070336, \"f2\": 0.00038235069205475264, \"f0_5\": 0.0015276504735716467, \"p4\": 0.001222493692029335, \"phi\": 0.017485535824884636}, {\"truth_threshold\": 51.60000076889992, \"match_probability\": 0.9999999999999997, \"total_clerical_labels\": 12497500.0, \"p\": 6538.0, \"n\": 12490962.0, \"tp\": 1.0, \"tn\": 12490962.0, \"fp\": 0.0, \"fn\": 6537.0, \"P_rate\": 0.0005231446289257852, \"N_rate\": 0.9994768553710742, \"tp_rate\": 0.00015295197308045274, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9998470480269196, \"precision\": 1.0, \"recall\": 0.00015295197308045274, \"specificity\": 1.0, \"npv\": 0.9994769353452239, \"accuracy\": 0.9994769353870774, \"f1\": 0.0003058571647040832, \"f2\": 0.0001911826559094559, \"f0_5\": 0.0007642922653622745, \"p4\": 0.0006115272404776511, \"phi\": 0.012364140459791619}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.LayerChart(...)"
            ]
          },
          "execution_count": 45,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_column(\n",
        "    \"cluster\", match_weight_round_to_nearest=0.1, output_type=\"accuracy\"\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 46,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:47.319625Z",
          "iopub.status.busy": "2024-06-07T09:11:47.319347Z",
          "iopub.status.idle": "2024-06-07T09:11:47.588558Z",
          "shell.execute_reply": "2024-06-07T09:11:47.587940Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'date_of_birth':\n",
            "    m values not fully trained\n",
            "Comparison: 'date_of_birth':\n",
            "    u values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>clerical_match_score</th>\n",
              "      <th>found_by_blocking_rules</th>\n",
              "      <th>match_weight</th>\n",
              "      <th>match_probability</th>\n",
              "      <th>rec_id_l</th>\n",
              "      <th>rec_id_r</th>\n",
              "      <th>given_name_l</th>\n",
              "      <th>given_name_r</th>\n",
              "      <th>gamma_given_name</th>\n",
              "      <th>tf_given_name_l</th>\n",
              "      <th>...</th>\n",
              "      <th>postcode_l</th>\n",
              "      <th>postcode_r</th>\n",
              "      <th>gamma_postcode</th>\n",
              "      <th>tf_postcode_l</th>\n",
              "      <th>tf_postcode_r</th>\n",
              "      <th>bf_postcode</th>\n",
              "      <th>bf_tf_adj_postcode</th>\n",
              "      <th>cluster_l</th>\n",
              "      <th>cluster_r</th>\n",
              "      <th>match_key</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-27.805731</td>\n",
              "      <td>4.262268e-09</td>\n",
              "      <td>rec-993-dup-1</td>\n",
              "      <td>rec-993-dup-3</td>\n",
              "      <td>westbrook</td>\n",
              "      <td>jake</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0004</td>\n",
              "      <td>...</td>\n",
              "      <td>2704</td>\n",
              "      <td>2074</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0002</td>\n",
              "      <td>0.0014</td>\n",
              "      <td>0.230173</td>\n",
              "      <td>1.0</td>\n",
              "      <td>rec-993</td>\n",
              "      <td>rec-993</td>\n",
              "      <td>5</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-27.805731</td>\n",
              "      <td>4.262268e-09</td>\n",
              "      <td>rec-829-dup-0</td>\n",
              "      <td>rec-829-dup-2</td>\n",
              "      <td>wilde</td>\n",
              "      <td>kyra</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0002</td>\n",
              "      <td>...</td>\n",
              "      <td>3859</td>\n",
              "      <td>3595</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0004</td>\n",
              "      <td>0.0006</td>\n",
              "      <td>0.230173</td>\n",
              "      <td>1.0</td>\n",
              "      <td>rec-829</td>\n",
              "      <td>rec-829</td>\n",
              "      <td>5</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>1.0</td>\n",
              "      <td>False</td>\n",
              "      <td>-19.717877</td>\n",
              "      <td>1.159651e-06</td>\n",
              "      <td>rec-829-dup-0</td>\n",
              "      <td>rec-829-dup-1</td>\n",
              "      <td>wilde</td>\n",
              "      <td>kyra</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0002</td>\n",
              "      <td>...</td>\n",
              "      <td>3859</td>\n",
              "      <td>3889</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0004</td>\n",
              "      <td>0.0002</td>\n",
              "      <td>0.230173</td>\n",
              "      <td>1.0</td>\n",
              "      <td>rec-829</td>\n",
              "      <td>rec-829</td>\n",
              "      <td>5</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>1.0</td>\n",
              "      <td>True</td>\n",
              "      <td>-15.453190</td>\n",
              "      <td>2.229034e-05</td>\n",
              "      <td>rec-721-dup-0</td>\n",
              "      <td>rec-721-dup-1</td>\n",
              "      <td>mikhaili</td>\n",
              "      <td>elly</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0008</td>\n",
              "      <td>...</td>\n",
              "      <td>4806</td>\n",
              "      <td>4860</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0008</td>\n",
              "      <td>0.0014</td>\n",
              "      <td>0.230173</td>\n",
              "      <td>1.0</td>\n",
              "      <td>rec-721</td>\n",
              "      <td>rec-721</td>\n",
              "      <td>2</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>1.0</td>\n",
              "      <td>True</td>\n",
              "      <td>-12.931781</td>\n",
              "      <td>1.279648e-04</td>\n",
              "      <td>rec-401-dup-1</td>\n",
              "      <td>rec-401-dup-3</td>\n",
              "      <td>whitbe</td>\n",
              "      <td>alexa-ose</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0002</td>\n",
              "      <td>...</td>\n",
              "      <td>3040</td>\n",
              "      <td>3041</td>\n",
              "      <td>0</td>\n",
              "      <td>0.0020</td>\n",
              "      <td>0.0004</td>\n",
              "      <td>0.230173</td>\n",
              "      <td>1.0</td>\n",
              "      <td>rec-401</td>\n",
              "      <td>rec-401</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>5 rows × 45 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "   clerical_match_score  found_by_blocking_rules  match_weight  \\\n",
              "0                   1.0                    False    -27.805731   \n",
              "1                   1.0                    False    -27.805731   \n",
              "2                   1.0                    False    -19.717877   \n",
              "3                   1.0                     True    -15.453190   \n",
              "4                   1.0                     True    -12.931781   \n",
              "\n",
              "   match_probability       rec_id_l       rec_id_r given_name_l given_name_r  \\\n",
              "0       4.262268e-09  rec-993-dup-1  rec-993-dup-3    westbrook         jake   \n",
              "1       4.262268e-09  rec-829-dup-0  rec-829-dup-2        wilde         kyra   \n",
              "2       1.159651e-06  rec-829-dup-0  rec-829-dup-1        wilde         kyra   \n",
              "3       2.229034e-05  rec-721-dup-0  rec-721-dup-1     mikhaili         elly   \n",
              "4       1.279648e-04  rec-401-dup-1  rec-401-dup-3       whitbe    alexa-ose   \n",
              "\n",
              "   gamma_given_name  tf_given_name_l  ...  postcode_l  postcode_r  \\\n",
              "0                 0           0.0004  ...        2704        2074   \n",
              "1                 0           0.0002  ...        3859        3595   \n",
              "2                 0           0.0002  ...        3859        3889   \n",
              "3                 0           0.0008  ...        4806        4860   \n",
              "4                 0           0.0002  ...        3040        3041   \n",
              "\n",
              "   gamma_postcode tf_postcode_l tf_postcode_r  bf_postcode  \\\n",
              "0               0        0.0002        0.0014     0.230173   \n",
              "1               0        0.0004        0.0006     0.230173   \n",
              "2               0        0.0004        0.0002     0.230173   \n",
              "3               0        0.0008        0.0014     0.230173   \n",
              "4               0        0.0020        0.0004     0.230173   \n",
              "\n",
              "   bf_tf_adj_postcode  cluster_l  cluster_r  match_key  \n",
              "0                 1.0    rec-993    rec-993          5  \n",
              "1                 1.0    rec-829    rec-829          5  \n",
              "2                 1.0    rec-829    rec-829          5  \n",
              "3                 1.0    rec-721    rec-721          2  \n",
              "4                 1.0    rec-401    rec-401          0  \n",
              "\n",
              "[5 rows x 45 columns]"
            ]
          },
          "execution_count": 46,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "pred_errors_df = linker.evaluation.prediction_errors_from_labels_column(\n",
        "    \"cluster\"\n",
        ").as_pandas_dataframe()\n",
        "len(pred_errors_df)\n",
        "pred_errors_df.head()"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "The following chart seems to suggest that, where the model is making errors, it's because the data is corrupted beyond recognition and no reasonable linkage model could find these matches"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 47,
      "metadata": {
        "execution": {
          "iopub.execute_input": "2024-06-07T09:11:47.591674Z",
          "iopub.status.busy": "2024-06-07T09:11:47.591437Z",
          "iopub.status.idle": "2024-06-07T09:11:48.630581Z",
          "shell.execute_reply": "2024-06-07T09:11:48.629955Z"
        }
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\n",
            " -- WARNING --\n",
            "You have called predict(), but there are some parameter estimates which have neither been estimated or specified in your settings dictionary.  To produce predictions the following untrained trained parameters will use default values.\n",
            "Comparison: 'date_of_birth':\n",
            "    m values not fully trained\n",
            "Comparison: 'date_of_birth':\n",
            "    u values not fully trained\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-cf815c299d78435388bbbbcfeca1e42e.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-cf815c299d78435388bbbbcfeca1e42e.vega-embed details,\n",
              "  #altair-viz-cf815c299d78435388bbbbcfeca1e42e.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-cf815c299d78435388bbbbcfeca1e42e\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-cf815c299d78435388bbbbcfeca1e42e\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-cf815c299d78435388bbbbcfeca1e42e\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}, \"layer\": [{\"layer\": [{\"mark\": \"rule\", \"encoding\": {\"color\": {\"value\": \"black\"}, \"size\": {\"value\": 0.5}, \"y\": {\"field\": \"zero\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"bar\", \"width\": 60}, \"encoding\": {\"color\": {\"condition\": {\"test\": \"(datum.log2_bayes_factor < 0)\", \"value\": \"red\"}, \"value\": \"green\"}, \"opacity\": {\"condition\": {\"test\": \"datum.column_name == 'Prior match weight' || datum.column_name == 'Final score'\", \"value\": 1}, \"value\": 0.5}, \"tooltip\": [{\"field\": \"column_name\", \"title\": \"Comparison column\", \"type\": \"nominal\"}, {\"field\": \"value_l\", \"title\": \"Value (L)\", \"type\": \"nominal\"}, {\"field\": \"value_r\", \"title\": \"Value (R)\", \"type\": \"nominal\"}, {\"field\": \"label_for_charts\", \"title\": \"Label\", \"type\": \"ordinal\"}, {\"field\": \"sql_condition\", \"title\": \"SQL condition\", \"type\": \"nominal\"}, {\"field\": \"comparison_vector_value\", \"title\": \"Comparison vector value\", \"type\": \"nominal\"}, {\"field\": \"bayes_factor\", \"format\": \",.4f\", \"title\": \"Bayes factor = m/u\", \"type\": \"quantitative\"}, {\"field\": \"log2_bayes_factor\", \"format\": \",.4f\", \"title\": \"Match weight = log2(m/u)\", \"type\": \"quantitative\"}, {\"field\": \"prob\", \"format\": \".4f\", \"title\": \"Cumulative match probability\", \"type\": \"quantitative\"}, {\"field\": \"bayes_factor_description\", \"title\": \"Match weight description\", \"type\": \"nominal\"}], \"x\": {\"axis\": {\"grid\": true, \"labelAlign\": \"center\", \"labelAngle\": -20, \"labelExpr\": \"datum.value == 'Prior' || datum.value == 'Final score' ? '' : datum.value\", \"labelPadding\": 10, \"tickBand\": \"extent\", \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"axis\": {\"grid\": false, \"orient\": \"left\", \"title\": \"Match Weight\"}, \"field\": \"previous_sum\", \"type\": \"quantitative\"}, \"y2\": {\"field\": \"sum\"}}}, {\"mark\": {\"type\": \"text\", \"fontWeight\": \"bold\"}, \"encoding\": {\"color\": {\"value\": \"white\"}, \"text\": {\"condition\": {\"test\": \"abs(datum.log2_bayes_factor) > 1\", \"field\": \"log2_bayes_factor\", \"format\": \".2f\", \"type\": \"nominal\"}, \"value\": \"\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"axis\": {\"orient\": \"left\"}, \"field\": \"center\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"baseline\": \"bottom\", \"dy\": -25, \"fontWeight\": \"bold\"}, \"encoding\": {\"color\": {\"value\": \"black\"}, \"text\": {\"field\": \"column_name\", \"type\": \"nominal\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"sum_top\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"baseline\": \"bottom\", \"dy\": -13, \"fontSize\": 8}, \"encoding\": {\"color\": {\"value\": \"grey\"}, \"text\": {\"field\": \"value_l\", \"type\": \"nominal\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"sum_top\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"baseline\": \"bottom\", \"dy\": -5, \"fontSize\": 8}, \"encoding\": {\"color\": {\"value\": \"grey\"}, \"text\": {\"field\": \"value_r\", \"type\": \"nominal\"}, \"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"y\": {\"field\": \"sum_top\", \"type\": \"quantitative\"}}}]}, {\"mark\": {\"type\": \"rule\", \"color\": \"black\", \"strokeWidth\": 2, \"x2Offset\": 30, \"xOffset\": -30}, \"encoding\": {\"x\": {\"axis\": {\"labelAngle\": -20, \"title\": \"Column\"}, \"field\": \"column_name\", \"sort\": {\"field\": \"bar_sort_order\", \"order\": \"ascending\"}, \"type\": \"nominal\"}, \"x2\": {\"field\": \"lead\"}, \"y\": {\"axis\": {\"labelExpr\": \"format(1 / (1 + pow(2, -1*datum.value)), '.2r')\", \"orient\": \"right\", \"title\": \"Probability\"}, \"field\": \"sum\", \"scale\": {\"zero\": false}, \"type\": \"quantitative\"}}}], \"data\": {\"name\": \"data-e2e437eb18e117736e5c931d02add6f8\"}, \"height\": 450, \"params\": [{\"name\": \"record_number\", \"bind\": {\"input\": \"range\", \"max\": 9, \"min\": 0, \"step\": 1}, \"value\": 0}], \"resolve\": {\"axis\": {\"y\": \"independent\"}}, \"title\": {\"text\": \"Match weights waterfall chart\", \"subtitle\": \"How each comparison contributes to the final match score\"}, \"transform\": [{\"filter\": \"(datum.record_number == record_number)\"}, {\"filter\": \"(datum.bayes_factor !== 1.0)\"}, {\"window\": [{\"op\": \"sum\", \"field\": \"log2_bayes_factor\", \"as\": \"sum\"}, {\"op\": \"lead\", \"field\": \"column_name\", \"as\": \"lead\"}], \"frame\": [null, 0]}, {\"calculate\": \"datum.column_name === \\\"Final score\\\" ? datum.sum - datum.log2_bayes_factor : datum.sum\", \"as\": \"sum\"}, {\"calculate\": \"datum.lead === null ? datum.column_name : datum.lead\", \"as\": \"lead\"}, {\"calculate\": \"datum.column_name === \\\"Final score\\\" || datum.column_name === \\\"Prior match weight\\\" ? 0 : datum.sum - datum.log2_bayes_factor\", \"as\": \"previous_sum\"}, {\"calculate\": \"datum.sum > datum.previous_sum ? datum.column_name : \\\"\\\"\", \"as\": \"top_label\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? datum.column_name : \\\"\\\"\", \"as\": \"bottom_label\"}, {\"calculate\": \"datum.sum > datum.previous_sum ? datum.sum : datum.previous_sum\", \"as\": \"sum_top\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? datum.sum : datum.previous_sum\", \"as\": \"sum_bottom\"}, {\"calculate\": \"(datum.sum + datum.previous_sum) / 2\", \"as\": \"center\"}, {\"calculate\": \"(datum.log2_bayes_factor > 0 ? \\\"+\\\" : \\\"\\\") + datum.log2_bayes_factor\", \"as\": \"text_log2_bayes_factor\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? 4 : -4\", \"as\": \"dy\"}, {\"calculate\": \"datum.sum < datum.previous_sum ? \\\"top\\\" : \\\"bottom\\\"\", \"as\": \"baseline\"}, {\"calculate\": \"1. / (1 + pow(2, -1.*datum.sum))\", \"as\": \"prob\"}, {\"calculate\": \"0*datum.sum\", \"as\": \"zero\"}], \"width\": {\"step\": 75}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-e2e437eb18e117736e5c931d02add6f8\": [{\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" westbrook\", \"value_r\": \" jake\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" jake\", \"value_r\": \" westbrook\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.0579791679989962, \"u_probability\": 0.9983736321700071, \"bayes_factor\": 0.05807361706155645, \"log2_bayes_factor\": -4.105973296008658, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  17.22 times less likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19001115\", \"value_r\": \"19501111\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"2330929\", \"value_r\": \"3733536\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 24\", \"value_r\": \" 15\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"2704\", \"value_r\": \"2074\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 0}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 0}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -27.8057314145045, \"bayes_factor\": 4.262268386159965e-09, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 0}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" wilde\", \"value_r\": \" kyra\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" kyra\", \"value_r\": \" wilde\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.0579791679989962, \"u_probability\": 0.9983736321700071, \"bayes_factor\": 0.05807361706155645, \"log2_bayes_factor\": -4.105973296008658, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  17.22 times less likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19090815\", \"value_r\": \"19220601\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"5230360\", \"value_r\": \"6073461\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 26\", \"value_r\": \" 62\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"3859\", \"value_r\": \"3595\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 1}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 1}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -27.8057314145045, \"bayes_factor\": 4.262268386159965e-09, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 1}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" wilde\", \"value_r\": \" kyra\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" kyra\", \"value_r\": \" everett\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.0579791679989962, \"u_probability\": 0.9983736321700071, \"bayes_factor\": 0.05807361706155645, \"log2_bayes_factor\": -4.105973296008658, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  17.22 times less likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19090815\", \"value_r\": \"19220601\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"5230360\", \"value_r\": \"6073461\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 2}, {\"sql_condition\": \"\\\"street_number_l\\\" = \\\"street_number_r\\\"\", \"label_for_charts\": \"Exact match on street_number\", \"m_probability\": 0.768227813758807, \"u_probability\": 0.014992862853499144, \"bayes_factor\": 51.239567870756076, \"log2_bayes_factor\": 5.679186403639793, \"comparison_vector_value\": 1, \"bayes_factor_description\": \"If comparison level is `exact match on street_number` then comparison is 51.24 times more likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 26\", \"value_r\": \" 26\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 2}, {\"sql_condition\": \"\\\"street_number_l\\\" = \\\"street_number_r\\\"\", \"label_for_charts\": \"Term freq adjustment on street_number with weight {cl.tf_adjustment_weight}\", \"m_probability\": null, \"u_probability\": null, \"bayes_factor\": 1.2494052377915952, \"log2_bayes_factor\": 0.32124148313525763, \"comparison_vector_value\": 1, \"bayes_factor_description\": \"Term frequency adjustment on street_number makes comparison 1.25 times more likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \" 26\", \"value_r\": \" 26\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"3859\", \"value_r\": \"3889\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 2}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 2}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -19.717876796171343, \"bayes_factor\": 1.1596518723924452e-06, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 2}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" mikhaili\", \"value_r\": \" elly\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 3}, {\"sql_condition\": \"\\\"surname_l\\\" = \\\"surname_r\\\"\", \"label_for_charts\": \"Exact match on surname\", \"m_probability\": 0.5618584392848496, \"u_probability\": 0.0027005710796134397, \"bayes_factor\": 208.05171303444237, \"log2_bayes_factor\": 7.70079835691789, \"comparison_vector_value\": 4, \"bayes_factor_description\": \"If comparison level is `exact match on surname` then comparison is 208.05 times more likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" reid\", \"value_r\": \" reid\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 3}, {\"sql_condition\": \"\\\"surname_l\\\" = \\\"surname_r\\\"\", \"label_for_charts\": \"Term freq adjustment on surname with weight {cl.tf_adjustment_weight}\", \"m_probability\": null, \"u_probability\": null, \"bayes_factor\": 0.27005710796134397, \"log2_bayes_factor\": -1.8886635740530093, \"comparison_vector_value\": 4, \"bayes_factor_description\": \"Term frequency adjustment on surname makes comparison  3.70 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \" reid\", \"value_r\": \" reid\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 3}, {\"sql_condition\": \"try_strptime(\\\"date_of_birth_l\\\", '%Y%m%d') IS NULL OR try_strptime(\\\"date_of_birth_r\\\", '%Y%m%d') IS NULL\", \"label_for_charts\": \"transformed date_of_birth is NULL\", \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": -1, \"bayes_factor_description\": \"If comparison level is `transformed date_of_birth is null` then comparison is 1.00 times more likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19850523\", \"value_r\": \"\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"2111602\", \"value_r\": \"6391700\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 46\", \"value_r\": \" 58\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"4806\", \"value_r\": \"4860\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 3}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 3}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -15.453189608267657, \"bayes_factor\": 2.229083818523644e-05, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 3}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" whitbe\", \"value_r\": \" alexa-ose\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" alexa-rose\", \"value_r\": \" white\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.0579791679989962, \"u_probability\": 0.9983736321700071, \"bayes_factor\": 0.05807361706155645, \"log2_bayes_factor\": -4.105973296008658, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  17.22 times less likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19300526\", \"value_r\": \"19160822\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 4}, {\"sql_condition\": \"\\\"soc_sec_id_l\\\" = \\\"soc_sec_id_r\\\"\", \"label_for_charts\": \"Exact match on soc_sec_id\", \"m_probability\": 0.8590418481224762, \"u_probability\": 0.0004563889599186916, \"bayes_factor\": 1882.2581691623734, \"log2_bayes_factor\": 10.878248805303441, \"comparison_vector_value\": 2, \"bayes_factor_description\": \"If comparison level is `exact match on soc_sec_id` then comparison is 1,882.26 times more likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"2502613\", \"value_r\": \"2502613\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 22\", \"value_r\": \" 43\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"3040\", \"value_r\": \"3041\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 4}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 4}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -12.931781173346474, \"bayes_factor\": 0.00012798113855733566, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 4}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" zarran\", \"value_r\": \" bradshaw\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" bradshaw\", \"value_r\": \" zarrna\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.0579791679989962, \"u_probability\": 0.9983736321700071, \"bayes_factor\": 0.05807361706155645, \"log2_bayes_factor\": -4.105973296008658, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  17.22 times less likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19350707\", \"value_r\": \"19550120\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 5}, {\"sql_condition\": \"\\\"soc_sec_id_l\\\" = \\\"soc_sec_id_r\\\"\", \"label_for_charts\": \"Exact match on soc_sec_id\", \"m_probability\": 0.8590418481224762, \"u_probability\": 0.0004563889599186916, \"bayes_factor\": 1882.2581691623734, \"log2_bayes_factor\": 10.878248805303441, \"comparison_vector_value\": 2, \"bayes_factor_description\": \"If comparison level is `exact match on soc_sec_id` then comparison is 1,882.26 times more likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"7831798\", \"value_r\": \"7831798\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 89\", \"value_r\": \" 63\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"6174\", \"value_r\": \"6147\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 5}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 5}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -12.931781173346474, \"bayes_factor\": 0.00012798113855733566, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 5}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" amy\", \"value_r\": \" chandelr\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" chandler\", \"value_r\": \" ay\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 6}, {\"sql_condition\": \"\\\"date_of_birth_l\\\" = \\\"date_of_birth_r\\\"\", \"label_for_charts\": \"Exact match on date_of_birth\", \"m_probability\": 0.9298272546074744, \"u_probability\": 0.0005239066567033322, \"bayes_factor\": 1774.7956486340258, \"log2_bayes_factor\": 10.793437205800899, \"comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `exact match on date_of_birth` then comparison is 1,774.80 times more likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19510715\", \"value_r\": \"19510715\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"8026179\", \"value_r\": \"1609739\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" \", \"value_r\": \" 65\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"2068\", \"value_r\": \"2086\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 6}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 6}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -12.906320912694943, \"bayes_factor\": 0.00013025975938520842, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 6}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" spicer\", \"value_r\": \" anika\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" anika\", \"value_r\": \" spicer\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 7}, {\"sql_condition\": \"\\\"date_of_birth_l\\\" = \\\"date_of_birth_r\\\"\", \"label_for_charts\": \"Exact match on date_of_birth\", \"m_probability\": 0.9298272546074744, \"u_probability\": 0.0005239066567033322, \"bayes_factor\": 1774.7956486340258, \"log2_bayes_factor\": 10.793437205800899, \"comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `exact match on date_of_birth` then comparison is 1,774.80 times more likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19861012\", \"value_r\": \"19861012\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"5087622\", \"value_r\": \"1434508\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 39\", \"value_r\": \" 3\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"2076\", \"value_r\": \"2067\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 7}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 7}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -12.906320912694943, \"bayes_factor\": 0.00013025975938520842, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 7}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" joel\", \"value_r\": \" ryan\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" ryan\", \"value_r\": \" joel\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 8}, {\"sql_condition\": \"\\\"date_of_birth_l\\\" = \\\"date_of_birth_r\\\"\", \"label_for_charts\": \"Exact match on date_of_birth\", \"m_probability\": 0.9298272546074744, \"u_probability\": 0.0005239066567033322, \"bayes_factor\": 1774.7956486340258, \"log2_bayes_factor\": 10.793437205800899, \"comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `exact match on date_of_birth` then comparison is 1,774.80 times more likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19720615\", \"value_r\": \"19720615\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"9499826\", \"value_r\": \"7563426\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" 62\", \"value_r\": \" 26\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"370\", \"value_r\": \"3070\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 8}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 8}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -12.906320912694943, \"bayes_factor\": 0.00013025975938520842, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 8}, {\"column_name\": \"Prior\", \"label_for_charts\": \"Starting match weight (prior)\", \"sql_condition\": null, \"log2_bayes_factor\": -10.886123785487664, \"bayes_factor\": 0.0005283846640354178, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 0, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 0.2211560363176601, \"log2_bayes_factor\": -2.1768634746417304, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"given_name\", \"value_l\": \" flynn\", \"value_r\": \" weller\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 1, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.19446648964219015, \"u_probability\": 0.8793180275797036, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.52 times less likely to be a match\", \"column_name\": \"tf_given_name\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 2, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 0.18499603725399344, \"log2_bayes_factor\": -2.4344337273633045, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"surname\", \"value_l\": \" welqer\", \"value_r\": \" ebony\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 3, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.1703762483746646, \"u_probability\": 0.9209724213754031, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  5.41 times less likely to be a match\", \"column_name\": \"tf_surname\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 4, \"record_number\": 9}, {\"sql_condition\": \"\\\"date_of_birth_l\\\" = \\\"date_of_birth_r\\\"\", \"label_for_charts\": \"Exact match on date_of_birth\", \"m_probability\": 0.9298272546074744, \"u_probability\": 0.0005239066567033322, \"bayes_factor\": 1774.7956486340258, \"log2_bayes_factor\": 10.793437205800899, \"comparison_vector_value\": 5, \"bayes_factor_description\": \"If comparison level is `exact match on date_of_birth` then comparison is 1,774.80 times more likely to be a match\", \"column_name\": \"date_of_birth\", \"value_l\": \"19700106\", \"value_r\": \"19700106\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 5, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.06263928986863126, \"u_probability\": 0.9992469042694486, \"bayes_factor\": 0.0626864988032432, \"log2_bayes_factor\": -3.9957014358545835, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  15.95 times less likely to be a match\", \"column_name\": \"soc_sec_id\", \"value_l\": \"9438862\", \"value_r\": \"2182752\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 6, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 0.23530000697519957, \"log2_bayes_factor\": -2.0874267315581068, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"street_number\", \"value_l\": \" \", \"value_r\": \" 23\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 7, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.23177218624119303, \"u_probability\": 0.9850071371465009, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.25 times less likely to be a match\", \"column_name\": \"tf_street_number\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 8, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 0.23017308303649886, \"log2_bayes_factor\": -2.119208963590452, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"postcode\", \"value_l\": \"2804\", \"value_r\": \"2870\", \"term_frequency_adjustment\": false, \"bar_sort_order\": 9, \"record_number\": 9}, {\"sql_condition\": \"ELSE\", \"label_for_charts\": \"All other comparisons\", \"m_probability\": 0.22987656328699965, \"u_probability\": 0.9987117531485983, \"bayes_factor\": 1.0, \"log2_bayes_factor\": 0.0, \"comparison_vector_value\": 0, \"bayes_factor_description\": \"If comparison level is `all other comparisons` then comparison is  4.34 times less likely to be a match\", \"column_name\": \"tf_postcode\", \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": true, \"bar_sort_order\": 10, \"record_number\": 9}, {\"column_name\": \"Final score\", \"label_for_charts\": \"Final score\", \"sql_condition\": null, \"log2_bayes_factor\": -12.906320912694943, \"bayes_factor\": 0.00013025975938520842, \"comparison_vector_value\": null, \"m_probability\": null, \"u_probability\": null, \"bayes_factor_description\": null, \"value_l\": \"\", \"value_r\": \"\", \"term_frequency_adjustment\": null, \"bar_sort_order\": 11, \"record_number\": 9}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.LayerChart(...)"
            ]
          },
          "execution_count": 47,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "records = linker.evaluation.prediction_errors_from_labels_column(\n",
        "    \"cluster\"\n",
        ").as_record_dict(limit=10)\n",
        "linker.visualisations.waterfall_chart(records)"
      ]
    }
  ],
  "metadata": {
    "kernelspec": {
      "display_name": ".venv",
      "language": "python",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.10.8"
    },
    "widgets": {
      "application/vnd.jupyter.widget-state+json": {
        "state": {
          "94aaeff2f888492ea321d4e4492526ff": {
            "model_module": "@jupyter-widgets/controls",
            "model_module_version": "2.0.0",
            "model_name": "FloatProgressModel",
            "state": {
              "_dom_classes": [],
              "_model_module": "@jupyter-widgets/controls",
              "_model_module_version": "2.0.0",
              "_model_name": "FloatProgressModel",
              "_view_count": null,
              "_view_module": "@jupyter-widgets/controls",
              "_view_module_version": "2.0.0",
              "_view_name": "ProgressView",
              "bar_style": "",
              "description": "",
              "description_allow_html": false,
              "layout": "IPY_MODEL_bdf3a462cd3d48bda4269ac1cc8ed9ef",
              "max": 100,
              "min": 0,
              "orientation": "horizontal",
              "style": "IPY_MODEL_e05a7090510949ac956ea05719a3b8c2",
              "tabbable": null,
              "tooltip": null,
              "value": 100
            }
          },
          "b179423ef9d24cb1ac973b4b55daa86c": {
            "model_module": "@jupyter-widgets/base",
            "model_module_version": "2.0.0",
            "model_name": "LayoutModel",
            "state": {
              "_model_module": "@jupyter-widgets/base",
              "_model_module_version": "2.0.0",
              "_model_name": "LayoutModel",
              "_view_count": null,
              "_view_module": "@jupyter-widgets/base",
              "_view_module_version": "2.0.0",
              "_view_name": "LayoutView",
              "align_content": null,
              "align_items": null,
              "align_self": null,
              "border_bottom": null,
              "border_left": null,
              "border_right": null,
              "border_top": null,
              "bottom": null,
              "display": null,
              "flex": null,
              "flex_flow": null,
              "grid_area": null,
              "grid_auto_columns": null,
              "grid_auto_flow": null,
              "grid_auto_rows": null,
              "grid_column": null,
              "grid_gap": null,
              "grid_row": null,
              "grid_template_areas": null,
              "grid_template_columns": null,
              "grid_template_rows": null,
              "height": null,
              "justify_content": null,
              "justify_items": null,
              "left": null,
              "margin": null,
              "max_height": null,
              "max_width": null,
              "min_height": null,
              "min_width": null,
              "object_fit": null,
              "object_position": null,
              "order": null,
              "overflow": null,
              "padding": null,
              "right": null,
              "top": null,
              "visibility": null,
              "width": "auto"
            }
          },
          "bdf3a462cd3d48bda4269ac1cc8ed9ef": {
            "model_module": "@jupyter-widgets/base",
            "model_module_version": "2.0.0",
            "model_name": "LayoutModel",
            "state": {
              "_model_module": "@jupyter-widgets/base",
              "_model_module_version": "2.0.0",
              "_model_name": "LayoutModel",
              "_view_count": null,
              "_view_module": "@jupyter-widgets/base",
              "_view_module_version": "2.0.0",
              "_view_name": "LayoutView",
              "align_content": null,
              "align_items": null,
              "align_self": null,
              "border_bottom": null,
              "border_left": null,
              "border_right": null,
              "border_top": null,
              "bottom": null,
              "display": null,
              "flex": null,
              "flex_flow": null,
              "grid_area": null,
              "grid_auto_columns": null,
              "grid_auto_flow": null,
              "grid_auto_rows": null,
              "grid_column": null,
              "grid_gap": null,
              "grid_row": null,
              "grid_template_areas": null,
              "grid_template_columns": null,
              "grid_template_rows": null,
              "height": null,
              "justify_content": null,
              "justify_items": null,
              "left": null,
              "margin": null,
              "max_height": null,
              "max_width": null,
              "min_height": null,
              "min_width": null,
              "object_fit": null,
              "object_position": null,
              "order": null,
              "overflow": null,
              "padding": null,
              "right": null,
              "top": null,
              "visibility": null,
              "width": "auto"
            }
          },
          "db3fd6bdb9884f5a88fd4cf5d39330d4": {
            "model_module": "@jupyter-widgets/controls",
            "model_module_version": "2.0.0",
            "model_name": "ProgressStyleModel",
            "state": {
              "_model_module": "@jupyter-widgets/controls",
              "_model_module_version": "2.0.0",
              "_model_name": "ProgressStyleModel",
              "_view_count": null,
              "_view_module": "@jupyter-widgets/base",
              "_view_module_version": "2.0.0",
              "_view_name": "StyleView",
              "bar_color": "black",
              "description_width": ""
            }
          },
          "e05a7090510949ac956ea05719a3b8c2": {
            "model_module": "@jupyter-widgets/controls",
            "model_module_version": "2.0.0",
            "model_name": "ProgressStyleModel",
            "state": {
              "_model_module": "@jupyter-widgets/controls",
              "_model_module_version": "2.0.0",
              "_model_name": "ProgressStyleModel",
              "_view_count": null,
              "_view_module": "@jupyter-widgets/base",
              "_view_module_version": "2.0.0",
              "_view_name": "StyleView",
              "bar_color": "black",
              "description_width": ""
            }
          },
          "e181cb7618b74e4bbf9f2e144b68b87e": {
            "model_module": "@jupyter-widgets/controls",
            "model_module_version": "2.0.0",
            "model_name": "FloatProgressModel",
            "state": {
              "_dom_classes": [],
              "_model_module": "@jupyter-widgets/controls",
              "_model_module_version": "2.0.0",
              "_model_name": "FloatProgressModel",
              "_view_count": null,
              "_view_module": "@jupyter-widgets/controls",
              "_view_module_version": "2.0.0",
              "_view_name": "ProgressView",
              "bar_style": "",
              "description": "",
              "description_allow_html": false,
              "layout": "IPY_MODEL_b179423ef9d24cb1ac973b4b55daa86c",
              "max": 100,
              "min": 0,
              "orientation": "horizontal",
              "style": "IPY_MODEL_db3fd6bdb9884f5a88fd4cf5d39330d4",
              "tabbable": null,
              "tooltip": null,
              "value": 100
            }
          }
        },
        "version_major": 2,
        "version_minor": 0
      }
    }
  },
  "nbformat": 4,
  "nbformat_minor": 4
}