{
  "cells": [
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "# `threshold_selection_tool_from_labels_table`\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 4,
      "metadata": {
        "tags": [
          "hide_input"
        ]
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-880e5556d99448c9902aa8407067a62c.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-880e5556d99448c9902aa8407067a62c.vega-embed details,\n",
              "  #altair-viz-880e5556d99448c9902aa8407067a62c.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-880e5556d99448c9902aa8407067a62c\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-880e5556d99448c9902aa8407067a62c\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-880e5556d99448c9902aa8407067a62c\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300, \"discreteHeight\": {\"step\": 150}, \"discreteWidth\": {\"step\": 150}}, \"axis\": {\"gridWidth\": 0.5, \"labelFontSize\": 12, \"titleFontSize\": 16}, \"axisX\": {\"format\": \"+.0f\", \"grid\": false, \"offset\": 20, \"values\": {\"expr\": \"[-25,-20,-15,-10,-5,0,5,10,15,20,25]\"}}, \"axisY\": {\"title\": \"Match probability threshold\", \"titleFontSize\": 16}, \"concat\": {\"spacing\": 40}}, \"hconcat\": [{\"vconcat\": [{\"layer\": [{\"layer\": [{\"mark\": {\"type\": \"rule\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 0.3, \"empty\": false}, \"value\": 0}, \"x\": {\"axis\": {\"orient\": \"bottom\"}, \"field\": \"truth_threshold\", \"scale\": {\"nice\": false}, \"title\": null, \"type\": \"quantitative\"}}, \"params\": [{\"name\": \"threshold\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}, \"value\": null}]}, {\"mark\": {\"type\": \"rule\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 0.3, \"empty\": false}, \"value\": 0}, \"y\": {\"axis\": {\"orient\": \"right\"}, \"field\": \"match_probability\", \"title\": \" \", \"type\": \"quantitative\"}}, \"params\": [{\"name\": \"prob\", \"select\": {\"type\": \"point\", \"encodings\": [\"y\"], \"fields\": [\"match_probability\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}}]}]}, {\"layer\": [{\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"fontWeight\": \"bold\", \"xOffset\": 25, \"yOffset\": 10}, \"encoding\": {\"text\": {\"aggregate\": \"min\", \"field\": \"truth_threshold\", \"format\": \"+.2f\"}, \"y\": {\"axis\": {\"orient\": \"left\"}, \"field\": \"match_probability\", \"title\": \"Match probability threshold\", \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"xOffset\": -25, \"yOffset\": -10}, \"encoding\": {\"text\": {\"aggregate\": \"min\", \"field\": \"match_probability\", \"format\": \".3f\"}}, \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}, {\"mark\": {\"type\": \"line\", \"color\": \"red\", \"opacity\": 0.5}}, {\"mark\": {\"type\": \"line\", \"color\": \"green\", \"opacity\": 0.5, \"strokeWidth\": 3}, \"transform\": [{\"filter\": \"datum.truth_threshold >= threshold.truth_threshold\"}]}, {\"mark\": {\"type\": \"point\", \"color\": \"green\", \"size\": 100}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 1, \"empty\": false}, \"value\": 0}}}], \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\", \"title\": \"Match weight threshold\", \"axis\": {\"orient\": \"top\"}}, \"y\": {\"field\": \"match_probability\", \"type\": \"quantitative\", \"title\": \"Match probability threshold\", \"axis\": {\"orient\": \"left\", \"titlePadding\": 10}}}}, {\"mark\": {\"type\": \"text\", \"align\": \"left\", \"color\": \"red\", \"fontSize\": 12, \"text\": \"Non-match\", \"x\": 0, \"y\": \"height\", \"yOffset\": 10}, \"data\": {\"values\": [{}]}}, {\"mark\": {\"type\": \"text\", \"align\": \"right\", \"color\": \"green\", \"fontSize\": 12, \"fontWeight\": \"bold\", \"text\": \"Match\", \"x\": \"width\", \"y\": 0, \"yOffset\": -10}, \"data\": {\"values\": [{}]}}], \"description\": \"Match weight vs probability\"}, {\"hconcat\": [{\"layer\": [{\"mark\": {\"type\": \"rect\", \"opacity\": 0.5}, \"encoding\": {\"color\": {\"field\": \"count\", \"legend\": null, \"scale\": {\"scheme\": \"reds\", \"zero\": true}, \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": \"datum.predicted == 0\"}]}, {\"mark\": {\"type\": \"rect\", \"opacity\": 0.5}, \"encoding\": {\"color\": {\"field\": \"count\", \"legend\": null, \"scale\": {\"scheme\": \"greens\", \"zero\": true}, \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": \"datum.predicted == 1\"}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"yOffset\": -40}, \"encoding\": {\"color\": {\"condition\": [{\"test\": \"datum.predicted==1 && datum.actual==1\", \"value\": \"darkgreen\"}, {\"test\": \"datum.predicted==0 && datum.actual==0\", \"value\": \"darkred\"}], \"value\": \"black\"}, \"opacity\": {\"condition\": {\"test\": \"datum.predicted != datum.actual\", \"value\": 1}, \"value\": 0.5}, \"text\": {\"field\": \"confusion_label\", \"type\": \"nominal\"}}}, {\"mark\": {\"type\": \"text\", \"fontSize\": 28, \"fontWeight\": \"bold\", \"yOffset\": 10}, \"encoding\": {\"color\": {\"condition\": [{\"test\": \"datum.predicted==1 && datum.actual==1\", \"value\": \"darkgreen\"}, {\"test\": \"datum.predicted==0 && datum.actual==0\", \"value\": \"darkred\"}], \"value\": \"black\"}, \"text\": {\"field\": \"count\", \"format\": \",\", \"type\": \"nominal\"}}}], \"description\": \"Confusion matrix\", \"encoding\": {\"x\": {\"field\": \"actual\", \"type\": \"nominal\", \"title\": \"Actual\", \"axis\": {\"domain\": false, \"labelAngle\": 0, \"labelExpr\": \"datum.label == 1 ? 'Match' : 'Non-match'\", \"labelFontSize\": 18, \"labelPadding\": 10, \"orient\": \"top\", \"ticks\": false, \"titleAngle\": 0, \"titleFontSize\": 20}, \"sort\": \"-x\"}, \"y\": {\"field\": \"predicted\", \"type\": \"nominal\", \"title\": \"Predicted\", \"axis\": {\"domain\": false, \"labelExpr\": \"datum.label == 1 ? 'Match' : 'Non-match'\", \"labelFontSize\": 18, \"labelPadding\": 10, \"ticks\": false, \"titleAngle\": 0, \"titleFontSize\": 20, \"titlePadding\": -30}, \"sort\": \"-y\"}}, \"resolve\": {\"scale\": {\"color\": \"independent\"}}, \"transform\": [{\"filter\": {\"or\": [{\"param\": \"threshold\", \"empty\": false}, {\"and\": [{\"param\": \"threshold\", \"empty\": true}, \"datum.truth_threshold == datum.median_threshold\"]}]}}]}], \"transform\": [{\"fold\": [\"tp\", \"tn\", \"fp\", \"fn\"], \"as\": [\"label\", \"count\"]}, {\"calculate\": \"datum.label === 'tp' ? 'True Positive (TP)' : datum.label === 'tn' ? 'True Negative (TN)' : datum.label === 'fp' ? 'False Positive (FP)' : 'False Negative (FN)'\", \"as\": \"confusion_label\"}, {\"calculate\": \"datum.label === 'tp' || datum.label === 'fp' ? 1 : 0\", \"as\": \"predicted\"}, {\"calculate\": \"datum.label === 'tp' || datum.label === 'fn' ? 1 : 0\", \"as\": \"actual\"}, {\"joinaggregate\": [{\"op\": \"median\", \"field\": \"truth_threshold\", \"as\": \"median_threshold\"}]}]}]}, {\"layer\": [{\"layer\": [{\"mark\": {\"type\": \"point\", \"size\": 100}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 1, \"empty\": false}, \"value\": 0}, \"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".3f\", \"title\": \"Match weight threshold\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".3%\", \"title\": \"Match probability threshold\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"title\": \"Precision\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"title\": \"Recall (TPR)\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FPR\", \"type\": \"quantitative\"}], \"x\": {\"axis\": {\"orient\": \"top\"}, \"field\": \"truth_threshold\", \"title\": \"Match weight threshold\"}}, \"params\": [{\"name\": \"metric\", \"select\": {\"type\": \"point\", \"fields\": [\"metric\"]}, \"bind\": \"legend\", \"value\": [{\"metric\": \"precision\"}, {\"metric\": \"recall\"}]}, {\"name\": \"threshold\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}, \"value\": null}], \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": {\"type\": \"line\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"metric\", \"value\": 1}, \"value\": 0.1}, \"x\": {\"axis\": {\"orient\": \"bottom\"}, \"field\": \"truth_threshold\", \"title\": null}}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"type\": \"nominal\", \"sort\": [\"precision\", \"recall\", \"f1\"], \"title\": [\"Performance\", \"Metric\"], \"legend\": {\"fillColor\": \"whitesmoke\", \"labelExpr\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.value]\", \"labelFontSize\": 14, \"legendX\": 800, \"legendY\": 160, \"orient\": \"none\", \"padding\": 10, \"titleFontSize\": 16, \"titlePadding\": 15}}, \"x\": {\"type\": \"quantitative\", \"field\": \"truth_threshold\"}, \"y\": {\"field\": \"value\", \"type\": \"quantitative\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Performance metric score\", \"titleFontSize\": 18, \"titlePadding\": 10, \"values\": {\"expr\": \"[0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95,1.0]\"}}, \"scale\": {\"domain\": [0.5, 1]}}}}, {\"layer\": [{\"mark\": {\"type\": \"rule\", \"color\": \"gray\"}, \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"title\": null, \"type\": \"quantitative\"}}}, {\"layer\": [{\"mark\": {\"type\": \"rect\", \"fill\": \"whitesmoke\", \"x\": 200, \"x2\": 10, \"y2Offset\": 20, \"yOffset\": -20}, \"encoding\": {\"y2\": {\"field\": \"score_index\"}}}, {\"mark\": {\"type\": \"text\", \"align\": \"right\", \"baseline\": \"middle\", \"fontSize\": 16, \"x\": 200, \"xOffset\": -10}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"sort\": [\"precision\", \"recall\", \"f1\"]}, \"text\": {\"field\": \"y_text\"}, \"y\": {\"field\": \"score_index\", \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"fontWeight\": \"bold\", \"xOffset\": 20, \"y\": 0, \"yOffset\": -10}, \"encoding\": {\"text\": {\"condition\": {\"param\": \"threshold\", \"aggregate\": \"min\", \"empty\": false, \"field\": \"truth_threshold\", \"format\": \"+.2f\", \"type\": \"nominal\"}, \"value\": \"\"}, \"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}], \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}], \"description\": \"Accuracy chart\", \"height\": 700, \"transform\": [{\"fold\": [\"precision\", \"recall\", \"f1\"], \"as\": [\"metric\", \"value\"]}, {\"calculate\": \"0.6375 - 0.025*indexof(['precision', 'recall', 'f1'], datum.metric)\", \"as\": \"score_index\"}, {\"calculate\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.metric]\", \"as\": \"metric_text\"}, {\"calculate\": \"datum.metric_text + ' = ' + format(datum.value, ',.3g')\", \"as\": \"y_text\"}], \"width\": 500}], \"data\": {\"name\": \"data-d655f0cd53ee561d3cd788a27c7bab03\"}, \"title\": {\"text\": \"Match Threshold Selection Tool\", \"anchor\": \"middle\", \"baseline\": \"line-bottom\", \"fontSize\": 28, \"subtitle\": [\"Hover over either line graph to show Confusion Matrix (bottom left) and selected performance metrics (right).\", \"\", \"Click a legend value to show a specific evaluation metric. Shift + Click to show multiple metrics\"], \"subtitleFontSize\": 14, \"subtitleFontStyle\": \"italic\"}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.14.1.json\", \"datasets\": {\"data-d655f0cd53ee561d3cd788a27c7bab03\": [{\"truth_threshold\": -23.800000354647636, \"match_probability\": 6.846773588489456e-08, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1055.0, \"fp\": 90.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9213973799126638, \"fp_rate\": 0.07860262008733625, \"fn_rate\": 0.2880354505169867, \"precision\": 0.94140625, \"recall\": 0.7119645494830132, \"specificity\": 0.9213973799126638, \"npv\": 0.6432926829268293, \"accuracy\": 0.7874685138539043, \"f1\": 0.8107653490328006, \"f2\": 0.7484472049689441, \"f0_5\": 0.8844036697247707, \"p4\": 0.7832976799979975, \"phi\": 0.6085442007563051}, {\"truth_threshold\": -22.70000033825636, \"match_probability\": 1.467637948991862e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1077.0, \"fp\": 68.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9406113537117904, \"fp_rate\": 0.059388646288209605, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9550858652575958, \"recall\": 0.7119645494830132, \"specificity\": 0.9406113537117904, \"npv\": 0.648014440433213, \"accuracy\": 0.7943954659949622, \"f1\": 0.8157968970380818, \"f2\": 0.750155633948952, \"f0_5\": 0.8940274514653147, \"p4\": 0.7908413564901972, \"phi\": 0.6273505612520337}, {\"truth_threshold\": -21.700000323355198, \"match_probability\": 2.9352754975091214e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1083.0, \"fp\": 62.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9458515283842794, \"fp_rate\": 0.05414847161572053, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9588859416445623, \"recall\": 0.7119645494830132, \"specificity\": 0.9458515283842794, \"npv\": 0.6492805755395683, \"accuracy\": 0.7962846347607053, \"f1\": 0.817179994348686, \"f2\": 0.7506229235880398, \"f0_5\": 0.896688577452561, \"p4\": 0.792886883910619, \"phi\": 0.6325043185815227}, {\"truth_threshold\": -21.600000321865082, \"match_probability\": 3.1459503204353755e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1088.0, \"fp\": 57.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9502183406113537, \"fp_rate\": 0.04978165938864629, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9620758483033932, \"recall\": 0.7119645494830132, \"specificity\": 0.9502183406113537, \"npv\": 0.6503287507471608, \"accuracy\": 0.7978589420654912, \"f1\": 0.8183361629881154, \"f2\": 0.7510127765659084, \"f0_5\": 0.8989183140619172, \"p4\": 0.7945877557823284, \"phi\": 0.6368075433805553}, {\"truth_threshold\": -20.60000030696392, \"match_probability\": 6.29189872645777e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1094.0, \"fp\": 51.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9554585152838428, \"fp_rate\": 0.0445414847161572, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9659318637274549, \"recall\": 0.7119645494830132, \"specificity\": 0.9554585152838428, \"npv\": 0.6515783204288267, \"accuracy\": 0.7997481108312342, \"f1\": 0.8197278911564626, \"f2\": 0.75148113501715, \"f0_5\": 0.9016086793864572, \"p4\": 0.7966244062798371, \"phi\": 0.6419817284271657}, {\"truth_threshold\": -19.000000283122063, \"match_probability\": 1.907344620533969e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1096.0, \"fp\": 49.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9572052401746725, \"fp_rate\": 0.04279475982532751, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9672240802675586, \"recall\": 0.7119645494830132, \"specificity\": 0.9572052401746725, \"npv\": 0.6519928613920285, \"accuracy\": 0.8003778337531486, \"f1\": 0.8201928530913216, \"f2\": 0.7516373843434868, \"f0_5\": 0.9025090500561728, \"p4\": 0.7973022397990062, \"phi\": 0.6437089952787838}, {\"truth_threshold\": -17.900000266730785, \"match_probability\": 4.088473825324779e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1097.0, \"fp\": 48.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9580786026200874, \"fp_rate\": 0.04192139737991266, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9678714859437751, \"recall\": 0.7119645494830132, \"specificity\": 0.9580786026200874, \"npv\": 0.6521997621878716, \"accuracy\": 0.8006926952141058, \"f1\": 0.8204255319148936, \"f2\": 0.751715533374922, \"f0_5\": 0.9029599100786811, \"p4\": 0.7976409617025867, \"phi\": 0.6445731096055997}, {\"truth_threshold\": -17.600000262260437, \"match_probability\": 5.03349696795731e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1100.0, \"fp\": 45.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9606986899563319, \"fp_rate\": 0.039301310043668124, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9698189134808853, \"recall\": 0.7119645494830132, \"specificity\": 0.9606986899563319, \"npv\": 0.6528189910979229, \"accuracy\": 0.8016372795969773, \"f1\": 0.8211243611584327, \"f2\": 0.7519500780031201, \"f0_5\": 0.9043151969981238, \"p4\": 0.7986563533248476, \"phi\": 0.6471673893914208}, {\"truth_threshold\": -17.50000026077032, \"match_probability\": 5.394766530610173e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1108.0, \"fp\": 37.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9676855895196507, \"fp_rate\": 0.032314410480349345, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9750505731625084, \"recall\": 0.7119645494830132, \"specificity\": 0.9676855895196507, \"npv\": 0.6544595392793857, \"accuracy\": 0.8041561712846348, \"f1\": 0.8229937393284007, \"f2\": 0.7525762464869367, \"f0_5\": 0.9079492653522542, \"p4\": 0.8013584652743565, \"phi\": 0.6540998665530485}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1110.0, \"fp\": 35.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9694323144104804, \"fp_rate\": 0.03056768558951965, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9763673193787981, \"recall\": 0.7119645494830132, \"specificity\": 0.9694323144104804, \"npv\": 0.6548672566371682, \"accuracy\": 0.8047858942065491, \"f1\": 0.8234624145785877, \"f2\": 0.7527329515877147, \"f0_5\": 0.9088623507228158, \"p4\": 0.8020327397013851, \"phi\": 0.6558363061606292}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1111.0, \"fp\": 34.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9703056768558952, \"fp_rate\": 0.029694323144104803, \"fn_rate\": 0.2880354505169867, \"precision\": 0.977027027027027, \"recall\": 0.7119645494830132, \"specificity\": 0.9703056768558952, \"npv\": 0.6550707547169812, \"accuracy\": 0.8051007556675063, \"f1\": 0.8236969524352037, \"f2\": 0.7528113286130779, \"f0_5\": 0.90931958244246, \"p4\": 0.8023696912661274, \"phi\": 0.6567050301458078}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1120.0, \"fp\": 25.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9781659388646288, \"fp_rate\": 0.021834061135371178, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9830047586675731, \"recall\": 0.7119645494830132, \"specificity\": 0.9781659388646288, \"npv\": 0.656891495601173, \"accuracy\": 0.8079345088161209, \"f1\": 0.8258138206739006, \"f2\": 0.7535174570088587, \"f0_5\": 0.9134554643082754, \"p4\": 0.8053967630362511, \"phi\": 0.6645388735433893}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1121.0, \"fp\": 24.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9790393013100437, \"fp_rate\": 0.02096069868995633, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9836734693877551, \"recall\": 0.7119645494830132, \"specificity\": 0.9790393013100437, \"npv\": 0.6570926143024619, \"accuracy\": 0.8082493702770781, \"f1\": 0.8260497000856898, \"f2\": 0.7535959974984365, \"f0_5\": 0.9139173302995829, \"p4\": 0.8057325018854461, \"phi\": 0.6654110243207023}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9870307167235495, \"recall\": 0.7119645494830132, \"specificity\": 0.9834061135371179, \"npv\": 0.6580946814728229, \"accuracy\": 0.809823677581864, \"f1\": 0.8272311212814645, \"f2\": 0.75398894566691, \"f0_5\": 0.9162336839437334, \"p4\": 0.8074094203617235, \"phi\": 0.6697770344487317}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9870218579234973, \"recall\": 0.7114721811915312, \"specificity\": 0.9834061135371179, \"npv\": 0.6577102803738317, \"accuracy\": 0.8095088161209067, \"f1\": 0.82689556509299, \"f2\": 0.7535460992907801, \"f0_5\": 0.9160644097882592, \"p4\": 0.8071048961802441, \"phi\": 0.6693357668739984}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1135.0, \"fp\": 10.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9912663755458515, \"fp_rate\": 0.008733624454148471, \"fn_rate\": 0.28852781880846873, \"precision\": 0.993127147766323, \"recall\": 0.7114721811915312, \"specificity\": 0.9912663755458515, \"npv\": 0.6595002905287624, \"accuracy\": 0.8123425692695214, \"f1\": 0.8290304073436604, \"f2\": 0.7542540975049588, \"f0_5\": 0.9202649344032607, \"p4\": 0.8101156090778194, \"phi\": 0.6772196571827369}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1137.0, \"fp\": 8.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9930131004366812, \"fp_rate\": 0.0069868995633187774, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9944941500344116, \"recall\": 0.7114721811915312, \"specificity\": 0.9930131004366812, \"npv\": 0.6598955310504934, \"accuracy\": 0.8129722921914357, \"f1\": 0.8295063145809415, \"f2\": 0.7544116111517176, \"f0_5\": 0.9212036210633686, \"p4\": 0.8107834022242488, \"phi\": 0.6789756245799222}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1138.0, \"fp\": 7.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.993886462882096, \"fp_rate\": 0.00611353711790393, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9951790633608816, \"recall\": 0.7114721811915312, \"specificity\": 0.993886462882096, \"npv\": 0.660092807424594, \"accuracy\": 0.8132871536523929, \"f1\": 0.8297444731553258, \"f2\": 0.7544903926482874, \"f0_5\": 0.9216736828677127, \"p4\": 0.8111171302195401, \"phi\": 0.6798541595642643}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1139.0, \"fp\": 6.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.994759825327511, \"fp_rate\": 0.005240174672489083, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9958649207443143, \"recall\": 0.7114721811915312, \"specificity\": 0.994759825327511, \"npv\": 0.6602898550724637, \"accuracy\": 0.8136020151133502, \"f1\": 0.8299827685238369, \"f2\": 0.7545691906005222, \"f0_5\": 0.9221442246330568, \"p4\": 0.8114507463687338, \"phi\": 0.680733063624895}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1142.0, \"fp\": 3.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9973799126637555, \"fp_rate\": 0.0026200873362445414, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9979281767955801, \"recall\": 0.7114721811915312, \"specificity\": 0.9973799126637555, \"npv\": 0.6608796296296297, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306984765737281, \"f2\": 0.7548056832427915, \"f0_5\": 0.9235587370573949, \"p4\": 0.81245092776752, \"phi\": 0.683372001937977}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9986178299930891, \"recall\": 0.7114721811915312, \"specificity\": 0.9982532751091703, \"npv\": 0.6610757663389243, \"accuracy\": 0.8148614609571788, \"f1\": 0.8309373202990225, \"f2\": 0.7548845470692718, \"f0_5\": 0.9240312060365775, \"p4\": 0.8127841005555416, \"phi\": 0.6842523939858662}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1444.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 587.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7109798129000492, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28902018709995075, \"precision\": 0.9986168741355463, \"recall\": 0.7109798129000492, \"specificity\": 0.9982532751091703, \"npv\": 0.6606936416184971, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306010928961749, \"f2\": 0.754440961337513, \"f0_5\": 0.9238643634037108, \"p4\": 0.8124788095466353, \"phi\": 0.6838163737767555}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9986159169550173, \"recall\": 0.7104874446085672, \"specificity\": 0.9982532751091703, \"npv\": 0.6603119584055459, \"accuracy\": 0.8142317380352645, \"f1\": 0.830264672036824, \"f2\": 0.7539972828926743, \"f0_5\": 0.9236973498911791, \"p4\": 0.8121735251016357, \"phi\": 0.6833805796370901}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9993074792243767, \"recall\": 0.7104874446085672, \"specificity\": 0.9991266375545852, \"npv\": 0.6605080831408776, \"accuracy\": 0.8145465994962217, \"f1\": 0.8305035971223022, \"f2\": 0.7540760869565217, \"f0_5\": 0.9241706161137441, \"p4\": 0.8125064984715595, \"phi\": 0.6842619488798015}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1440.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 591.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7090103397341211, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.29098966026587886, \"precision\": 0.9993060374739764, \"recall\": 0.7090103397341211, \"specificity\": 0.9991266375545852, \"npv\": 0.6593659942363113, \"accuracy\": 0.8136020151133502, \"f1\": 0.8294930875576036, \"f2\": 0.7527443805541035, \"f0_5\": 0.9236690186016677, \"p4\": 0.811590547208778, \"phi\": 0.6829568226176045}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2914820285573609, \"precision\": 0.9993055555555556, \"recall\": 0.7085179714426391, \"specificity\": 0.9991266375545852, \"npv\": 0.6589861751152074, \"accuracy\": 0.8132871536523929, \"f1\": 0.829155862863728, \"f2\": 0.7523002927645337, \"f0_5\": 0.923501476062123, \"p4\": 0.8112852415850365, \"phi\": 0.6825222299358593}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2914820285573609, \"precision\": 1.0, \"recall\": 0.7085179714426391, \"specificity\": 1.0, \"npv\": 0.6591824985607369, \"accuracy\": 0.8136020151133502, \"f1\": 0.8293948126801153, \"f2\": 0.7523789605772248, \"f0_5\": 0.9239758571978939, \"p4\": 0.8116179257342173, \"phi\": 0.6834051848579609}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1437.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 594.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29246676514032494, \"precision\": 1.0, \"recall\": 0.707533234859675, \"specificity\": 1.0, \"npv\": 0.6584243818286372, \"accuracy\": 0.8129722921914357, \"f1\": 0.828719723183391, \"f2\": 0.7514904298713524, \"f0_5\": 0.9236405707674509, \"p4\": 0.811007239776182, \"phi\": 0.6825372757481436}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1436.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 595.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707040866568193, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.292959133431807, \"precision\": 1.0, \"recall\": 0.707040866568193, \"specificity\": 1.0, \"npv\": 0.6580459770114943, \"accuracy\": 0.8126574307304786, \"f1\": 0.8283818863570811, \"f2\": 0.7510460251046025, \"f0_5\": 0.9234726688102894, \"p4\": 0.8107019041426428, \"phi\": 0.6821036562194343}, {\"truth_threshold\": -6.70000009983778, \"match_probability\": 0.009526684411466419, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1428.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 603.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7031019202363368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2968980797636632, \"precision\": 1.0, \"recall\": 0.7031019202363368, \"specificity\": 1.0, \"npv\": 0.6550343249427918, \"accuracy\": 0.8101385390428212, \"f1\": 0.8256721595836947, \"f2\": 0.7474874371859297, \"f0_5\": 0.9221232080588919, \"p4\": 0.8082593661032169, \"phi\": 0.6786426833673147}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1424.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 607.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29886755292959133, \"precision\": 1.0, \"recall\": 0.7011324470704087, \"specificity\": 1.0, \"npv\": 0.6535388127853882, \"accuracy\": 0.8088790931989924, \"f1\": 0.8243125904486251, \"f2\": 0.7457059069962296, \"f0_5\": 0.9214442862689272, \"p4\": 0.8070381719383619, \"phi\": 0.6769174743376838}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1423.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 608.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29935992122107336, \"precision\": 1.0, \"recall\": 0.7006400787789266, \"specificity\": 1.0, \"npv\": 0.6531660011409013, \"accuracy\": 0.8085642317380353, \"f1\": 0.8239722061378112, \"f2\": 0.74526029119095, \"f0_5\": 0.9212741162760585, \"p4\": 0.8067328787708493, \"phi\": 0.6764867171608602}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1422.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 609.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7001477104874446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2998522895125554, \"precision\": 1.0, \"recall\": 0.7001477104874446, \"specificity\": 1.0, \"npv\": 0.6527936145952109, \"accuracy\": 0.8082493702770781, \"f1\": 0.8236316246741964, \"f2\": 0.7448145820238844, \"f0_5\": 0.9211037699183832, \"p4\": 0.8064275873033679, \"phi\": 0.676056177162564}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1405.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 626.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6917774495322502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3082225504677499, \"precision\": 1.0, \"recall\": 0.6917774495322502, \"specificity\": 1.0, \"npv\": 0.6465273856578204, \"accuracy\": 0.802896725440806, \"f1\": 0.8178114086146682, \"f2\": 0.737223213348725, \"f0_5\": 0.9181806299830088, \"p4\": 0.8012376730750075, \"phi\": 0.6687698153349331}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1393.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 638.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31413096996553425, \"precision\": 1.0, \"recall\": 0.6858690300344658, \"specificity\": 1.0, \"npv\": 0.6421761076836792, \"accuracy\": 0.7991183879093199, \"f1\": 0.8136682242990654, \"f2\": 0.7318482715141326, \"f0_5\": 0.9160857556227805, \"p4\": 0.7975738525329583, \"phi\": 0.6636630953189379}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1391.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 640.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6848842934515017, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3151157065484983, \"precision\": 1.0, \"recall\": 0.6848842934515017, \"specificity\": 1.0, \"npv\": 0.6414565826330533, \"accuracy\": 0.7984886649874056, \"f1\": 0.8129748684979544, \"f2\": 0.7309511297950604, \"f0_5\": 0.9157340355497038, \"p4\": 0.7969631540364932, \"phi\": 0.6628148598035907}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1390.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 641.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6843919251600197, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3156080748399803, \"precision\": 1.0, \"recall\": 0.6843919251600197, \"specificity\": 1.0, \"npv\": 0.641097424412094, \"accuracy\": 0.7981738035264484, \"f1\": 0.8126278865828706, \"f2\": 0.7305024174900148, \"f0_5\": 0.9155578975102094, \"p4\": 0.7966577964206586, \"phi\": 0.6623910480286727}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1389.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 642.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6838995568685377, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31610044313146235, \"precision\": 1.0, \"recall\": 0.6838995568685377, \"specificity\": 1.0, \"npv\": 0.6407386681589256, \"accuracy\": 0.7978589420654912, \"f1\": 0.8122807017543859, \"f2\": 0.7300536108483129, \"f0_5\": 0.9153815737445631, \"p4\": 0.7963524329131265, \"phi\": 0.6619674396995868}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1388.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 643.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6834071885770556, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3165928114229444, \"precision\": 1.0, \"recall\": 0.6834071885770556, \"specificity\": 1.0, \"npv\": 0.6403803131991052, \"accuracy\": 0.797544080604534, \"f1\": 0.8119333138344546, \"f2\": 0.7296047098402019, \"f0_5\": 0.9152050639588554, \"p4\": 0.7960470632785187, \"phi\": 0.6615440344100268}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1382.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 649.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31954702117183653, \"precision\": 1.0, \"recall\": 0.6804529788281635, \"specificity\": 1.0, \"npv\": 0.6382385730211817, \"accuracy\": 0.7956549118387909, \"f1\": 0.8098447113975974, \"f2\": 0.7269093204292026, \"f0_5\": 0.9141420822860167, \"p4\": 0.7942147035798486, \"phi\": 0.6590078438192518}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6789758739537174, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3210241260462826, \"precision\": 1.0, \"recall\": 0.6789758739537174, \"specificity\": 1.0, \"npv\": 0.6371730662214803, \"accuracy\": 0.7947103274559194, \"f1\": 0.8087976539589443, \"f2\": 0.7255603493633589, \"f0_5\": 0.9136080561812641, \"p4\": 0.7932984218488349, \"phi\": 0.6577424568153551}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1377.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 654.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6779911373707533, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32200886262924666, \"precision\": 1.0, \"recall\": 0.6779911373707533, \"specificity\": 1.0, \"npv\": 0.6364647026125625, \"accuracy\": 0.7940806045340051, \"f1\": 0.8080985915492958, \"f2\": 0.7246605620461004, \"f0_5\": 0.9132510943095902, \"p4\": 0.7926875252637993, \"phi\": 0.6568998611817706}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1375.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 656.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6770064007877893, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3229935992122107, \"precision\": 1.0, \"recall\": 0.6770064007877893, \"specificity\": 1.0, \"npv\": 0.6357579122709606, \"accuracy\": 0.7934508816120907, \"f1\": 0.8073987081620669, \"f2\": 0.7237603958311402, \"f0_5\": 0.9128933740539105, \"p4\": 0.7920765927688658, \"phi\": 0.6560580583751121}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1374.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 657.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6765140324963073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32348596750369274, \"precision\": 1.0, \"recall\": 0.6765140324963073, \"specificity\": 1.0, \"npv\": 0.6354051054384018, \"accuracy\": 0.7931360201511335, \"f1\": 0.8070484581497798, \"f2\": 0.7233101705622236, \"f0_5\": 0.912714228776405, \"p4\": 0.7917711124557005, \"phi\": 0.6556374532841107}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6745445593303792, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3254554406696209, \"precision\": 1.0, \"recall\": 0.6745445593303792, \"specificity\": 1.0, \"npv\": 0.6339977851605758, \"accuracy\": 0.7918765743073047, \"f1\": 0.8056453984122317, \"f2\": 0.7215083210448704, \"f0_5\": 0.9119957395819465, \"p4\": 0.7905490918185237, \"phi\": 0.6539569990508374}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1363.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 668.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.671097981290005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3289020187099951, \"precision\": 1.0, \"recall\": 0.671097981290005, \"specificity\": 1.0, \"npv\": 0.631549917264203, \"accuracy\": 0.7896725440806045, \"f1\": 0.803182086034178, \"f2\": 0.7183514282702645, \"f0_5\": 0.9107309902445543, \"p4\": 0.7884101358393227, \"phi\": 0.6510237127477586}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1356.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 675.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6676514032496307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33234859675036926, \"precision\": 1.0, \"recall\": 0.6676514032496307, \"specificity\": 1.0, \"npv\": 0.6291208791208791, \"accuracy\": 0.7874685138539043, \"f1\": 0.8007085916740478, \"f2\": 0.7151898734177216, \"f0_5\": 0.9094567404426559, \"p4\": 0.7862705739141664, \"phi\": 0.6480998671182523}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1355.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 676.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6671590349581487, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3328409650418513, \"precision\": 1.0, \"recall\": 0.6671590349581487, \"specificity\": 1.0, \"npv\": 0.628775398132894, \"accuracy\": 0.7871536523929471, \"f1\": 0.8003544004725339, \"f2\": 0.7147378415444667, \"f0_5\": 0.9092739229633606, \"p4\": 0.7859648678428045, \"phi\": 0.6476829377278417}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1351.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 680.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6651895617922206, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3348104382077794, \"precision\": 1.0, \"recall\": 0.6651895617922206, \"specificity\": 1.0, \"npv\": 0.6273972602739726, \"accuracy\": 0.7858942065491183, \"f1\": 0.7989355410999409, \"f2\": 0.7129287598944591, \"f0_5\": 0.9085406859448554, \"p4\": 0.7847418977635621, \"phi\": 0.6460171117170842}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6627277203348104, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3372722796651896, \"precision\": 1.0, \"recall\": 0.6627277203348104, \"specificity\": 1.0, \"npv\": 0.6256830601092896, \"accuracy\": 0.7843198992443325, \"f1\": 0.7971572401539828, \"f2\": 0.7106652587117213, \"f0_5\": 0.9076196898179366, \"p4\": 0.7832128384966869, \"phi\": 0.6439390561833764}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1344.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 687.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6617429837518464, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33825701624815363, \"precision\": 1.0, \"recall\": 0.6617429837518464, \"specificity\": 1.0, \"npv\": 0.625, \"accuracy\": 0.7836901763224181, \"f1\": 0.7964444444444444, \"f2\": 0.7097591888466414, \"f0_5\": 0.907249898744431, \"p4\": 0.7826011005327976, \"phi\": 0.6431091391396206}, {\"truth_threshold\": -2.1000000312924385, \"match_probability\": 0.18913982061899084, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1340.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 691.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34022648941408173, \"precision\": 1.0, \"recall\": 0.6597735105859183, \"specificity\": 1.0, \"npv\": 0.6236383442265795, \"accuracy\": 0.7824307304785895, \"f1\": 0.7950163156333432, \"f2\": 0.7079459002535926, \"f0_5\": 0.9065079150317954, \"p4\": 0.7813774176935412, \"phi\": 0.6414515256091918}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1338.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 693.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3412112259970458, \"precision\": 1.0, \"recall\": 0.6587887740029542, \"specificity\": 1.0, \"npv\": 0.6229597388465724, \"accuracy\": 0.781801007556675, \"f1\": 0.7943009795191451, \"f2\": 0.7070386810399493, \"f0_5\": 0.9061357171881349, \"p4\": 0.7807654687830268, \"phi\": 0.6406238230099892}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6573116691285081, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34268833087149186, \"precision\": 1.0, \"recall\": 0.6573116691285081, \"specificity\": 1.0, \"npv\": 0.6219445953286258, \"accuracy\": 0.7808564231738035, \"f1\": 0.7932263814616756, \"f2\": 0.7056771328893118, \"f0_5\": 0.9055759055759056, \"p4\": 0.7798474053553527, \"phi\": 0.6393836407517114}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1333.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 698.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6563269325455441, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3436730674544559, \"precision\": 1.0, \"recall\": 0.6563269325455441, \"specificity\": 1.0, \"npv\": 0.6212696690179056, \"accuracy\": 0.7802267002518891, \"f1\": 0.7925089179548157, \"f2\": 0.7047689542138099, \"f0_5\": 0.9052016840961564, \"p4\": 0.7792352667284765, \"phi\": 0.6385577625791636}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1332.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.6209327548806941, \"accuracy\": 0.779911838790932, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.778929167747172, \"phi\": 0.6381450953570468}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1327.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 704.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3466272772033481, \"precision\": 1.0, \"recall\": 0.6533727227966519, \"specificity\": 1.0, \"npv\": 0.6192536506219578, \"accuracy\": 0.7783375314861462, \"f1\": 0.7903513996426444, \"f2\": 0.7020421119458259, \"f0_5\": 0.904074124540128, \"p4\": 0.7773983659139201, \"phi\": 0.6360844627945531}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6509108813392418, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34908911866075826, \"precision\": 1.0, \"recall\": 0.6509108813392418, \"specificity\": 1.0, \"npv\": 0.6175836030204962, \"accuracy\": 0.7767632241813602, \"f1\": 0.7885475693408888, \"f2\": 0.6997670971839932, \"f0_5\": 0.9031288427380789, \"p4\": 0.775867028931697, \"phi\": 0.6340283016890773}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1320.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 711.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6499261447562777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3500738552437223, \"precision\": 1.0, \"recall\": 0.6499261447562777, \"specificity\": 1.0, \"npv\": 0.6169181034482759, \"accuracy\": 0.7761335012594458, \"f1\": 0.7878245299910475, \"f2\": 0.6988564167725541, \"f0_5\": 0.9027492819039803, \"p4\": 0.7752543370502095, \"phi\": 0.6332070787700438}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1319.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 712.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35056622353520434, \"precision\": 1.0, \"recall\": 0.6494337764647957, \"specificity\": 1.0, \"npv\": 0.6165858912224017, \"accuracy\": 0.7758186397984886, \"f1\": 0.7874626865671642, \"f2\": 0.6984009319072328, \"f0_5\": 0.9025591898179828, \"p4\": 0.7749479564070448, \"phi\": 0.6327967318590355}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1316.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 715.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6479566715903495, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3520433284096504, \"precision\": 1.0, \"recall\": 0.6479566715903495, \"specificity\": 1.0, \"npv\": 0.6155913978494624, \"accuracy\": 0.7748740554156172, \"f1\": 0.7863758589781894, \"f2\": 0.6970338983050848, \"f0_5\": 0.9019876627827279, \"p4\": 0.774028672532505, \"phi\": 0.6315667448577293}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1314.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 717.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6469719350073855, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35302806499261447, \"precision\": 1.0, \"recall\": 0.6469719350073855, \"specificity\": 1.0, \"npv\": 0.6149301825993555, \"accuracy\": 0.7742443324937027, \"f1\": 0.7856502242152467, \"f2\": 0.6961220597584233, \"f0_5\": 0.901605599011939, \"p4\": 0.7734156957074743, \"phi\": 0.6307476279232052}, {\"truth_threshold\": -0.6000000089406967, \"match_probability\": 0.3975010577814427, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1309.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 722.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6445100935499753, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3554899064500246, \"precision\": 1.0, \"recall\": 0.6445100935499753, \"specificity\": 1.0, \"npv\": 0.6132833422603107, \"accuracy\": 0.7726700251889169, \"f1\": 0.7838323353293413, \"f2\": 0.6938407717587194, \"f0_5\": 0.9006467593229668, \"p4\": 0.7718828151071816, \"phi\": 0.6287028744111437}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6430329886755293, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35696701132447073, \"precision\": 1.0, \"recall\": 0.6430329886755293, \"specificity\": 1.0, \"npv\": 0.6122994652406417, \"accuracy\": 0.7717254408060453, \"f1\": 0.7827389871141744, \"f2\": 0.6924708377518558, \"f0_5\": 0.9000689179875948, \"p4\": 0.7709627754494935, \"phi\": 0.627478091329186}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6400787789266371, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3599212210733629, \"precision\": 1.0, \"recall\": 0.6400787789266371, \"specificity\": 1.0, \"npv\": 0.6103411513859275, \"accuracy\": 0.7698362720403022, \"f1\": 0.7805463824677275, \"f2\": 0.6897283531409168, \"f0_5\": 0.8989074816761167, \"p4\": 0.7691219621523272, \"phi\": 0.6250331342479231}, {\"truth_threshold\": -0.30000000447034836, \"match_probability\": 0.4482004805735527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1297.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 734.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.638601674052191, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36139832594780896, \"precision\": 1.0, \"recall\": 0.638601674052191, \"specificity\": 1.0, \"npv\": 0.6093666844065992, \"accuracy\": 0.7688916876574308, \"f1\": 0.7794471153846154, \"f2\": 0.6883558008703959, \"f0_5\": 0.8983238675716858, \"p4\": 0.7682011740290052, \"phi\": 0.6238129405308033}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1284.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 747.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6322008862629247, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36779911373707536, \"precision\": 1.0, \"recall\": 0.6322008862629247, \"specificity\": 1.0, \"npv\": 0.6051797040169133, \"accuracy\": 0.7647984886649875, \"f1\": 0.7746606334841629, \"f2\": 0.6823979591836735, \"f0_5\": 0.895772289660946, \"p4\": 0.7642079467115986, \"phi\": 0.6185427594175095}, {\"truth_threshold\": 0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1283.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 748.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6317085179714427, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3682914820285574, \"precision\": 1.0, \"recall\": 0.6317085179714427, \"specificity\": 1.0, \"npv\": 0.6048600105652404, \"accuracy\": 0.7644836272040302, \"f1\": 0.7742908871454436, \"f2\": 0.6819389816094398, \"f0_5\": 0.8955744799664945, \"p4\": 0.7639005528137026, \"phi\": 0.6181385126768588}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1279.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 752.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6297390448055146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3702609551944855, \"precision\": 1.0, \"recall\": 0.6297390448055146, \"specificity\": 1.0, \"npv\": 0.6035846072746441, \"accuracy\": 0.7632241813602015, \"f1\": 0.7728096676737161, \"f2\": 0.6801020950760396, \"f0_5\": 0.8947810270043375, \"p4\": 0.7626706427097226, \"phi\": 0.6165231496419628}, {\"truth_threshold\": 0.20000000298023224, \"match_probability\": 0.5346019618947252, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1272.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 759.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6262924667651403, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37370753323485967, \"precision\": 1.0, \"recall\": 0.6262924667651403, \"specificity\": 1.0, \"npv\": 0.6013655462184874, \"accuracy\": 0.7610201511335013, \"f1\": 0.7702089009990918, \"f2\": 0.6768837803320562, \"f0_5\": 0.8933839022334598, \"p4\": 0.7605169691954441, \"phi\": 0.6137024615957984}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6258000984736583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3741999015263417, \"precision\": 1.0, \"recall\": 0.6258000984736583, \"specificity\": 1.0, \"npv\": 0.6010498687664042, \"accuracy\": 0.760705289672544, \"f1\": 0.7698364627498486, \"f2\": 0.6764236295902075, \"f0_5\": 0.893183415319747, \"p4\": 0.7602091587940459, \"phi\": 0.6133001443515198}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6243229935992122, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37567700640078777, \"precision\": 1.0, \"recall\": 0.6243229935992122, \"specificity\": 1.0, \"npv\": 0.600104821802935, \"accuracy\": 0.7597607052896725, \"f1\": 0.7687177932706881, \"f2\": 0.6750425894378195, \"f0_5\": 0.892580599746586, \"p4\": 0.7592855072439135, \"phi\": 0.6120941421230318}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.599476439790576, \"accuracy\": 0.7591309823677582, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.7586695530830421, \"phi\": 0.6112909283650163}, {\"truth_threshold\": 0.7000000104308128, \"match_probability\": 0.6189757403752982, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6179222058099458, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38207779419005417, \"precision\": 1.0, \"recall\": 0.6179222058099458, \"specificity\": 1.0, \"npv\": 0.5960437272254034, \"accuracy\": 0.7556675062972292, \"f1\": 0.7638466220328667, \"f2\": 0.6690478729075594, \"f0_5\": 0.8899446886966388, \"p4\": 0.7552790297360157, \"phi\": 0.6068843832941353}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6149679960610537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3850320039389463, \"precision\": 1.0, \"recall\": 0.6149679960610537, \"specificity\": 1.0, \"npv\": 0.5941878567721848, \"accuracy\": 0.7537783375314862, \"f1\": 0.7615853658536585, \"f2\": 0.6662754721007148, \"f0_5\": 0.8887149565959869, \"p4\": 0.7534275803790252, \"phi\": 0.6044886397303119}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1246.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.5932642487046632, \"accuracy\": 0.7528337531486146, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.7525012807216611, \"phi\": 0.6032928083832734}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1241.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 790.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6110290497291975, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3889709502708026, \"precision\": 1.0, \"recall\": 0.6110290497291975, \"specificity\": 1.0, \"npv\": 0.5917312661498708, \"accuracy\": 0.7512594458438288, \"f1\": 0.758557457212714, \"f2\": 0.6625734116390817, \"f0_5\": 0.8870621872766261, \"p4\": 0.7509565686139796, \"phi\": 0.6013027467512604}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1238.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 793.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6095519448547514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.39044805514524866, \"precision\": 1.0, \"recall\": 0.6095519448547514, \"specificity\": 1.0, \"npv\": 0.5908152734778122, \"accuracy\": 0.7503148614609572, \"f1\": 0.757418170694402, \"f2\": 0.6611835077974791, \"f0_5\": 0.8864384934841758, \"p4\": 0.7500292006663175, \"phi\": 0.6001104889920623}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6065977351058592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3934022648941408, \"precision\": 1.0, \"recall\": 0.6065977351058592, \"specificity\": 1.0, \"npv\": 0.5889917695473251, \"accuracy\": 0.7484256926952141, \"f1\": 0.7551333129022372, \"f2\": 0.6584010260795211, \"f0_5\": 0.8851846529673804, \"p4\": 0.748173210499427, \"phi\": 0.5977299335012423}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1217.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 814.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5992122107336287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40078778926637126, \"precision\": 1.0, \"recall\": 0.5992122107336287, \"specificity\": 1.0, \"npv\": 0.5844818785094436, \"accuracy\": 0.7437027707808564, \"f1\": 0.749384236453202, \"f2\": 0.6514291831709667, \"f0_5\": 0.8820118857805479, \"p4\": 0.7435255360881731, \"phi\": 0.5918012154054669}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1216.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.5841836734693877, \"accuracy\": 0.7433879093198993, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.7432152820546354, \"phi\": 0.5914070991600171}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1213.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 818.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5972427375677006, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40275726243229937, \"precision\": 1.0, \"recall\": 0.5972427375677006, \"specificity\": 1.0, \"npv\": 0.5832908813041263, \"accuracy\": 0.7424433249370277, \"f1\": 0.747842170160296, \"f2\": 0.6495662418335654, \"f0_5\": 0.8811564724684005, \"p4\": 0.742284201345094, \"phi\": 0.590225586321326}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1200.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 831.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4091580502215657, \"precision\": 1.0, \"recall\": 0.5908419497784343, \"specificity\": 1.0, \"npv\": 0.5794534412955465, \"accuracy\": 0.7383501259445844, \"f1\": 0.7428040854224698, \"f2\": 0.6435006435006435, \"f0_5\": 0.8783487044356609, \"p4\": 0.7382438098538624, \"phi\": 0.5851199886013844}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1195.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 836.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5883801083210242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4116198916789759, \"precision\": 1.0, \"recall\": 0.5883801083210242, \"specificity\": 1.0, \"npv\": 0.5779909136799596, \"accuracy\": 0.7367758186397985, \"f1\": 0.7408555486670799, \"f2\": 0.6411632149372251, \"f0_5\": 0.8772573777712523, \"p4\": 0.7366872427429624, \"phi\": 0.5831623756721471}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1190.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.5765357502517623, \"accuracy\": 0.7352015113350125, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.7351291857832581, \"phi\": 0.5812080759697219}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1189.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 842.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.41457410142786805, \"precision\": 1.0, \"recall\": 0.585425898572132, \"specificity\": 1.0, \"npv\": 0.5762455963764469, \"accuracy\": 0.7348866498740554, \"f1\": 0.7385093167701864, \"f2\": 0.6383549876516698, \"f0_5\": 0.8759392957123914, \"p4\": 0.7348173920177556, \"phi\": 0.5808176099748659}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1183.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 848.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5824716888232397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4175283111767602, \"precision\": 1.0, \"recall\": 0.5824716888232397, \"specificity\": 1.0, \"npv\": 0.57451078775715, \"accuracy\": 0.7329974811083123, \"f1\": 0.7361543248288737, \"f2\": 0.6355431395723649, \"f0_5\": 0.8746118586426143, \"p4\": 0.732945325152551, \"phi\": 0.578477543896111}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1179.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 852.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5805022156573116, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4194977843426883, \"precision\": 1.0, \"recall\": 0.5805022156573116, \"specificity\": 1.0, \"npv\": 0.5733600400600901, \"accuracy\": 0.7317380352644837, \"f1\": 0.7345794392523365, \"f2\": 0.6336665591744598, \"f0_5\": 0.8737216540684749, \"p4\": 0.7316960140750485, \"phi\": 0.5769200755947459}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1164.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 867.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5731166912850812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.42688330871491875, \"precision\": 1.0, \"recall\": 0.5731166912850812, \"specificity\": 1.0, \"npv\": 0.5690854870775348, \"accuracy\": 0.7270151133501259, \"f1\": 0.7286384976525822, \"f2\": 0.6266149870801033, \"f0_5\": 0.8703454463885151, \"p4\": 0.7270016744799519, \"phi\": 0.571097532311457}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1151.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.5654320987654321, \"accuracy\": 0.7229219143576826, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.7229205464573797, \"phi\": 0.5660736371863528}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1148.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 883.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5652387986213688, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4347612013786312, \"precision\": 1.0, \"recall\": 0.5652387986213688, \"specificity\": 1.0, \"npv\": 0.564595660749507, \"accuracy\": 0.7219773299748111, \"f1\": 0.7222396980182447, \"f2\": 0.6190681622088007, \"f0_5\": 0.8666767325985203, \"p4\": 0.7219769863862414, \"phi\": 0.5649171381617742}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1147.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 884.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5647464303298868, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43525356967011325, \"precision\": 1.0, \"recall\": 0.5647464303298868, \"specificity\": 1.0, \"npv\": 0.5643173977328734, \"accuracy\": 0.7216624685138538, \"f1\": 0.7218376337319069, \"f2\": 0.6185956207528853, \"f0_5\": 0.8664450823387219, \"p4\": 0.7216623155682867, \"phi\": 0.5645318732743893}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5632693254554406, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4367306745445593, \"precision\": 1.0, \"recall\": 0.5632693254554406, \"specificity\": 1.0, \"npv\": 0.5634842519685039, \"accuracy\": 0.7207178841309824, \"f1\": 0.7206299212598425, \"f2\": 0.6171773845489857, \"f0_5\": 0.8657484486151051, \"p4\": 0.7207178457145617, \"phi\": 0.5633767784627467}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1136.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 895.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5593303791235844, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44066962087641554, \"precision\": 1.0, \"recall\": 0.5593303791235844, \"specificity\": 1.0, \"npv\": 0.5612745098039216, \"accuracy\": 0.718198992443325, \"f1\": 0.7173981686138301, \"f2\": 0.6133909287257019, \"f0_5\": 0.8638783269961977, \"p4\": 0.7181958416012424, \"phi\": 0.560301601247963}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1135.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 896.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5588380108321024, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44116198916789756, \"precision\": 1.0, \"recall\": 0.5588380108321024, \"specificity\": 1.0, \"npv\": 0.560999510044096, \"accuracy\": 0.7178841309823678, \"f1\": 0.7169930511686671, \"f2\": 0.612917161680527, \"f0_5\": 0.8636432810835489, \"p4\": 0.717880234989325, \"phi\": 0.5599177174110734}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1109.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 922.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5460364352535697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45396356474643035, \"precision\": 1.0, \"recall\": 0.5460364352535697, \"specificity\": 1.0, \"npv\": 0.5539429124334785, \"accuracy\": 0.7096977329974811, \"f1\": 0.7063694267515923, \"f2\": 0.6005631972273368, \"f0_5\": 0.8574300293799288, \"p4\": 0.7096451676361052, \"phi\": 0.5499754660338557}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1105.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 926.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5440669620876416, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45593303791235845, \"precision\": 1.0, \"recall\": 0.5440669620876416, \"specificity\": 1.0, \"npv\": 0.5528730082085949, \"accuracy\": 0.7084382871536524, \"f1\": 0.704719387755102, \"f2\": 0.5986564091450861, \"f0_5\": 0.8564563633545187, \"p4\": 0.7083729914338502, \"phi\": 0.5484523115060287}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1103.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 928.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5430822255046776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4569177744953225, \"precision\": 1.0, \"recall\": 0.5430822255046776, \"specificity\": 1.0, \"npv\": 0.5523396044380126, \"accuracy\": 0.707808564231738, \"f1\": 0.7038927887683472, \"f2\": 0.597702395144684, \"f0_5\": 0.8559677169020643, \"p4\": 0.7077363543593591, \"phi\": 0.5476913561601727}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1101.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.5518072289156627, \"accuracy\": 0.7071788413098237, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.7070993471091068, \"phi\": 0.5469308120448416}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1094.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 937.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5386509108813392, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46134908911866074, \"precision\": 1.0, \"recall\": 0.5386509108813392, \"specificity\": 1.0, \"npv\": 0.5499519692603266, \"accuracy\": 0.7049748110831234, \"f1\": 0.70016, \"f2\": 0.5934042091559991, \"f0_5\": 0.8537537068830966, \"p4\": 0.7048668608108392, \"phi\": 0.544272109503198}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1093.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.5496879500720115, \"accuracy\": 0.7046599496221663, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.7045475528853299, \"phi\": 0.5438926972206558}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1089.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.5486344034499281, \"accuracy\": 0.7034005037783375, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.7032693518148777, \"phi\": 0.5423760413585481}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1084.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 947.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46627277203348105, \"precision\": 1.0, \"recall\": 0.533727227966519, \"specificity\": 1.0, \"npv\": 0.5473231357552581, \"accuracy\": 0.7018261964735516, \"f1\": 0.6959871589085073, \"f2\": 0.5886185925282363, \"f0_5\": 0.8512643317103816, \"p4\": 0.701669388939399, \"phi\": 0.5404824326919393}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1074.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 957.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5288035450516987, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4711964549483013, \"precision\": 1.0, \"recall\": 0.5288035450516987, \"specificity\": 1.0, \"npv\": 0.5447193149381542, \"accuracy\": 0.6986775818639799, \"f1\": 0.6917874396135266, \"f2\": 0.58382257012394, \"f0_5\": 0.8487434803224277, \"p4\": 0.698461896288862, \"phi\": 0.5367024359898404}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1067.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 964.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5253569670113245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47464303298867555, \"precision\": 1.0, \"recall\": 0.5253569670113245, \"specificity\": 1.0, \"npv\": 0.5429113323850165, \"accuracy\": 0.6964735516372796, \"f1\": 0.6888315041962556, \"f2\": 0.5804591448155805, \"f0_5\": 0.8469598348944277, \"p4\": 0.6962104664501566, \"phi\": 0.5340620291107292}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1030.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1001.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5071393402264894, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49286065977351057, \"precision\": 1.0, \"recall\": 0.5071393402264894, \"specificity\": 1.0, \"npv\": 0.5335507921714818, \"accuracy\": 0.684823677581864, \"f1\": 0.6729826853969291, \"f2\": 0.5625955866287962, \"f0_5\": 0.8372622337831247, \"p4\": 0.6842191143036372, \"phi\": 0.5201774665622936}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1025.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5046774987690793, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49532250123092075, \"precision\": 1.0, \"recall\": 0.5046774987690793, \"specificity\": 1.0, \"npv\": 0.5323105532310554, \"accuracy\": 0.6832493702770781, \"f1\": 0.6708115183246073, \"f2\": 0.5601705104382992, \"f0_5\": 0.8359158375468928, \"p4\": 0.6825861647803277, \"phi\": 0.518309905918297}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1020.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5022156573116692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4977843426883309, \"precision\": 1.0, \"recall\": 0.5022156573116692, \"specificity\": 1.0, \"npv\": 0.5310760667903525, \"accuracy\": 0.6816750629722922, \"f1\": 0.6686332350049164, \"f2\": 0.5577427821522309, \"f0_5\": 0.8345606283750614, \"p4\": 0.6809500591436524, \"phi\": 0.5164443009324556}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.HConcatChart(...)"
            ]
          },
          "execution_count": 4,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "chart"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "\n",
        "!!! info \"At a glance\"\n",
        "    **Useful for:** Selecting an optimal match weight threshold for generating linked clusters.\n",
        "\n",
        "    **API Documentation:** [accuracy_chart_from_labels_table()](../api_docs/evaluation.md#splink.internals.linker_components.evaluation.LinkerEvalution.accuracy_analysis_from_labels_table)\n",
        "\n",
        "    **What is needed to generate the chart?** A `linker` with some data and a corresponding labelled dataset"
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "### What the chart shows\n",
        "\n",
        "For a given match weight threshold, a record pair with a score above this threshold will be labelled a match and below the threshold will be labelled a non-match. Lowering the threshold to the extreme ensures many more matches are generated - this maximises the True Positives (high recall) but at the expense of some False Positives (low precision).\n",
        "\n",
        "You can then see the effect on the confusion matrix of raising the match threshold. As more predicted matches become non-matches at the higher threshold, True Positives become False Negatives, but False Positives become True Negatives.\n",
        "\n",
        "This demonstrates the trade-off between Type 1 (FP) and Type 2 (FN) errors when selecting a match threshold, or precision vs recall.\n",
        "\n",
        "This chart adds further context to [accuracy_analysis_from_labels_table](./accuracy_analysis_from_labels_table.ipynb) showing:\n",
        "\n",
        "-  the relationship between match weight and match probability\n",
        "-  various accuracy metrics comparing the Splink scores against clerical labels\n",
        "-  the confusion matrix of the predictions and the labels"
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "### How to interpret the chart\n",
        "\n",
        "**Precision** can be maximised by **increasing** the match threshold (reducing false positives).\n",
        "\n",
        "**Recall** can be maximised by **decreasing** the match threshold (reducing false negatives). \n",
        "\n",
        "Additional metrics can be used to find the optimal compromise between these two, looking for the threshold at which peak accuracy is achieved. "
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "### Actions to take as a result of the chart\n",
        "\n",
        "Having identified an optimal match weight threshold, this can be applied when generating linked clusters using [cluster_pairwise_predictions_at_thresholds()](../api_docs/clustering.md#splink.clustering.cluster_pairwise_predictions_at_threshold)."
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "## Worked Example"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "tags": [
          "hide_output"
        ]
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "Probability two random records match is estimated to be  0.000821.\n",
            "This means that amongst all possible pairwise record comparisons, one in 1,218.29 are expected to match.  With 499,500 total possible comparisons, we expect a total of around 410.00 matching pairs\n",
            "You are using the default value for `max_pairs`, which may be too small and thus lead to inaccurate estimates for your model's u-parameters. Consider increasing to 1e8 or 1e9, which will result in more accurate estimates, but with a longer run time.\n",
            "----- Estimating u probabilities using random sampling -----\n",
            "u probability not trained for dob - Abs difference of 'transformed dob <= 1 month' (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Estimated u probabilities using random sampling\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - first_name (no m values are trained).\n",
            "    - surname (no m values are trained).\n",
            "    - dob (some u values are not trained, no m values are trained).\n",
            "    - city (no m values are trained).\n",
            "    - email (no m values are trained).\n",
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "(l.\"first_name\" = r.\"first_name\") AND (l.\"surname\" = r.\"surname\")\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - dob\n",
            "    - city\n",
            "    - email\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - first_name\n",
            "    - surname\n",
            "\n",
            "WARNING:\n",
            "Level Abs difference of 'transformed dob <= 1 month' on comparison dob not observed in dataset, unable to train m value\n",
            "\n",
            "WARNING:\n",
            "Level Jaro-Winkler distance of transformed email >= 0.88 on comparison email not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was -0.51 in the m_probability of dob, level `Exact match on dob`\n",
            "Iteration 2: Largest change in params was 0.0782 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.0205 in probability_two_random_records_match\n",
            "Iteration 4: Largest change in params was 0.00737 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00323 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.00161 in probability_two_random_records_match\n",
            "Iteration 7: Largest change in params was 0.000862 in probability_two_random_records_match\n",
            "Iteration 8: Largest change in params was 0.000482 in probability_two_random_records_match\n",
            "Iteration 9: Largest change in params was 0.000276 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 0.00016 in probability_two_random_records_match\n",
            "Iteration 11: Largest change in params was 9.35e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 11 iterations\n",
            "m probability not trained for dob - Abs difference of 'transformed dob <= 1 month' (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "m probability not trained for email - Jaro-Winkler distance of transformed email >= 0.88 (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - first_name (no m values are trained).\n",
            "    - surname (no m values are trained).\n",
            "    - dob (some u values are not trained, some m values are not trained).\n",
            "    - email (some m values are not trained).\n",
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "l.\"dob\" = r.\"dob\"\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - first_name\n",
            "    - surname\n",
            "    - city\n",
            "    - email\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - dob\n",
            "\n",
            "WARNING:\n",
            "Level Jaro-Winkler distance of transformed email >= 0.88 on comparison email not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was 0.588 in probability_two_random_records_match\n",
            "Iteration 2: Largest change in params was 0.128 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.0558 in the m_probability of first_name, level `All other comparisons`\n",
            "Iteration 4: Largest change in params was 0.0183 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00723 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.00319 in probability_two_random_records_match\n",
            "Iteration 7: Largest change in params was 0.00149 in probability_two_random_records_match\n",
            "Iteration 8: Largest change in params was 0.000709 in probability_two_random_records_match\n",
            "Iteration 9: Largest change in params was 0.000343 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 0.000168 in probability_two_random_records_match\n",
            "Iteration 11: Largest change in params was 8.47e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 11 iterations\n",
            "m probability not trained for email - Jaro-Winkler distance of transformed email >= 0.88 (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - dob (some u values are not trained, some m values are not trained).\n",
            "    - email (some m values are not trained).\n"
          ]
        },
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-04716d206ea149eaa4d743f0935a1710.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-04716d206ea149eaa4d743f0935a1710.vega-embed details,\n",
              "  #altair-viz-04716d206ea149eaa4d743f0935a1710.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-04716d206ea149eaa4d743f0935a1710\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-04716d206ea149eaa4d743f0935a1710\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-04716d206ea149eaa4d743f0935a1710\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300, \"discreteHeight\": {\"step\": 150}, \"discreteWidth\": {\"step\": 150}}, \"axis\": {\"gridWidth\": 0.5, \"labelFontSize\": 12, \"titleFontSize\": 16}, \"axisX\": {\"format\": \"+.0f\", \"grid\": false, \"offset\": 20, \"values\": {\"expr\": \"[-25,-20,-15,-10,-5,0,5,10,15,20,25]\"}}, \"axisY\": {\"title\": \"Match probability threshold\", \"titleFontSize\": 16}, \"concat\": {\"spacing\": 40}}, \"hconcat\": [{\"vconcat\": [{\"layer\": [{\"layer\": [{\"mark\": {\"type\": \"rule\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 0.3, \"empty\": false}, \"value\": 0}, \"x\": {\"axis\": {\"orient\": \"bottom\"}, \"field\": \"truth_threshold\", \"scale\": {\"nice\": false}, \"title\": null, \"type\": \"quantitative\"}}, \"params\": [{\"name\": \"threshold\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}, \"value\": null}]}, {\"mark\": {\"type\": \"rule\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 0.3, \"empty\": false}, \"value\": 0}, \"y\": {\"axis\": {\"orient\": \"right\"}, \"field\": \"match_probability\", \"title\": \" \", \"type\": \"quantitative\"}}, \"params\": [{\"name\": \"prob\", \"select\": {\"type\": \"point\", \"encodings\": [\"y\"], \"fields\": [\"match_probability\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}}]}]}, {\"layer\": [{\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"fontWeight\": \"bold\", \"xOffset\": 25, \"yOffset\": 10}, \"encoding\": {\"text\": {\"aggregate\": \"min\", \"field\": \"truth_threshold\", \"format\": \"+.2f\"}, \"y\": {\"axis\": {\"orient\": \"left\"}, \"field\": \"match_probability\", \"title\": \"Match probability threshold\", \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"xOffset\": -25, \"yOffset\": -10}, \"encoding\": {\"text\": {\"aggregate\": \"min\", \"field\": \"match_probability\", \"format\": \".3f\"}}, \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}, {\"mark\": {\"type\": \"line\", \"color\": \"red\", \"opacity\": 0.5}}, {\"mark\": {\"type\": \"line\", \"color\": \"green\", \"opacity\": 0.5, \"strokeWidth\": 3}, \"transform\": [{\"filter\": \"datum.truth_threshold >= threshold.truth_threshold\"}]}, {\"mark\": {\"type\": \"point\", \"color\": \"green\", \"size\": 100}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 1, \"empty\": false}, \"value\": 0}}}], \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\", \"title\": \"Match weight threshold\", \"axis\": {\"orient\": \"top\"}}, \"y\": {\"field\": \"match_probability\", \"type\": \"quantitative\", \"title\": \"Match probability threshold\", \"axis\": {\"orient\": \"left\", \"titlePadding\": 10}}}}, {\"mark\": {\"type\": \"text\", \"align\": \"left\", \"color\": \"red\", \"fontSize\": 12, \"text\": \"Non-match\", \"x\": 0, \"y\": \"height\", \"yOffset\": 10}, \"data\": {\"values\": [{}]}}, {\"mark\": {\"type\": \"text\", \"align\": \"right\", \"color\": \"green\", \"fontSize\": 12, \"fontWeight\": \"bold\", \"text\": \"Match\", \"x\": \"width\", \"y\": 0, \"yOffset\": -10}, \"data\": {\"values\": [{}]}}], \"description\": \"Match weight vs probability\"}, {\"hconcat\": [{\"layer\": [{\"mark\": {\"type\": \"rect\", \"opacity\": 0.5}, \"encoding\": {\"color\": {\"field\": \"count\", \"legend\": null, \"scale\": {\"scheme\": \"reds\", \"zero\": true}, \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": \"datum.predicted == 0\"}]}, {\"mark\": {\"type\": \"rect\", \"opacity\": 0.5}, \"encoding\": {\"color\": {\"field\": \"count\", \"legend\": null, \"scale\": {\"scheme\": \"greens\", \"zero\": true}, \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": \"datum.predicted == 1\"}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"yOffset\": -40}, \"encoding\": {\"color\": {\"condition\": [{\"test\": \"datum.predicted==1 && datum.actual==1\", \"value\": \"darkgreen\"}, {\"test\": \"datum.predicted==0 && datum.actual==0\", \"value\": \"darkred\"}], \"value\": \"black\"}, \"opacity\": {\"condition\": {\"test\": \"datum.predicted != datum.actual\", \"value\": 1}, \"value\": 0.5}, \"text\": {\"field\": \"confusion_label\", \"type\": \"nominal\"}}}, {\"mark\": {\"type\": \"text\", \"fontSize\": 28, \"fontWeight\": \"bold\", \"yOffset\": 10}, \"encoding\": {\"color\": {\"condition\": [{\"test\": \"datum.predicted==1 && datum.actual==1\", \"value\": \"darkgreen\"}, {\"test\": \"datum.predicted==0 && datum.actual==0\", \"value\": \"darkred\"}], \"value\": \"black\"}, \"text\": {\"field\": \"count\", \"format\": \",\", \"type\": \"nominal\"}}}], \"description\": \"Confusion matrix\", \"encoding\": {\"x\": {\"field\": \"actual\", \"type\": \"nominal\", \"title\": \"Actual\", \"axis\": {\"domain\": false, \"labelAngle\": 0, \"labelExpr\": \"datum.label == 1 ? 'Match' : 'Non-match'\", \"labelFontSize\": 18, \"labelPadding\": 10, \"orient\": \"top\", \"ticks\": false, \"titleAngle\": 0, \"titleFontSize\": 20}, \"sort\": \"-x\"}, \"y\": {\"field\": \"predicted\", \"type\": \"nominal\", \"title\": \"Predicted\", \"axis\": {\"domain\": false, \"labelExpr\": \"datum.label == 1 ? 'Match' : 'Non-match'\", \"labelFontSize\": 18, \"labelPadding\": 10, \"ticks\": false, \"titleAngle\": 0, \"titleFontSize\": 20, \"titlePadding\": -30}, \"sort\": \"-y\"}}, \"resolve\": {\"scale\": {\"color\": \"independent\"}}, \"transform\": [{\"filter\": {\"or\": [{\"param\": \"threshold\", \"empty\": false}, {\"and\": [{\"param\": \"threshold\", \"empty\": true}, \"datum.truth_threshold == datum.median_threshold\"]}]}}]}], \"transform\": [{\"fold\": [\"tp\", \"tn\", \"fp\", \"fn\"], \"as\": [\"label\", \"count\"]}, {\"calculate\": \"datum.label === 'tp' ? 'True Positive (TP)' : datum.label === 'tn' ? 'True Negative (TN)' : datum.label === 'fp' ? 'False Positive (FP)' : 'False Negative (FN)'\", \"as\": \"confusion_label\"}, {\"calculate\": \"datum.label === 'tp' || datum.label === 'fp' ? 1 : 0\", \"as\": \"predicted\"}, {\"calculate\": \"datum.label === 'tp' || datum.label === 'fn' ? 1 : 0\", \"as\": \"actual\"}, {\"joinaggregate\": [{\"op\": \"median\", \"field\": \"truth_threshold\", \"as\": \"median_threshold\"}]}]}]}, {\"layer\": [{\"layer\": [{\"mark\": {\"type\": \"point\", \"size\": 100}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"threshold\", \"value\": 1, \"empty\": false}, \"value\": 0}, \"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".3f\", \"title\": \"Match weight threshold\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".3%\", \"title\": \"Match probability threshold\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"title\": \"Precision\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"title\": \"Recall (TPR)\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FPR\", \"type\": \"quantitative\"}], \"x\": {\"axis\": {\"orient\": \"top\"}, \"field\": \"truth_threshold\", \"title\": \"Match weight threshold\"}}, \"params\": [{\"name\": \"metric\", \"select\": {\"type\": \"point\", \"fields\": [\"metric\"]}, \"bind\": \"legend\", \"value\": [{\"metric\": \"precision\"}, {\"metric\": \"recall\"}]}, {\"name\": \"threshold\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\", \"toggle\": false}, \"value\": null}], \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": {\"type\": \"line\"}, \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"metric\", \"value\": 1}, \"value\": 0.1}, \"x\": {\"axis\": {\"orient\": \"bottom\"}, \"field\": \"truth_threshold\", \"title\": null}}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"type\": \"nominal\", \"sort\": [\"precision\", \"recall\", \"f1\"], \"title\": [\"Performance\", \"Metric\"], \"legend\": {\"fillColor\": \"whitesmoke\", \"labelExpr\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.value]\", \"labelFontSize\": 14, \"legendX\": 800, \"legendY\": 160, \"orient\": \"none\", \"padding\": 10, \"titleFontSize\": 16, \"titlePadding\": 15}}, \"x\": {\"type\": \"quantitative\", \"field\": \"truth_threshold\"}, \"y\": {\"field\": \"value\", \"type\": \"quantitative\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Performance metric score\", \"titleFontSize\": 18, \"titlePadding\": 10, \"values\": {\"expr\": \"[0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95,1.0]\"}}, \"scale\": {\"domain\": [0.5, 1]}}}}, {\"layer\": [{\"mark\": {\"type\": \"rule\", \"color\": \"gray\"}, \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"title\": null, \"type\": \"quantitative\"}}}, {\"layer\": [{\"mark\": {\"type\": \"rect\", \"fill\": \"whitesmoke\", \"x\": 200, \"x2\": 10, \"y2Offset\": 20, \"yOffset\": -20}, \"encoding\": {\"y2\": {\"field\": \"score_index\"}}}, {\"mark\": {\"type\": \"text\", \"align\": \"right\", \"baseline\": \"middle\", \"fontSize\": 16, \"x\": 200, \"xOffset\": -10}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"sort\": [\"precision\", \"recall\", \"f1\"]}, \"text\": {\"field\": \"y_text\"}, \"y\": {\"field\": \"score_index\", \"type\": \"quantitative\"}}, \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": {\"type\": \"text\", \"fontSize\": 14, \"fontWeight\": \"bold\", \"xOffset\": 20, \"y\": 0, \"yOffset\": -10}, \"encoding\": {\"text\": {\"condition\": {\"param\": \"threshold\", \"aggregate\": \"min\", \"empty\": false, \"field\": \"truth_threshold\", \"format\": \"+.2f\", \"type\": \"nominal\"}, \"value\": \"\"}, \"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}], \"transform\": [{\"filter\": {\"param\": \"threshold\", \"empty\": false}}]}], \"description\": \"Accuracy chart\", \"height\": 700, \"transform\": [{\"fold\": [\"precision\", \"recall\", \"f1\"], \"as\": [\"metric\", \"value\"]}, {\"calculate\": \"0.6375 - 0.025*indexof(['precision', 'recall', 'f1'], datum.metric)\", \"as\": \"score_index\"}, {\"calculate\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.metric]\", \"as\": \"metric_text\"}, {\"calculate\": \"datum.metric_text + ' = ' + format(datum.value, ',.3g')\", \"as\": \"y_text\"}], \"width\": 500}], \"data\": {\"name\": \"data-d655f0cd53ee561d3cd788a27c7bab03\"}, \"title\": {\"text\": \"Match Threshold Selection Tool\", \"anchor\": \"middle\", \"baseline\": \"line-bottom\", \"fontSize\": 28, \"subtitle\": [\"Hover over either line graph to show Confusion Matrix (bottom left) and selected performance metrics (right).\", \"\", \"Click a legend value to show a specific evaluation metric. Shift + Click to show multiple metrics\"], \"subtitleFontSize\": 14, \"subtitleFontStyle\": \"italic\"}, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.14.1.json\", \"datasets\": {\"data-d655f0cd53ee561d3cd788a27c7bab03\": [{\"truth_threshold\": -23.800000354647636, \"match_probability\": 6.846773588489456e-08, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1055.0, \"fp\": 90.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9213973799126638, \"fp_rate\": 0.07860262008733625, \"fn_rate\": 0.2880354505169867, \"precision\": 0.94140625, \"recall\": 0.7119645494830132, \"specificity\": 0.9213973799126638, \"npv\": 0.6432926829268293, \"accuracy\": 0.7874685138539043, \"f1\": 0.8107653490328006, \"f2\": 0.7484472049689441, \"f0_5\": 0.8844036697247707, \"p4\": 0.7832976799979975, \"phi\": 0.6085442007563051}, {\"truth_threshold\": -22.70000033825636, \"match_probability\": 1.467637948991862e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1077.0, \"fp\": 68.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9406113537117904, \"fp_rate\": 0.059388646288209605, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9550858652575958, \"recall\": 0.7119645494830132, \"specificity\": 0.9406113537117904, \"npv\": 0.648014440433213, \"accuracy\": 0.7943954659949622, \"f1\": 0.8157968970380818, \"f2\": 0.750155633948952, \"f0_5\": 0.8940274514653147, \"p4\": 0.7908413564901972, \"phi\": 0.6273505612520337}, {\"truth_threshold\": -21.700000323355198, \"match_probability\": 2.9352754975091214e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1083.0, \"fp\": 62.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9458515283842794, \"fp_rate\": 0.05414847161572053, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9588859416445623, \"recall\": 0.7119645494830132, \"specificity\": 0.9458515283842794, \"npv\": 0.6492805755395683, \"accuracy\": 0.7962846347607053, \"f1\": 0.817179994348686, \"f2\": 0.7506229235880398, \"f0_5\": 0.896688577452561, \"p4\": 0.792886883910619, \"phi\": 0.6325043185815227}, {\"truth_threshold\": -21.600000321865082, \"match_probability\": 3.1459503204353755e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1088.0, \"fp\": 57.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9502183406113537, \"fp_rate\": 0.04978165938864629, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9620758483033932, \"recall\": 0.7119645494830132, \"specificity\": 0.9502183406113537, \"npv\": 0.6503287507471608, \"accuracy\": 0.7978589420654912, \"f1\": 0.8183361629881154, \"f2\": 0.7510127765659084, \"f0_5\": 0.8989183140619172, \"p4\": 0.7945877557823284, \"phi\": 0.6368075433805553}, {\"truth_threshold\": -20.60000030696392, \"match_probability\": 6.29189872645777e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1094.0, \"fp\": 51.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9554585152838428, \"fp_rate\": 0.0445414847161572, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9659318637274549, \"recall\": 0.7119645494830132, \"specificity\": 0.9554585152838428, \"npv\": 0.6515783204288267, \"accuracy\": 0.7997481108312342, \"f1\": 0.8197278911564626, \"f2\": 0.75148113501715, \"f0_5\": 0.9016086793864572, \"p4\": 0.7966244062798371, \"phi\": 0.6419817284271657}, {\"truth_threshold\": -19.000000283122063, \"match_probability\": 1.907344620533969e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1096.0, \"fp\": 49.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9572052401746725, \"fp_rate\": 0.04279475982532751, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9672240802675586, \"recall\": 0.7119645494830132, \"specificity\": 0.9572052401746725, \"npv\": 0.6519928613920285, \"accuracy\": 0.8003778337531486, \"f1\": 0.8201928530913216, \"f2\": 0.7516373843434868, \"f0_5\": 0.9025090500561728, \"p4\": 0.7973022397990062, \"phi\": 0.6437089952787838}, {\"truth_threshold\": -17.900000266730785, \"match_probability\": 4.088473825324779e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1097.0, \"fp\": 48.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9580786026200874, \"fp_rate\": 0.04192139737991266, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9678714859437751, \"recall\": 0.7119645494830132, \"specificity\": 0.9580786026200874, \"npv\": 0.6521997621878716, \"accuracy\": 0.8006926952141058, \"f1\": 0.8204255319148936, \"f2\": 0.751715533374922, \"f0_5\": 0.9029599100786811, \"p4\": 0.7976409617025867, \"phi\": 0.6445731096055997}, {\"truth_threshold\": -17.600000262260437, \"match_probability\": 5.03349696795731e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1100.0, \"fp\": 45.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9606986899563319, \"fp_rate\": 0.039301310043668124, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9698189134808853, \"recall\": 0.7119645494830132, \"specificity\": 0.9606986899563319, \"npv\": 0.6528189910979229, \"accuracy\": 0.8016372795969773, \"f1\": 0.8211243611584327, \"f2\": 0.7519500780031201, \"f0_5\": 0.9043151969981238, \"p4\": 0.7986563533248476, \"phi\": 0.6471673893914208}, {\"truth_threshold\": -17.50000026077032, \"match_probability\": 5.394766530610173e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1108.0, \"fp\": 37.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9676855895196507, \"fp_rate\": 0.032314410480349345, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9750505731625084, \"recall\": 0.7119645494830132, \"specificity\": 0.9676855895196507, \"npv\": 0.6544595392793857, \"accuracy\": 0.8041561712846348, \"f1\": 0.8229937393284007, \"f2\": 0.7525762464869367, \"f0_5\": 0.9079492653522542, \"p4\": 0.8013584652743565, \"phi\": 0.6540998665530485}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1110.0, \"fp\": 35.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9694323144104804, \"fp_rate\": 0.03056768558951965, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9763673193787981, \"recall\": 0.7119645494830132, \"specificity\": 0.9694323144104804, \"npv\": 0.6548672566371682, \"accuracy\": 0.8047858942065491, \"f1\": 0.8234624145785877, \"f2\": 0.7527329515877147, \"f0_5\": 0.9088623507228158, \"p4\": 0.8020327397013851, \"phi\": 0.6558363061606292}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1111.0, \"fp\": 34.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9703056768558952, \"fp_rate\": 0.029694323144104803, \"fn_rate\": 0.2880354505169867, \"precision\": 0.977027027027027, \"recall\": 0.7119645494830132, \"specificity\": 0.9703056768558952, \"npv\": 0.6550707547169812, \"accuracy\": 0.8051007556675063, \"f1\": 0.8236969524352037, \"f2\": 0.7528113286130779, \"f0_5\": 0.90931958244246, \"p4\": 0.8023696912661274, \"phi\": 0.6567050301458078}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1120.0, \"fp\": 25.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9781659388646288, \"fp_rate\": 0.021834061135371178, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9830047586675731, \"recall\": 0.7119645494830132, \"specificity\": 0.9781659388646288, \"npv\": 0.656891495601173, \"accuracy\": 0.8079345088161209, \"f1\": 0.8258138206739006, \"f2\": 0.7535174570088587, \"f0_5\": 0.9134554643082754, \"p4\": 0.8053967630362511, \"phi\": 0.6645388735433893}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1121.0, \"fp\": 24.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9790393013100437, \"fp_rate\": 0.02096069868995633, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9836734693877551, \"recall\": 0.7119645494830132, \"specificity\": 0.9790393013100437, \"npv\": 0.6570926143024619, \"accuracy\": 0.8082493702770781, \"f1\": 0.8260497000856898, \"f2\": 0.7535959974984365, \"f0_5\": 0.9139173302995829, \"p4\": 0.8057325018854461, \"phi\": 0.6654110243207023}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9870307167235495, \"recall\": 0.7119645494830132, \"specificity\": 0.9834061135371179, \"npv\": 0.6580946814728229, \"accuracy\": 0.809823677581864, \"f1\": 0.8272311212814645, \"f2\": 0.75398894566691, \"f0_5\": 0.9162336839437334, \"p4\": 0.8074094203617235, \"phi\": 0.6697770344487317}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9870218579234973, \"recall\": 0.7114721811915312, \"specificity\": 0.9834061135371179, \"npv\": 0.6577102803738317, \"accuracy\": 0.8095088161209067, \"f1\": 0.82689556509299, \"f2\": 0.7535460992907801, \"f0_5\": 0.9160644097882592, \"p4\": 0.8071048961802441, \"phi\": 0.6693357668739984}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1135.0, \"fp\": 10.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9912663755458515, \"fp_rate\": 0.008733624454148471, \"fn_rate\": 0.28852781880846873, \"precision\": 0.993127147766323, \"recall\": 0.7114721811915312, \"specificity\": 0.9912663755458515, \"npv\": 0.6595002905287624, \"accuracy\": 0.8123425692695214, \"f1\": 0.8290304073436604, \"f2\": 0.7542540975049588, \"f0_5\": 0.9202649344032607, \"p4\": 0.8101156090778194, \"phi\": 0.6772196571827369}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1137.0, \"fp\": 8.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9930131004366812, \"fp_rate\": 0.0069868995633187774, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9944941500344116, \"recall\": 0.7114721811915312, \"specificity\": 0.9930131004366812, \"npv\": 0.6598955310504934, \"accuracy\": 0.8129722921914357, \"f1\": 0.8295063145809415, \"f2\": 0.7544116111517176, \"f0_5\": 0.9212036210633686, \"p4\": 0.8107834022242488, \"phi\": 0.6789756245799222}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1138.0, \"fp\": 7.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.993886462882096, \"fp_rate\": 0.00611353711790393, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9951790633608816, \"recall\": 0.7114721811915312, \"specificity\": 0.993886462882096, \"npv\": 0.660092807424594, \"accuracy\": 0.8132871536523929, \"f1\": 0.8297444731553258, \"f2\": 0.7544903926482874, \"f0_5\": 0.9216736828677127, \"p4\": 0.8111171302195401, \"phi\": 0.6798541595642643}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1139.0, \"fp\": 6.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.994759825327511, \"fp_rate\": 0.005240174672489083, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9958649207443143, \"recall\": 0.7114721811915312, \"specificity\": 0.994759825327511, \"npv\": 0.6602898550724637, \"accuracy\": 0.8136020151133502, \"f1\": 0.8299827685238369, \"f2\": 0.7545691906005222, \"f0_5\": 0.9221442246330568, \"p4\": 0.8114507463687338, \"phi\": 0.680733063624895}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1142.0, \"fp\": 3.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9973799126637555, \"fp_rate\": 0.0026200873362445414, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9979281767955801, \"recall\": 0.7114721811915312, \"specificity\": 0.9973799126637555, \"npv\": 0.6608796296296297, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306984765737281, \"f2\": 0.7548056832427915, \"f0_5\": 0.9235587370573949, \"p4\": 0.81245092776752, \"phi\": 0.683372001937977}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9986178299930891, \"recall\": 0.7114721811915312, \"specificity\": 0.9982532751091703, \"npv\": 0.6610757663389243, \"accuracy\": 0.8148614609571788, \"f1\": 0.8309373202990225, \"f2\": 0.7548845470692718, \"f0_5\": 0.9240312060365775, \"p4\": 0.8127841005555416, \"phi\": 0.6842523939858662}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1444.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 587.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7109798129000492, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28902018709995075, \"precision\": 0.9986168741355463, \"recall\": 0.7109798129000492, \"specificity\": 0.9982532751091703, \"npv\": 0.6606936416184971, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306010928961749, \"f2\": 0.754440961337513, \"f0_5\": 0.9238643634037108, \"p4\": 0.8124788095466353, \"phi\": 0.6838163737767555}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9986159169550173, \"recall\": 0.7104874446085672, \"specificity\": 0.9982532751091703, \"npv\": 0.6603119584055459, \"accuracy\": 0.8142317380352645, \"f1\": 0.830264672036824, \"f2\": 0.7539972828926743, \"f0_5\": 0.9236973498911791, \"p4\": 0.8121735251016357, \"phi\": 0.6833805796370901}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9993074792243767, \"recall\": 0.7104874446085672, \"specificity\": 0.9991266375545852, \"npv\": 0.6605080831408776, \"accuracy\": 0.8145465994962217, \"f1\": 0.8305035971223022, \"f2\": 0.7540760869565217, \"f0_5\": 0.9241706161137441, \"p4\": 0.8125064984715595, \"phi\": 0.6842619488798015}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1440.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 591.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7090103397341211, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.29098966026587886, \"precision\": 0.9993060374739764, \"recall\": 0.7090103397341211, \"specificity\": 0.9991266375545852, \"npv\": 0.6593659942363113, \"accuracy\": 0.8136020151133502, \"f1\": 0.8294930875576036, \"f2\": 0.7527443805541035, \"f0_5\": 0.9236690186016677, \"p4\": 0.811590547208778, \"phi\": 0.6829568226176045}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2914820285573609, \"precision\": 0.9993055555555556, \"recall\": 0.7085179714426391, \"specificity\": 0.9991266375545852, \"npv\": 0.6589861751152074, \"accuracy\": 0.8132871536523929, \"f1\": 0.829155862863728, \"f2\": 0.7523002927645337, \"f0_5\": 0.923501476062123, \"p4\": 0.8112852415850365, \"phi\": 0.6825222299358593}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2914820285573609, \"precision\": 1.0, \"recall\": 0.7085179714426391, \"specificity\": 1.0, \"npv\": 0.6591824985607369, \"accuracy\": 0.8136020151133502, \"f1\": 0.8293948126801153, \"f2\": 0.7523789605772248, \"f0_5\": 0.9239758571978939, \"p4\": 0.8116179257342173, \"phi\": 0.6834051848579609}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1437.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 594.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29246676514032494, \"precision\": 1.0, \"recall\": 0.707533234859675, \"specificity\": 1.0, \"npv\": 0.6584243818286372, \"accuracy\": 0.8129722921914357, \"f1\": 0.828719723183391, \"f2\": 0.7514904298713524, \"f0_5\": 0.9236405707674509, \"p4\": 0.811007239776182, \"phi\": 0.6825372757481436}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1436.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 595.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707040866568193, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.292959133431807, \"precision\": 1.0, \"recall\": 0.707040866568193, \"specificity\": 1.0, \"npv\": 0.6580459770114943, \"accuracy\": 0.8126574307304786, \"f1\": 0.8283818863570811, \"f2\": 0.7510460251046025, \"f0_5\": 0.9234726688102894, \"p4\": 0.8107019041426428, \"phi\": 0.6821036562194343}, {\"truth_threshold\": -6.70000009983778, \"match_probability\": 0.009526684411466419, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1428.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 603.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7031019202363368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2968980797636632, \"precision\": 1.0, \"recall\": 0.7031019202363368, \"specificity\": 1.0, \"npv\": 0.6550343249427918, \"accuracy\": 0.8101385390428212, \"f1\": 0.8256721595836947, \"f2\": 0.7474874371859297, \"f0_5\": 0.9221232080588919, \"p4\": 0.8082593661032169, \"phi\": 0.6786426833673147}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1424.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 607.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29886755292959133, \"precision\": 1.0, \"recall\": 0.7011324470704087, \"specificity\": 1.0, \"npv\": 0.6535388127853882, \"accuracy\": 0.8088790931989924, \"f1\": 0.8243125904486251, \"f2\": 0.7457059069962296, \"f0_5\": 0.9214442862689272, \"p4\": 0.8070381719383619, \"phi\": 0.6769174743376838}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1423.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 608.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29935992122107336, \"precision\": 1.0, \"recall\": 0.7006400787789266, \"specificity\": 1.0, \"npv\": 0.6531660011409013, \"accuracy\": 0.8085642317380353, \"f1\": 0.8239722061378112, \"f2\": 0.74526029119095, \"f0_5\": 0.9212741162760585, \"p4\": 0.8067328787708493, \"phi\": 0.6764867171608602}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1422.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 609.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7001477104874446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2998522895125554, \"precision\": 1.0, \"recall\": 0.7001477104874446, \"specificity\": 1.0, \"npv\": 0.6527936145952109, \"accuracy\": 0.8082493702770781, \"f1\": 0.8236316246741964, \"f2\": 0.7448145820238844, \"f0_5\": 0.9211037699183832, \"p4\": 0.8064275873033679, \"phi\": 0.676056177162564}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1405.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 626.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6917774495322502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3082225504677499, \"precision\": 1.0, \"recall\": 0.6917774495322502, \"specificity\": 1.0, \"npv\": 0.6465273856578204, \"accuracy\": 0.802896725440806, \"f1\": 0.8178114086146682, \"f2\": 0.737223213348725, \"f0_5\": 0.9181806299830088, \"p4\": 0.8012376730750075, \"phi\": 0.6687698153349331}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1393.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 638.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31413096996553425, \"precision\": 1.0, \"recall\": 0.6858690300344658, \"specificity\": 1.0, \"npv\": 0.6421761076836792, \"accuracy\": 0.7991183879093199, \"f1\": 0.8136682242990654, \"f2\": 0.7318482715141326, \"f0_5\": 0.9160857556227805, \"p4\": 0.7975738525329583, \"phi\": 0.6636630953189379}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1391.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 640.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6848842934515017, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3151157065484983, \"precision\": 1.0, \"recall\": 0.6848842934515017, \"specificity\": 1.0, \"npv\": 0.6414565826330533, \"accuracy\": 0.7984886649874056, \"f1\": 0.8129748684979544, \"f2\": 0.7309511297950604, \"f0_5\": 0.9157340355497038, \"p4\": 0.7969631540364932, \"phi\": 0.6628148598035907}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1390.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 641.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6843919251600197, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3156080748399803, \"precision\": 1.0, \"recall\": 0.6843919251600197, \"specificity\": 1.0, \"npv\": 0.641097424412094, \"accuracy\": 0.7981738035264484, \"f1\": 0.8126278865828706, \"f2\": 0.7305024174900148, \"f0_5\": 0.9155578975102094, \"p4\": 0.7966577964206586, \"phi\": 0.6623910480286727}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1389.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 642.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6838995568685377, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31610044313146235, \"precision\": 1.0, \"recall\": 0.6838995568685377, \"specificity\": 1.0, \"npv\": 0.6407386681589256, \"accuracy\": 0.7978589420654912, \"f1\": 0.8122807017543859, \"f2\": 0.7300536108483129, \"f0_5\": 0.9153815737445631, \"p4\": 0.7963524329131265, \"phi\": 0.6619674396995868}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1388.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 643.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6834071885770556, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3165928114229444, \"precision\": 1.0, \"recall\": 0.6834071885770556, \"specificity\": 1.0, \"npv\": 0.6403803131991052, \"accuracy\": 0.797544080604534, \"f1\": 0.8119333138344546, \"f2\": 0.7296047098402019, \"f0_5\": 0.9152050639588554, \"p4\": 0.7960470632785187, \"phi\": 0.6615440344100268}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1382.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 649.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31954702117183653, \"precision\": 1.0, \"recall\": 0.6804529788281635, \"specificity\": 1.0, \"npv\": 0.6382385730211817, \"accuracy\": 0.7956549118387909, \"f1\": 0.8098447113975974, \"f2\": 0.7269093204292026, \"f0_5\": 0.9141420822860167, \"p4\": 0.7942147035798486, \"phi\": 0.6590078438192518}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6789758739537174, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3210241260462826, \"precision\": 1.0, \"recall\": 0.6789758739537174, \"specificity\": 1.0, \"npv\": 0.6371730662214803, \"accuracy\": 0.7947103274559194, \"f1\": 0.8087976539589443, \"f2\": 0.7255603493633589, \"f0_5\": 0.9136080561812641, \"p4\": 0.7932984218488349, \"phi\": 0.6577424568153551}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1377.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 654.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6779911373707533, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32200886262924666, \"precision\": 1.0, \"recall\": 0.6779911373707533, \"specificity\": 1.0, \"npv\": 0.6364647026125625, \"accuracy\": 0.7940806045340051, \"f1\": 0.8080985915492958, \"f2\": 0.7246605620461004, \"f0_5\": 0.9132510943095902, \"p4\": 0.7926875252637993, \"phi\": 0.6568998611817706}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1375.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 656.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6770064007877893, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3229935992122107, \"precision\": 1.0, \"recall\": 0.6770064007877893, \"specificity\": 1.0, \"npv\": 0.6357579122709606, \"accuracy\": 0.7934508816120907, \"f1\": 0.8073987081620669, \"f2\": 0.7237603958311402, \"f0_5\": 0.9128933740539105, \"p4\": 0.7920765927688658, \"phi\": 0.6560580583751121}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1374.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 657.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6765140324963073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32348596750369274, \"precision\": 1.0, \"recall\": 0.6765140324963073, \"specificity\": 1.0, \"npv\": 0.6354051054384018, \"accuracy\": 0.7931360201511335, \"f1\": 0.8070484581497798, \"f2\": 0.7233101705622236, \"f0_5\": 0.912714228776405, \"p4\": 0.7917711124557005, \"phi\": 0.6556374532841107}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6745445593303792, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3254554406696209, \"precision\": 1.0, \"recall\": 0.6745445593303792, \"specificity\": 1.0, \"npv\": 0.6339977851605758, \"accuracy\": 0.7918765743073047, \"f1\": 0.8056453984122317, \"f2\": 0.7215083210448704, \"f0_5\": 0.9119957395819465, \"p4\": 0.7905490918185237, \"phi\": 0.6539569990508374}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1363.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 668.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.671097981290005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3289020187099951, \"precision\": 1.0, \"recall\": 0.671097981290005, \"specificity\": 1.0, \"npv\": 0.631549917264203, \"accuracy\": 0.7896725440806045, \"f1\": 0.803182086034178, \"f2\": 0.7183514282702645, \"f0_5\": 0.9107309902445543, \"p4\": 0.7884101358393227, \"phi\": 0.6510237127477586}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1356.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 675.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6676514032496307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33234859675036926, \"precision\": 1.0, \"recall\": 0.6676514032496307, \"specificity\": 1.0, \"npv\": 0.6291208791208791, \"accuracy\": 0.7874685138539043, \"f1\": 0.8007085916740478, \"f2\": 0.7151898734177216, \"f0_5\": 0.9094567404426559, \"p4\": 0.7862705739141664, \"phi\": 0.6480998671182523}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1355.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 676.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6671590349581487, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3328409650418513, \"precision\": 1.0, \"recall\": 0.6671590349581487, \"specificity\": 1.0, \"npv\": 0.628775398132894, \"accuracy\": 0.7871536523929471, \"f1\": 0.8003544004725339, \"f2\": 0.7147378415444667, \"f0_5\": 0.9092739229633606, \"p4\": 0.7859648678428045, \"phi\": 0.6476829377278417}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1351.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 680.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6651895617922206, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3348104382077794, \"precision\": 1.0, \"recall\": 0.6651895617922206, \"specificity\": 1.0, \"npv\": 0.6273972602739726, \"accuracy\": 0.7858942065491183, \"f1\": 0.7989355410999409, \"f2\": 0.7129287598944591, \"f0_5\": 0.9085406859448554, \"p4\": 0.7847418977635621, \"phi\": 0.6460171117170842}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6627277203348104, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3372722796651896, \"precision\": 1.0, \"recall\": 0.6627277203348104, \"specificity\": 1.0, \"npv\": 0.6256830601092896, \"accuracy\": 0.7843198992443325, \"f1\": 0.7971572401539828, \"f2\": 0.7106652587117213, \"f0_5\": 0.9076196898179366, \"p4\": 0.7832128384966869, \"phi\": 0.6439390561833764}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1344.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 687.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6617429837518464, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33825701624815363, \"precision\": 1.0, \"recall\": 0.6617429837518464, \"specificity\": 1.0, \"npv\": 0.625, \"accuracy\": 0.7836901763224181, \"f1\": 0.7964444444444444, \"f2\": 0.7097591888466414, \"f0_5\": 0.907249898744431, \"p4\": 0.7826011005327976, \"phi\": 0.6431091391396206}, {\"truth_threshold\": -2.1000000312924385, \"match_probability\": 0.18913982061899084, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1340.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 691.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34022648941408173, \"precision\": 1.0, \"recall\": 0.6597735105859183, \"specificity\": 1.0, \"npv\": 0.6236383442265795, \"accuracy\": 0.7824307304785895, \"f1\": 0.7950163156333432, \"f2\": 0.7079459002535926, \"f0_5\": 0.9065079150317954, \"p4\": 0.7813774176935412, \"phi\": 0.6414515256091918}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1338.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 693.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3412112259970458, \"precision\": 1.0, \"recall\": 0.6587887740029542, \"specificity\": 1.0, \"npv\": 0.6229597388465724, \"accuracy\": 0.781801007556675, \"f1\": 0.7943009795191451, \"f2\": 0.7070386810399493, \"f0_5\": 0.9061357171881349, \"p4\": 0.7807654687830268, \"phi\": 0.6406238230099892}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6573116691285081, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34268833087149186, \"precision\": 1.0, \"recall\": 0.6573116691285081, \"specificity\": 1.0, \"npv\": 0.6219445953286258, \"accuracy\": 0.7808564231738035, \"f1\": 0.7932263814616756, \"f2\": 0.7056771328893118, \"f0_5\": 0.9055759055759056, \"p4\": 0.7798474053553527, \"phi\": 0.6393836407517114}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1333.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 698.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6563269325455441, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3436730674544559, \"precision\": 1.0, \"recall\": 0.6563269325455441, \"specificity\": 1.0, \"npv\": 0.6212696690179056, \"accuracy\": 0.7802267002518891, \"f1\": 0.7925089179548157, \"f2\": 0.7047689542138099, \"f0_5\": 0.9052016840961564, \"p4\": 0.7792352667284765, \"phi\": 0.6385577625791636}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1332.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.6209327548806941, \"accuracy\": 0.779911838790932, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.778929167747172, \"phi\": 0.6381450953570468}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1327.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 704.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3466272772033481, \"precision\": 1.0, \"recall\": 0.6533727227966519, \"specificity\": 1.0, \"npv\": 0.6192536506219578, \"accuracy\": 0.7783375314861462, \"f1\": 0.7903513996426444, \"f2\": 0.7020421119458259, \"f0_5\": 0.904074124540128, \"p4\": 0.7773983659139201, \"phi\": 0.6360844627945531}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6509108813392418, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34908911866075826, \"precision\": 1.0, \"recall\": 0.6509108813392418, \"specificity\": 1.0, \"npv\": 0.6175836030204962, \"accuracy\": 0.7767632241813602, \"f1\": 0.7885475693408888, \"f2\": 0.6997670971839932, \"f0_5\": 0.9031288427380789, \"p4\": 0.775867028931697, \"phi\": 0.6340283016890773}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1320.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 711.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6499261447562777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3500738552437223, \"precision\": 1.0, \"recall\": 0.6499261447562777, \"specificity\": 1.0, \"npv\": 0.6169181034482759, \"accuracy\": 0.7761335012594458, \"f1\": 0.7878245299910475, \"f2\": 0.6988564167725541, \"f0_5\": 0.9027492819039803, \"p4\": 0.7752543370502095, \"phi\": 0.6332070787700438}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1319.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 712.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35056622353520434, \"precision\": 1.0, \"recall\": 0.6494337764647957, \"specificity\": 1.0, \"npv\": 0.6165858912224017, \"accuracy\": 0.7758186397984886, \"f1\": 0.7874626865671642, \"f2\": 0.6984009319072328, \"f0_5\": 0.9025591898179828, \"p4\": 0.7749479564070448, \"phi\": 0.6327967318590355}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1316.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 715.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6479566715903495, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3520433284096504, \"precision\": 1.0, \"recall\": 0.6479566715903495, \"specificity\": 1.0, \"npv\": 0.6155913978494624, \"accuracy\": 0.7748740554156172, \"f1\": 0.7863758589781894, \"f2\": 0.6970338983050848, \"f0_5\": 0.9019876627827279, \"p4\": 0.774028672532505, \"phi\": 0.6315667448577293}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1314.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 717.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6469719350073855, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35302806499261447, \"precision\": 1.0, \"recall\": 0.6469719350073855, \"specificity\": 1.0, \"npv\": 0.6149301825993555, \"accuracy\": 0.7742443324937027, \"f1\": 0.7856502242152467, \"f2\": 0.6961220597584233, \"f0_5\": 0.901605599011939, \"p4\": 0.7734156957074743, \"phi\": 0.6307476279232052}, {\"truth_threshold\": -0.6000000089406967, \"match_probability\": 0.3975010577814427, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1309.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 722.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6445100935499753, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3554899064500246, \"precision\": 1.0, \"recall\": 0.6445100935499753, \"specificity\": 1.0, \"npv\": 0.6132833422603107, \"accuracy\": 0.7726700251889169, \"f1\": 0.7838323353293413, \"f2\": 0.6938407717587194, \"f0_5\": 0.9006467593229668, \"p4\": 0.7718828151071816, \"phi\": 0.6287028744111437}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6430329886755293, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35696701132447073, \"precision\": 1.0, \"recall\": 0.6430329886755293, \"specificity\": 1.0, \"npv\": 0.6122994652406417, \"accuracy\": 0.7717254408060453, \"f1\": 0.7827389871141744, \"f2\": 0.6924708377518558, \"f0_5\": 0.9000689179875948, \"p4\": 0.7709627754494935, \"phi\": 0.627478091329186}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6400787789266371, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3599212210733629, \"precision\": 1.0, \"recall\": 0.6400787789266371, \"specificity\": 1.0, \"npv\": 0.6103411513859275, \"accuracy\": 0.7698362720403022, \"f1\": 0.7805463824677275, \"f2\": 0.6897283531409168, \"f0_5\": 0.8989074816761167, \"p4\": 0.7691219621523272, \"phi\": 0.6250331342479231}, {\"truth_threshold\": -0.30000000447034836, \"match_probability\": 0.4482004805735527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1297.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 734.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.638601674052191, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36139832594780896, \"precision\": 1.0, \"recall\": 0.638601674052191, \"specificity\": 1.0, \"npv\": 0.6093666844065992, \"accuracy\": 0.7688916876574308, \"f1\": 0.7794471153846154, \"f2\": 0.6883558008703959, \"f0_5\": 0.8983238675716858, \"p4\": 0.7682011740290052, \"phi\": 0.6238129405308033}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1284.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 747.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6322008862629247, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36779911373707536, \"precision\": 1.0, \"recall\": 0.6322008862629247, \"specificity\": 1.0, \"npv\": 0.6051797040169133, \"accuracy\": 0.7647984886649875, \"f1\": 0.7746606334841629, \"f2\": 0.6823979591836735, \"f0_5\": 0.895772289660946, \"p4\": 0.7642079467115986, \"phi\": 0.6185427594175095}, {\"truth_threshold\": 0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1283.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 748.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6317085179714427, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3682914820285574, \"precision\": 1.0, \"recall\": 0.6317085179714427, \"specificity\": 1.0, \"npv\": 0.6048600105652404, \"accuracy\": 0.7644836272040302, \"f1\": 0.7742908871454436, \"f2\": 0.6819389816094398, \"f0_5\": 0.8955744799664945, \"p4\": 0.7639005528137026, \"phi\": 0.6181385126768588}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1279.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 752.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6297390448055146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3702609551944855, \"precision\": 1.0, \"recall\": 0.6297390448055146, \"specificity\": 1.0, \"npv\": 0.6035846072746441, \"accuracy\": 0.7632241813602015, \"f1\": 0.7728096676737161, \"f2\": 0.6801020950760396, \"f0_5\": 0.8947810270043375, \"p4\": 0.7626706427097226, \"phi\": 0.6165231496419628}, {\"truth_threshold\": 0.20000000298023224, \"match_probability\": 0.5346019618947252, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1272.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 759.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6262924667651403, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37370753323485967, \"precision\": 1.0, \"recall\": 0.6262924667651403, \"specificity\": 1.0, \"npv\": 0.6013655462184874, \"accuracy\": 0.7610201511335013, \"f1\": 0.7702089009990918, \"f2\": 0.6768837803320562, \"f0_5\": 0.8933839022334598, \"p4\": 0.7605169691954441, \"phi\": 0.6137024615957984}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6258000984736583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3741999015263417, \"precision\": 1.0, \"recall\": 0.6258000984736583, \"specificity\": 1.0, \"npv\": 0.6010498687664042, \"accuracy\": 0.760705289672544, \"f1\": 0.7698364627498486, \"f2\": 0.6764236295902075, \"f0_5\": 0.893183415319747, \"p4\": 0.7602091587940459, \"phi\": 0.6133001443515198}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6243229935992122, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37567700640078777, \"precision\": 1.0, \"recall\": 0.6243229935992122, \"specificity\": 1.0, \"npv\": 0.600104821802935, \"accuracy\": 0.7597607052896725, \"f1\": 0.7687177932706881, \"f2\": 0.6750425894378195, \"f0_5\": 0.892580599746586, \"p4\": 0.7592855072439135, \"phi\": 0.6120941421230318}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.599476439790576, \"accuracy\": 0.7591309823677582, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.7586695530830421, \"phi\": 0.6112909283650163}, {\"truth_threshold\": 0.7000000104308128, \"match_probability\": 0.6189757403752982, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6179222058099458, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38207779419005417, \"precision\": 1.0, \"recall\": 0.6179222058099458, \"specificity\": 1.0, \"npv\": 0.5960437272254034, \"accuracy\": 0.7556675062972292, \"f1\": 0.7638466220328667, \"f2\": 0.6690478729075594, \"f0_5\": 0.8899446886966388, \"p4\": 0.7552790297360157, \"phi\": 0.6068843832941353}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6149679960610537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3850320039389463, \"precision\": 1.0, \"recall\": 0.6149679960610537, \"specificity\": 1.0, \"npv\": 0.5941878567721848, \"accuracy\": 0.7537783375314862, \"f1\": 0.7615853658536585, \"f2\": 0.6662754721007148, \"f0_5\": 0.8887149565959869, \"p4\": 0.7534275803790252, \"phi\": 0.6044886397303119}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1246.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.5932642487046632, \"accuracy\": 0.7528337531486146, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.7525012807216611, \"phi\": 0.6032928083832734}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1241.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 790.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6110290497291975, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3889709502708026, \"precision\": 1.0, \"recall\": 0.6110290497291975, \"specificity\": 1.0, \"npv\": 0.5917312661498708, \"accuracy\": 0.7512594458438288, \"f1\": 0.758557457212714, \"f2\": 0.6625734116390817, \"f0_5\": 0.8870621872766261, \"p4\": 0.7509565686139796, \"phi\": 0.6013027467512604}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1238.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 793.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6095519448547514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.39044805514524866, \"precision\": 1.0, \"recall\": 0.6095519448547514, \"specificity\": 1.0, \"npv\": 0.5908152734778122, \"accuracy\": 0.7503148614609572, \"f1\": 0.757418170694402, \"f2\": 0.6611835077974791, \"f0_5\": 0.8864384934841758, \"p4\": 0.7500292006663175, \"phi\": 0.6001104889920623}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6065977351058592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3934022648941408, \"precision\": 1.0, \"recall\": 0.6065977351058592, \"specificity\": 1.0, \"npv\": 0.5889917695473251, \"accuracy\": 0.7484256926952141, \"f1\": 0.7551333129022372, \"f2\": 0.6584010260795211, \"f0_5\": 0.8851846529673804, \"p4\": 0.748173210499427, \"phi\": 0.5977299335012423}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1217.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 814.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5992122107336287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40078778926637126, \"precision\": 1.0, \"recall\": 0.5992122107336287, \"specificity\": 1.0, \"npv\": 0.5844818785094436, \"accuracy\": 0.7437027707808564, \"f1\": 0.749384236453202, \"f2\": 0.6514291831709667, \"f0_5\": 0.8820118857805479, \"p4\": 0.7435255360881731, \"phi\": 0.5918012154054669}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1216.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.5841836734693877, \"accuracy\": 0.7433879093198993, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.7432152820546354, \"phi\": 0.5914070991600171}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1213.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 818.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5972427375677006, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40275726243229937, \"precision\": 1.0, \"recall\": 0.5972427375677006, \"specificity\": 1.0, \"npv\": 0.5832908813041263, \"accuracy\": 0.7424433249370277, \"f1\": 0.747842170160296, \"f2\": 0.6495662418335654, \"f0_5\": 0.8811564724684005, \"p4\": 0.742284201345094, \"phi\": 0.590225586321326}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1200.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 831.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4091580502215657, \"precision\": 1.0, \"recall\": 0.5908419497784343, \"specificity\": 1.0, \"npv\": 0.5794534412955465, \"accuracy\": 0.7383501259445844, \"f1\": 0.7428040854224698, \"f2\": 0.6435006435006435, \"f0_5\": 0.8783487044356609, \"p4\": 0.7382438098538624, \"phi\": 0.5851199886013844}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1195.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 836.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5883801083210242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4116198916789759, \"precision\": 1.0, \"recall\": 0.5883801083210242, \"specificity\": 1.0, \"npv\": 0.5779909136799596, \"accuracy\": 0.7367758186397985, \"f1\": 0.7408555486670799, \"f2\": 0.6411632149372251, \"f0_5\": 0.8772573777712523, \"p4\": 0.7366872427429624, \"phi\": 0.5831623756721471}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1190.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.5765357502517623, \"accuracy\": 0.7352015113350125, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.7351291857832581, \"phi\": 0.5812080759697219}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1189.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 842.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.41457410142786805, \"precision\": 1.0, \"recall\": 0.585425898572132, \"specificity\": 1.0, \"npv\": 0.5762455963764469, \"accuracy\": 0.7348866498740554, \"f1\": 0.7385093167701864, \"f2\": 0.6383549876516698, \"f0_5\": 0.8759392957123914, \"p4\": 0.7348173920177556, \"phi\": 0.5808176099748659}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1183.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 848.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5824716888232397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4175283111767602, \"precision\": 1.0, \"recall\": 0.5824716888232397, \"specificity\": 1.0, \"npv\": 0.57451078775715, \"accuracy\": 0.7329974811083123, \"f1\": 0.7361543248288737, \"f2\": 0.6355431395723649, \"f0_5\": 0.8746118586426143, \"p4\": 0.732945325152551, \"phi\": 0.578477543896111}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1179.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 852.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5805022156573116, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4194977843426883, \"precision\": 1.0, \"recall\": 0.5805022156573116, \"specificity\": 1.0, \"npv\": 0.5733600400600901, \"accuracy\": 0.7317380352644837, \"f1\": 0.7345794392523365, \"f2\": 0.6336665591744598, \"f0_5\": 0.8737216540684749, \"p4\": 0.7316960140750485, \"phi\": 0.5769200755947459}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1164.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 867.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5731166912850812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.42688330871491875, \"precision\": 1.0, \"recall\": 0.5731166912850812, \"specificity\": 1.0, \"npv\": 0.5690854870775348, \"accuracy\": 0.7270151133501259, \"f1\": 0.7286384976525822, \"f2\": 0.6266149870801033, \"f0_5\": 0.8703454463885151, \"p4\": 0.7270016744799519, \"phi\": 0.571097532311457}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1151.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.5654320987654321, \"accuracy\": 0.7229219143576826, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.7229205464573797, \"phi\": 0.5660736371863528}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1148.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 883.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5652387986213688, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4347612013786312, \"precision\": 1.0, \"recall\": 0.5652387986213688, \"specificity\": 1.0, \"npv\": 0.564595660749507, \"accuracy\": 0.7219773299748111, \"f1\": 0.7222396980182447, \"f2\": 0.6190681622088007, \"f0_5\": 0.8666767325985203, \"p4\": 0.7219769863862414, \"phi\": 0.5649171381617742}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1147.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 884.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5647464303298868, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43525356967011325, \"precision\": 1.0, \"recall\": 0.5647464303298868, \"specificity\": 1.0, \"npv\": 0.5643173977328734, \"accuracy\": 0.7216624685138538, \"f1\": 0.7218376337319069, \"f2\": 0.6185956207528853, \"f0_5\": 0.8664450823387219, \"p4\": 0.7216623155682867, \"phi\": 0.5645318732743893}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5632693254554406, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4367306745445593, \"precision\": 1.0, \"recall\": 0.5632693254554406, \"specificity\": 1.0, \"npv\": 0.5634842519685039, \"accuracy\": 0.7207178841309824, \"f1\": 0.7206299212598425, \"f2\": 0.6171773845489857, \"f0_5\": 0.8657484486151051, \"p4\": 0.7207178457145617, \"phi\": 0.5633767784627467}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1136.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 895.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5593303791235844, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44066962087641554, \"precision\": 1.0, \"recall\": 0.5593303791235844, \"specificity\": 1.0, \"npv\": 0.5612745098039216, \"accuracy\": 0.718198992443325, \"f1\": 0.7173981686138301, \"f2\": 0.6133909287257019, \"f0_5\": 0.8638783269961977, \"p4\": 0.7181958416012424, \"phi\": 0.560301601247963}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1135.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 896.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5588380108321024, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44116198916789756, \"precision\": 1.0, \"recall\": 0.5588380108321024, \"specificity\": 1.0, \"npv\": 0.560999510044096, \"accuracy\": 0.7178841309823678, \"f1\": 0.7169930511686671, \"f2\": 0.612917161680527, \"f0_5\": 0.8636432810835489, \"p4\": 0.717880234989325, \"phi\": 0.5599177174110734}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1109.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 922.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5460364352535697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45396356474643035, \"precision\": 1.0, \"recall\": 0.5460364352535697, \"specificity\": 1.0, \"npv\": 0.5539429124334785, \"accuracy\": 0.7096977329974811, \"f1\": 0.7063694267515923, \"f2\": 0.6005631972273368, \"f0_5\": 0.8574300293799288, \"p4\": 0.7096451676361052, \"phi\": 0.5499754660338557}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1105.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 926.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5440669620876416, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45593303791235845, \"precision\": 1.0, \"recall\": 0.5440669620876416, \"specificity\": 1.0, \"npv\": 0.5528730082085949, \"accuracy\": 0.7084382871536524, \"f1\": 0.704719387755102, \"f2\": 0.5986564091450861, \"f0_5\": 0.8564563633545187, \"p4\": 0.7083729914338502, \"phi\": 0.5484523115060287}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1103.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 928.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5430822255046776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4569177744953225, \"precision\": 1.0, \"recall\": 0.5430822255046776, \"specificity\": 1.0, \"npv\": 0.5523396044380126, \"accuracy\": 0.707808564231738, \"f1\": 0.7038927887683472, \"f2\": 0.597702395144684, \"f0_5\": 0.8559677169020643, \"p4\": 0.7077363543593591, \"phi\": 0.5476913561601727}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1101.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.5518072289156627, \"accuracy\": 0.7071788413098237, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.7070993471091068, \"phi\": 0.5469308120448416}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1094.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 937.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5386509108813392, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46134908911866074, \"precision\": 1.0, \"recall\": 0.5386509108813392, \"specificity\": 1.0, \"npv\": 0.5499519692603266, \"accuracy\": 0.7049748110831234, \"f1\": 0.70016, \"f2\": 0.5934042091559991, \"f0_5\": 0.8537537068830966, \"p4\": 0.7048668608108392, \"phi\": 0.544272109503198}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1093.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.5496879500720115, \"accuracy\": 0.7046599496221663, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.7045475528853299, \"phi\": 0.5438926972206558}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1089.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.5486344034499281, \"accuracy\": 0.7034005037783375, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.7032693518148777, \"phi\": 0.5423760413585481}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1084.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 947.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46627277203348105, \"precision\": 1.0, \"recall\": 0.533727227966519, \"specificity\": 1.0, \"npv\": 0.5473231357552581, \"accuracy\": 0.7018261964735516, \"f1\": 0.6959871589085073, \"f2\": 0.5886185925282363, \"f0_5\": 0.8512643317103816, \"p4\": 0.701669388939399, \"phi\": 0.5404824326919393}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1074.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 957.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5288035450516987, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4711964549483013, \"precision\": 1.0, \"recall\": 0.5288035450516987, \"specificity\": 1.0, \"npv\": 0.5447193149381542, \"accuracy\": 0.6986775818639799, \"f1\": 0.6917874396135266, \"f2\": 0.58382257012394, \"f0_5\": 0.8487434803224277, \"p4\": 0.698461896288862, \"phi\": 0.5367024359898404}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1067.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 964.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5253569670113245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47464303298867555, \"precision\": 1.0, \"recall\": 0.5253569670113245, \"specificity\": 1.0, \"npv\": 0.5429113323850165, \"accuracy\": 0.6964735516372796, \"f1\": 0.6888315041962556, \"f2\": 0.5804591448155805, \"f0_5\": 0.8469598348944277, \"p4\": 0.6962104664501566, \"phi\": 0.5340620291107292}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1030.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1001.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5071393402264894, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49286065977351057, \"precision\": 1.0, \"recall\": 0.5071393402264894, \"specificity\": 1.0, \"npv\": 0.5335507921714818, \"accuracy\": 0.684823677581864, \"f1\": 0.6729826853969291, \"f2\": 0.5625955866287962, \"f0_5\": 0.8372622337831247, \"p4\": 0.6842191143036372, \"phi\": 0.5201774665622936}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1025.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5046774987690793, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49532250123092075, \"precision\": 1.0, \"recall\": 0.5046774987690793, \"specificity\": 1.0, \"npv\": 0.5323105532310554, \"accuracy\": 0.6832493702770781, \"f1\": 0.6708115183246073, \"f2\": 0.5601705104382992, \"f0_5\": 0.8359158375468928, \"p4\": 0.6825861647803277, \"phi\": 0.518309905918297}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1020.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5022156573116692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4977843426883309, \"precision\": 1.0, \"recall\": 0.5022156573116692, \"specificity\": 1.0, \"npv\": 0.5310760667903525, \"accuracy\": 0.6816750629722922, \"f1\": 0.6686332350049164, \"f2\": 0.5577427821522309, \"f0_5\": 0.8345606283750614, \"p4\": 0.6809500591436524, \"phi\": 0.5164443009324556}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.HConcatChart(...)"
            ]
          },
          "execution_count": 3,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "import splink.comparison_library as cl\n",
        "from splink import DuckDBAPI, Linker, SettingsCreator, block_on, splink_datasets\n",
        "from splink.datasets import splink_dataset_labels\n",
        "\n",
        "db_api = DuckDBAPI()\n",
        "\n",
        "df = splink_datasets.fake_1000\n",
        "\n",
        "settings = SettingsCreator(\n",
        "    link_type=\"dedupe_only\",\n",
        "    comparisons=[\n",
        "        cl.JaroWinklerAtThresholds(\"first_name\", [0.9, 0.7]),\n",
        "        cl.JaroAtThresholds(\"surname\", [0.9, 0.7]),\n",
        "        cl.DateOfBirthComparison(\n",
        "            \"dob\",\n",
        "            input_is_string=True,\n",
        "            datetime_metrics=[\"year\", \"month\"],\n",
        "            datetime_thresholds=[1, 1],\n",
        "        ),\n",
        "        cl.ExactMatch(\"city\").configure(term_frequency_adjustments=True),\n",
        "        cl.EmailComparison(\"email\"),\n",
        "    ],\n",
        "    blocking_rules_to_generate_predictions=[\n",
        "        block_on(\"substr(first_name,1,1)\"),\n",
        "        block_on(\"substr(surname, 1,1)\"),\n",
        "    ],\n",
        ")\n",
        "\n",
        "linker = Linker(df, settings, db_api)\n",
        "\n",
        "linker.training.estimate_probability_two_random_records_match(\n",
        "    [block_on(\"first_name\", \"surname\")], recall=0.7\n",
        ")\n",
        "linker.training.estimate_u_using_random_sampling(max_pairs=1e6)\n",
        "\n",
        "blocking_rule_for_training = block_on(\"first_name\", \"surname\")\n",
        "\n",
        "linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    blocking_rule_for_training\n",
        ")\n",
        "\n",
        "blocking_rule_for_training = block_on(\"dob\")\n",
        "linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    blocking_rule_for_training\n",
        ")\n",
        "\n",
        "df_labels = splink_dataset_labels.fake_1000_labels\n",
        "labels_table = linker.table_management.register_labels_table(df_labels)\n",
        "\n",
        "chart = linker.evaluation.accuracy_analysis_from_labels_table(\n",
        "    labels_table, output_type=\"threshold_selection\", add_metrics=[\"f1\"]\n",
        ")\n",
        "chart\n"
      ]
    }
  ],
  "metadata": {
    "kernelspec": {
      "display_name": "base",
      "language": "python",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.10.8"
    },
    "orig_nbformat": 4
  },
  "nbformat": 4,
  "nbformat_minor": 2
}
